summaryrefslogtreecommitdiff
diff options
context:
space:
mode:
authorNikita Kostovsky <luntik2012@gmail.com>2024-11-09 17:26:11 +0100
committerNikita Kostovsky <luntik2012@gmail.com>2024-11-09 17:26:11 +0100
commit053237b9c91f1b5892782c1c4b2eb50cc8aeadfd (patch)
treeab3d3d2e896c7c71228bdb62c4fd4364a1364347
Initial commit
-rw-r--r--.gitignore2
-rw-r--r--CMakeLists.txt83
-rw-r--r--LibCamera.cpp267
-rw-r--r--LibCamera.h86
-rw-r--r--genetic_algos.cpp1
-rw-r--r--genetic_algos.h174
-rw-r--r--httpservice.cpp1
-rw-r--r--httpservice.h203
-rw-r--r--imagealgos.cpp246
-rw-r--r--imagealgos.h29
-rw-r--r--macro.h11
-rw-r--r--main.cpp783
12 files changed, 1886 insertions, 0 deletions
diff --git a/.gitignore b/.gitignore
new file mode 100644
index 0000000..6f6596f
--- /dev/null
+++ b/.gitignore
@@ -0,0 +1,2 @@
+*.user*
+build*
diff --git a/CMakeLists.txt b/CMakeLists.txt
new file mode 100644
index 0000000..a1d1b47
--- /dev/null
+++ b/CMakeLists.txt
@@ -0,0 +1,83 @@
+cmake_minimum_required(VERSION 3.16)
+
+set(CMAKE_CXX_STANDARD 23)
+
+cmake_minimum_required(VERSION 3.18)
+include_guard(GLOBAL)
+
+set(CMAKE_SYSTEM_NAME Linux)
+set(CMAKE_SYSTEM_PROCESSOR arm)
+
+set(TARGET_SYSROOT /home/nikita/rpi/rpi-sysroot)
+set(CMAKE_SYSROOT ${TARGET_SYSROOT})
+set(CMAKE_LIBRARY_PATH ${CMAKE_SYSROOT}/usr/lib/aarch64-linux-gnu)
+
+set(ENV{PKG_CONFIG_PATH} $PKG_CONFIG_PATH:/usr/lib/aarch64-linux-gnu/pkgconfig)
+set(ENV{PKG_CONFIG_LIBDIR} /usr/lib/pkgconfig:/usr/share/pkgconfig/:${TARGET_SYSROOT}/usr/lib/aarch64-linux-gnu/pkgconfig:${TARGET_SYSROOT}/usr/lib/pkgconfig)
+set(ENV{PKG_CONFIG_SYSROOT_DIR} ${CMAKE_SYSROOT})
+
+set(CMAKE_C_FLAGS "${CMAKE_C_FLAGS} -I${TARGET_SYSROOT}/usr/include")
+set(CMAKE_CXX_FLAGS "${CMAKE_C_FLAGS}")
+
+set(CMAKE_FIND_ROOT_PATH_MODE_PROGRAM NEVER)
+set(CMAKE_FIND_ROOT_PATH_MODE_LIBRARY ONLY)
+set(CMAKE_FIND_ROOT_PATH_MODE_INCLUDE ONLY)
+set(CMAKE_FIND_ROOT_PATH_MODE_PACKAGE ONLY)
+set(CMAKE_INSTALL_RPATH_USE_LINK_PATH TRUE)
+set(CMAKE_BUILD_RPATH ${TARGET_SYSROOT})
+
+find_package(PkgConfig REQUIRED)
+pkg_check_modules(LIBCAMERA REQUIRED IMPORTED_TARGET libcamera)
+find_library(LIBCAMERA_LIBRARY libcamera.so REQUIRED)
+find_library(LIBCAMERA_BASE_LIBRARY libcamera-base.so REQUIRED)
+message(STATUS ${CAMERA_INCLUDE_DIRS})
+message(STATUS ${LIBCAMERA_LIBRARY})
+message(STATUS ${LIBCAMERA_BASE_LIBRARY})
+pkg_check_modules(CAMERA REQUIRED libcamera)
+set(LIBCAMERA_LIBRARIES "${LIBCAMERA_LIBRARY}" "${LIBCAMERA_BASE_LIBRARY}")
+
+project(orpheus LANGUAGES CXX)
+
+set(CMAKE_CXX_STANDARD_REQUIRED ON)
+
+find_package(Qt6 6.4 REQUIRED COMPONENTS Quick HttpServer)
+
+qt_standard_project_setup(REQUIRES 6.4)
+
+include_directories(. "${CAMERA_INCLUDE_DIRS}")
+
+# libpistache
+pkg_check_modules(Pistache REQUIRED IMPORTED_TARGET libpistache)
+
+# add_executable(${PROJECT_NAME}
+qt_add_executable(apporpheus
+ httpservice.h
+ httpservice.cpp
+ imagealgos.h
+ imagealgos.cpp
+ macro.h
+ main.cpp
+ LibCamera.h
+ LibCamera.cpp
+ genetic_algos.h
+ genetic_algos.cpp
+)
+target_link_libraries(app${PROJECT_NAME}
+ PRIVATE
+ "${LIBCAMERA_LIBRARIES}"
+ PkgConfig::Pistache
+ Qt6::HttpServer
+)
+
+include(GNUInstallDirs)
+install(TARGETS app${PROJECT_NAME}
+ BUNDLE DESTINATION .
+ LIBRARY DESTINATION ${CMAKE_INSTALL_LIBDIR}
+ RUNTIME DESTINATION ${CMAKE_INSTALL_BINDIR}
+)
+
+if ("${CMAKE_BUILD_TYPE}" MATCHES "Release")
+ # message("it's Debug")
+else()
+ #message("it's not Debug")
+endif()
diff --git a/LibCamera.cpp b/LibCamera.cpp
new file mode 100644
index 0000000..945a8ae
--- /dev/null
+++ b/LibCamera.cpp
@@ -0,0 +1,267 @@
+#include "LibCamera.h"
+
+using namespace std::placeholders;
+
+int LibCamera::initCamera() {
+ int ret;
+ cm = std::make_unique<CameraManager>();
+ ret = cm->start();
+ if (ret){
+ std::cout << "Failed to start camera manager: "
+ << ret << std::endl;
+ return ret;
+ }
+ cameraId = cm->cameras()[0]->id();
+ camera_ = cm->get(cameraId);
+ if (!camera_) {
+ std::cerr << "Camera " << cameraId << " not found" << std::endl;
+ return 1;
+ }
+
+ if (camera_->acquire()) {
+ std::cerr << "Failed to acquire camera " << cameraId
+ << std::endl;
+ return 1;
+ }
+ camera_acquired_ = true;
+ return 0;
+}
+
+char * LibCamera::getCameraId(){
+ return cameraId.data();
+}
+
+void LibCamera::configureStill(int width, int height, PixelFormat format, int buffercount, int rotation) {
+ printf("Configuring still capture...\n");
+ config_ = camera_->generateConfiguration({ StreamRole::StillCapture });
+ if (width && height) {
+ libcamera::Size size(width, height);
+ config_->at(0).size = size;
+ }
+ config_->at(0).pixelFormat = format;
+ if (buffercount)
+ config_->at(0).bufferCount = buffercount;
+ Transform transform = Transform::Identity;
+ bool ok;
+ Transform rot = transformFromRotation(rotation, &ok);
+ if (!ok)
+ throw std::runtime_error("illegal rotation value, Please use 0 or 180");
+ transform = rot * transform;
+ if (!!(transform & Transform::Transpose))
+ throw std::runtime_error("transforms requiring transpose not supported");
+ // FIXME: update
+ // config_->transform = transform;
+
+ CameraConfiguration::Status validation = config_->validate();
+ if (validation == CameraConfiguration::Invalid)
+ throw std::runtime_error("failed to valid stream configurations");
+ else if (validation == CameraConfiguration::Adjusted)
+ std::cout << "Stream configuration adjusted" << std::endl;
+
+ printf("Still capture setup complete\n");
+}
+
+int LibCamera::startCamera() {
+ int ret;
+ ret = camera_->configure(config_.get());
+ if (ret < 0) {
+ std::cout << "Failed to configure camera" << std::endl;
+ return ret;
+ }
+
+ camera_->requestCompleted.connect(this, &LibCamera::requestComplete);
+
+ allocator_ = std::make_unique<FrameBufferAllocator>(camera_);
+
+ return startCapture();
+}
+
+int LibCamera::startCapture() {
+ int ret;
+ unsigned int nbuffers = UINT_MAX;
+ for (StreamConfiguration &cfg : *config_) {
+ ret = allocator_->allocate(cfg.stream());
+ if (ret < 0) {
+ std::cerr << "Can't allocate buffers" << std::endl;
+ return -ENOMEM;
+ }
+
+ unsigned int allocated = allocator_->buffers(cfg.stream()).size();
+ nbuffers = std::min(nbuffers, allocated);
+ }
+
+ for (unsigned int i = 0; i < nbuffers; i++) {
+ std::unique_ptr<Request> request = camera_->createRequest();
+ if (!request) {
+ std::cerr << "Can't create request" << std::endl;
+ return -ENOMEM;
+ }
+
+ for (StreamConfiguration &cfg : *config_) {
+ Stream *stream = cfg.stream();
+ const std::vector<std::unique_ptr<FrameBuffer>> &buffers =
+ allocator_->buffers(stream);
+ const std::unique_ptr<FrameBuffer> &buffer = buffers[i];
+
+ ret = request->addBuffer(stream, buffer.get());
+ if (ret < 0) {
+ std::cerr << "Can't set buffer for request"
+ << std::endl;
+ return ret;
+ }
+ for (const FrameBuffer::Plane &plane : buffer->planes()) {
+ void *memory = mmap(NULL, plane.length, PROT_READ, MAP_SHARED,
+ plane.fd.get(), 0);
+ mappedBuffers_[plane.fd.get()] =
+ std::make_pair(memory, plane.length);
+ }
+ }
+
+ requests_.push_back(std::move(request));
+ }
+
+ ret = camera_->start(&this->controls_);
+ // ret = camera_->start();
+ if (ret) {
+ std::cout << "Failed to start capture" << std::endl;
+ return ret;
+ }
+ controls_.clear();
+ camera_started_ = true;
+ for (std::unique_ptr<Request> &request : requests_) {
+ ret = queueRequest(request.get());
+ if (ret < 0) {
+ std::cerr << "Can't queue request" << std::endl;
+ camera_->stop();
+ return ret;
+ }
+ }
+ viewfinder_stream_ = config_->at(0).stream();
+ return 0;
+}
+
+void LibCamera::StreamDimensions(Stream const *stream, uint32_t *w, uint32_t *h, uint32_t *stride) const
+{
+ StreamConfiguration const &cfg = stream->configuration();
+ if (w)
+ *w = cfg.size.width;
+ if (h)
+ *h = cfg.size.height;
+ if (stride)
+ *stride = cfg.stride;
+}
+
+Stream *LibCamera::VideoStream(uint32_t *w, uint32_t *h, uint32_t *stride) const
+{
+ StreamDimensions(viewfinder_stream_, w, h, stride);
+ return viewfinder_stream_;
+}
+
+int LibCamera::queueRequest(Request *request) {
+ std::lock_guard<std::mutex> stop_lock(camera_stop_mutex_);
+ if (!camera_started_)
+ return -1;
+ {
+ std::lock_guard<std::mutex> lock(control_mutex_);
+ request->controls() = std::move(controls_);
+ }
+ return camera_->queueRequest(request);
+}
+
+void LibCamera::requestComplete(Request *request) {
+ if (request->status() == Request::RequestCancelled)
+ return;
+ processRequest(request);
+}
+
+void LibCamera::processRequest(Request *request) {
+ requestQueue.push(request);
+}
+
+void LibCamera::returnFrameBuffer(LibcameraOutData frameData) {
+ uint64_t request = frameData.request;
+ Request * req = (Request *)request;
+ req->reuse(Request::ReuseBuffers);
+ queueRequest(req);
+}
+
+bool LibCamera::readFrame(LibcameraOutData *frameData){
+ std::lock_guard<std::mutex> lock(free_requests_mutex_);
+ // int w, h, stride;
+ if (!requestQueue.empty()){
+ Request *request = this->requestQueue.front();
+
+ const Request::BufferMap &buffers = request->buffers();
+ for (auto it = buffers.begin(); it != buffers.end(); ++it) {
+ FrameBuffer *buffer = it->second;
+ for (unsigned int i = 0; i < buffer->planes().size(); ++i) {
+ const FrameBuffer::Plane &plane = buffer->planes()[i];
+ const FrameMetadata::Plane &meta = buffer->metadata().planes()[i];
+
+ void *data = mappedBuffers_[plane.fd.get()].first;
+ int length = std::min(meta.bytesused, plane.length);
+
+ frameData->size = length;
+ frameData->imageData = (uint8_t *)data;
+ }
+ }
+ this->requestQueue.pop();
+ frameData->request = (uint64_t)request;
+ return true;
+ } else {
+ Request *request = nullptr;
+ frameData->request = (uint64_t)request;
+ return false;
+ }
+}
+
+void LibCamera::set(ControlList controls){
+ std::lock_guard<std::mutex> lock(control_mutex_);
+ this->controls_ = std::move(controls);
+}
+
+int LibCamera::resetCamera(int width, int height, PixelFormat format, int buffercount, int rotation) {
+ stopCamera();
+ configureStill(width, height, format, buffercount, rotation);
+ return startCamera();
+}
+
+void LibCamera::stopCamera() {
+ if (camera_){
+ {
+ std::lock_guard<std::mutex> lock(camera_stop_mutex_);
+ if (camera_started_){
+ if (camera_->stop())
+ throw std::runtime_error("failed to stop camera");
+ camera_started_ = false;
+ }
+ }
+ camera_->requestCompleted.disconnect(this, &LibCamera::requestComplete);
+ }
+ while (!requestQueue.empty())
+ requestQueue.pop();
+
+ for (auto &iter : mappedBuffers_)
+ {
+ std::pair<void *, unsigned int> pair_ = iter.second;
+ munmap(std::get<0>(pair_), std::get<1>(pair_));
+ }
+
+ mappedBuffers_.clear();
+
+ requests_.clear();
+
+ allocator_.reset();
+
+ controls_.clear();
+}
+
+void LibCamera::closeCamera(){
+ if (camera_acquired_)
+ camera_->release();
+ camera_acquired_ = false;
+
+ camera_.reset();
+
+ cm.reset();
+}
diff --git a/LibCamera.h b/LibCamera.h
new file mode 100644
index 0000000..2262ab2
--- /dev/null
+++ b/LibCamera.h
@@ -0,0 +1,86 @@
+#include <atomic>
+#include <iomanip>
+#include <iostream>
+#include <signal.h>
+#include <limits.h>
+#include <memory>
+#include <stdint.h>
+#include <string>
+#include <vector>
+#include <unordered_map>
+#include <queue>
+#include <sstream>
+#include <sys/mman.h>
+#include <unistd.h>
+#include <time.h>
+#include <mutex>
+
+#include <libcamera/controls.h>
+#include <libcamera/control_ids.h>
+#include <libcamera/property_ids.h>
+#include <libcamera/libcamera.h>
+#include <libcamera/camera.h>
+#include <libcamera/camera_manager.h>
+#include <libcamera/framebuffer_allocator.h>
+#include <libcamera/request.h>
+#include <libcamera/stream.h>
+#include <libcamera/formats.h>
+#include <libcamera/transform.h>
+
+using namespace libcamera;
+
+typedef struct {
+ uint8_t *imageData;
+ uint32_t size;
+ uint64_t request;
+} LibcameraOutData;
+
+class LibCamera {
+ public:
+ LibCamera(){};
+ ~LibCamera(){};
+
+ int initCamera();
+ void configureStill(int width, int height, PixelFormat format, int buffercount, int rotation);
+ int startCamera();
+ int resetCamera(int width, int height, PixelFormat format, int buffercount, int rotation);
+ bool readFrame(LibcameraOutData *frameData);
+ void returnFrameBuffer(LibcameraOutData frameData);
+
+ void set(ControlList controls);
+ void stopCamera();
+ void closeCamera();
+
+ Stream *VideoStream(uint32_t *w, uint32_t *h, uint32_t *stride) const;
+ char * getCameraId();
+
+ private:
+ int startCapture();
+ int queueRequest(Request *request);
+ void requestComplete(Request *request);
+ void processRequest(Request *request);
+
+ void StreamDimensions(Stream const *stream, uint32_t *w, uint32_t *h, uint32_t *stride) const;
+
+ unsigned int cameraIndex_;
+ uint64_t last_;
+ std::unique_ptr<CameraManager> cm;
+ std::shared_ptr<Camera> camera_;
+ bool camera_acquired_ = false;
+ bool camera_started_ = false;
+ std::unique_ptr<CameraConfiguration> config_;
+ std::unique_ptr<FrameBufferAllocator> allocator_;
+ std::vector<std::unique_ptr<Request>> requests_;
+ // std::map<std::string, Stream *> stream_;
+ std::map<int, std::pair<void *, unsigned int>> mappedBuffers_;
+
+ std::queue<Request *> requestQueue;
+
+ ControlList controls_;
+ std::mutex control_mutex_;
+ std::mutex camera_stop_mutex_;
+ std::mutex free_requests_mutex_;
+
+ Stream *viewfinder_stream_ = nullptr;
+ std::string cameraId;
+}; \ No newline at end of file
diff --git a/genetic_algos.cpp b/genetic_algos.cpp
new file mode 100644
index 0000000..1962768
--- /dev/null
+++ b/genetic_algos.cpp
@@ -0,0 +1 @@
+#include "genetic_algos.h"
diff --git a/genetic_algos.h b/genetic_algos.h
new file mode 100644
index 0000000..862cd34
--- /dev/null
+++ b/genetic_algos.h
@@ -0,0 +1,174 @@
+#pragma once
+
+// #define _USE_MATH_DEFINES
+#include <algorithm>
+#include <cmath>
+// #include <execution>
+#include <numbers>
+#include <random>
+
+/*
+
+a, sigma
+
+*/
+
+// #define POP_SIZE 30
+// #define NUM_IT 25
+// #define COL_SIZE 16
+
+// using Column = double;
+
+static std::random_device random_device;
+static std::mt19937 gen(random_device());
+static std::uniform_real_distribution<> dis01(0., 1.);
+static std::uniform_real_distribution<> disDelta2(-2., 2.);
+
+template<typename T, size_t COL_SIZE = 16>
+struct Item {
+ double a, sigma; // E, sigma = sqrt(D)
+ T* column;
+
+ double W;
+
+ Item() {}
+
+ Item(double a_, double s, T * column_) : a(a_), sigma(s), column(column_) {
+ update();
+ }
+
+ double gauss(double t) {
+ return std::exp(std::pow((t - a) / sigma,
+ 2) /
+ 2) /
+ std::sqrt(2 * std::numbers::pi) /
+ sigma;
+ }
+
+ double F() { // objective function
+ double s = 0;
+ int x0 = std::floor(.5 + a - 3 * sigma);
+ double x1 = std::round(a + 3 * sigma);
+
+ for (int j = x0; j <= x1; j++)
+ s += std::pow(gauss(j) - column[j], 2);
+
+ return s;
+ }
+
+ void update() {
+ W = F();
+ }
+
+ // action
+
+ Item move() {
+ double a1 = a + disDelta2(gen);
+ double sigma1 = sigma * (1 + disDelta2(gen)); // a: ~ +- 2 pixel
+
+ return Item(a1, sigma1, column);
+ }
+
+ // a = q * a1 + (1 - q) * a2, sigma = q * s1 + (1 - q) * s2
+ Item crossover(const Item& other) {
+ double q = dis01(gen);
+ double a_ = q * a + (1 - q) * other.a;
+ double sigma_ = q * sigma + (1 - q) * other.sigma;
+
+ return Item(a_, sigma_, column);
+ }
+};
+
+template <typename T = uint16_t,
+ size_t POP_SIZE = 30,
+ size_t COL_SIZE = 16,
+ size_t NUM_IT = 25,
+ double maxW = .01>
+struct Algo {
+ T * column;
+ using I = Item<T, COL_SIZE>;
+ std::vector<I> population;
+ std::uniform_real_distribution<double> disA { 0., double(1.) };
+
+ double targetW;
+
+ Algo(T * column_): column(column_) {
+ init();
+ }
+
+ I getNewItem() {
+ double a = rand() % (COL_SIZE * 1000) / 1000.;
+ double sigma = rand() % (COL_SIZE * 100) / 1000.;
+
+ return I(a, sigma, column);
+ }
+
+ void init() {
+ for (size_t i = 0; i < POP_SIZE; i++) {
+ population.push_back(getNewItem());
+ }
+ }
+
+ bool stopCondition() {
+ // return population[0].W <= targetW;
+ return population[0].W <= maxW;
+ }
+
+ I run() {
+ for (int it = 0; it < NUM_IT; it++) {
+ work();
+
+ // if (stopCondition())
+ // break;
+ }
+
+ return population[0];
+ }
+
+ void work() {
+ move();
+ crossover();
+ addNew();
+
+ sort();
+
+ remove();
+ }
+
+ void sort() {
+ // sort vector ASC
+ std::sort(population.begin(), population.end(),
+ [](const auto & a, const auto & b)
+ {
+ return a.W < b.W;
+ });
+ }
+
+ void remove() {
+ population.erase(population.begin() + POP_SIZE, population.end());
+ }
+
+ void move() {
+ for (I& item : population) {
+ population.push_back(item.move());
+ }
+ }
+
+ void addNew() {
+ for (int i = 0; i < 5; i++) {
+ population.push_back(getNewItem());
+ }
+ }
+
+ void crossover() {
+ for (int i1 = 0; i1 < 4; i1++) {
+
+ for (int i2 = i1 + 1; i2 < 4; i2++) {
+ I& x1 = population[i1];
+ I& x2 = population[i2];
+ // a = q * a1 + (1 - q) * a2, sigma = q * s1 + (1 - q) * s2
+ population.push_back(x1.crossover(x2));
+ }
+ }
+ }
+};
diff --git a/httpservice.cpp b/httpservice.cpp
new file mode 100644
index 0000000..7fa72fb
--- /dev/null
+++ b/httpservice.cpp
@@ -0,0 +1 @@
+#include "httpservice.h"
diff --git a/httpservice.h b/httpservice.h
new file mode 100644
index 0000000..80b8e4a
--- /dev/null
+++ b/httpservice.h
@@ -0,0 +1,203 @@
+#pragma once
+
+#include <pistache/description.h>
+#include <pistache/endpoint.h>
+#include <pistache/http.h>
+
+#include <pistache/serializer/rapidjson.h>
+
+#include "imagealgos.h"
+
+using namespace Pistache;
+
+extern uint8_t pgm_image[64 + img_width * img_height * sizeof(uint16_t)];
+extern size_t pgm_image_size;
+extern std::mutex pgm_image_mtx;
+
+class HttpService
+{
+public:
+ HttpService(Address addr)
+ : httpEndpoint(std::make_shared<Http::Endpoint>(addr))
+ , desc("Banking API", "0.1")
+ { }
+
+ void init(size_t thr = 2)
+ {
+ auto opts = Http::Endpoint::options()
+ .threads(static_cast<int>(thr));
+ httpEndpoint->init(opts);
+ createDescription();
+ }
+
+ void start()
+ {
+ router.initFromDescription(desc);
+
+ Rest::Swagger swagger(desc);
+ swagger
+ .uiPath("/doc")
+ .uiDirectory("/home/user/swagger-ui/dist")
+ .apiPath("/banker-api.json")
+ .serializer(&Rest::Serializer::rapidJson)
+ .install(router);
+
+ httpEndpoint->setHandler(router.handler());
+ httpEndpoint->serve();
+ }
+
+private:
+ void createDescription()
+ {
+ desc
+ .info()
+ .license("Apache", "http://www.apache.org/licenses/LICENSE-2.0");
+
+ auto backendErrorResponse = desc.response(Http::Code::Internal_Server_Error, "An error occured with the backend");
+
+ desc
+ .schemes(Rest::Scheme::Http)
+ .basePath("/v1")
+ .produces(MIME(Application, Json))
+ .consumes(MIME(Application, Json));
+
+ // desc
+ // .route(desc.get("/ready"))
+ // .bind(&Generic::handleReady)
+ // .response(Http::Code::Ok, "Response to the /ready call")
+ // .hide();
+
+ auto versionPath = desc.path("/v1");
+
+ auto sensorPath = versionPath.path("/sensor");
+
+ sensorPath
+ .route(desc.get("/image"))
+ .bind(&HttpService::image, this)
+ .produces(MIME(Image, Png))
+ .response(Http::Code::Ok, "Image from sensor");
+
+ // tmp
+ sensorPath
+ .route(desc.get("/image2"))
+ .bind(&HttpService::image, this)
+ .produces(MIME(Image, Png))
+ .response(Http::Code::Ok, "Image from sensor");
+
+ sensorPath
+ .route(desc.get("/params"), "Retrive sensor parameters")
+ .bind(&HttpService::get_sensorParams, this)
+ .produces(MIME(Application, Plain))
+ .response(Http::Code::Ok, "Parameter value")
+ .response(backendErrorResponse);;
+
+ sensorPath
+ .route(desc.get("/:param"), "Retrive sensor parameter")
+ .bind(&HttpService::get_sensorParam, this)
+ .produces(MIME(Application, Json))
+ .parameter<Rest::Type::String>("param", "The name of the parameter to retrieve")
+ .response(Http::Code::Ok, "Parameter value")
+ .response(backendErrorResponse);;
+
+ sensorPath
+ .route(desc.post("/:param"), "Set sensor parameter")
+ .bind(&HttpService::set_sensorParam, this)
+ .produces(MIME(Application, Plain))
+ .consumes(MIME(Application, Plain))
+ .response(Http::Code::Ok, "Setting parameter result");
+
+
+ auto accountsPath = versionPath.path("/accounts");
+
+ accountsPath
+ .route(desc.get("/all"))
+ .bind(&HttpService::retrieveAllAccounts, this)
+ .produces(MIME(Application, Json), MIME(Application, Xml))
+ .response(Http::Code::Ok, "The list of all account");
+
+ accountsPath
+ .route(desc.get("/:name"), "Retrieve an account")
+ .bind(&HttpService::retrieveAccount, this)
+ .produces(MIME(Application, Json))
+ .parameter<Rest::Type::String>("name", "The name of the account to retrieve")
+ .response(Http::Code::Ok, "The requested account")
+ .response(backendErrorResponse);
+
+ accountsPath
+ .route(desc.post("/:name"), "Create an account")
+ .bind(&HttpService::createAccount, this)
+ .produces(MIME(Application, Json))
+ .consumes(MIME(Application, Json))
+ .parameter<Rest::Type::String>("name", "The name of the account to create")
+ .response(Http::Code::Ok, "The initial state of the account")
+ .response(backendErrorResponse);
+ auto accountPath = accountsPath.path("/:name");
+ accountPath.parameter<Rest::Type::String>("name", "The name of the account to operate on");
+
+ accountPath
+ .route(desc.post("/budget"), "Add budget to the account")
+ .bind(&HttpService::creditAccount, this)
+ .produces(MIME(Application, Json))
+ .response(Http::Code::Ok, "Budget has been added to the account")
+ .response(backendErrorResponse);
+ }
+
+ void retrieveAllAccounts(const Rest::Request&, Http::ResponseWriter response)
+ {
+ response.send(Http::Code::Ok,
+ "No Account",
+ { Http::Mime::Type::Text, Http::Mime::Subtype::Plain} );
+ }
+
+ void retrieveAccount(const Rest::Request&, Http::ResponseWriter response)
+ {
+ response.send(Http::Code::Ok, "The bank is closed, come back later");
+ }
+
+ void createAccount(const Rest::Request&, Http::ResponseWriter response)
+ {
+ response.send(Http::Code::Ok, "The bank is closed, come back later");
+ }
+
+ void creditAccount(const Rest::Request&, Http::ResponseWriter response)
+ {
+ response.send(Http::Code::Ok, "The bank is closed, come back later");
+ }
+
+ void image(const Rest::Request&, Http::ResponseWriter response)
+ {
+ // FIXME: image should be valid
+ std::lock_guard<std::mutex> lg(pgm_image_mtx);
+ // char data[pgm_image_size];
+ // memcpy(data, pgm_image, pgm_image_size);
+ std::cout << "send bytes: " << pgm_image_size << std::endl;
+
+ auto res = response.send(Http::Code::Ok,
+ (const char*)pgm_image, pgm_image_size,
+ Http::Mime::MediaType { "image/pgm" });
+ // { Http::Mime::Type::Image, Http::Mime::Subtype::Png });
+
+ res.then([](ssize_t bytes)
+ { std::cout << bytes << " bytes have been sent\n"; },
+ Async::NoExcept);
+ }
+
+ void get_sensorParam(const Rest::Request&, Http::ResponseWriter response)
+ {
+ response.send(Http::Code::Ok, std::to_string(123));
+ }
+
+ void get_sensorParams(const Rest::Request&, Http::ResponseWriter response)
+ {
+ response.send(Http::Code::Ok, std::to_string(123));
+ }
+
+ void set_sensorParam(const Rest::Request&, Http::ResponseWriter response)
+ {
+ response.send(Http::Code::Ok, std::to_string(123));
+ }
+
+ std::shared_ptr<Http::Endpoint> httpEndpoint;
+ Rest::Description desc;
+ Rest::Router router;
+};
diff --git a/imagealgos.cpp b/imagealgos.cpp
new file mode 100644
index 0000000..23902d1
--- /dev/null
+++ b/imagealgos.cpp
@@ -0,0 +1,246 @@
+#include "imagealgos.h"
+
+#include <cassert>
+#include <cstdint>
+#include <cstring>
+
+#include <algorithm>
+#include <chrono>
+#include <iostream>
+#include <limits>
+#include <mutex>
+#include <typeinfo>
+#include <utility>
+
+// #include <arm_neon.h>
+
+#include "genetic_algos.h"
+#include "macro.h"
+
+uint8_t pgm_image[64 + img_width * img_height * sizeof(uint16_t)];
+size_t pgm_image_size = 0;
+std::mutex pgm_image_mtx;
+
+size_t pgm_save(Image *img, FILE *outfile, bool really_save) {
+ std::lock_guard<std::mutex> lg(pgm_image_mtx);
+
+ size_t n = 0;
+
+ // n += fprintf(outfile, "P5\n%d %d\n%d\n",
+ // img->width, img->height, 0xFF);
+ n += sprintf((char*)pgm_image, "P5\n%d %d\n%d\n",
+ img->width, img->height, 0xFF);
+
+ for (size_t i = 0; i < img->width * img->height; ++i)
+ {
+ uint16_t *pixels = (uint16_t*)img->data;
+ const auto p = pixels[i];
+ uint8_t value = (pixels[i] & 0xFF00) >> 8;
+
+ // n += fwrite(&value, 1, 1, outfile);
+ memcpy((void*)(pgm_image + n), &value, sizeof(value));
+ n += sizeof(value);
+ }
+
+ pgm_image_size = n;
+
+ // std::cout << "size is " << n << std::endl;
+
+ if (really_save)
+ {
+ fwrite(pgm_image, 1, pgm_image_size, outfile);
+ fflush(outfile);
+ }
+
+ return n;
+}
+
+void unpack_10bit(uint8_t const *src, Image const &image, uint16_t *dest)
+{
+ unsigned int w_align = image.width & ~3;
+ for (unsigned int y = 0; y < image.height; y++, src += image.stride)
+ {
+ uint8_t const *ptr = src;
+ unsigned int x;
+ for (x = 0; x < w_align; x += 4, ptr += 5)
+ {
+ *dest++ = (ptr[0] << 2) | ((ptr[4] >> 0) & 3);
+ *dest++ = (ptr[1] << 2) | ((ptr[4] >> 2) & 3);
+ *dest++ = (ptr[2] << 2) | ((ptr[4] >> 4) & 3);
+ *dest++ = (ptr[3] << 2) | ((ptr[4] >> 6) & 3);
+ }
+ for (; x < image.width; x++)
+ *dest++ = (ptr[x & 3] << 2) | ((ptr[4] >> ((x & 3) << 1)) & 3);
+ }
+}
+
+void unpack_16bit(uint8_t const *src, Image const &image, uint16_t *dest)
+{
+ start_timer(unpack_16bit);
+ /* Assume the pixels in memory are already in native byte order */
+ unsigned int w = image.width;
+
+ for (unsigned int y = 0; y < image.height; y++)
+ {
+ memcpy(dest, src, 2 * w);
+ dest += w;
+ src += image.stride;
+ }
+ stop_timer(unpack_16bit);
+}
+
+void rotate(Image &image)
+{
+ start_timer(rotate);
+
+ using namespace std;
+
+ for (size_t i = 0; i < image.height; ++i)
+ {
+ for (size_t j = 0; j < image.width; ++j)
+ {
+ image.rotated_cw[j][i] = image.data[image.height - i][j];
+ }
+ }
+
+ stop_timer(rotate);
+}
+
+template<class T, size_t N>
+constexpr size_t mysize(T (&)[N]) { return N; }
+
+float process_column(uint16_t (&column)[])
+{
+ float result = std::numeric_limits<float>::quiet_NaN();
+
+ constexpr uint32_t patternSize = 16; // good
+ constexpr uint32_t signalThreshold = 450; // = SKO * sqrt(patternSize)
+ static constexpr uint32_t patternOffset = patternSize - ((patternSize % 2 == 1) ? 1 : 0);
+ const uint32_t correlationSize = img_height - patternSize +
+ ((patternSize % 2 == 1) ? 1 : 0);
+ uint32_t correlation[img_height];
+ uint32_t integralSum[img_height];
+ uint32_t maxSum = signalThreshold * 50;
+ uint32_t x1 = 0;
+ int32_t y1 = 0;
+ int32_t y2 = 0;
+
+ memset(correlation, 0, img_height * sizeof(uint32_t));
+ integralSum[0] = 0;
+
+ for(uint32_t i = 1; i < img_height; ++i) {
+ if (column[i] < 100) {
+ column[i] = 1;
+ }
+ integralSum[i] = column[i] / 256 + integralSum[i - 1];
+ }
+
+ // maxSum = 0 ;
+ // size_t maxIdx { 0 };
+
+ // for (size_t i = 0; i < img_height - patternSize; ++i) {
+ // const auto sum = integralSum[i + patternSize] - integralSum[i];
+ // if (sum > maxSum) {
+ // maxSum = sum;
+ // maxIdx = i;
+ // }
+ // }
+
+ // Algo genetic(column + maxIdx);
+ // // std::cout << "maxIdx " << maxIdx << std::endl;
+
+ // // return maxIdx + genetic.run().a;
+ // return 500;
+ // return img_height - maxIdx - genetic.run().a;
+
+
+ for(uint32_t i = 0; i < correlationSize; ++i)
+ correlation[i + patternSize / 2] =
+ column[i + patternSize / 2] / 256 *
+ (integralSum[i + patternOffset] - integralSum[i]);
+
+ for(uint32_t i = 3; i < img_height - 2; ++i)
+ {
+ const auto sum = correlation[i - 1] +
+ correlation[i] +
+ correlation[i + 1];
+
+ if(sum > maxSum)
+ {
+ const int32_t rioux0 = int32_t(correlation[i - 2 - 1] + correlation[i - 1 - 1]) -
+ int32_t(correlation[i + 1 - 1] + correlation[i + 2 - 1]);
+
+ if(rioux0 < 0)
+ {
+ const int32_t rioux1 = int32_t(correlation[i - 2] + correlation[i - 1]) -
+ int32_t(correlation[i + 1] + correlation[i + 2]);
+
+ if(rioux1 >= 0)
+ {
+ x1 = i - 1;
+ y1 = rioux0;
+ y2 = rioux1;
+ maxSum = sum;
+ }
+ }
+ }
+ }
+
+ result = (y2 != y1) ?
+ (img_height - (float(x1) - (float(y1) / (y2 - y1)))) :
+ std::numeric_limits<float>::quiet_NaN();
+
+
+ static bool result_done = false;
+ if (!result_done) {
+ std::cout << "result " << result << std::endl;
+ result_done = true;
+ }
+ // std::cout << "result is '" << result << "'\n";
+
+ return result;
+
+// center of mass
+#if 0
+ auto max_el = std::max_element(std::begin(accumulated_sum),
+ std::end(accumulated_sum) - window_size);
+
+ size_t max_sum_idx = max_el - std::begin(accumulated_sum) + window_size;
+
+ double sum_w = 0;
+ double prod_wx = 0;
+ double wmc = 0;
+
+ for(int i = max_sum_idx - window_size; i < max_sum_idx; ++i)
+ {
+ prod_wx += column[i] * i;
+ sum_w += column[i];
+ }
+
+ wmc = float(prod_wx) / float(sum_w);
+
+ result = img_height - wmc;
+
+ return result;
+#endif
+}
+
+void process_columns(Image &image)
+{
+ std::cout << "here\n";
+ start_timer(process_columns);
+
+ for (size_t i = 0; i < image.width; i++)
+ {
+ image.pixels[i] = process_column(image.rotated_cw[i]);
+ // Algo genetic(image.rotated_cw[i]);
+
+ // image.pixels[i] = genetic.run().a;
+
+ // if (i == 0) {
+ // std::cout << "pixel: " << image.pixels[i] << std::endl;
+ // }
+ }
+
+ stop_timer(process_columns);
+}
diff --git a/imagealgos.h b/imagealgos.h
new file mode 100644
index 0000000..b1efc87
--- /dev/null
+++ b/imagealgos.h
@@ -0,0 +1,29 @@
+#pragma once
+
+#include <cstddef>
+#include <cstdio>
+
+#include <libcamera/pixel_format.h>
+
+constexpr size_t img_width = 1280;
+constexpr size_t img_height = 800;
+
+struct Image
+{
+ int width;
+ int height;
+ uint16_t data[img_height][img_width];
+ uint16_t rotated_cw[img_width][img_height];
+ size_t dataSize;
+ unsigned int stride;
+ libcamera::PixelFormat pixelFormat;
+ float pixels[img_width];
+};
+
+size_t pgm_save(Image *img, FILE *outfile, bool really_save = true);
+
+void unpack_10bit(uint8_t const *src, Image const &image, uint16_t *dest);
+void unpack_16bit(uint8_t const *src, Image const &image, uint16_t *dest);
+
+void rotate(Image & image);
+void process_columns(Image & image);
diff --git a/macro.h b/macro.h
new file mode 100644
index 0000000..80998e5
--- /dev/null
+++ b/macro.h
@@ -0,0 +1,11 @@
+#pragma once
+
+#define start_timer(name) \
+ std::chrono::steady_clock::time_point begin_ ## name = \
+ std::chrono::steady_clock::now(); \
+ \
+
+#define stop_timer(name) \
+std::chrono::steady_clock::time_point end_ ## name = \
+ std::chrono::steady_clock::now(); \
+
diff --git a/main.cpp b/main.cpp
new file mode 100644
index 0000000..0aff149
--- /dev/null
+++ b/main.cpp
@@ -0,0 +1,783 @@
+#define QT_NO_KEYWORDS
+#include <QCoreApplication>
+#include <QDebug>
+#include <QFile>
+#include <QHttpServer>
+#include <QJsonArray>
+#include <QJsonDocument>
+#include <QJsonObject>
+#include <QtConcurrent/QtConcurrentRun>
+#undef QT_NO_KEYWORDS
+
+#include <chrono>
+#include <errno.h>
+#include <iostream>
+#include <fstream>
+#include <string.h>
+#include <thread>
+
+#include "httpservice.h"
+#include "genetic_algos.h"
+#include "imagealgos.h"
+#include "LibCamera.h"
+
+#define try_apply_config() \
+if(!applyConfig(config)) \
+ { \
+ camera->release(); \
+ cm->stop(); \
+ \
+ return EXIT_FAILURE;\
+ }
+
+
+const QString exposureTimeKey = "exposureTime";
+const QString laserLevelKey = "laserLevel";
+
+struct requested_params_t {
+ int32_t exposureTime = { 200 };
+ int32_t laserLevel = { 7000 };
+} requested_params;
+Image img;
+
+using namespace std::chrono_literals;
+
+static std::shared_ptr<Camera> camera;
+std::unique_ptr<CameraConfiguration> config;
+static std::map<int, std::pair<void *, unsigned int>> mappedBuffers_;
+std::vector<std::unique_ptr<Request>> requests;
+ControlList lastControls;
+
+static bool applyConfig(const std::unique_ptr<CameraConfiguration> & config);
+static void onRequestCompleted(Request *completed_request);
+static void printControls();
+
+int main(int argc, char *argv[]) {
+ QCoreApplication app(argc, argv);
+ qDebug() << "Hello qt";
+ // FIXME: don't use one var for everything
+ int ret;
+ std::unique_ptr<CameraManager> cm = std::make_unique<CameraManager>();
+ cm->start();
+
+ const auto cameras = cm->cameras();
+
+ if (cameras.empty())
+ {
+ std::cout << "No cameras were identified on the system." << std::endl;
+ cm->stop();
+
+ return EXIT_FAILURE;
+ }
+
+ std::string cameraId = cameras[0]->id();
+
+ std::cout << "using " << cameraId << std::endl;
+
+ /*
+ * Note that `camera` may not compare equal to `cameras[0]`.
+ * In fact, it might simply be a `nullptr`, as the particular
+ * device might have disappeared (and reappeared) in the meantime.
+ */
+ // std::shared_ptr<Camera> camera = cm->get(cameraId);
+ camera = cm->get(cameraId);
+
+ if (camera->acquire() != EXIT_SUCCESS)
+ {
+ std::cout << "Cannot acquire camera." << std::endl;
+ cm->stop();
+
+ return EXIT_FAILURE;
+ }
+
+ // FIXME: nullptr
+ // std::unique_ptr<CameraConfiguration> config = camera->generateConfiguration( { StreamRole::Viewfinder } );
+ /*std::unique_ptr<CameraConfiguration> */config = camera->generateConfiguration( { StreamRole::Raw } );
+
+ if (config->empty())
+ {
+ std::cerr << "No configurations generated." << std::endl;
+ cm->stop();
+
+ return EXIT_FAILURE;
+ }
+
+ config->orientation = libcamera::Orientation::Rotate90;
+
+ // if (config->validate() != EXIT_SUCCESS)
+
+ // if (camera->configure(config.get()) != EXIT_SUCCESS)
+ // {
+ // std::cerr << "cannot configure camera" << std::endl << std::flush;
+ // cm->stop();
+
+ // return EXIT_FAILURE;
+ // }
+
+ // FIXME: nullptr
+ StreamConfiguration &streamConfig = config->at(0);
+ std::cout << "Default viewfinder configuration is: " << streamConfig.toString() << std::endl;
+ std::cout << "Pixel format is: " << streamConfig.pixelFormat.toString() << std::endl;
+ std::cout << "Buffer count is: " << streamConfig.bufferCount << std::endl;
+ // FIXME: empty variant
+ std::cout << "Color space is: " << streamConfig.colorSpace.value().toString() << std::endl;
+ std::cout << "Orientation is: " << config->orientation << std::endl;
+ // formats::R8,
+ // formats::R10,
+ // formats::R12,
+ // formats::R16,
+ // formats::R10_CSI2P, // camera->configure failure
+ // formats::R12_CSI2P, // camera->configure failure
+ // streamConfig.pixelFormat = PixelFormat::fromString("R8");
+ // streamConfig.pixelFormat = PixelFormat::fromString("Y8_1X8");
+
+ // streamConfig.pixelFormat = formats::R8;
+ streamConfig.pixelFormat = formats::R16;
+ streamConfig.bufferCount = 2;
+ // what is default R10_CSI2P? MONO_PISP_COMP1?
+ // MONO_PISP_COMP1 - check rpicam-apps sources for decoding algos
+ // streamConfig.pixelFormat = formats::R10_CSI2P;
+ // streamConfig.bufferCount = 16;
+ try_apply_config()
+
+ // #define doit(rotation) \
+ // std::cout << "set rotation to: " << libcamera::Orientation:: rotation \
+ // << std::endl; \
+ // config->orientation = libcamera::Orientation:: rotation; \
+ // try_apply_config()
+
+ // doit(Rotate0Mirror);
+ // doit(Rotate180);
+ // doit(Rotate180Mirror);
+ // doit(Rotate90Mirror);
+ // doit(Rotate270);
+ // doit(Rotate270Mirror);
+ // doit(Rotate90);
+
+ std::cout << "new config " << streamConfig.toString() << std::endl;
+
+ // FIXME: may crassh even on success (e.g. by setting pixelFormat to "8")
+ if (camera->configure(config.get()) != EXIT_SUCCESS)
+ {
+ std::cout << "cannot apply config, quit." << std::endl;
+ camera->release();
+ cm->stop();
+
+ return EXIT_FAILURE;
+ }
+
+ // TODO: try custom FrameBufferAllocator and compare performance
+
+ auto allocator = std::make_shared<FrameBufferAllocator>(camera);
+
+ auto stream = streamConfig.stream();
+
+ ret = allocator->allocate(stream);
+
+ // TODO: check if zero
+ if (ret < 0)
+ {
+ std::cerr << "Can't allocate buffers" << std::endl;
+ // return -ENOMEM;
+ return ret;
+ }
+
+ size_t allocated = size_t(ret);
+ std::cout << "Allocated " << allocated << " buffers for stream" << std::endl;
+
+ const std::vector<std::unique_ptr<FrameBuffer>> &buffers = allocator->buffers(stream);
+
+ // for (size_t i = 0; i < buffers.size(); ++i)
+ static int expOffset = 0;
+ for (const auto & buffer : buffers)
+ {
+ std::unique_ptr<Request> request = camera->createRequest();
+
+ if (!request)
+ {
+ std::cerr << "Can't create request" << std::endl;
+ return -ENOMEM;
+ }
+
+ // TODO: try multiple buffers per request and compare performance
+ int ret = request->addBuffer(stream, buffer.get());
+
+ if (ret < 0)
+ {
+ std::cerr << "Can't set buffer for request" << std::endl;
+
+ return ret;
+ }
+
+ for (const auto & plane : buffer->planes())
+ {
+ void *memory = mmap(NULL, plane.length, PROT_READ, MAP_SHARED,
+ plane.fd.get(), 0);
+ mappedBuffers_[plane.fd.get()] =
+ std::make_pair(memory, plane.length);
+ }
+
+ // size_t desiredFPS = 200;
+
+ // std::int64_t lowerUS = 1 * 1000 * 1000 / desiredFPS;
+ // std::int64_t higherUS = lowerUS;
+ // std::int64_t value_pair[2] = { higherUS / 2, higherUS };
+ request->controls().set(libcamera::controls::AnalogueGain, 1.0);
+ request->controls().set(libcamera::controls::ExposureTime, 4321 + expOffset++ * 100);
+ // request->controls().set(
+ // libcamera::controls::FrameDurationLimits,
+ // libcamera::Span<const std::int64_t, 2>(value_pair));
+
+ requests.push_back(std::move(request));
+ }
+
+ camera->requestCompleted.connect(onRequestCompleted);
+
+ std::unique_ptr<libcamera::ControlList> camcontrols { new libcamera::ControlList() };
+ // camcontrols->set(controls::FrameDurationLimits, libcamera::Span<const std::int64_t, 2>({8702, 10718903}));
+ // camcontrols->set(controls::ExposureTime, 100);
+ // camcontrols->set(controls::AnalogueGain, 0.1);
+
+ std::this_thread::sleep_for(500ms);
+
+ if (camera->start(camcontrols.get()))
+ {
+ qDebug() << "failed to start camera";
+ return EXIT_FAILURE;
+ }
+
+ // camera->start();
+
+ for (auto & request : requests)
+ {
+ camera->queueRequest(request.get());
+ }
+
+ printControls();
+
+ // std::this_thread::sleep_for(2s);
+ // TODO: move to thread
+ // Http::listenAndServe<HttpHandler>(Pistache::Address("*:8080"));
+
+ QHttpServer qHttpServer;
+ qHttpServer.route("/v1/sensor/image", [&]() {
+ std::lock_guard<std::mutex> lg(pgm_image_mtx);
+ // qDebug() << "image";
+ return QByteArray((const char*)pgm_image, pgm_image_size);
+ });
+ qHttpServer.route("/v1/sensor/image2", [&]() {
+ std::lock_guard<std::mutex> lg(pgm_image_mtx);
+ // qDebug() << "image";
+ return QByteArray((const char*)pgm_image, pgm_image_size);
+ });
+ qHttpServer.route("/v1/sensor/exposureTimeUs", [&]() {
+ std::lock_guard<std::mutex> lg(pgm_image_mtx);
+ return "123";
+ });
+ qHttpServer.route("/v1/pixels", [&]() {
+ std::lock_guard<std::mutex> lg(pgm_image_mtx);
+
+ QJsonArray pixels;
+
+ for (size_t i = 0; i < img_width; ++i) {
+ pixels << img_height - img.pixels[i];
+ }
+
+ QJsonObject json;
+ json["pixels"] = pixels;
+
+ return QHttpServerResponse(QJsonDocument(json).toJson());
+ });
+
+ qHttpServer.route("/v1/sensor/params", [&](const QHttpServerRequest &request) -> QHttpServerResponse {
+
+ switch (request.method()) {
+ case QHttpServerRequest::Method::Get:
+ {
+ std::lock_guard<std::mutex> lg(pgm_image_mtx);
+ QJsonObject json;
+
+ const ControlIdMap & ctrlIdMap = camera->controls().idmap();
+
+ qDebug() << "readParams:" << lastControls.size();
+ qDebug() << request.method();
+
+ for (const auto & [id, value]: lastControls)
+ {
+ const ControlId * controlId = ctrlIdMap.at(id);
+ auto name = QString::fromStdString(controlId->name());
+ const auto valueStr = QString::fromStdString(value.toString());
+ qDebug() << "\t param:"
+ << controlId->id()
+ << name
+ << valueStr
+ ;
+
+ name[0] = name[0].toLower();
+ json[name] = valueStr;
+ }
+
+ json[laserLevelKey] = requested_params.laserLevel;
+
+ qDebug() << "response body:" << json;
+
+ // QHttpServerResponse
+ return QHttpServerResponse(QJsonDocument(json).toJson());
+ }
+
+ case QHttpServerRequest::Method::Post:
+ {
+ qDebug() << "request body:" << request.body();
+
+ auto json = QJsonDocument::fromJson(request.body()).object();
+
+ if (json.contains(exposureTimeKey)) {
+ const int32_t value { json[exposureTimeKey].toInt() };
+
+ if (value == 0) {
+ return QHttpServerResponse::StatusCode::NotFound;
+ }
+
+ qDebug() << "set new exposure time:" << value;
+ requested_params.exposureTime = value;
+ }
+
+ if (json.contains(laserLevelKey)) {
+ const int32_t value { json[laserLevelKey].toInt() };
+
+ if (value == 0) {
+ return QHttpServerResponse::StatusCode::NotFound;
+ }
+
+ qDebug() << "set new laserLevel:" << value;
+ requested_params.laserLevel = value;
+
+ const QString laserLevelFile { "/sys/class/pwm/pwmchip2/pwm1/duty_cycle"};
+ QFile f { laserLevelFile };
+
+ if (!f.open(QFile::ReadWrite)) {
+ qDebug() << "cannot open laser level file:" << f.errorString();
+ qDebug() << "file path is" << f.fileName();
+ return QHttpServerResponse::StatusCode::InternalServerError;
+ }
+
+ QTextStream s { &f };
+
+ s << value;
+
+ s >> requested_params.laserLevel;
+ }
+
+ return QHttpServerResponse(request.body());
+ }
+ default:
+ {
+ return QHttpServerResponse(QByteArray("unsupported http method"));
+ }
+ }
+ });
+
+ qDebug() << "listen: " << qHttpServer.listen(QHostAddress::Any, 8081);
+
+ QFuture<void> future = QtConcurrent::run([](){
+
+ Port port(8080);
+ Address addr(Ipv4::any(), port);
+
+ HttpService httpService(addr);
+
+ size_t threads_count = 1;
+ httpService.init(threads_count);
+ httpService.start();
+ });
+
+ ////////////////////////////////////////////////////////////////////////////
+ std::clog << std::flush;
+ std::cerr << std::flush;
+ std::cout << "ok for now" << std::endl << std::flush;
+
+ // camera->stop();
+ // camera->release();
+ // cm->stop();
+
+ auto result = app.exec();
+
+ future.cancel();
+ future.waitForFinished();
+
+ for (auto & [fd, mem] : mappedBuffers_)
+ {
+ munmap(mem.first, mem.second);
+ }
+
+ // FIXME: crash somewhere here. proper libcamera finishing needed
+ requests.clear();
+ mappedBuffers_.clear();
+
+ camera->stop();
+ config.reset();
+ allocator->free(stream);
+ allocator.reset();
+ camera->release();
+ camera.reset();
+ cm->stop();
+
+ return result;
+
+ // time_t start_time = time(0);
+ // int frame_count = 0;
+
+ // LibCamera cam;
+ // uint32_t width = 1280;
+ // uint32_t height = 800;
+ // uint32_t stride;
+ // char key;
+
+ // ret = cam.initCamera();
+
+ // if (ret != EXIT_SUCCESS)
+ // {
+ // std::cerr << "cannot open camera" << std::endl;
+
+ // return EXIT_FAILURE;
+ // }
+
+ // cam.configureStill(width, height, formats::R8, 1, 0);
+ // // ControlList controls_;
+ // int64_t frame_time = 1000000 / 10;
+ // // Set frame rate
+ // // controls_.set( controls::FrameDurationLimits, libcamera::Span<const int64_t, 2>(
+ // // { frame_time, frame_time } ));
+ // // Adjust the brightness of the output images, in the range -1.0 to 1.0
+ // // controls_.set(controls::Brightness, 0.5);
+ // // Adjust the contrast of the output image, where 1.0 = normal contrast
+ // // controls_.set(controls::Contrast, 1.5);
+ // // Set the exposure time
+ // // controls_.set(controls::ExposureTime, 20000);
+ // // cam.set(controls_);
+
+ // std::cout << std::flush;
+
+ // // NOTE: already checked
+ // if (ret == EXIT_SUCCESS) {
+ // bool flag;
+ // LibcameraOutData frameData;
+ // cam.startCamera();
+ // cam.VideoStream(&width, &height, &stride);
+
+ // while (true) {
+ // flag = cam.readFrame(&frameData);
+ // if (!flag)
+ // continue;
+
+ // // key = waitKey(1);
+ // // if (key == 'q') {
+ // // break;
+ // // } else if (key == 'f') {
+ // // ControlList controls;
+ // // controls.set(controls::AfMode, controls::AfModeAuto);
+ // // controls.set(controls::AfTrigger, 0);
+ // // cam.set(controls);
+ // // }
+
+
+ // frame_count++;
+ // if ((time(0) - start_time) >= 1){
+ // printf("fps: %d\n", frame_count);
+ // frame_count = 0;
+ // start_time = time(0);
+ // }
+ // cam.returnFrameBuffer(frameData);
+ // }
+
+ // cam.stopCamera();
+ // }
+
+ // cam.closeCamera();
+
+ // return EXIT_SUCCESS;
+}
+
+/*
+ * Signals operate in the libcamera CameraManager thread context, so it is
+ * important not to block the thread for a long time, as this blocks internal
+ * processing of the camera pipelines, and can affect realtime performance.
+*/
+void onRequestCompleted(Request *completed_request)
+{
+ bool verbose = false;
+
+ if (completed_request->status() == Request::RequestCancelled)
+ {
+ std::cerr << "request canceled" << std::endl;
+
+ return;
+ }
+
+ const std::map<const Stream *, FrameBuffer *> &buffers = completed_request->buffers();
+
+ // std::cout << "request completed, buffers count is " << buffers.size();
+
+ // // TODO: rewrite this shit
+ for (auto [stream, buffer] : buffers)
+ {
+ const auto & streamConfig = stream->configuration();
+ const auto & imageSize = streamConfig.size;
+ const auto & pixelFormat = streamConfig.pixelFormat;
+ const auto & stride = streamConfig.stride;
+
+ const FrameMetadata &metadata = buffer->metadata();
+
+ if (verbose)
+ {
+ std::cout << " seq: " << std::setw(6) << std::setfill('0')
+ << metadata.sequence
+ << " bytesused: ";
+ }
+
+ for (size_t i = 0; i < buffer->planes().size(); ++i)
+ {
+ const FrameBuffer::Plane & plane = buffer->planes()[i];
+ const FrameMetadata::Plane & metaplane = buffer->metadata().planes()[i];
+
+ size_t size = std::min(metaplane.bytesused, plane.length);
+ void * data = mappedBuffers_[plane.fd.get()].first;
+
+ // std::cout << metaplane.bytesused << "/" << plane.length;
+
+ // std::cout << " stride " << stride;
+ // std::cout << " planes count: " << buffer->planes().size() << " ";
+ // std::cout << std::endl;
+
+ // if (metadata.sequence == 20)
+ {
+ // FIXME: remove hardcode
+ img.width = imageSize.width;
+ img.height = imageSize.height;
+ // img.data = data;
+ memcpy(img.data, data, size);
+ img.dataSize = size;
+ img.stride = stride;
+ img.pixelFormat = pixelFormat;
+
+ // uint16_t unpacked[img.width * img.height] = { 0 };
+ // unpack_16bit((uint8_t*)img.data, img, (uint16_t*)&unpacked);
+ // img.data = unpacked;
+ // img.dataSize = img.width * img.height * sizeof(uint16_t);
+ rotate(img);
+ process_columns(img);
+
+ static bool done = false;
+ // mark pixels and max region
+ for (size_t i = 0; i < img_width; ++i)
+ {
+ // std::cout << "\t" << img.pixels[i] << std::endl;
+ // uint
+ // const auto & p = img.pixels[i];
+ // const auto int_p = int(p);
+ // const auto fract = p - int_p;
+
+
+ // img.data[int_p][i] = 256 * 256 * fract;
+ // img.data[int_p + 1][i] = 256 * 256 * (1.0 - fract);
+
+
+ // if (!done) {
+ // std::cout << fract << " ";
+ // }
+
+ img.data[size_t(img.pixels[i])][i] = 0;
+ img.data[size_t(img.pixels[i]) - 6][i] = 0xffff;
+ img.data[size_t(img.pixels[i]) + 6][i] = 0xffff;
+ }
+ done = true;
+
+ // // FILE * f = fopen("/tmp/R16.pgm", "w");
+ FILE * f = fopen("/tmp/img.pgm", "w");
+ // // FILE * f = fopen("/tmp/MONO_PISP_COMP1.pgm", "w");
+
+ if (f == NULL)
+ {
+ std::cerr << "cannot open output file: "
+ << strerror(errno)
+ << std::endl;
+ }
+ else
+ {
+ // pgm_save(&img, f);
+ pgm_save(&img, f);
+ fclose(f);
+ // std::cout << "file written" << std::endl;
+ }
+ }
+ }
+ }
+
+ const libcamera::ControlList &metadata = completed_request->metadata();
+ const ControlInfoMap & control_map = camera->controls();
+ const ControlIdMap & ctrlIdMap = control_map.idmap();
+
+ auto frameDurationCtrl = control_map.find(&controls::FrameDurationLimits);
+ auto expTimeCtrl = control_map.find(&controls::ExposureTime);
+ double fps = frameDurationCtrl == control_map.end() ?
+ std::numeric_limits<double>::quiet_NaN() :
+ (1e6 / frameDurationCtrl->second.min().get<int64_t>());
+
+ auto exp = metadata.get(controls::ExposureTime);
+ auto ag = metadata.get(controls::AnalogueGain);
+ auto ae = metadata.get(controls::AeEnable);
+ // auto br= metadata.get(controls::Brightness);
+ lastControls = completed_request->controls();
+
+ if (verbose)
+ {
+ std::cout << "fps: " << fps
+ << " exp: " << *exp
+ << " ag: " << *ag
+ // << " br: " << *br
+ << " ae: " << *ae
+ << " aa: " << *completed_request->controls()
+ .get(libcamera::controls::ExposureTime)
+ << std::endl;
+ }
+
+ for (const auto & [id, value] : metadata)
+ {
+
+ }
+
+ // metadata.set(controls::ExposureTime, 300);
+
+ // exp->set(*exp + 1);
+ // expTimeCtrl->second().set(*exp + 1);
+ // auto expTimeCtrlId= expTimeCtrl->id();
+
+
+ // properties.set(controls::ExposureTime, 1000);
+
+ // std::optional<uint32_t> expTimeOptional = properties.get(controls::ExposureTime);
+
+ // if (expTimeOptional.has_value())
+ // {
+ // // uint32_t value = expTimeOptional.value();
+
+ // auto frameDurationLimits = controls.find(&controls::FrameDurationLimits)->second;
+ // auto min = frameDurationLimits.min().get<int64_t>();
+ // auto max = frameDurationLimits.max().get<int64_t>();
+ // // auto val = properties.find(controls::FrameDurationLimits)->value();//.second().min().get<int64_t>()
+ // // auto second = val.second();
+ // auto framerate = 1.0e6 / min;
+ // auto rAG = request->controls().get<float>(libcamera::controls::AnalogueGain);
+ // auto rET = request->controls().get<int32_t>(libcamera::controls::ExposureTime);
+ // int32_t randET = rand() % 9000 + 1000;
+ // request->controls().set(libcamera::controls::ExposureTime, 100);
+ // std::cout << "exposure time (us): "
+ // << properties.get(controls::ExposureTime).value()
+ // << " frame duration limits (ns): " << min << "/" << max
+ // << " framerate: " << framerate
+ // << " " << rAG.value_or(321)
+ // << " " << rET.value_or(321)
+ // << std::endl;
+ // // std::cout << "noise reduction mode: "
+ // // << properties.get(controls::AwbMode).value()
+ // // << std::endl;
+ // // NoiseReductionMode
+ // }
+
+ // completed_request->reuse(Request::ReuseBuffers);
+ // camera->queueRequest(completed_request);
+
+
+ completed_request->reuse(Request::ReuseBuffers);
+
+ completed_request->controls().set(libcamera::controls::AeEnable, false);
+ completed_request->controls().set(libcamera::controls::draft::NoiseReductionMode,
+ libcamera::controls::draft::NoiseReductionModeEnum::NoiseReductionModeHighQuality);
+ // completed_request->controls().set(libcamera::controls::ExposureTime, rand() % 1000 + 100);
+
+ static bool done0 = false;
+ static bool done1 = false;
+
+ if (!done0 || !done1)
+ {
+
+ if (completed_request->sequence() % 2 == 0)
+ {
+ // qDebug() << "set 0" << completed_request->sequence();
+ completed_request->controls().set(
+ libcamera::controls::ExposureTime,
+ requested_params.exposureTime);
+ done0 == true;
+ }
+ else
+ {
+ // qDebug() << "set 1" << completed_request->sequence();
+ completed_request->controls().set(
+ libcamera::controls::ExposureTime,
+ requested_params.exposureTime);
+ done1 = true;
+ }
+ }
+
+ camera->queueRequest(completed_request);
+}
+
+static bool applyConfig(const std::unique_ptr<CameraConfiguration> & config)
+{
+ auto status = config->validate();
+
+ // WARNING: unsafe
+ StreamConfiguration &streamConfig = config->at(0);
+
+ switch (status) {
+ case CameraConfiguration::Status::Valid:
+ std::cout << "config is valid" << std::endl;
+ break;
+ case CameraConfiguration::Status::Adjusted:
+ std::cout << "\tpixelFormat: "
+ << streamConfig.pixelFormat.toString() << std::endl;
+ std::cout << "\tbufferCount: "
+ << streamConfig.bufferCount << std::endl;
+ std::cout << "\torientation: "
+ << config->orientation << std::endl;
+ break;
+ case CameraConfiguration::Status::Invalid:
+ std::cout << "config is invalid, quit." << std::endl;
+
+ return false;
+ }
+
+ return true;
+}
+
+static void printControls()
+{
+ const ControlInfoMap & control_map = camera->controls();
+
+ // for (const auto & [id, info]: control_map)
+ for (const std::pair<const ControlId *, ControlInfo> & pair : control_map)
+ {
+ const ControlId * const & id = pair.first;
+ const ControlInfo & info = pair.second;
+
+ std::cout << "\tc " << id->name()
+ << " (" << id->id()
+ << "): " << info.toString()
+ << (info.def().isNone() ? "" : " (dflt:" + info.def().toString() + ")");
+
+ if (!info.values().size())
+ {
+ std::cout << std::endl;
+ continue;
+ }
+
+ std::cout << " - [";
+
+ for (const auto & v : info.values())
+ {
+ std::cout << " " << v.toString();
+ }
+
+ std::cout << " ]\n";
+ }
+}