summaryrefslogtreecommitdiff
path: root/main.cpp
diff options
context:
space:
mode:
authorNikita Kostovsky <luntik2012@gmail.com>2024-11-09 17:26:11 +0100
committerNikita Kostovsky <luntik2012@gmail.com>2024-11-09 17:26:11 +0100
commit053237b9c91f1b5892782c1c4b2eb50cc8aeadfd (patch)
treeab3d3d2e896c7c71228bdb62c4fd4364a1364347 /main.cpp
Initial commit
Diffstat (limited to 'main.cpp')
-rw-r--r--main.cpp783
1 files changed, 783 insertions, 0 deletions
diff --git a/main.cpp b/main.cpp
new file mode 100644
index 0000000..0aff149
--- /dev/null
+++ b/main.cpp
@@ -0,0 +1,783 @@
+#define QT_NO_KEYWORDS
+#include <QCoreApplication>
+#include <QDebug>
+#include <QFile>
+#include <QHttpServer>
+#include <QJsonArray>
+#include <QJsonDocument>
+#include <QJsonObject>
+#include <QtConcurrent/QtConcurrentRun>
+#undef QT_NO_KEYWORDS
+
+#include <chrono>
+#include <errno.h>
+#include <iostream>
+#include <fstream>
+#include <string.h>
+#include <thread>
+
+#include "httpservice.h"
+#include "genetic_algos.h"
+#include "imagealgos.h"
+#include "LibCamera.h"
+
+#define try_apply_config() \
+if(!applyConfig(config)) \
+ { \
+ camera->release(); \
+ cm->stop(); \
+ \
+ return EXIT_FAILURE;\
+ }
+
+
+const QString exposureTimeKey = "exposureTime";
+const QString laserLevelKey = "laserLevel";
+
+struct requested_params_t {
+ int32_t exposureTime = { 200 };
+ int32_t laserLevel = { 7000 };
+} requested_params;
+Image img;
+
+using namespace std::chrono_literals;
+
+static std::shared_ptr<Camera> camera;
+std::unique_ptr<CameraConfiguration> config;
+static std::map<int, std::pair<void *, unsigned int>> mappedBuffers_;
+std::vector<std::unique_ptr<Request>> requests;
+ControlList lastControls;
+
+static bool applyConfig(const std::unique_ptr<CameraConfiguration> & config);
+static void onRequestCompleted(Request *completed_request);
+static void printControls();
+
+int main(int argc, char *argv[]) {
+ QCoreApplication app(argc, argv);
+ qDebug() << "Hello qt";
+ // FIXME: don't use one var for everything
+ int ret;
+ std::unique_ptr<CameraManager> cm = std::make_unique<CameraManager>();
+ cm->start();
+
+ const auto cameras = cm->cameras();
+
+ if (cameras.empty())
+ {
+ std::cout << "No cameras were identified on the system." << std::endl;
+ cm->stop();
+
+ return EXIT_FAILURE;
+ }
+
+ std::string cameraId = cameras[0]->id();
+
+ std::cout << "using " << cameraId << std::endl;
+
+ /*
+ * Note that `camera` may not compare equal to `cameras[0]`.
+ * In fact, it might simply be a `nullptr`, as the particular
+ * device might have disappeared (and reappeared) in the meantime.
+ */
+ // std::shared_ptr<Camera> camera = cm->get(cameraId);
+ camera = cm->get(cameraId);
+
+ if (camera->acquire() != EXIT_SUCCESS)
+ {
+ std::cout << "Cannot acquire camera." << std::endl;
+ cm->stop();
+
+ return EXIT_FAILURE;
+ }
+
+ // FIXME: nullptr
+ // std::unique_ptr<CameraConfiguration> config = camera->generateConfiguration( { StreamRole::Viewfinder } );
+ /*std::unique_ptr<CameraConfiguration> */config = camera->generateConfiguration( { StreamRole::Raw } );
+
+ if (config->empty())
+ {
+ std::cerr << "No configurations generated." << std::endl;
+ cm->stop();
+
+ return EXIT_FAILURE;
+ }
+
+ config->orientation = libcamera::Orientation::Rotate90;
+
+ // if (config->validate() != EXIT_SUCCESS)
+
+ // if (camera->configure(config.get()) != EXIT_SUCCESS)
+ // {
+ // std::cerr << "cannot configure camera" << std::endl << std::flush;
+ // cm->stop();
+
+ // return EXIT_FAILURE;
+ // }
+
+ // FIXME: nullptr
+ StreamConfiguration &streamConfig = config->at(0);
+ std::cout << "Default viewfinder configuration is: " << streamConfig.toString() << std::endl;
+ std::cout << "Pixel format is: " << streamConfig.pixelFormat.toString() << std::endl;
+ std::cout << "Buffer count is: " << streamConfig.bufferCount << std::endl;
+ // FIXME: empty variant
+ std::cout << "Color space is: " << streamConfig.colorSpace.value().toString() << std::endl;
+ std::cout << "Orientation is: " << config->orientation << std::endl;
+ // formats::R8,
+ // formats::R10,
+ // formats::R12,
+ // formats::R16,
+ // formats::R10_CSI2P, // camera->configure failure
+ // formats::R12_CSI2P, // camera->configure failure
+ // streamConfig.pixelFormat = PixelFormat::fromString("R8");
+ // streamConfig.pixelFormat = PixelFormat::fromString("Y8_1X8");
+
+ // streamConfig.pixelFormat = formats::R8;
+ streamConfig.pixelFormat = formats::R16;
+ streamConfig.bufferCount = 2;
+ // what is default R10_CSI2P? MONO_PISP_COMP1?
+ // MONO_PISP_COMP1 - check rpicam-apps sources for decoding algos
+ // streamConfig.pixelFormat = formats::R10_CSI2P;
+ // streamConfig.bufferCount = 16;
+ try_apply_config()
+
+ // #define doit(rotation) \
+ // std::cout << "set rotation to: " << libcamera::Orientation:: rotation \
+ // << std::endl; \
+ // config->orientation = libcamera::Orientation:: rotation; \
+ // try_apply_config()
+
+ // doit(Rotate0Mirror);
+ // doit(Rotate180);
+ // doit(Rotate180Mirror);
+ // doit(Rotate90Mirror);
+ // doit(Rotate270);
+ // doit(Rotate270Mirror);
+ // doit(Rotate90);
+
+ std::cout << "new config " << streamConfig.toString() << std::endl;
+
+ // FIXME: may crassh even on success (e.g. by setting pixelFormat to "8")
+ if (camera->configure(config.get()) != EXIT_SUCCESS)
+ {
+ std::cout << "cannot apply config, quit." << std::endl;
+ camera->release();
+ cm->stop();
+
+ return EXIT_FAILURE;
+ }
+
+ // TODO: try custom FrameBufferAllocator and compare performance
+
+ auto allocator = std::make_shared<FrameBufferAllocator>(camera);
+
+ auto stream = streamConfig.stream();
+
+ ret = allocator->allocate(stream);
+
+ // TODO: check if zero
+ if (ret < 0)
+ {
+ std::cerr << "Can't allocate buffers" << std::endl;
+ // return -ENOMEM;
+ return ret;
+ }
+
+ size_t allocated = size_t(ret);
+ std::cout << "Allocated " << allocated << " buffers for stream" << std::endl;
+
+ const std::vector<std::unique_ptr<FrameBuffer>> &buffers = allocator->buffers(stream);
+
+ // for (size_t i = 0; i < buffers.size(); ++i)
+ static int expOffset = 0;
+ for (const auto & buffer : buffers)
+ {
+ std::unique_ptr<Request> request = camera->createRequest();
+
+ if (!request)
+ {
+ std::cerr << "Can't create request" << std::endl;
+ return -ENOMEM;
+ }
+
+ // TODO: try multiple buffers per request and compare performance
+ int ret = request->addBuffer(stream, buffer.get());
+
+ if (ret < 0)
+ {
+ std::cerr << "Can't set buffer for request" << std::endl;
+
+ return ret;
+ }
+
+ for (const auto & plane : buffer->planes())
+ {
+ void *memory = mmap(NULL, plane.length, PROT_READ, MAP_SHARED,
+ plane.fd.get(), 0);
+ mappedBuffers_[plane.fd.get()] =
+ std::make_pair(memory, plane.length);
+ }
+
+ // size_t desiredFPS = 200;
+
+ // std::int64_t lowerUS = 1 * 1000 * 1000 / desiredFPS;
+ // std::int64_t higherUS = lowerUS;
+ // std::int64_t value_pair[2] = { higherUS / 2, higherUS };
+ request->controls().set(libcamera::controls::AnalogueGain, 1.0);
+ request->controls().set(libcamera::controls::ExposureTime, 4321 + expOffset++ * 100);
+ // request->controls().set(
+ // libcamera::controls::FrameDurationLimits,
+ // libcamera::Span<const std::int64_t, 2>(value_pair));
+
+ requests.push_back(std::move(request));
+ }
+
+ camera->requestCompleted.connect(onRequestCompleted);
+
+ std::unique_ptr<libcamera::ControlList> camcontrols { new libcamera::ControlList() };
+ // camcontrols->set(controls::FrameDurationLimits, libcamera::Span<const std::int64_t, 2>({8702, 10718903}));
+ // camcontrols->set(controls::ExposureTime, 100);
+ // camcontrols->set(controls::AnalogueGain, 0.1);
+
+ std::this_thread::sleep_for(500ms);
+
+ if (camera->start(camcontrols.get()))
+ {
+ qDebug() << "failed to start camera";
+ return EXIT_FAILURE;
+ }
+
+ // camera->start();
+
+ for (auto & request : requests)
+ {
+ camera->queueRequest(request.get());
+ }
+
+ printControls();
+
+ // std::this_thread::sleep_for(2s);
+ // TODO: move to thread
+ // Http::listenAndServe<HttpHandler>(Pistache::Address("*:8080"));
+
+ QHttpServer qHttpServer;
+ qHttpServer.route("/v1/sensor/image", [&]() {
+ std::lock_guard<std::mutex> lg(pgm_image_mtx);
+ // qDebug() << "image";
+ return QByteArray((const char*)pgm_image, pgm_image_size);
+ });
+ qHttpServer.route("/v1/sensor/image2", [&]() {
+ std::lock_guard<std::mutex> lg(pgm_image_mtx);
+ // qDebug() << "image";
+ return QByteArray((const char*)pgm_image, pgm_image_size);
+ });
+ qHttpServer.route("/v1/sensor/exposureTimeUs", [&]() {
+ std::lock_guard<std::mutex> lg(pgm_image_mtx);
+ return "123";
+ });
+ qHttpServer.route("/v1/pixels", [&]() {
+ std::lock_guard<std::mutex> lg(pgm_image_mtx);
+
+ QJsonArray pixels;
+
+ for (size_t i = 0; i < img_width; ++i) {
+ pixels << img_height - img.pixels[i];
+ }
+
+ QJsonObject json;
+ json["pixels"] = pixels;
+
+ return QHttpServerResponse(QJsonDocument(json).toJson());
+ });
+
+ qHttpServer.route("/v1/sensor/params", [&](const QHttpServerRequest &request) -> QHttpServerResponse {
+
+ switch (request.method()) {
+ case QHttpServerRequest::Method::Get:
+ {
+ std::lock_guard<std::mutex> lg(pgm_image_mtx);
+ QJsonObject json;
+
+ const ControlIdMap & ctrlIdMap = camera->controls().idmap();
+
+ qDebug() << "readParams:" << lastControls.size();
+ qDebug() << request.method();
+
+ for (const auto & [id, value]: lastControls)
+ {
+ const ControlId * controlId = ctrlIdMap.at(id);
+ auto name = QString::fromStdString(controlId->name());
+ const auto valueStr = QString::fromStdString(value.toString());
+ qDebug() << "\t param:"
+ << controlId->id()
+ << name
+ << valueStr
+ ;
+
+ name[0] = name[0].toLower();
+ json[name] = valueStr;
+ }
+
+ json[laserLevelKey] = requested_params.laserLevel;
+
+ qDebug() << "response body:" << json;
+
+ // QHttpServerResponse
+ return QHttpServerResponse(QJsonDocument(json).toJson());
+ }
+
+ case QHttpServerRequest::Method::Post:
+ {
+ qDebug() << "request body:" << request.body();
+
+ auto json = QJsonDocument::fromJson(request.body()).object();
+
+ if (json.contains(exposureTimeKey)) {
+ const int32_t value { json[exposureTimeKey].toInt() };
+
+ if (value == 0) {
+ return QHttpServerResponse::StatusCode::NotFound;
+ }
+
+ qDebug() << "set new exposure time:" << value;
+ requested_params.exposureTime = value;
+ }
+
+ if (json.contains(laserLevelKey)) {
+ const int32_t value { json[laserLevelKey].toInt() };
+
+ if (value == 0) {
+ return QHttpServerResponse::StatusCode::NotFound;
+ }
+
+ qDebug() << "set new laserLevel:" << value;
+ requested_params.laserLevel = value;
+
+ const QString laserLevelFile { "/sys/class/pwm/pwmchip2/pwm1/duty_cycle"};
+ QFile f { laserLevelFile };
+
+ if (!f.open(QFile::ReadWrite)) {
+ qDebug() << "cannot open laser level file:" << f.errorString();
+ qDebug() << "file path is" << f.fileName();
+ return QHttpServerResponse::StatusCode::InternalServerError;
+ }
+
+ QTextStream s { &f };
+
+ s << value;
+
+ s >> requested_params.laserLevel;
+ }
+
+ return QHttpServerResponse(request.body());
+ }
+ default:
+ {
+ return QHttpServerResponse(QByteArray("unsupported http method"));
+ }
+ }
+ });
+
+ qDebug() << "listen: " << qHttpServer.listen(QHostAddress::Any, 8081);
+
+ QFuture<void> future = QtConcurrent::run([](){
+
+ Port port(8080);
+ Address addr(Ipv4::any(), port);
+
+ HttpService httpService(addr);
+
+ size_t threads_count = 1;
+ httpService.init(threads_count);
+ httpService.start();
+ });
+
+ ////////////////////////////////////////////////////////////////////////////
+ std::clog << std::flush;
+ std::cerr << std::flush;
+ std::cout << "ok for now" << std::endl << std::flush;
+
+ // camera->stop();
+ // camera->release();
+ // cm->stop();
+
+ auto result = app.exec();
+
+ future.cancel();
+ future.waitForFinished();
+
+ for (auto & [fd, mem] : mappedBuffers_)
+ {
+ munmap(mem.first, mem.second);
+ }
+
+ // FIXME: crash somewhere here. proper libcamera finishing needed
+ requests.clear();
+ mappedBuffers_.clear();
+
+ camera->stop();
+ config.reset();
+ allocator->free(stream);
+ allocator.reset();
+ camera->release();
+ camera.reset();
+ cm->stop();
+
+ return result;
+
+ // time_t start_time = time(0);
+ // int frame_count = 0;
+
+ // LibCamera cam;
+ // uint32_t width = 1280;
+ // uint32_t height = 800;
+ // uint32_t stride;
+ // char key;
+
+ // ret = cam.initCamera();
+
+ // if (ret != EXIT_SUCCESS)
+ // {
+ // std::cerr << "cannot open camera" << std::endl;
+
+ // return EXIT_FAILURE;
+ // }
+
+ // cam.configureStill(width, height, formats::R8, 1, 0);
+ // // ControlList controls_;
+ // int64_t frame_time = 1000000 / 10;
+ // // Set frame rate
+ // // controls_.set( controls::FrameDurationLimits, libcamera::Span<const int64_t, 2>(
+ // // { frame_time, frame_time } ));
+ // // Adjust the brightness of the output images, in the range -1.0 to 1.0
+ // // controls_.set(controls::Brightness, 0.5);
+ // // Adjust the contrast of the output image, where 1.0 = normal contrast
+ // // controls_.set(controls::Contrast, 1.5);
+ // // Set the exposure time
+ // // controls_.set(controls::ExposureTime, 20000);
+ // // cam.set(controls_);
+
+ // std::cout << std::flush;
+
+ // // NOTE: already checked
+ // if (ret == EXIT_SUCCESS) {
+ // bool flag;
+ // LibcameraOutData frameData;
+ // cam.startCamera();
+ // cam.VideoStream(&width, &height, &stride);
+
+ // while (true) {
+ // flag = cam.readFrame(&frameData);
+ // if (!flag)
+ // continue;
+
+ // // key = waitKey(1);
+ // // if (key == 'q') {
+ // // break;
+ // // } else if (key == 'f') {
+ // // ControlList controls;
+ // // controls.set(controls::AfMode, controls::AfModeAuto);
+ // // controls.set(controls::AfTrigger, 0);
+ // // cam.set(controls);
+ // // }
+
+
+ // frame_count++;
+ // if ((time(0) - start_time) >= 1){
+ // printf("fps: %d\n", frame_count);
+ // frame_count = 0;
+ // start_time = time(0);
+ // }
+ // cam.returnFrameBuffer(frameData);
+ // }
+
+ // cam.stopCamera();
+ // }
+
+ // cam.closeCamera();
+
+ // return EXIT_SUCCESS;
+}
+
+/*
+ * Signals operate in the libcamera CameraManager thread context, so it is
+ * important not to block the thread for a long time, as this blocks internal
+ * processing of the camera pipelines, and can affect realtime performance.
+*/
+void onRequestCompleted(Request *completed_request)
+{
+ bool verbose = false;
+
+ if (completed_request->status() == Request::RequestCancelled)
+ {
+ std::cerr << "request canceled" << std::endl;
+
+ return;
+ }
+
+ const std::map<const Stream *, FrameBuffer *> &buffers = completed_request->buffers();
+
+ // std::cout << "request completed, buffers count is " << buffers.size();
+
+ // // TODO: rewrite this shit
+ for (auto [stream, buffer] : buffers)
+ {
+ const auto & streamConfig = stream->configuration();
+ const auto & imageSize = streamConfig.size;
+ const auto & pixelFormat = streamConfig.pixelFormat;
+ const auto & stride = streamConfig.stride;
+
+ const FrameMetadata &metadata = buffer->metadata();
+
+ if (verbose)
+ {
+ std::cout << " seq: " << std::setw(6) << std::setfill('0')
+ << metadata.sequence
+ << " bytesused: ";
+ }
+
+ for (size_t i = 0; i < buffer->planes().size(); ++i)
+ {
+ const FrameBuffer::Plane & plane = buffer->planes()[i];
+ const FrameMetadata::Plane & metaplane = buffer->metadata().planes()[i];
+
+ size_t size = std::min(metaplane.bytesused, plane.length);
+ void * data = mappedBuffers_[plane.fd.get()].first;
+
+ // std::cout << metaplane.bytesused << "/" << plane.length;
+
+ // std::cout << " stride " << stride;
+ // std::cout << " planes count: " << buffer->planes().size() << " ";
+ // std::cout << std::endl;
+
+ // if (metadata.sequence == 20)
+ {
+ // FIXME: remove hardcode
+ img.width = imageSize.width;
+ img.height = imageSize.height;
+ // img.data = data;
+ memcpy(img.data, data, size);
+ img.dataSize = size;
+ img.stride = stride;
+ img.pixelFormat = pixelFormat;
+
+ // uint16_t unpacked[img.width * img.height] = { 0 };
+ // unpack_16bit((uint8_t*)img.data, img, (uint16_t*)&unpacked);
+ // img.data = unpacked;
+ // img.dataSize = img.width * img.height * sizeof(uint16_t);
+ rotate(img);
+ process_columns(img);
+
+ static bool done = false;
+ // mark pixels and max region
+ for (size_t i = 0; i < img_width; ++i)
+ {
+ // std::cout << "\t" << img.pixels[i] << std::endl;
+ // uint
+ // const auto & p = img.pixels[i];
+ // const auto int_p = int(p);
+ // const auto fract = p - int_p;
+
+
+ // img.data[int_p][i] = 256 * 256 * fract;
+ // img.data[int_p + 1][i] = 256 * 256 * (1.0 - fract);
+
+
+ // if (!done) {
+ // std::cout << fract << " ";
+ // }
+
+ img.data[size_t(img.pixels[i])][i] = 0;
+ img.data[size_t(img.pixels[i]) - 6][i] = 0xffff;
+ img.data[size_t(img.pixels[i]) + 6][i] = 0xffff;
+ }
+ done = true;
+
+ // // FILE * f = fopen("/tmp/R16.pgm", "w");
+ FILE * f = fopen("/tmp/img.pgm", "w");
+ // // FILE * f = fopen("/tmp/MONO_PISP_COMP1.pgm", "w");
+
+ if (f == NULL)
+ {
+ std::cerr << "cannot open output file: "
+ << strerror(errno)
+ << std::endl;
+ }
+ else
+ {
+ // pgm_save(&img, f);
+ pgm_save(&img, f);
+ fclose(f);
+ // std::cout << "file written" << std::endl;
+ }
+ }
+ }
+ }
+
+ const libcamera::ControlList &metadata = completed_request->metadata();
+ const ControlInfoMap & control_map = camera->controls();
+ const ControlIdMap & ctrlIdMap = control_map.idmap();
+
+ auto frameDurationCtrl = control_map.find(&controls::FrameDurationLimits);
+ auto expTimeCtrl = control_map.find(&controls::ExposureTime);
+ double fps = frameDurationCtrl == control_map.end() ?
+ std::numeric_limits<double>::quiet_NaN() :
+ (1e6 / frameDurationCtrl->second.min().get<int64_t>());
+
+ auto exp = metadata.get(controls::ExposureTime);
+ auto ag = metadata.get(controls::AnalogueGain);
+ auto ae = metadata.get(controls::AeEnable);
+ // auto br= metadata.get(controls::Brightness);
+ lastControls = completed_request->controls();
+
+ if (verbose)
+ {
+ std::cout << "fps: " << fps
+ << " exp: " << *exp
+ << " ag: " << *ag
+ // << " br: " << *br
+ << " ae: " << *ae
+ << " aa: " << *completed_request->controls()
+ .get(libcamera::controls::ExposureTime)
+ << std::endl;
+ }
+
+ for (const auto & [id, value] : metadata)
+ {
+
+ }
+
+ // metadata.set(controls::ExposureTime, 300);
+
+ // exp->set(*exp + 1);
+ // expTimeCtrl->second().set(*exp + 1);
+ // auto expTimeCtrlId= expTimeCtrl->id();
+
+
+ // properties.set(controls::ExposureTime, 1000);
+
+ // std::optional<uint32_t> expTimeOptional = properties.get(controls::ExposureTime);
+
+ // if (expTimeOptional.has_value())
+ // {
+ // // uint32_t value = expTimeOptional.value();
+
+ // auto frameDurationLimits = controls.find(&controls::FrameDurationLimits)->second;
+ // auto min = frameDurationLimits.min().get<int64_t>();
+ // auto max = frameDurationLimits.max().get<int64_t>();
+ // // auto val = properties.find(controls::FrameDurationLimits)->value();//.second().min().get<int64_t>()
+ // // auto second = val.second();
+ // auto framerate = 1.0e6 / min;
+ // auto rAG = request->controls().get<float>(libcamera::controls::AnalogueGain);
+ // auto rET = request->controls().get<int32_t>(libcamera::controls::ExposureTime);
+ // int32_t randET = rand() % 9000 + 1000;
+ // request->controls().set(libcamera::controls::ExposureTime, 100);
+ // std::cout << "exposure time (us): "
+ // << properties.get(controls::ExposureTime).value()
+ // << " frame duration limits (ns): " << min << "/" << max
+ // << " framerate: " << framerate
+ // << " " << rAG.value_or(321)
+ // << " " << rET.value_or(321)
+ // << std::endl;
+ // // std::cout << "noise reduction mode: "
+ // // << properties.get(controls::AwbMode).value()
+ // // << std::endl;
+ // // NoiseReductionMode
+ // }
+
+ // completed_request->reuse(Request::ReuseBuffers);
+ // camera->queueRequest(completed_request);
+
+
+ completed_request->reuse(Request::ReuseBuffers);
+
+ completed_request->controls().set(libcamera::controls::AeEnable, false);
+ completed_request->controls().set(libcamera::controls::draft::NoiseReductionMode,
+ libcamera::controls::draft::NoiseReductionModeEnum::NoiseReductionModeHighQuality);
+ // completed_request->controls().set(libcamera::controls::ExposureTime, rand() % 1000 + 100);
+
+ static bool done0 = false;
+ static bool done1 = false;
+
+ if (!done0 || !done1)
+ {
+
+ if (completed_request->sequence() % 2 == 0)
+ {
+ // qDebug() << "set 0" << completed_request->sequence();
+ completed_request->controls().set(
+ libcamera::controls::ExposureTime,
+ requested_params.exposureTime);
+ done0 == true;
+ }
+ else
+ {
+ // qDebug() << "set 1" << completed_request->sequence();
+ completed_request->controls().set(
+ libcamera::controls::ExposureTime,
+ requested_params.exposureTime);
+ done1 = true;
+ }
+ }
+
+ camera->queueRequest(completed_request);
+}
+
+static bool applyConfig(const std::unique_ptr<CameraConfiguration> & config)
+{
+ auto status = config->validate();
+
+ // WARNING: unsafe
+ StreamConfiguration &streamConfig = config->at(0);
+
+ switch (status) {
+ case CameraConfiguration::Status::Valid:
+ std::cout << "config is valid" << std::endl;
+ break;
+ case CameraConfiguration::Status::Adjusted:
+ std::cout << "\tpixelFormat: "
+ << streamConfig.pixelFormat.toString() << std::endl;
+ std::cout << "\tbufferCount: "
+ << streamConfig.bufferCount << std::endl;
+ std::cout << "\torientation: "
+ << config->orientation << std::endl;
+ break;
+ case CameraConfiguration::Status::Invalid:
+ std::cout << "config is invalid, quit." << std::endl;
+
+ return false;
+ }
+
+ return true;
+}
+
+static void printControls()
+{
+ const ControlInfoMap & control_map = camera->controls();
+
+ // for (const auto & [id, info]: control_map)
+ for (const std::pair<const ControlId *, ControlInfo> & pair : control_map)
+ {
+ const ControlId * const & id = pair.first;
+ const ControlInfo & info = pair.second;
+
+ std::cout << "\tc " << id->name()
+ << " (" << id->id()
+ << "): " << info.toString()
+ << (info.def().isNone() ? "" : " (dflt:" + info.def().toString() + ")");
+
+ if (!info.values().size())
+ {
+ std::cout << std::endl;
+ continue;
+ }
+
+ std::cout << " - [";
+
+ for (const auto & v : info.values())
+ {
+ std::cout << " " << v.toString();
+ }
+
+ std::cout << " ]\n";
+ }
+}