#include #include #include #include #include #include #include #include #include "LibCamera.h" #include "calibration.h" #include "camera/ov9281.h" #include "dumps.h" #include "fuck_intel.h" #include "genetic_algos.h" #include "httpservice.h" #include "imagealgos.h" #include "pigpio.h" #include "printerclient.h" #include "profile.h" #include "rotaryencoder.h" #include #include #include #include #include #include #include #include #include #include #include #include #include #define try_apply_config() \ if (!applyConfig(config)) { \ camera->release(); \ cm->stop(); \ \ return EXIT_FAILURE; \ } ScanningModeFlags scanningModeFlags{ScanningModeFlags::None}; QElapsedTimer calibrationTimer; extern volatile int32_t positionSteps; requested_params_t requested_params; namespace { Image img; Pixels pixels; std::vector calibrationPixels; QMutex calibrationPixelsMutex; } // namespace using namespace std::chrono_literals; // static std::shared_ptr camera; // std::unique_ptr config; // static std::map> mappedBuffers_; // std::vector> requests; libcamera::ControlList lastControls; namespace { CalibrationTablePtr calibrationTableZ; CalibrationTablePtr calibrationTableX; } // namespace // static bool applyConfig( // const std::unique_ptr& config // ); // static void onRequestCompleted(libcamera::Request* completed_request); // static void printControls(); // static QList filter(const QList& rawProfiles); auto printPixels = [](const auto& pixels) { for (size_t i = (img_width - 10) / 2; i < img_width - ((img_width - 10) / 2); ++i) { std::cout << pixels[i] << " "; } std::cout << std::endl; }; void onNewPixels(std::shared_ptr pixels) { if (!*pixels) { qDebug() << "got empty pixels"; } ::pixels = *pixels; if (!::pixels) { qDebug() << "empty pixels after copy"; } }; bool initLaser(); int main(int argc, char* argv[]) { QCoreApplication app(argc, argv); // if (false) qDebug() << "size of raw profile" << sizeof(Pixels); if (true) { if (true) { // z // if (!openCalibrationTable( // "/home/user/dumps/binz.calibration_table", // ::calibrationTableZ // )) // { // exit(EXIT_FAILURE); // } // if (!calibrationTableToImage(::calibrationTableZ) // .save("/home/user/dumps/imageZ.png")) // { // qDebug() << "cannot save imageZ.png"; // exit(EXIT_FAILURE); // } // interpolate(::calibrationTableZ); // exit(EXIT_SUCCESS); // calibrationTableToImage(::calibrationTableZ) // .save("/home/user/dumps/imageZ_interpolated.png"); auto rawProfiles = openDump("/home/user/dumps/binx"); qDebug() << "raw x-profiles count is" << rawProfiles.size(); // qDebug() << "height" << calibrationColumnHeight; auto filteredRawProfiles = filter(std::move(rawProfiles)); qDebug() << "filtered x-profiles count is" << filteredRawProfiles.count(); ::calibrationTableX = calibrateX(std::move(filteredRawProfiles)); for (size_t i = 9471; i < 9472; i++) { std::cout << "row #" << i << ": "; for (size_t j = 0; j < 1280; ++j) { const auto& p = ::calibrationTableX->at(j).at(i); std::cout << p << ' '; } std::cout << std::endl; } // x qDebug() << "open x table"; if (!openCalibrationTable("/home/user/dumps/binx.calibration_table", ::calibrationTableX)) { exit(EXIT_FAILURE); } // if (!calibrationTableToImage(::calibrationTableX) // .save("/home/user/dumps/imageX.png")) { // qDebug() << "cannot save imageX.png"; // exit(EXIT_FAILURE); // } for (size_t i = 9471; i < 9472; i++) { std::cout << "row #" << i << ": "; for (size_t j = 0; j < 1280; ++j) { const auto& p = ::calibrationTableX->at(j).at(i); std::cout << p << ' '; } std::cout << std::endl; } // exit(EXIT_SUCCESS); interpolate(::calibrationTableX); // calibrationTableToImage(::calibrationTableX) // .save("/home/user/dumps/imageX_interpolated.png"); } if (false) { auto rawProfiles = openDump("/home/user/dumps/binz"); // auto rawProfiles = openDump("/home/user/dumps/z"); qDebug() << "raw z-profiles count is" << rawProfiles.size(); // qDebug() << "height" << calibrationColumnHeight; auto filteredRawProfiles = filter(std::move(rawProfiles)); qDebug() << "filtered z-profiles count is" << filteredRawProfiles.count(); ::calibrationTableZ = calibrateZ(std::move(filteredRawProfiles), requested_params.stepsPerMm); // if (!dump( // ::calibrationTableZ, // "/home/user/dumps/binz.calibration_table" // )) // { // qApp->exit(EXIT_FAILURE); // } // bool ok = calibrationTableToImage(::calibrationTableZ) // .save("/home/user/dumps/z/imageZ.png"); // if (!ok) // { // qDebug() << "cannot save imageZ.png"; // exit(EXIT_FAILURE); // } interpolate(::calibrationTableZ); // calibrationTableToImage(::calibrationTableZ) // .save("/home/user/dumps/z/imageZ_interpolated.png"); // exit(EXIT_SUCCESS); } qDebug() << "--------------------------------------------------------"; if (false) { auto rawProfiles = openDump("/home/user/dumps/binx"); qDebug() << "raw x-profiles count is" << rawProfiles.size(); // qDebug() << "height" << calibrationColumnHeight; auto filteredRawProfiles = filter(std::move(rawProfiles)); qDebug() << "filtered x-profiles count is" << filteredRawProfiles.count(); ::calibrationTableX = calibrateX(std::move(filteredRawProfiles)); // if (!dump( // ::calibrationTableZ, // "/home/user/dumps/binx.calibration_table" // )) // { // qApp->exit(EXIT_FAILURE); // } // bool ok = calibrationTableToImage(::calibrationTableX) // .save("/home/user/dumps/z/imageX.png"); // if (!ok) // { // qDebug() << "cannot save imageX.png"; // exit(EXIT_FAILURE); // } interpolate(::calibrationTableX); // calibrationTableToImage(::calibrationTableX) // .save("/home/user/dumps/z/imageX_interpolated.png"); } } // exit(EXIT_SUCCESS); if (!initLaser()) { return EXIT_FAILURE; } // PrinterClient printerClient; QElapsedTimer t; t.start(); qDebug() << "msecs before encoder:" << t.elapsed(); RotaryEncoder encoder; qDebug() << "msecs before camera:" << t.elapsed(); // FIXME: don't use one var for everything int ret; std::unique_ptr cm = std::make_unique(); cm->start(); // const auto cameras = cm->cameras(); const auto cameras = OV9281::search(cm); if (cameras.empty()) { std::cerr << "No cameras were identified on the system." << std::endl; cm->stop(); return EXIT_FAILURE; } auto camera = cameras.at(0); camera->printControls(); camera->newPixels.connect(&onNewPixels); if (!camera->startStream()) { cm->stop(); return EXIT_FAILURE; } /* std::string cameraId = cameras[0]->id(); std::cout << "using " << cameraId << std::endl; // // Note that `camera` may not compare equal to `cameras[0]`. // In fact, it might simply be a `nullptr`, as the particular // device might have disappeared (and reappeared) in the meantime. // // std::shared_ptr camera = cm->get(cameraId); camera = cm->get(cameraId); if (camera->acquire() != EXIT_SUCCESS) { std::cout << "Cannot acquire camera." << std::endl; cm->stop(); return EXIT_FAILURE; } // FIXME: nullptr // std::unique_ptr config = // camera->generateConfiguration( { StreamRole::Viewfinder } ); config = camera->generateConfiguration({libcamera::StreamRole::Raw}); if (config->empty()) { std::cerr << "No configurations generated." << std::endl; cm->stop(); return EXIT_FAILURE; } config->orientation = libcamera::Orientation::Rotate90; // if (config->validate() != EXIT_SUCCESS) // if (camera->configure(config.get()) != EXIT_SUCCESS) // { // std::cerr << "cannot configure camera" << std::endl << std::flush; // cm->stop(); // return EXIT_FAILURE; // } // FIXME: nullptr libcamera::StreamConfiguration& streamConfig = config->at(0); std::cout << "Default viewfinder configuration is: " << streamConfig.toString() << std::endl; std::cout << "Pixel format is: " << streamConfig.pixelFormat.toString() << std::endl; std::cout << "Buffer count is: " << streamConfig.bufferCount << std::endl; // FIXME: empty variant std::cout << "Color space is: " << streamConfig.colorSpace.value().toString() << std::endl; std::cout << "Orientation is: " << config->orientation << std::endl; // formats::R8, // formats::R10, // formats::R12, // formats::R16, // formats::R10_CSI2P, // camera->configure failure // formats::R12_CSI2P, // camera->configure failure // streamConfig.pixelFormat = PixelFormat::fromString("R8"); // streamConfig.pixelFormat = PixelFormat::fromString("Y8_1X8"); // streamConfig.pixelFormat = formats::R8; streamConfig.pixelFormat = libcamera::formats::R16; streamConfig.bufferCount = 2; // what is default R10_CSI2P? MONO_PISP_COMP1? // MONO_PISP_COMP1 - check rpicam-apps sources for decoding algos // streamConfig.pixelFormat = formats::R10_CSI2P; // streamConfig.bufferCount = 16; try_apply_config() // #define doit(rotation) \ // std::cout << "set rotation to: " << libcamera::Orientation:: rotation \ // << std::endl; \ // config->orientation = libcamera::Orientation:: rotation; \ // try_apply_config() // doit(Rotate0Mirror); // doit(Rotate180); // doit(Rotate180Mirror); // doit(Rotate90Mirror); // doit(Rotate270); // doit(Rotate270Mirror); // doit(Rotate90); std::cout << "new config " << streamConfig.toString() << std::endl; // FIXME: may crassh even on success (e.g. by setting pixelFormat to "8") if (camera->configure(config.get()) != EXIT_SUCCESS) { std::cout << "cannot apply config, quit." << std::endl; camera->release(); cm->stop(); return EXIT_FAILURE; } // TODO: try custom FrameBufferAllocator and compare performance auto allocator = std::make_shared(camera); auto stream = streamConfig.stream(); ret = allocator->allocate(stream); // TODO: check if zero if (ret < 0) { std::cerr << "Can't allocate buffers" << std::endl; // return -ENOMEM; return ret; } size_t allocated = size_t(ret); std::cout << "Allocated " << allocated << " buffers for stream" << std::endl; const std::vector>& buffers = allocator->buffers(stream); // for (size_t i = 0; i < buffers.size(); ++i) static int expOffset = 0; for (const auto& buffer : buffers) { std::unique_ptr request = camera->createRequest(); if (!request) { std::cerr << "Can't create request" << std::endl; return -ENOMEM; } // TODO: try multiple buffers per request and compare performance int ret = request->addBuffer(stream, buffer.get()); if (ret < 0) { std::cerr << "Can't set buffer for request" << std::endl; return ret; } for (const auto& plane : buffer->planes()) { void* memory = mmap( NULL, plane.length, PROT_READ, MAP_SHARED, plane.fd.get(), 0 ); mappedBuffers_[plane.fd.get()] = std::make_pair(memory, plane.length); } size_t desiredFPS = 144; std::int64_t lowerUS = 1 * 1000 * 1000 / desiredFPS; std::int64_t higherUS = lowerUS; std::int64_t value_pair[2] = {higherUS / 2, higherUS}; request->controls().set(libcamera::controls::AnalogueGain, 1.0); request->controls().set(libcamera::controls::ExposureTime, 100); request->controls().set( libcamera::controls::FrameDurationLimits, libcamera::Span(value_pair) ); requests.push_back(std::move(request)); } camera->requestCompleted.connect(onRequestCompleted); std::unique_ptr camcontrols{ new libcamera::ControlList() }; // camcontrols->set(controls::FrameDurationLimits, libcamera::Span({8702, 10718903})); // camcontrols->set(controls::ExposureTime, 100); // camcontrols->set(controls::AnalogueGain, 0.1); std::this_thread::sleep_for(500ms); if (camera->start(camcontrols.get())) { qDebug() << "failed to start camera"; return EXIT_FAILURE; } // camera->start(); for (auto& request : requests) { camera->queueRequest(request.get()); } */ // std::this_thread::sleep_for(2s); // TODO: move to thread // Http::listenAndServe(Pistache::Address("*:8080")); QHttpServer qHttpServer; qHttpServer.route("/v1/sensor/image", [&]() { std::cout << "http: image" << std::endl; std::lock_guard lg(pgm_image_mtx); // qDebug() << "image"; return QByteArray((const char*)pgm_image, pgm_image_size); }); qHttpServer.route("/v1/sensor/image2", [&]() { std::cout << "http: image2" << std::endl; std::lock_guard lg(pgm_image_mtx); // qDebug() << "image"; return QByteArray((const char*)pgm_image, pgm_image_size); }); // qHttpServer.route("/v1/sensor/exposureTimeUs", [&]() { // // std::lock_guard lg(pgm_image_mtx); // return "123"; // }); qHttpServer.route("/v1/pixels", [&]() { std::cout << "http: pixels" << std::endl; std::lock_guard lg(pgm_image_mtx); QJsonArray pixels; for (size_t i = 0; i < img_width; ++i) { // pixels << img_height - img.pixels[i]; pixels << ::pixels.pixels[i]; } QJsonObject json; json["pixels"] = pixels; json["encoderPosition"] = qint64{encoder.position()}; json["measurementCounter"] = qint64{img.counters.measurementCounter}; json["timestampUs"] = qint64(img.counters.timestampUs); const auto lines = pixelsToLines(::pixels); // qDebug() << "lines count is " << lines.count(); QJsonArray jsonLines; for (const auto& l : lines) { jsonLines << QJsonArray{ QJsonArray{l.p1().x(), l.p1().y()}, QJsonArray{l.p2().x(), l.p2().y()} }; } json["lines"] = jsonLines; return QHttpServerResponse(QJsonDocument(json).toJson()); }); qHttpServer.route("/v1/profile", [&]() { std::cout << "http: profile" << std::endl; std::lock_guard lg(pgm_image_mtx); const Profile profile(::pixels, ::calibrationTableZ, ::calibrationTableX); const QJsonObject json{{"profile", QJsonObject(profile)}}; return QHttpServerResponse(QJsonDocument(json).toJson()); }); qHttpServer .route("/v1/commands/resetEncoder", [&](const QHttpServerRequest& request) -> QHttpServerResponse { std::cout << "http: resetEncoder" << std::endl; if (request.method() != QHttpServerRequest::Method::Post) { return QHttpServerResponse::StatusCode::NotFound; } qDebug() << "reset encoder"; positionSteps = 0; return QHttpServerResponse::StatusCode::Ok; }); qHttpServer .route("/v1/commands/startCalibration", [&](const QHttpServerRequest& request) -> QHttpServerResponse { std::cout << "http: startCalibration" << std::endl; if (request.method() != QHttpServerRequest::Method::Post) { return QHttpServerResponse::StatusCode::NotFound; } qDebug() << "start calibration"; // TODO: use flags scanningModeFlags = ScanningModeFlags::Calibration; calibrationTimer.start(); return QHttpServerResponse::StatusCode::Ok; }); qHttpServer .route("/v1/commands/gCode", [&](const QHttpServerRequest& request) -> QHttpServerResponse { std::cout << "http: gCode" << std::endl; if (request.method() != QHttpServerRequest::Method::Post) { return QHttpServerResponse::StatusCode::NotFound; } const auto command = request.body(); qDebug() << "send gCode:" << command; // printerClient.sendCommand(command); return QHttpServerResponse::StatusCode::Ok; }); // qHttpServer // .route("/v1/commands/startCalibration", // [&](const QHttpServerRequest& request) -> QHttpServerResponse { // std::cout << "http: startCalibration" << std::endl; // if (request.method() != QHttpServerRequest::Method::Post) // { // return QHttpServerResponse::StatusCode::NotFound; // } // const auto command = request.body(); // qDebug() << "send gCode:" << command; // // printerClient.sendCommand(command); // return QHttpServerResponse::StatusCode::Ok; // }); qHttpServer.route( "/v1/sensor/params", [&](const QHttpServerRequest& request) -> QHttpServerResponse { std::cout << "http: params" << std::endl; switch (request.method()) { case QHttpServerRequest::Method::Get: { std::lock_guard lg(pgm_image_mtx); QJsonObject json; // const libcamera::ControlIdMap& ctrlIdMap = // camera->controls().idmap(); // qDebug() << "readParams:" << lastControls.size(); // qDebug() << request.method(); // for (const auto& [id, value] : lastControls) // { // const libcamera::ControlId* controlId = ctrlIdMap.at(id); // auto name = QString::fromStdString(controlId->name()); // const auto valueStr = // QString::fromStdString(value.toString()); // qDebug() // << "\t param:" << controlId->id() << name << valueStr; // name[0] = name[0].toLower(); // json[name] = valueStr; // } // json[laserLevelKey] = requested_params.laserLevel; // qDebug() << "response body:" << json; // QHttpServerResponse return QHttpServerResponse(QJsonDocument(json).toJson()); } case QHttpServerRequest::Method::Post: { qDebug() << "request body:" << request.body(); auto json = QJsonDocument::fromJson(request.body()).object(); if (json.contains(exposureTimeKey)) { const int32_t value{json[exposureTimeKey].toInt()}; if (value == 0) { return QHttpServerResponse::StatusCode::NotFound; } qDebug() << "set new exposure time:" << value; requested_params.exposureTime = value; } if (json.contains(laserLevelKey)) { const int32_t value{json[laserLevelKey].toInt()}; if (value == 0) { return QHttpServerResponse::StatusCode::NotFound; } qDebug() << "set new laserLevel:" << value; requested_params.laserLevel = value; const QString laserLevelFile{ "/sys/class/pwm/pwmchip2/pwm1/duty_cycle" }; QFile f{laserLevelFile}; if (!f.open(QFile::ReadWrite)) { qDebug() << "cannot open laser level file:" << f.errorString(); qDebug() << "file path is" << f.fileName(); return QHttpServerResponse::StatusCode:: InternalServerError; } QTextStream s{&f}; s << value; s >> requested_params.laserLevel; qDebug() << "done with laser level"; } return QHttpServerResponse(request.body()); } default: { return QHttpServerResponse(QByteArray("unsupported http method") ); } } }); qDebug() << "listen: " << qHttpServer.listen(QHostAddress::Any, 8081); QFuture future = QtConcurrent::run([]() { Port port(8080); Address addr(Ipv4::any(), port); HttpService httpService(addr); size_t threads_count = 1; httpService.init(threads_count); httpService.start(); }); //////////////////////////////////////////////////////////////////////////// std::clog << std::flush; std::cerr << std::flush; std::cout << "ok for now" << std::endl << std::flush; // camera->stop(); // camera->release(); // cm->stop(); auto result = app.exec(); future.cancel(); future.waitForFinished(); // for (auto& [fd, mem] : mappedBuffers_) // { // munmap(mem.first, mem.second); // } // FIXME: crash somewhere here. proper libcamera finishing needed // requests.clear(); // mappedBuffers_.clear(); // camera->stop(); // config.reset(); // allocator->free(stream); // allocator.reset(); // camera->release(); // camera.reset(); cm->stop(); return result; } /* * Signals operate in the libcamera CameraManager thread context, so it is * important not to block the thread for a long time, as this blocks internal * processing of the camera pipelines, and can affect realtime performance. */ /* void onRequestCompleted(libcamera::Request* completed_request) { using namespace libcamera; static std::chrono::steady_clock::time_point fpsTimstamp = std::chrono::steady_clock::now(); QElapsedTimer t; t.start(); static uint32_t performanceCounter{0}; static uint32_t elapsedSum{0}; bool verbose = false; if (completed_request->status() == Request::RequestCancelled) { std::cerr << "request canceled" << std::endl; return; } const std::map& buffers = completed_request->buffers(); // std::cout << "request completed, buffers count is " << buffers.size(); // // TODO: rewrite this shit for (auto [stream, buffer] : buffers) { const auto& streamConfig = stream->configuration(); const auto& imageSize = streamConfig.size; const auto& pixelFormat = streamConfig.pixelFormat; const auto& stride = streamConfig.stride; const FrameMetadata& metadata = buffer->metadata(); for (size_t i = 0; i < buffer->planes().size(); ++i) { const FrameBuffer::Plane& plane = buffer->planes()[i]; const FrameMetadata::Plane& metaplane = buffer->metadata().planes()[i]; size_t size = std::min(metaplane.bytesused, plane.length); void* data = mappedBuffers_[plane.fd.get()].first; // FIXME: remove hardcode img.width = imageSize.width; img.height = imageSize.height; // img.data = data; memcpy(img.data, data, size); img.dataSize = size; img.stride = stride; img.pixelFormat = pixelFormat; img.counters.measurementCounter = metadata.sequence; img.counters.timestampUs = metadata.timestamp / 1000; img.counters.encoderPosition = RotaryEncoder::instance()->position(); // qDebug() << "pos:" << img.counters.encoderPosition; // uint16_t unpacked[img.width * img.height] = { 0 }; // unpack_16bit((uint8_t*)img.data, img, (uint16_t*)&unpacked); // img.data = unpacked; // img.dataSize = img.width * img.height * sizeof(uint16_t); img.rotate(); Pixels pixels = process_columns(img); ::pixels = pixels; // qDebug() << "calibration mode" << scanningModeFlags; if (scanningModeFlags == ScanningModeFlags::Calibration) { const int32_t maxEncoderPosition = int32_t(hardcodedZRangeMm) * requested_params.stepsPerMm; // qDebug() << "calibration max range" << maxEncoderPosition; // qDebug() << "calibration encoder pos" << // pixels.counters.encoderPosition; if (pixels.counters.encoderPosition >= 0 && pixels.counters.encoderPosition <= maxEncoderPosition) { qDebug() << "calibration save at pos:" << pixels.counters.encoderPosition; QMutexLocker l(&calibrationPixelsMutex); ::calibrationPixels.push_back(std::move(pixels)); } else if (pixels.counters.encoderPosition > maxEncoderPosition) { // save to files QMutexLocker l(&calibrationPixelsMutex); qDebug() << "calibration pixels count:" << ::calibrationPixels.size(); qDebug() << "calibration elapsed (s):" << calibrationTimer.elapsed() / 1000; // ::calibrationPixels.clear(); // TODO: use flags // qDebug() << "stop calibration mode"; scanningModeFlags = ScanningModeFlags::None; { QMutexLocker l(&calibrationPixelsMutex); // QFuture dumpCalirationPixelsFuture // = QtConcurrent::run(&dumpCalibrationPixels, // calibrationPixels); auto future = QtConcurrent::run([&]() { dumpCalibrationPixels( std::move(::calibrationPixels)); }); } } else { // qDebug() << "calibration skip at pos:" << // pixels.counters.encoderPosition; } } pgm_save(&img, nullptr); } } const libcamera::ControlList& metadata = completed_request->metadata(); const ControlInfoMap& control_map = camera->controls(); // const ControlIdMap & ctrlIdMap = control_map.idmap(); auto frameDurationCtrl = control_map.find(&controls::FrameDurationLimits); // auto expTimeCtrl = control_map.find(&controls::ExposureTime); double fps = frameDurationCtrl == control_map.end() ? std::numeric_limits::quiet_NaN() : (1e6 / frameDurationCtrl->second.min().get()); auto exp = metadata.get(controls::ExposureTime); auto ag = metadata.get(controls::AnalogueGain); auto ae = metadata.get(controls::AeEnable); // auto br= metadata.get(controls::Brightness); lastControls = completed_request->controls(); if (verbose) { std::cout << "fps: " << fps << " exp: " << *exp << " ag: " << *ag // << " br: " << *br << " ae: " << *ae << " aa: " << *completed_request->controls().get( libcamera::controls::ExposureTime ) << std::endl; } completed_request->reuse(Request::ReuseBuffers); completed_request->controls().set(libcamera::controls::AeEnable, false); completed_request->controls().set( libcamera::controls::draft ::NoiseReductionMode, libcamera::controls::draft ::NoiseReductionModeEnum :: NoiseReductionModeHighQuality ); completed_request->controls().set( libcamera::controls::ExposureTime, requested_params.exposureTime ); camera->queueRequest(completed_request); ++performanceCounter; elapsedSum += t.elapsed(); // if (performanceCounter == 20) std::chrono::steady_clock::time_point now = std::chrono::steady_clock::now(); if ((now - fpsTimstamp) > 1000ms) { auto msPerFrame{float(elapsedSum / performanceCounter)}; double configFps = frameDurationCtrl == control_map.end() ? std::numeric_limits::quiet_NaN() : (1e6 / frameDurationCtrl->second.min().get()); auto fps{1000.f / msPerFrame}; // qDebug() << "fps ideal/real is" << configFps << "/" << fps // << "; ms per frame is" << msPerFrame << "counted fps" // << performanceCounter; elapsedSum = 0; performanceCounter = 0; fpsTimstamp = now; } // qDebug() << "-------------------------------------------"; } */ // static bool applyConfig( // const std::unique_ptr& config // ) // { // using namespace libcamera; // auto status = config->validate(); // // WARNING: unsafe // libcamera::StreamConfiguration& streamConfig = config->at(0); // switch (status) // { // case CameraConfiguration::Status::Valid: // std::cout << "config is valid" << std::endl; // break; // case CameraConfiguration::Status::Adjusted: // std::cout << "\tpixelFormat: " << streamConfig.pixelFormat.toString() // << std::endl; // std::cout << "\tbufferCount: " << streamConfig.bufferCount << std::endl; // std::cout << "\torientation: " << config->orientation << std::endl; // break; // case CameraConfiguration::Status::Invalid: // std::cout << "config is invalid, quit." << std::endl; // return false; // } // return true; // } // static void printControls() // { // using namespace libcamera; // const libcamera::ControlInfoMap& control_map = camera->controls(); // // for (const auto & [id, info]: control_map) // for (const std::pair& pair : control_map) // { // const ControlId* const& id = pair.first; // const ControlInfo& info = pair.second; // std::cout << "\tc " << id->name() << " (" << id->id() // << "): " << info.toString() // << (info.def().isNone() // ? "" // : " (dflt:" + info.def().toString() + ")"); // if (!info.values().size()) // { // std::cout << std::endl; // continue; // } // std::cout << " - ["; // for (const auto& v : info.values()) // { // std::cout << " " << v.toString(); // } // std::cout << " ]\n"; // } // } bool initLaser() { const QLatin1String pwmChip{"pwmchip2"}; const uint16_t pwmChannel{1}; const QLatin1String pwmSystemRoot{"/sys/class/pwm"}; const QString pwmChipRoot{pwmSystemRoot + "/" + pwmChip}; const QString pwmExportFile{pwmChipRoot + "/export"}; QFile f{pwmExportFile}; if (!f.open(QFile::WriteOnly)) { qWarning() << "cannot open" << f.fileName() << "for writing"; qWarning() << "error:" << f.errorString(); return false; } QTextStream s{&f}; s << pwmChannel; const QString pwm{QLatin1String("pwm%1").arg(QString::number(pwmChannel))}; const QString pwmRoot{pwmChipRoot + "/" + pwm}; const QString periodFilename{pwmRoot + "/period"}; f.close(); f.setFileName(periodFilename); if (!f.open(QFile::WriteOnly)) { qWarning() << "cannot open" << f.fileName() << "for writing"; qWarning() << "error:" << f.errorString(); return false; } const unsigned periodHz{50'000}; s << periodHz; const QString dutyCycleFilename{pwmRoot + "/duty_cycle"}; f.close(); f.setFileName(dutyCycleFilename); if (!f.open(QFile::WriteOnly)) { qWarning() << "cannot open" << f.fileName() << "for writing"; qWarning() << "error:" << f.errorString(); return false; } const unsigned dutyCycle{3'000}; s << dutyCycle; const QString enableFilename{pwmRoot + "/enable"}; f.close(); f.setFileName(enableFilename); if (!f.open(QFile::WriteOnly)) { qWarning() << "cannot open" << f.fileName() << "for writing"; qWarning() << "error:" << f.errorString(); return false; } const int enable{1}; s << enable; return true; }