#include "httpservice.h" #include "genetic_algos.h" #include "imagealgos.h" #include "LibCamera.h" #include "pigpio.h" #include "printerclient.h" #include "rotaryencoder.h" #define QT_NO_KEYWORDS #include #include #include #include #include #include #include #include #include #include #include #include #undef QT_NO_KEYWORDS #include #include #include #include #include #include #define try_apply_config() \ if(!applyConfig(config)) \ { \ camera->release(); \ cm->stop(); \ \ return EXIT_FAILURE;\ } const QString exposureTimeKey = "exposureTime"; const QString laserLevelKey = "laserLevel"; enum ScanningModeFlags : uint8_t { None = 0, Calibration }; // ScanningModeFlags operator|(ScanningModeFlags lhs, ScanningModeFlags rhs) // { // using T = std::underlying_type::type; // return static_cast(static_cast(lhs) | static_cast(rhs)); // } ScanningModeFlags scanningModeFlags { ScanningModeFlags::None }; QElapsedTimer calibrationTimer; extern volatile int32_t positionSteps; struct requested_params_t { int32_t exposureTime = { 1000 }; int32_t laserLevel = { 3000 }; uint32_t stepsPerMm { 200 }; } requested_params; namespace { Image img; Pixels pixels; std::vector calibrationPixels; QMutex calibrationPixelsMutex; } const QString dumpsRoot { QStringLiteral("/home/user/dumps") }; using namespace std::chrono_literals; static std::shared_ptr camera; std::unique_ptr config; static std::map> mappedBuffers_; std::vector> requests; ControlList lastControls; static bool applyConfig(const std::unique_ptr & config); static void onRequestCompleted(Request *completed_request); static void printControls(); static void dumpCalibrationPixels(); static std::vector openDump(const QString dumpPath = ""); int main(int argc, char *argv[]) { QCoreApplication app(argc, argv); QElapsedTimer t; t.start(); qDebug() << "msecs before encoder:" << t.elapsed(); RotaryEncoder encoder; PrinterClient printerClient; qDebug() << "msecs before camera:" << t.elapsed(); // FIXME: don't use one var for everything int ret; std::unique_ptr cm = std::make_unique(); cm->start(); const auto cameras = cm->cameras(); openDump(); if (cameras.empty()) { std::cout << "No cameras were identified on the system." << std::endl; cm->stop(); return EXIT_FAILURE; } std::string cameraId = cameras[0]->id(); std::cout << "using " << cameraId << std::endl; /* * Note that `camera` may not compare equal to `cameras[0]`. * In fact, it might simply be a `nullptr`, as the particular * device might have disappeared (and reappeared) in the meantime. */ // std::shared_ptr camera = cm->get(cameraId); camera = cm->get(cameraId); if (camera->acquire() != EXIT_SUCCESS) { std::cout << "Cannot acquire camera." << std::endl; cm->stop(); return EXIT_FAILURE; } // FIXME: nullptr // std::unique_ptr config = camera->generateConfiguration( { StreamRole::Viewfinder } ); /*std::unique_ptr */config = camera->generateConfiguration( { StreamRole::Raw } ); if (config->empty()) { std::cerr << "No configurations generated." << std::endl; cm->stop(); return EXIT_FAILURE; } config->orientation = libcamera::Orientation::Rotate90; // if (config->validate() != EXIT_SUCCESS) // if (camera->configure(config.get()) != EXIT_SUCCESS) // { // std::cerr << "cannot configure camera" << std::endl << std::flush; // cm->stop(); // return EXIT_FAILURE; // } // FIXME: nullptr StreamConfiguration &streamConfig = config->at(0); std::cout << "Default viewfinder configuration is: " << streamConfig.toString() << std::endl; std::cout << "Pixel format is: " << streamConfig.pixelFormat.toString() << std::endl; std::cout << "Buffer count is: " << streamConfig.bufferCount << std::endl; // FIXME: empty variant std::cout << "Color space is: " << streamConfig.colorSpace.value().toString() << std::endl; std::cout << "Orientation is: " << config->orientation << std::endl; // formats::R8, // formats::R10, // formats::R12, // formats::R16, // formats::R10_CSI2P, // camera->configure failure // formats::R12_CSI2P, // camera->configure failure // streamConfig.pixelFormat = PixelFormat::fromString("R8"); // streamConfig.pixelFormat = PixelFormat::fromString("Y8_1X8"); // streamConfig.pixelFormat = formats::R8; streamConfig.pixelFormat = formats::R16; streamConfig.bufferCount = 2; // what is default R10_CSI2P? MONO_PISP_COMP1? // MONO_PISP_COMP1 - check rpicam-apps sources for decoding algos // streamConfig.pixelFormat = formats::R10_CSI2P; // streamConfig.bufferCount = 16; try_apply_config() // #define doit(rotation) \ // std::cout << "set rotation to: " << libcamera::Orientation:: rotation \ // << std::endl; \ // config->orientation = libcamera::Orientation:: rotation; \ // try_apply_config() // doit(Rotate0Mirror); // doit(Rotate180); // doit(Rotate180Mirror); // doit(Rotate90Mirror); // doit(Rotate270); // doit(Rotate270Mirror); // doit(Rotate90); std::cout << "new config " << streamConfig.toString() << std::endl; // FIXME: may crassh even on success (e.g. by setting pixelFormat to "8") if (camera->configure(config.get()) != EXIT_SUCCESS) { std::cout << "cannot apply config, quit." << std::endl; camera->release(); cm->stop(); return EXIT_FAILURE; } // TODO: try custom FrameBufferAllocator and compare performance auto allocator = std::make_shared(camera); auto stream = streamConfig.stream(); ret = allocator->allocate(stream); // TODO: check if zero if (ret < 0) { std::cerr << "Can't allocate buffers" << std::endl; // return -ENOMEM; return ret; } size_t allocated = size_t(ret); std::cout << "Allocated " << allocated << " buffers for stream" << std::endl; const std::vector> &buffers = allocator->buffers(stream); // for (size_t i = 0; i < buffers.size(); ++i) static int expOffset = 0; for (const auto & buffer : buffers) { std::unique_ptr request = camera->createRequest(); if (!request) { std::cerr << "Can't create request" << std::endl; return -ENOMEM; } // TODO: try multiple buffers per request and compare performance int ret = request->addBuffer(stream, buffer.get()); if (ret < 0) { std::cerr << "Can't set buffer for request" << std::endl; return ret; } for (const auto & plane : buffer->planes()) { void *memory = mmap(NULL, plane.length, PROT_READ, MAP_SHARED, plane.fd.get(), 0); mappedBuffers_[plane.fd.get()] = std::make_pair(memory, plane.length); } size_t desiredFPS = 144; std::int64_t lowerUS = 1 * 1000 * 1000 / desiredFPS; std::int64_t higherUS = lowerUS; std::int64_t value_pair[2] = { higherUS / 2, higherUS }; request->controls().set(libcamera::controls::AnalogueGain, 1.0); request->controls().set(libcamera::controls::ExposureTime, 100); request->controls().set( libcamera::controls::FrameDurationLimits, libcamera::Span(value_pair)); requests.push_back(std::move(request)); } camera->requestCompleted.connect(onRequestCompleted); std::unique_ptr camcontrols { new libcamera::ControlList() }; // camcontrols->set(controls::FrameDurationLimits, libcamera::Span({8702, 10718903})); // camcontrols->set(controls::ExposureTime, 100); // camcontrols->set(controls::AnalogueGain, 0.1); std::this_thread::sleep_for(500ms); if (camera->start(camcontrols.get())) { qDebug() << "failed to start camera"; return EXIT_FAILURE; } // camera->start(); for (auto & request : requests) { camera->queueRequest(request.get()); } printControls(); // std::this_thread::sleep_for(2s); // TODO: move to thread // Http::listenAndServe(Pistache::Address("*:8080")); QHttpServer qHttpServer; qHttpServer.route("/v1/sensor/image", [&]() { std::lock_guard lg(pgm_image_mtx); // qDebug() << "image"; return QByteArray((const char*)pgm_image, pgm_image_size); }); qHttpServer.route("/v1/sensor/image2", [&]() { std::lock_guard lg(pgm_image_mtx); // qDebug() << "image"; return QByteArray((const char*)pgm_image, pgm_image_size); }); // qHttpServer.route("/v1/sensor/exposureTimeUs", [&]() { // // std::lock_guard lg(pgm_image_mtx); // return "123"; // }); qHttpServer.route("/v1/pixels", [&]() { std::lock_guard lg(pgm_image_mtx); QJsonArray pixels; for (size_t i = 0; i < img_width; ++i) { // pixels << img_height - img.pixels[i]; pixels << img_height - ::pixels.pixels[i]; } QJsonObject json; json["pixels"] = pixels; json["encoderPosition"] = qint64 { encoder.position() }; json["measurementCounter"] = qint64 { img.counters.measurementCounter }; json["timestampUs"] = qint64(img.counters.timestampUs); return QHttpServerResponse(QJsonDocument(json).toJson()); }); qHttpServer.route("/v1/commands/resetEncoder", [&](const QHttpServerRequest &request) -> QHttpServerResponse { if (request.method() != QHttpServerRequest::Method::Post) { return QHttpServerResponse::StatusCode::NotFound; } qDebug() << "reset encoder"; positionSteps = 0; return QHttpServerResponse::StatusCode::Ok; }); qHttpServer.route("/v1/commands/startCalibration", [&](const QHttpServerRequest &request) -> QHttpServerResponse { if (request.method() != QHttpServerRequest::Method::Post) { return QHttpServerResponse::StatusCode::NotFound; } qDebug() << "start calibration"; // TODO: use flags scanningModeFlags = ScanningModeFlags::Calibration; calibrationTimer.start(); return QHttpServerResponse::StatusCode::Ok; }); qHttpServer.route("/v1/commands/gCode", [&](const QHttpServerRequest &request) -> QHttpServerResponse { if (request.method() != QHttpServerRequest::Method::Post) { return QHttpServerResponse::StatusCode::NotFound; } const auto command = request.body(); qDebug() << "send gCode:" << command; printerClient.sendCommand(command); return QHttpServerResponse::StatusCode::Ok; }); qHttpServer.route("/v1/commands/startCalibration", [&](const QHttpServerRequest &request) -> QHttpServerResponse { if (request.method() != QHttpServerRequest::Method::Post) { return QHttpServerResponse::StatusCode::NotFound; } const auto command = request.body(); qDebug() << "send gCode:" << command; printerClient.sendCommand(command); return QHttpServerResponse::StatusCode::Ok; }); qHttpServer.route("/v1/sensor/params", [&](const QHttpServerRequest &request) -> QHttpServerResponse { switch (request.method()) { case QHttpServerRequest::Method::Get: { std::lock_guard lg(pgm_image_mtx); QJsonObject json; const ControlIdMap & ctrlIdMap = camera->controls().idmap(); qDebug() << "readParams:" << lastControls.size(); qDebug() << request.method(); for (const auto & [id, value]: lastControls) { const ControlId * controlId = ctrlIdMap.at(id); auto name = QString::fromStdString(controlId->name()); const auto valueStr = QString::fromStdString(value.toString()); qDebug() << "\t param:" << controlId->id() << name << valueStr ; name[0] = name[0].toLower(); json[name] = valueStr; } json[laserLevelKey] = requested_params.laserLevel; qDebug() << "response body:" << json; // QHttpServerResponse return QHttpServerResponse(QJsonDocument(json).toJson()); } case QHttpServerRequest::Method::Post: { qDebug() << "request body:" << request.body(); auto json = QJsonDocument::fromJson(request.body()).object(); if (json.contains(exposureTimeKey)) { const int32_t value { json[exposureTimeKey].toInt() }; if (value == 0) { return QHttpServerResponse::StatusCode::NotFound; } qDebug() << "set new exposure time:" << value; requested_params.exposureTime = value; } if (json.contains(laserLevelKey)) { const int32_t value { json[laserLevelKey].toInt() }; if (value == 0) { return QHttpServerResponse::StatusCode::NotFound; } qDebug() << "set new laserLevel:" << value; requested_params.laserLevel = value; const QString laserLevelFile { "/sys/class/pwm/pwmchip2/pwm1/duty_cycle"}; QFile f { laserLevelFile }; if (!f.open(QFile::ReadWrite)) { qDebug() << "cannot open laser level file:" << f.errorString(); qDebug() << "file path is" << f.fileName(); return QHttpServerResponse::StatusCode::InternalServerError; } QTextStream s { &f }; s << value; s >> requested_params.laserLevel; qDebug() << "done with laser level"; } return QHttpServerResponse(request.body()); } default: { return QHttpServerResponse(QByteArray("unsupported http method")); } } }); qDebug() << "listen: " << qHttpServer.listen(QHostAddress::Any, 8081); QFuture future = QtConcurrent::run([](){ Port port(8080); Address addr(Ipv4::any(), port); HttpService httpService(addr); size_t threads_count = 1; httpService.init(threads_count); httpService.start(); }); //////////////////////////////////////////////////////////////////////////// std::clog << std::flush; std::cerr << std::flush; std::cout << "ok for now" << std::endl << std::flush; // camera->stop(); // camera->release(); // cm->stop(); auto result = app.exec(); future.cancel(); future.waitForFinished(); for (auto & [fd, mem] : mappedBuffers_) { munmap(mem.first, mem.second); } // FIXME: crash somewhere here. proper libcamera finishing needed requests.clear(); mappedBuffers_.clear(); camera->stop(); config.reset(); allocator->free(stream); allocator.reset(); camera->release(); camera.reset(); cm->stop(); return result; // time_t start_time = time(0); // int frame_count = 0; // LibCamera cam; // uint32_t width = 1280; // uint32_t height = 800; // uint32_t stride; // char key; // ret = cam.initCamera(); // if (ret != EXIT_SUCCESS) // { // std::cerr << "cannot open camera" << std::endl; // return EXIT_FAILURE; // } // cam.configureStill(width, height, formats::R8, 1, 0); // // ControlList controls_; // int64_t frame_time = 1000000 / 10; // // Set frame rate // // controls_.set( controls::FrameDurationLimits, libcamera::Span( // // { frame_time, frame_time } )); // // Adjust the brightness of the output images, in the range -1.0 to 1.0 // // controls_.set(controls::Brightness, 0.5); // // Adjust the contrast of the output image, where 1.0 = normal contrast // // controls_.set(controls::Contrast, 1.5); // // Set the exposure time // // controls_.set(controls::ExposureTime, 20000); // // cam.set(controls_); // std::cout << std::flush; // // NOTE: already checked // if (ret == EXIT_SUCCESS) { // bool flag; // LibcameraOutData frameData; // cam.startCamera(); // cam.VideoStream(&width, &height, &stride); // while (true) { // flag = cam.readFrame(&frameData); // if (!flag) // continue; // // key = waitKey(1); // // if (key == 'q') { // // break; // // } else if (key == 'f') { // // ControlList controls; // // controls.set(controls::AfMode, controls::AfModeAuto); // // controls.set(controls::AfTrigger, 0); // // cam.set(controls); // // } // frame_count++; // if ((time(0) - start_time) >= 1){ // printf("fps: %d\n", frame_count); // frame_count = 0; // start_time = time(0); // } // cam.returnFrameBuffer(frameData); // } // cam.stopCamera(); // } // cam.closeCamera(); // return EXIT_SUCCESS; } /* * Signals operate in the libcamera CameraManager thread context, so it is * important not to block the thread for a long time, as this blocks internal * processing of the camera pipelines, and can affect realtime performance. */ void onRequestCompleted(Request *completed_request) { static std::chrono::steady_clock::time_point fpsTimstamp = std::chrono::steady_clock::now(); QElapsedTimer t; t.start(); static uint32_t performanceCounter { 0 }; static uint32_t elapsedSum { 0 }; bool verbose = false; if (completed_request->status() == Request::RequestCancelled) { std::cerr << "request canceled" << std::endl; return; } const std::map &buffers = completed_request->buffers(); // std::cout << "request completed, buffers count is " << buffers.size(); // // TODO: rewrite this shit for (auto [stream, buffer] : buffers) { const auto & streamConfig = stream->configuration(); const auto & imageSize = streamConfig.size; const auto & pixelFormat = streamConfig.pixelFormat; const auto & stride = streamConfig.stride; const FrameMetadata &metadata = buffer->metadata(); // if (verbose) // { // std::cout << " seq: " << std::setw(6) << std::setfill('0') // << metadata.sequence // << " bytesused: "; // } for (size_t i = 0; i < buffer->planes().size(); ++i) { const FrameBuffer::Plane & plane = buffer->planes()[i]; const FrameMetadata::Plane & metaplane = buffer->metadata().planes()[i]; size_t size = std::min(metaplane.bytesused, plane.length); void * data = mappedBuffers_[plane.fd.get()].first; // FIXME: remove hardcode img.width = imageSize.width; img.height = imageSize.height; // img.data = data; memcpy(img.data, data, size); img.dataSize = size; img.stride = stride; img.pixelFormat = pixelFormat; img.counters.measurementCounter = metadata.sequence; img.counters.timestampUs = metadata.timestamp / 1000; img.counters.encoderPosition = RotaryEncoder::instance()->position(); // qDebug() << "pos:" << img.counters.encoderPosition; // uint16_t unpacked[img.width * img.height] = { 0 }; // unpack_16bit((uint8_t*)img.data, img, (uint16_t*)&unpacked); // img.data = unpacked; // img.dataSize = img.width * img.height * sizeof(uint16_t); rotate(img); Pixels pixels = process_columns(img); ::pixels = pixels; // qDebug() << "calibration mode" << scanningModeFlags; if (scanningModeFlags == ScanningModeFlags::Calibration) { constexpr int32_t hardcodedZRangeMm { 175 }; const int32_t maxEncoderPosition = hardcodedZRangeMm * requested_params.stepsPerMm; // qDebug() << "calibration max range" << maxEncoderPosition; // qDebug() << "calibration encoder pos" << pixels.counters.encoderPosition; if (pixels.counters.encoderPosition >= 0 && pixels.counters.encoderPosition <= maxEncoderPosition) { qDebug() << "calibration save at pos:" << pixels.counters.encoderPosition; QMutexLocker l(&calibrationPixelsMutex); ::calibrationPixels.push_back(std::move(pixels)); } else if (pixels.counters.encoderPosition > maxEncoderPosition) { // save to files QMutexLocker l(&calibrationPixelsMutex); qDebug() << "calibration pixels count:" << ::calibrationPixels.size(); qDebug() << "calibration elapsed (s):" << calibrationTimer.elapsed() / 1000; // ::calibrationPixels.clear(); // TODO: use flags // qDebug() << "stop calibration mode"; scanningModeFlags = ScanningModeFlags::None; QFuture dumpCalirationPixelsFuture = QtConcurrent::run(&dumpCalibrationPixels); } else { // qDebug() << "calibration skip at pos:" << pixels.counters.encoderPosition; } } pgm_save(&img, nullptr); } } const libcamera::ControlList &metadata = completed_request->metadata(); const ControlInfoMap & control_map = camera->controls(); // const ControlIdMap & ctrlIdMap = control_map.idmap(); auto frameDurationCtrl = control_map.find(&controls::FrameDurationLimits); // auto expTimeCtrl = control_map.find(&controls::ExposureTime); double fps = frameDurationCtrl == control_map.end() ? std::numeric_limits::quiet_NaN() : (1e6 / frameDurationCtrl->second.min().get()); auto exp = metadata.get(controls::ExposureTime); auto ag = metadata.get(controls::AnalogueGain); auto ae = metadata.get(controls::AeEnable); // auto br= metadata.get(controls::Brightness); lastControls = completed_request->controls(); if (verbose) { std::cout << "fps: " << fps << " exp: " << *exp << " ag: " << *ag // << " br: " << *br << " ae: " << *ae << " aa: " << *completed_request->controls() .get(libcamera::controls::ExposureTime) << std::endl; } completed_request->reuse(Request::ReuseBuffers); completed_request->controls().set(libcamera::controls::AeEnable, false); completed_request->controls().set(libcamera::controls::draft ::NoiseReductionMode, libcamera::controls::draft ::NoiseReductionModeEnum ::NoiseReductionModeHighQuality); completed_request->controls().set(libcamera::controls::ExposureTime, requested_params.exposureTime); camera->queueRequest(completed_request); ++performanceCounter; elapsedSum += t.elapsed(); // if (performanceCounter == 20) std::chrono::steady_clock::time_point now = std::chrono::steady_clock::now(); if ((now - fpsTimstamp) > 1000ms) { auto msPerFrame { float(elapsedSum / performanceCounter) }; double configFps = frameDurationCtrl == control_map.end() ? std::numeric_limits::quiet_NaN() : (1e6 / frameDurationCtrl->second.min().get()); auto fps { 1000.f / msPerFrame }; qDebug() << "fps ideal/real is" << configFps << "/" << fps << "; ms per frame is" << msPerFrame << "counted fps" << performanceCounter; elapsedSum = 0; performanceCounter = 0; fpsTimstamp = now; } // qDebug() << "-------------------------------------------"; } static bool applyConfig(const std::unique_ptr & config) { auto status = config->validate(); // WARNING: unsafe StreamConfiguration &streamConfig = config->at(0); switch (status) { case CameraConfiguration::Status::Valid: std::cout << "config is valid" << std::endl; break; case CameraConfiguration::Status::Adjusted: std::cout << "\tpixelFormat: " << streamConfig.pixelFormat.toString() << std::endl; std::cout << "\tbufferCount: " << streamConfig.bufferCount << std::endl; std::cout << "\torientation: " << config->orientation << std::endl; break; case CameraConfiguration::Status::Invalid: std::cout << "config is invalid, quit." << std::endl; return false; } return true; } static void printControls() { const ControlInfoMap & control_map = camera->controls(); // for (const auto & [id, info]: control_map) for (const std::pair & pair : control_map) { const ControlId * const & id = pair.first; const ControlInfo & info = pair.second; std::cout << "\tc " << id->name() << " (" << id->id() << "): " << info.toString() << (info.def().isNone() ? "" : " (dflt:" + info.def().toString() + ")"); if (!info.values().size()) { std::cout << std::endl; continue; } std::cout << " - ["; for (const auto & v : info.values()) { std::cout << " " << v.toString(); } std::cout << " ]\n"; } } static void dumpCalibrationPixels() { std::vector rawProfiles; { QMutexLocker l(&calibrationPixelsMutex); std::swap(rawProfiles, ::calibrationPixels); } const QString dumpSubdir { QDateTime::currentDateTime().toString("yyyy.MM.dd_hh.mm.ss") }; const QDir dumpPath { dumpsRoot + "/" + dumpSubdir }; if (!dumpPath.mkdir(dumpPath.path())) { qWarning() << "cannot create dump dir: " << dumpPath.path(); return; } for (const auto& rawProfile : rawProfiles) { const auto filename = QLatin1String("raw_profile_meas_%1_enc_%2") .arg(QString::number(rawProfile.counters.measurementCounter)) .arg(rawProfile.counters.encoderPosition); const auto filepath = dumpPath.path() + "/" + filename; QFile f { filepath }; if (!f.open(QFile::WriteOnly)) { qWarning() << "cannot open dump dump file" << f.fileName(); qWarning() << "error is:" << f.errorString(); return; } QJsonObject jsonCounters { { "timestampUs", qint64(rawProfile.counters.timestampUs) }, { "measurementCounter", qint64(rawProfile.counters.measurementCounter) }, { "encoderPosition", qint64(rawProfile.counters.encoderPosition) }, }; QJsonObject json; json["counters"] = jsonCounters; QJsonArray jsonPixels; for (const auto& pixel : rawProfile.pixels) { jsonPixels << pixel; } json["pixels"] = jsonPixels; if (!f.write(QJsonDocument(json).toJson())) { qWarning() << "cannot write file" << f.fileName(); qWarning() << "error is" << f.errorString(); return; } qDebug() << "file written: " << f.fileName(); } qDebug() << "dump finished"; } static std::vector openDump(const QString dumpPath) { std::vector result; QString dirToRead { dumpPath }; if (dirToRead.isEmpty()) { qDebug() << "dumpPath not specified. looking into" << dumpsRoot; QDir dumpsRootDir { dumpsRoot }; const auto filter = QDir::Dirs | QDir::NoDotAndDotDot | QDir::Readable; // there is no battery in my rpi5 for now const auto sort = QDir::Name; const auto entries = dumpsRootDir.entryList(filter, sort); if (entries.isEmpty()) { qWarning() << "dumps root" << dumpsRoot << "contains no dumps. " << "specify existing dump path"; return {}; } dirToRead = entries.last(); } QDir dumpDir { dumpsRoot + "/" + dirToRead }; const auto filter = QDir::Files; const auto sort = QDir::Name; const auto filenames = dumpDir.entryList(filter, sort); if (filenames.isEmpty()) { qDebug() << "no filenames found in" << dumpDir.path(); } for (const auto& filename : filenames) { qDebug() << "raw profile:" << filename; QFile f { dumpDir.path() + "/" + filename }; if (!f.open(QFile::ReadOnly)) { qWarning() << "cannot open file for reading:" << f.fileName(); qWarning() << "error string:" << f.errorString(); return {}; } // TODO: rewrite to remove manual serialization/deserialization const auto json = QJsonDocument::fromJson(f.readAll()).object(); const auto jsonCounters = json["counters"].toObject(); qDebug() << jsonCounters; Pixels rawProfile; rawProfile.counters.timestampUs = jsonCounters["timestampUs"].toInteger(); rawProfile.counters.measurementCounter = jsonCounters["measurementCounter"].toInteger(); rawProfile.counters.encoderPosition = jsonCounters["encoderPosition"].toInteger(); const auto jsonPixels = json["pixels"].toArray(); qDebug() << jsonPixels.count() << rawProfile.pixels.size(); for (size_t i = 0; i < jsonPixels.count() && i < rawProfile.pixels.size(); ++i) { rawProfile.pixels[i] = jsonPixels[i].toDouble(); } } // { // QMutexLocker l(&calibrationPixelsMutex); // std::swap(result, ::calibrationPixels); // } return result; }