#include "veyeimx287m.h" #include #include #include #include #include #include #include #include #include #include #include #include "constants.h" #include "imagealgos.h" #include "mem_utils.h" #include "pixels.h" // #include "rotaryencoder.h" #define LOGD(...) \ do { \ printf(__VA_ARGS__); \ printf("\n"); \ } while (0) #define DBG(fmt, args...) LOGD("%s:%d, " fmt, __FUNCTION__, __LINE__, ##args); extern uint64_t dq_elapsed_ns; extern uint64_t get_elapsed_ns; extern uint64_t sum_elapsed_ns; extern uint64_t corr_elapsed_ns; extern uint64_t max_elapsed_ns; extern uint64_t value_elapsed_ns; extern uint64_t rot_elapsed_ns; extern uint64_t pix_elapsed_ns; extern uint64_t dropped_count; // constexpr char videoDevice[] = "/dev/video0"; VeyeIMX287m::VeyeIMX287m() {} VeyeIMX287m::~VeyeIMX287m() { for (auto &t : m_calcThreads) { t.request_stop(); t.join(); } m_streamThread.request_stop(); m_streamThread.join(); int buffer_type = V4L2_BUF_TYPE_VIDEO_CAPTURE; if (ioctl(m_cam_fd, VIDIOC_STREAMOFF, &buffer_type) == -1) { std::cout << "cannot stop stream" << std::endl; } for (const auto buffer : m_videoBuffers) { if (munmap(buffer, img_size) < 0) { DBG("Munmap failed!!."); } } if (m_cam_fd >= 0) { if (close(m_cam_fd) == -1) { std::cout << __func__ << ": cannot close camera: " << strerror(errno) << std::endl; } }; std::cout << "camera closed" << std::endl; } std::vector > VeyeIMX287m::search() { // return only one camera for now std::cout << std::boolalpha; auto cam = std::make_shared(); if (!cam->init()) return {}; // if (!cam->setExposureTimeUs(30)) if (!cam->setExposureTimeUs(250)) return {}; if (!cam->setLaserLevel(1)) return {}; if (!cam->setGain(2)) return {}; if (!cam->setSomething(0)) { return {}; } // m_someThread = std::jthread{[=](std::stop_token stopToken) { // std::cout << "VeyeIMX287m: start stream" << std::endl; // sleep(5); // static int i = 0; // while (!stopToken.stop_requested()) { // cam->setSomething(i); // i -= 1; // } // }}; return {cam}; } bool VeyeIMX287m::startStream() { int buffer_type = V4L2_BUF_TYPE_VIDEO_CAPTURE; auto ret = ioctl(m_cam_fd, VIDIOC_STREAMON, &buffer_type); if (ret != 0) { DBG("ioctl(VIDIOC_STREAMON) failed %d(%s)", errno, strerror(errno)); return false; } // m_streamThread = std::jthread{&VeyeIMX287m::dequeueFrameLoop, this}; for (auto &t : m_calcThreads) { t = std::jthread{&VeyeIMX287m::calcFrameLoop, this}; } return true; } bool VeyeIMX287m::init() { if (!openCam()) return false; if (!selectCam()) return false; if (!initCam()) return false; return true; } bool VeyeIMX287m::setExposureTimeUs(int valueUs) { //return true; std::cout << __func__ << ": " << V4L2_CID_EXPOSURE << " - " << valueUs << std::endl << std::flush; /* * Shutter Time. Value is from 8721ns to 8721*885ns, must be integral * multiple of 8721ns . * 8721xN(N =1,2,3,4,5.....855) */ // constexpr int exposureStep{8721}; // constexpr int maxExposureStepMultiplier{885}; // auto valueNs = valueUs; // valueNs = (valueNs / exposureStep) * exposureStep; // std::clamp(valueNs, exposureStep, exposureStep * maxExposureStepMultiplier); // setGain(rand() % 254); // setGain(3); // setLaserLevel(rand() % 0x7fffffff); // setLaserLevel(rand() % 100); // int exp = rand() % 10; // return setCamParam(V4L2_CID_EXPOSURE, exp * exp * exp * exp * exp * exp); // return setCamParam(V4L2_CID_EXPOSURE, valueUs); return setCamParam(V4L2_CID_EXPOSURE, valueUs); } bool VeyeIMX287m::setGain(int value) { std::cout << __func__ << ": " << value << std::endl << std::flush; // return setCamParam(V4L2_CID_GAIN, value); // FIXME: tmp workaround for imx287llr return true; } bool VeyeIMX287m::setLaserLevel(int value) { std::cout << __func__ << ": " << value << std::endl << std::flush; // return setCamParam(V4L2_CID_FLASH_TIMEOUT, value); // FIXME: tmp workaround for imx287llr return true; } bool VeyeIMX287m::setSomething(int value) { std::cout << __func__ << ": " << value << std::endl << std::flush; // return setCamParam(V4L2_CID_FLASH_INTENSITY, value); // FIXME: tmp workaround for imx287llr return true; } bool VeyeIMX287m::setCamParam(unsigned int v4l2controlId, int value) { v4l2_control ctl{v4l2controlId, value}; if (ioctl(m_cam_fd, VIDIOC_S_CTRL, &ctl) < 0) { fprintf(stderr, "cannot set cam param: id - %d, error - '%s'\n", v4l2controlId, strerror(errno)); fflush(stderr); return false; } if (ioctl(m_cam_fd, VIDIOC_G_CTRL, &ctl) < 0) { fprintf(stderr, "cannot get cam param: id - %d, error - '%s'\n", v4l2controlId, strerror(errno)); fflush(stderr); return false; } std::cout << __func__ << ": new value is " << ctl.value << std::endl; return true; } bool VeyeIMX287m::openCam() { m_cam_fd = open(videoDevice, O_RDWR); if (m_cam_fd < 0) { fprintf(stderr, "cannot open cam '%s', error: '%s'\n", videoDevice, strerror(errno)); return false; } return true; } bool VeyeIMX287m::selectCam(int camIdx) { int input = camIdx; int ret = ioctl(m_cam_fd, VIDIOC_S_INPUT, &input); if (ret < 0) { fprintf(stderr, "cannot select cam: idx - %d, error - '%s'\n", camIdx, strerror(errno)); return false; } return true; } bool VeyeIMX287m::initCam() { v4l2_format format; memset(&format, 0, sizeof(v4l2_format)); format.type = V4L2_BUF_TYPE_VIDEO_CAPTURE; format.fmt.pix.pixelformat = V4L2_PIX_FMT_GREY; format.fmt.pix.width = img_width; format.fmt.pix.height = img_height; int ret = ioctl(m_cam_fd, VIDIOC_TRY_FMT, &format); if (ret < 0) { fprintf(stderr, "cannot try cam format: error - '%s'\n", strerror(errno)); return false; } // TODO: remove this? format.type = V4L2_BUF_TYPE_VIDEO_CAPTURE; ret = ioctl(m_cam_fd, VIDIOC_S_FMT, &format); if (ret < 0) { fprintf(stderr, "cannot set cam format: error - '%s'\n", strerror(errno)); return false; } struct v4l2_requestbuffers request; request.count = BUFFER_COUNT; request.type = V4L2_BUF_TYPE_VIDEO_CAPTURE; request.memory = V4L2_MEMORY_MMAP; ret = ioctl(m_cam_fd, VIDIOC_REQBUFS, &request); if (ret < 0) { fprintf(stderr, "cannot set cam request buffers: ioctl error - '%s'\n", strerror(errno)); return false; } if (request.count < BUFFER_COUNT) { fprintf(stderr, "cannot set cam request buffers\n"); return false; } struct v4l2_buffer buffer; memset(&buffer, 0, sizeof(buffer)); buffer.type = request.type; buffer.memory = V4L2_MEMORY_MMAP; for (uint32_t i = 0; i < request.count; i++) { buffer.index = i; ret = ioctl(m_cam_fd, VIDIOC_QUERYBUF, &buffer); if (ret < 0) { DBG("ioctl(VIDIOC_QUERYBUF) failed %d(%s)", errno, strerror(errno)); return false; } DBG("buffer.length: %d", buffer.length); DBG("buffer.m.offset: %d", buffer.m.offset); m_videoBuffers[i] = (uint8_t *) mmap(NULL, buffer.length, PROT_READ | PROT_WRITE, MAP_SHARED, m_cam_fd, buffer.m.offset); if (m_videoBuffers[i] == MAP_FAILED) { DBG("mmap() failed %d(%s)", errno, strerror(errno)); return false; } buffer.type = V4L2_BUF_TYPE_VIDEO_CAPTURE; buffer.memory = V4L2_MEMORY_MMAP; buffer.index = i; ret = ioctl(m_cam_fd, VIDIOC_QBUF, &buffer); if (ret != 0) { DBG("ioctl(VIDIOC_QBUF) failed %d(%s)", errno, strerror(errno)); return false; } } // int buffer_type = V4L2_BUF_TYPE_VIDEO_CAPTURE; // ret = ioctl(m_cam_fd, VIDIOC_STREAMON, &buffer_type); // if (ret != 0) // { // DBG("ioctl(VIDIOC_STREAMON) failed %d(%s)", errno, strerror(errno)); // return false; // } DBG("cam init done."); return true; } void VeyeIMX287m::dequeueFrameLoop(std::stop_token stopToken) { // std::cout << "VeyeIMX287m: start stream" << std::endl; // while (!stopToken.stop_requested()) { // size_t imageIndex{}; // if (!dequeueImageBuffer(imageIndex)) // // break; // continue; // } // std::cout << "VeyeIMX287m: stream interruption requested" << std::endl; } void VeyeIMX287m::calcFrameLoop(std::stop_token stopToken) { QElapsedTimer t; while (!stopToken.stop_requested()) { size_t bufferIdx{}; if (!dequeueImageBuffer(bufferIdx)) { std::cout << "AAAAAAAAAAAAAAAAAA" << std::endl; continue; } // std::lock_guard img_lock{m_imageMutexes[bufferIdx]}; auto &image = m_images[bufferIdx]; { t.start(); // std::lock_guard buffer_lock{m_bufferMutexes[bufferIdx]}; // get: 4100-4500 // memcpy(&image.data, m_videoBuffers[bufferIdx], img_size); // get: 5000-5100 // memcpy_1by1((std::byte *) &image.data, // (std::byte *) m_videoBuffers[bufferIdx]); memcpy_neon((ARRAY_TYPE *) &image.data, (ARRAY_TYPE *) m_videoBuffers[bufferIdx]); get_elapsed_ns += t.nsecsElapsed(); } image.rotate(); // const auto pixels = image.pixels(); const auto pixels = image.sharedPixels(); #pragma push_macro("emit") #undef emit // std::cout << "emit pixels" << std::endl; newPixels.emit(pixels); #pragma pop_macro("emit") // const auto lines = pixelsToLines(*pixels); // continue; // setCamParam(V4L2_CID_EXPOSURE, 18); } } // Image &VeyeIMX287m::dequeueImageBuffer() // bool VeyeIMX287m::dequeueImageBuffer(Image &image) bool VeyeIMX287m::dequeueImageBuffer(size_t &imageIndex) // TODO: get Image from video_buffer_ptr { static struct timeval curr, prev; static uint16_t counter = 0; gettimeofday(&curr, NULL); double elapsedTime = (curr.tv_sec - prev.tv_sec) * 1000.0; // sec to ms elapsedTime += (curr.tv_usec - prev.tv_usec) / 1000.0; // us to ms if (elapsedTime > 1000.) { fprintf(stderr, "fps: %d\tdropped: %d sec: %d " "dq: %d get: %d rot: %d pix: %d sum: %d corr: " "%d val: %d\n", counter, dropped_count, curr.tv_sec % 1000, dq_elapsed_ns / 1000 / counter, get_elapsed_ns / 1000 / counter, rot_elapsed_ns / 1000 / counter, pix_elapsed_ns / 1000 / counter, sum_elapsed_ns / 1000 / counter, corr_elapsed_ns / 1000 / counter, // max_elapsed_ns / 1000 / counter, value_elapsed_ns / 1000 / counter); dq_elapsed_ns = 0; get_elapsed_ns = 0; sum_elapsed_ns = 0; corr_elapsed_ns = 0; max_elapsed_ns = 0; value_elapsed_ns = 0; rot_elapsed_ns = 0; pix_elapsed_ns = 0; dropped_count = 0; counter = 0; prev = curr; } int ret; struct v4l2_buffer buffer; memset(&buffer, 0, sizeof(buffer)); buffer.type = V4L2_BUF_TYPE_VIDEO_CAPTURE; buffer.memory = V4L2_MEMORY_MMAP; static uint16_t requestIdx{0}; // buffer.index = BUFFER_COUNT; buffer.index = requestIdx++ % BUFFER_COUNT; { QElapsedTimer t; t.start(); // { // std::lock_guard lock(m_queueMtx); // if (m_buffersQueue.size() >= BUFFER_COUNT) { // // std::cout << "frame skip" << std::endl; // dq_elapsed_ns += t.nsecsElapsed(); // return false; // } // } auto oldIdx = buffer.index; // m_bufferMutexes[buffer.index].lock(); std::lock_guard lock(m_camMtx); ret = ioctl(m_cam_fd, VIDIOC_DQBUF, &buffer); // m_bufferMutexes[buffer.index].unlock(); auto newIdx = buffer.index; if (oldIdx != newIdx) { // std::cout << "AAAAAAAAAAAAAA " << oldIdx << ' ' << newIdx << std::endl; } if (ret != 0) { DBG("ioctl(VIDIOC_DQBUF) failed %d(%s)", errno, strerror(errno)); return false; } if (buffer.index < 0 || buffer.index >= BUFFER_COUNT) { DBG("invalid buffer index: %d", buffer.index); return false; } // std::lock_guard lock(m_queueMtx); // m_buffersQueue.push(buffer.index); ++counter; dq_elapsed_ns += t.nsecsElapsed(); } // auto &image = video_buffer_ptr[buffer.index]; imageIndex = buffer.index; auto &image = m_images[buffer.index]; image.height = img_height; image.width = img_width; // TODO: fill // image.counters.encoderPosition = RotaryEncoder::instance()->position(); image.counters.measurementCounter = buffer.sequence; static int64_t prevCounter = buffer.sequence; dropped_count += buffer.sequence - prevCounter - 1; prevCounter = buffer.sequence; image.counters.timestampUs = buffer.timestamp.tv_sec * 1000 * 1000 + buffer.timestamp.tv_usec; // { // QElapsedTimer t; // t.start(); // memcpy(&image.data, m_videoBuffers[buffer.index], img_size); // // std::cout << (void *) video_buffer_ptr[buffer.index] << std::endl; // get_elapsed_ns += t.nsecsElapsed(); // } { std::lock_guard lock(m_camMtx); ret = ioctl(m_cam_fd, VIDIOC_QBUF, &buffer); } if (ret != 0) { DBG("ioctl(VIDIOC_QBUF) failed %d(%s)", errno, strerror(errno)); return false; } return true; } bool VeyeIMX287m::getImage(Image &image) { size_t bufferIdx{}; if (!dequeueImageBuffer(bufferIdx)) { std::cout << "AAAAAAAAAAAAAAAAAA" << std::endl; return false; } image = std::move(m_images[bufferIdx]); { QElapsedTimer t; t.start(); memcpy(&image.data, m_videoBuffers[bufferIdx], img_size); get_elapsed_ns += t.nsecsElapsed(); } return true; }