#include "veyeimx287m.h" #include #include #include #include #include #include #include #include #include #include #include #include #include #include "constants.h" #include "imagealgos.h" #include "mem_utils.h" #include "pixels.h" // #include "rotaryencoder.h" static const struct v4l2_format_info { const char *name; unsigned int fourcc; unsigned char n_planes; } pixel_formats[] = { {"RGB332", V4L2_PIX_FMT_RGB332, 1}, {"RGB444", V4L2_PIX_FMT_RGB444, 1}, {"ARGB444", V4L2_PIX_FMT_ARGB444, 1}, {"XRGB444", V4L2_PIX_FMT_XRGB444, 1}, {"RGB555", V4L2_PIX_FMT_RGB555, 1}, {"ARGB555", V4L2_PIX_FMT_ARGB555, 1}, {"XRGB555", V4L2_PIX_FMT_XRGB555, 1}, {"RGB565", V4L2_PIX_FMT_RGB565, 1}, {"RGB555X", V4L2_PIX_FMT_RGB555X, 1}, {"RGB565X", V4L2_PIX_FMT_RGB565X, 1}, {"BGR666", V4L2_PIX_FMT_BGR666, 1}, {"BGR24", V4L2_PIX_FMT_BGR24, 1}, {"RGB24", V4L2_PIX_FMT_RGB24, 1}, {"BGR32", V4L2_PIX_FMT_BGR32, 1}, {"ABGR32", V4L2_PIX_FMT_ABGR32, 1}, {"XBGR32", V4L2_PIX_FMT_XBGR32, 1}, {"RGB32", V4L2_PIX_FMT_RGB32, 1}, {"ARGB32", V4L2_PIX_FMT_ARGB32, 1}, {"XRGB32", V4L2_PIX_FMT_XRGB32, 1}, {"HSV24", V4L2_PIX_FMT_HSV24, 1}, {"HSV32", V4L2_PIX_FMT_HSV32, 1}, {"Y8", V4L2_PIX_FMT_GREY, 1}, {"Y10", V4L2_PIX_FMT_Y10, 1}, {"Y12", V4L2_PIX_FMT_Y12, 1}, {"Y16", V4L2_PIX_FMT_Y16, 1}, {"UYVY", V4L2_PIX_FMT_UYVY, 1}, {"VYUY", V4L2_PIX_FMT_VYUY, 1}, {"YUYV", V4L2_PIX_FMT_YUYV, 1}, {"YVYU", V4L2_PIX_FMT_YVYU, 1}, {"YUV32", V4L2_PIX_FMT_YUV32, 1}, {"AYUV32", V4L2_PIX_FMT_AYUV32, 1}, {"XYUV32", V4L2_PIX_FMT_XYUV32, 1}, {"VUYA32", V4L2_PIX_FMT_VUYA32, 1}, {"VUYX32", V4L2_PIX_FMT_VUYX32, 1}, {"YUVA32", V4L2_PIX_FMT_YUVA32, 1}, {"YUVX32", V4L2_PIX_FMT_YUVX32, 1}, {"NV12", V4L2_PIX_FMT_NV12, 1}, {"NV12M", V4L2_PIX_FMT_NV12M, 2}, {"NV21", V4L2_PIX_FMT_NV21, 1}, {"NV21M", V4L2_PIX_FMT_NV21M, 2}, {"NV16", V4L2_PIX_FMT_NV16, 1}, {"NV16M", V4L2_PIX_FMT_NV16M, 2}, {"NV61", V4L2_PIX_FMT_NV61, 1}, {"NV61M", V4L2_PIX_FMT_NV61M, 2}, {"NV24", V4L2_PIX_FMT_NV24, 1}, {"NV42", V4L2_PIX_FMT_NV42, 1}, {"YUV420M", V4L2_PIX_FMT_YUV420M, 3}, {"YUV422M", V4L2_PIX_FMT_YUV422M, 3}, {"YUV444M", V4L2_PIX_FMT_YUV444M, 3}, {"YVU420M", V4L2_PIX_FMT_YVU420M, 3}, {"YVU422M", V4L2_PIX_FMT_YVU422M, 3}, {"YVU444M", V4L2_PIX_FMT_YVU444M, 3}, {"SBGGR8", V4L2_PIX_FMT_SBGGR8, 1}, {"SGBRG8", V4L2_PIX_FMT_SGBRG8, 1}, {"SGRBG8", V4L2_PIX_FMT_SGRBG8, 1}, {"SRGGB8", V4L2_PIX_FMT_SRGGB8, 1}, {"SBGGR10_DPCM8", V4L2_PIX_FMT_SBGGR10DPCM8, 1}, {"SGBRG10_DPCM8", V4L2_PIX_FMT_SGBRG10DPCM8, 1}, {"SGRBG10_DPCM8", V4L2_PIX_FMT_SGRBG10DPCM8, 1}, {"SRGGB10_DPCM8", V4L2_PIX_FMT_SRGGB10DPCM8, 1}, {"SBGGR10", V4L2_PIX_FMT_SBGGR10, 1}, {"SGBRG10", V4L2_PIX_FMT_SGBRG10, 1}, {"SGRBG10", V4L2_PIX_FMT_SGRBG10, 1}, {"SRGGB10", V4L2_PIX_FMT_SRGGB10, 1}, {"SBGGR10P", V4L2_PIX_FMT_SBGGR10P, 1}, {"SGBRG10P", V4L2_PIX_FMT_SGBRG10P, 1}, {"SGRBG10P", V4L2_PIX_FMT_SGRBG10P, 1}, {"SRGGB10P", V4L2_PIX_FMT_SRGGB10P, 1}, {"SBGGR12", V4L2_PIX_FMT_SBGGR12, 1}, {"SGBRG12", V4L2_PIX_FMT_SGBRG12, 1}, {"SGRBG12", V4L2_PIX_FMT_SGRBG12, 1}, {"SRGGB12", V4L2_PIX_FMT_SRGGB12, 1}, {"SBGGR16", V4L2_PIX_FMT_SBGGR16, 1}, {"SGBRG16", V4L2_PIX_FMT_SGBRG16, 1}, {"SGRBG16", V4L2_PIX_FMT_SGRBG16, 1}, {"SRGGB16", V4L2_PIX_FMT_SRGGB16, 1}, {"IPU3_SBGGR10", V4L2_PIX_FMT_IPU3_SBGGR10, 1}, {"IPU3_SGBRG10", V4L2_PIX_FMT_IPU3_SGBRG10, 1}, {"IPU3_SGRBG10", V4L2_PIX_FMT_IPU3_SGRBG10, 1}, {"IPU3_SRGGB10", V4L2_PIX_FMT_IPU3_SRGGB10, 1}, {"IPU3_Y10", V4L2_PIX_FMT_IPU3_Y10, 1}, {"DV", V4L2_PIX_FMT_DV, 1}, {"MJPEG", V4L2_PIX_FMT_MJPEG, 1}, {"MPEG", V4L2_PIX_FMT_MPEG, 1}, }; static const struct { const char *name; enum v4l2_field field; } fields[] = { {"any", V4L2_FIELD_ANY}, {"none", V4L2_FIELD_NONE}, {"top", V4L2_FIELD_TOP}, {"bottom", V4L2_FIELD_BOTTOM}, {"interlaced", V4L2_FIELD_INTERLACED}, {"seq-tb", V4L2_FIELD_SEQ_TB}, {"seq-bt", V4L2_FIELD_SEQ_BT}, {"alternate", V4L2_FIELD_ALTERNATE}, {"interlaced-tb", V4L2_FIELD_INTERLACED_TB}, {"interlaced-bt", V4L2_FIELD_INTERLACED_BT}, }; #define ARRAY_SIZE(a) (sizeof(a) / sizeof((a)[0])) static const char *v4l2_field_name(enum v4l2_field field) { unsigned int i; for (i = 0; i < ARRAY_SIZE(fields); ++i) { if (fields[i].field == field) return fields[i].name; } return "unknown"; } static const struct v4l2_format_info *v4l2_format_by_fourcc(unsigned int fourcc) { unsigned int i; for (i = 0; i < ARRAY_SIZE(pixel_formats); ++i) { if (pixel_formats[i].fourcc == fourcc) return &pixel_formats[i]; } return NULL; } static const char *v4l2_format_name(unsigned int fourcc) { const struct v4l2_format_info *info; static char name[5]; unsigned int i; info = v4l2_format_by_fourcc(fourcc); if (info) return info->name; for (i = 0; i < 4; ++i) { name[i] = fourcc & 0xff; fourcc >>= 8; } name[4] = '\0'; return name; } #define LOGD(...) \ do { \ printf(__VA_ARGS__); \ printf("\n"); \ } while (0) #define DBG(fmt, args...) LOGD("%s:%d, " fmt, __FUNCTION__, __LINE__, ##args); extern uint64_t dq_elapsed_ns; extern uint64_t get_elapsed_ns; extern uint64_t sum_elapsed_ns; extern uint64_t corr_elapsed_ns; extern uint64_t max_elapsed_ns; extern uint64_t value_elapsed_ns; extern uint64_t rot_elapsed_ns; extern uint64_t pix_elapsed_ns; extern uint64_t dropped_count; // constexpr char videoDevice[] = "/dev/video0"; VeyeIMX287m::VeyeIMX287m() {} VeyeIMX287m::~VeyeIMX287m() { for (auto &t : m_calcThreads) { t.request_stop(); t.join(); } m_streamThread.request_stop(); m_streamThread.join(); // int buffer_type = V4L2_BUF_TYPE_VIDEO_CAPTURE; const auto radxa_buf_type = V4L2_BUF_TYPE_VIDEO_CAPTURE_MPLANE; // if (ioctl(m_cam_fd, VIDIOC_STREAMOFF, &buffer_type) == -1) { if (ioctl(m_cam_fd, VIDIOC_STREAMOFF, &radxa_buf_type) == -1) { std::cout << "cannot stop stream" << std::endl; } #ifdef RADXA_ZERO_3E for (const auto buffer : buffers) { if (munmap(buffer.mem[0], radxa_raw_img_size) < 0) { DBG("Munmap failed!!."); } } #else for (const auto buffer : m_videoBuffers) { if (munmap(buffer, img_size) < 0) { DBG("Munmap failed!!."); } } #endif // RADXA_ZERO_3E if (m_cam_fd >= 0) { if (close(m_cam_fd) == -1) { std::cout << __func__ << ": cannot close camera: " << strerror(errno) << std::endl; } }; std::cout << "camera closed" << std::endl; } std::vector > VeyeIMX287m::search() { // return only one camera for now std::cout << std::boolalpha; auto cam = std::make_shared(); if (!cam->init()) return {}; if (!cam->setExposureTimeUs(30)) return {}; if (!cam->setLaserLevel(1)) return {}; if (!cam->setGain(2)) return {}; if (!cam->setSomething(0)) { return {}; } // m_someThread = std::jthread{[=](std::stop_token stopToken) { // std::cout << "VeyeIMX287m: start stream" << std::endl; // sleep(5); // static int i = 0; // while (!stopToken.stop_requested()) { // cam->setSomething(i); // i -= 1; // } // }}; return {cam}; } bool VeyeIMX287m::startStream() { // int buffer_type = V4L2_BUF_TYPE_VIDEO_CAPTURE; const auto radxa_buf_type = V4L2_BUF_TYPE_VIDEO_CAPTURE_MPLANE; // auto ret = ioctl(m_cam_fd, VIDIOC_STREAMON, &buffer_type); auto ret = ioctl(m_cam_fd, VIDIOC_STREAMON, &radxa_buf_type); if (ret != 0) { std::cerr << "ioctl(VIDIOC_STREAMON) failed: " << errno << " (" << strerror(errno) << ")" << std::endl; return false; } std::cout << "stream started" << std::endl; // m_streamThread = std::jthread{&VeyeIMX287m::dequeueFrameLoop, this}; for (auto &t : m_calcThreads) { t = std::jthread{&VeyeIMX287m::calcFrameLoop, this}; } return true; } bool VeyeIMX287m::init() { if (!openCam()) return false; // if (!selectCam()) // return false; if (!initCam()) return false; return true; } bool VeyeIMX287m::setExposureTimeUs(int valueUs) { //return true; std::cout << __func__ << ": " << V4L2_CID_EXPOSURE << " - " << valueUs << std::endl << std::flush; /* * Shutter Time. Value is from 8721ns to 8721*885ns, must be integral * multiple of 8721ns . * 8721xN(N =1,2,3,4,5.....855) */ // constexpr int exposureStep{8721}; // constexpr int maxExposureStepMultiplier{885}; // auto valueNs = valueUs; // valueNs = (valueNs / exposureStep) * exposureStep; // std::clamp(valueNs, exposureStep, exposureStep * maxExposureStepMultiplier); // setGain(rand() % 254); // setGain(3); // setLaserLevel(rand() % 0x7fffffff); // setLaserLevel(rand() % 100); // int exp = rand() % 10; // return setCamParam(V4L2_CID_EXPOSURE, exp * exp * exp * exp * exp * exp); // return setCamParam(V4L2_CID_EXPOSURE, valueUs); return setCamParam(V4L2_CID_EXPOSURE, valueUs); } bool VeyeIMX287m::setGain(int value) { std::cout << __func__ << ": " << value << std::endl << std::flush; // return setCamParam(V4L2_CID_GAIN, value); // FIXME: tmp workaround for imx287llr return true; } bool VeyeIMX287m::setLaserLevel(int value) { std::cout << __func__ << ": " << value << std::endl << std::flush; // return setCamParam(V4L2_CID_FLASH_TIMEOUT, value); // FIXME: tmp workaround for imx287llr return true; } bool VeyeIMX287m::setSomething(int value) { std::cout << __func__ << ": " << value << std::endl << std::flush; // return setCamParam(V4L2_CID_FLASH_INTENSITY, value); // FIXME: tmp workaround for imx287llr return true; } bool VeyeIMX287m::setCamParam(unsigned int v4l2controlId, int value) { std::cout << "radxa: skip setCamParam" << std::endl; return true; v4l2_control ctl{v4l2controlId, value}; if (ioctl(m_cam_fd, VIDIOC_S_CTRL, &ctl) < 0) { fprintf(stderr, "cannot set cam param: id - %d, error - '%s'\n", v4l2controlId, strerror(errno)); fflush(stderr); return false; } if (ioctl(m_cam_fd, VIDIOC_G_CTRL, &ctl) < 0) { fprintf(stderr, "cannot get cam param: id - %d, error - '%s'\n", v4l2controlId, strerror(errno)); fflush(stderr); return false; } std::cout << __func__ << ": new value is " << ctl.value << std::endl; return true; } bool VeyeIMX287m::openCam() { m_cam_fd = open(videoDevice, O_RDWR); if (m_cam_fd < 0) { fprintf(stderr, "cannot open cam '%s', error: '%s'\n", videoDevice, strerror(errno)); return false; } return true; } bool VeyeIMX287m::selectCam(int camIdx) { int input = camIdx; int ret = ioctl(m_cam_fd, VIDIOC_S_INPUT, &input); if (ret < 0) { fprintf(stderr, "cannot select cam: idx - %d, error - '%s'\n", camIdx, strerror(errno)); return false; } return true; } bool VeyeIMX287m::initCam() { int ret{-1}; constexpr bool radxa_zero_3et{true}; const auto radxa_buf_type = V4L2_BUF_TYPE_VIDEO_CAPTURE_MPLANE; if constexpr (!radxa_zero_3et) { v4l2_format format; memset(&format, 0, sizeof(v4l2_format)); format.type = V4L2_BUF_TYPE_VIDEO_CAPTURE; format.fmt.pix.pixelformat = V4L2_PIX_FMT_GREY; format.fmt.pix.width = img_width; format.fmt.pix.height = img_height; ret = ioctl(m_cam_fd, VIDIOC_TRY_FMT, &format); if (ret < 0) { fprintf(stderr, "cannot try cam format: error - '%s'\n", strerror(errno)); return false; } // TODO: remove this? format.type = V4L2_BUF_TYPE_VIDEO_CAPTURE; ret = ioctl(m_cam_fd, VIDIOC_S_FMT, &format); if (ret < 0) { fprintf(stderr, "cannot set cam format: error - '%s'\n", strerror(errno)); return false; } } v4l2_format fmt; memset(&fmt, 0, sizeof fmt); fmt.type = radxa_buf_type; if (ioctl(m_cam_fd, VIDIOC_G_FMT, &fmt) < 0) { printf("Unable to get format: %s (%d).\n", strerror(errno), errno); return false; } const auto width = fmt.fmt.pix_mp.width; const auto height = fmt.fmt.pix_mp.height; const int num_planes = fmt.fmt.pix_mp.num_planes; std::cout << "num_planes: " << num_planes << std::endl; if (num_planes != 1) { std::cerr << "multiple planes are not supported" << std::endl; return false; } printf("Video format: %s (%08x) %ux%u field %s, %u planes: \n", v4l2_format_name(fmt.fmt.pix_mp.pixelformat), fmt.fmt.pix_mp.pixelformat, fmt.fmt.pix_mp.width, fmt.fmt.pix_mp.height, v4l2_field_name((enum v4l2_field) fmt.fmt.pix_mp.field), fmt.fmt.pix_mp.num_planes); for (int i = 0; i < fmt.fmt.pix_mp.num_planes; i++) { printf(" * Stride %u, buffer size %u\n", fmt.fmt.pix_mp.plane_fmt[i].bytesperline, fmt.fmt.pix_mp.plane_fmt[i].sizeimage); fflush(stdout); } struct v4l2_requestbuffers rb; memset(&rb, 0, sizeof rb); rb.count = BUFFER_COUNT; if constexpr (radxa_zero_3et) { rb.type = V4L2_BUF_TYPE_VIDEO_CAPTURE_MPLANE; } else { rb.type = V4L2_BUF_TYPE_VIDEO_CAPTURE; } rb.memory = V4L2_MEMORY_MMAP; ret = ioctl(m_cam_fd, VIDIOC_REQBUFS, &rb); if (ret < 0) { fprintf(stderr, "cannot set cam request buffers: ioctl error - '%s'\n", strerror(errno)); return false; } std::cout << "buffers requested" << std::endl; if (rb.count < BUFFER_COUNT) { fprintf(stderr, "cannot set cam request buffers\n"); return false; } std::cout << "buffers count is ok: " << rb.count << std::endl; buffers.resize(rb.count); // memset(&buffer, 0, sizeof(buffer)); // buffer.type = request.type; // buffer.memory = V4L2_MEMORY_MMAP; std::cout << "query buffers" << std::endl; for (uint32_t i = 0; i < rb.count; i++) { std::cout << "-----------------------------------------------------" << std::endl; struct v4l2_buffer buf; struct v4l2_plane planes[VIDEO_MAX_PLANES]; memset(&buf, 0, sizeof buf); memset(planes, 0, sizeof planes); buf.index = i; buf.type = rb.type; buf.memory = V4L2_MEMORY_MMAP; buf.length = VIDEO_MAX_PLANES; buf.m.planes = planes; std::cout << "run ioctl(VIDIOC_QUERYBUF) for buf #" << i << std::endl; ret = ioctl(m_cam_fd, VIDIOC_QUERYBUF, &buf); if (ret < 0) { // std::cout << "ioctl(VIDIOC_QUERYBUF) failed: " << errno << " " << std::endl; std::cerr << "ioctl(VIDIOC_QUERYBUF) failed: " << errno << " (" << strerror(errno) << ")" << std::endl; return false; } std::cout << "ioctl(VIDIOC_QUERYBUF) is ok: " << std::endl; std::cout << "buffer.length: " << buf.length << std::endl; std::cout << "buffer.m.offset: " << buf.m.offset << std::endl; buffers[i].idx = i; const auto length = buf.m.planes[0].length; const auto offset = buf.m.planes[0].m.mem_offset; buffers[i].mem[0] = mmap(0, length, PROT_READ | PROT_WRITE, MAP_SHARED, m_cam_fd, offset); if (buffers[i].mem[0] == MAP_FAILED) { std::cerr << "mmap() failed: " << errno << " (" << strerror(errno) << ")" << std::endl; std::cerr << "length: " << length << std::endl; std::cerr << "offset: " << offset << std::endl; return false; // m_videoBuffers[i] = (uint8_t *) // mmap(NULL, buf.length, PROT_READ | PROT_WRITE, MAP_SHARED, m_cam_fd, buf.m.offset); // if (m_videoBuffers[i] == MAP_FAILED) { // DBG("mmap() failed %d(%s)", errno, strerror(errno)); // return false; } m_videoBuffers[i] = (uint8_t *) buffers[i].mem[0]; buffers[i].size[i] = length; buffers[i].padding[i] = 0; printf("Buffer %u/%u mapped at address %p.\n", buffers[i].idx, i, buffers[i].mem[0]); // buf.type = V4L2_BUF_TYPE_VIDEO_CAPTURE; // buf.memory = V4L2_MEMORY_MMAP; // buf.index = i; ret = ioctl(m_cam_fd, VIDIOC_QBUF, &buf); if (ret != 0) { std::cerr << "ioctl(VIDIOC_QBUF) failed: " << errno << " (" << strerror(errno) << ")" << std::endl; return false; } std::cout << "ioctl(VIDIOC_QBUF) is OK" << std::endl; } fflush(stdout); fflush(stderr); // std::cout << "test return false" << std::endl; // return false; // int buffer_type = V4L2_BUF_TYPE_VIDEO_CAPTURE; // ret = ioctl(m_cam_fd, VIDIOC_STREAMON, &buffer_type); // if (ret != 0) // { // DBG("ioctl(VIDIOC_STREAMON) failed %d(%s)", errno, strerror(errno)); // return false; // } DBG("cam init done."); return true; } void VeyeIMX287m::dequeueFrameLoop(std::stop_token stopToken) { // std::cout << "VeyeIMX287m: start stream" << std::endl; // while (!stopToken.stop_requested()) { // size_t imageIndex{}; // if (!dequeueImageBuffer(imageIndex)) // // break; // continue; // } // std::cout << "VeyeIMX287m: stream interruption requested" << std::endl; } void VeyeIMX287m::calcFrameLoop(std::stop_token stopToken) { QElapsedTimer t; while (!stopToken.stop_requested()) { size_t bufferIdx{}; if (!dequeueImageBuffer(bufferIdx)) { std::cout << "AAAAAAAAAAAAAAAAAA" << std::endl; continue; } // std::lock_guard img_lock{m_imageMutexes[bufferIdx]}; auto &image = m_images[bufferIdx]; { t.start(); // auto &src = *(Image::data_t *) m_videoBuffers[bufferIdx]; auto &src = *(Image::radxa_data_t *) m_videoBuffers[bufferIdx]; auto &dst = image.data; Image::copy(dst, src); // #pragma omp parallel for num_threads(4) // for (size_t i = 0; i < img_height; ++i) { // memcpy(dst[i].data(), src[i].data(), img_width); // } // for (std::tuple dst_src : std::views::zip()) // auto &dst = *(Image::data_t *) m_videoBuffers[bufferIdx]; // std::transform(std::execution::unseq, // src.begin(), // src.end(), // dst.begin(), // src.begin(), // [](auto &srcRow, auto &dstRow) -> Image::row_t { // // memcpy(dstRow.data(), srcRow.data(), img_width); // return dstRow; // // return Image::row_t(dst.begin(), dst.begin() + img_width); // }); // memcpy(&image.data, m_videoBuffers[bufferIdx], radxa_raw_img_size); get_elapsed_ns += t.nsecsElapsed(); } image.rotate(); const auto pixels = image.sharedPixels(); ++processedCounter; } } // Image &VeyeIMX287m::dequeueImageBuffer() // bool VeyeIMX287m::dequeueImageBuffer(Image &image) bool VeyeIMX287m::dequeueImageBuffer(size_t &imageIndex) // TODO: get Image from video_buffer_ptr { static struct timeval curr, prev; static uint16_t counter = 0; gettimeofday(&curr, NULL); double elapsedTime = (curr.tv_sec - prev.tv_sec) * 1000.0; // sec to ms elapsedTime += (curr.tv_usec - prev.tv_usec) / 1000.0; // us to ms if (elapsedTime > 1000. && processedCounter != 0) { fprintf(stderr, "fps: %d\tdropped: %lu sec: %ld " "dq: %lu get: %lu rot: %lu pix: %lu sum: %lu corr: " "%lu val: %lu\n", counter, dropped_count, curr.tv_sec % 1000, dq_elapsed_ns / 1000 / processedCounter, get_elapsed_ns / 1000 / processedCounter, rot_elapsed_ns / 1000 / processedCounter, pix_elapsed_ns / 1000 / processedCounter, sum_elapsed_ns / 1000 / processedCounter, corr_elapsed_ns / 1000 / processedCounter, // max_elapsed_ns / 1000 / processedCounter, value_elapsed_ns / 1000 / processedCounter); dq_elapsed_ns = 0; get_elapsed_ns = 0; sum_elapsed_ns = 0; corr_elapsed_ns = 0; max_elapsed_ns = 0; value_elapsed_ns = 0; rot_elapsed_ns = 0; pix_elapsed_ns = 0; dropped_count = 0; counter = 0; processedCounter = 0; prev = curr; } int ret; struct v4l2_buffer buf; struct v4l2_plane planes[VIDEO_MAX_PLANES]; memset(&buf, 0, sizeof(buf)); memset(planes, 0, sizeof planes); const auto radxa_buf_type = V4L2_BUF_TYPE_VIDEO_CAPTURE_MPLANE; // buffer.type = V4L2_BUF_TYPE_VIDEO_CAPTURE; buf.type = radxa_buf_type; buf.memory = V4L2_MEMORY_MMAP; buf.length = VIDEO_MAX_PLANES; buf.m.planes = planes; static uint16_t requestIdx{0}; // buffer.index = BUFFER_COUNT; const auto tmpIdx = requestIdx++ % BUFFER_COUNT; buf.index = requestIdx++ % BUFFER_COUNT; { QElapsedTimer t; t.start(); auto oldIdx = buf.index; // m_bufferMutexes[buffer.index].lock(); { std::lock_guard lock(m_camMtx); ret = ioctl(m_cam_fd, VIDIOC_DQBUF, &buf); } // m_bufferMutexes[buffer.index].unlock(); auto newIdx = buf.index; if (oldIdx != newIdx) { // std::cout << "AAAAAAAAAAAAAA " << oldIdx << ' ' << newIdx << std::endl; } if (ret != 0) { std::cerr << "ioctl(VIDIOC_DQBUF) failed: " << errno << " (" << strerror(errno) << ")" << std::endl; return false; } if (buf.index < 0 || buf.index >= BUFFER_COUNT) { std::cerr << "invalid buffer index: " << buf.index << std::endl; return false; } // std::lock_guard lock(m_queueMtx); // m_buffersQueue.push(buffer.index); ++counter; dq_elapsed_ns += t.nsecsElapsed(); } // auto &image = video_buffer_ptr[buffer.index]; imageIndex = buf.index; // std::cout << "index: " << imageIndex << '\t' << tmpIdx << std::endl; auto &image = m_images[buf.index]; image.height = img_height; image.width = img_width; // TODO: fill // image.counters.encoderPosition = RotaryEncoder::instance()->position(); image.counters.measurementCounter = buf.sequence; static int64_t prevCounter = buf.sequence; dropped_count += buf.sequence - prevCounter - 1; prevCounter = buf.sequence; image.counters.timestampUs = buf.timestamp.tv_sec * 1000 * 1000 + buf.timestamp.tv_usec; { std::lock_guard lock(m_camMtx); ret = ioctl(m_cam_fd, VIDIOC_QBUF, &buf); } if (ret != 0) { std::cerr << "ioctl(VIDIOC_QBUF) failed: " << errno << " (" << strerror(errno) << ")" << std::endl; return false; } return true; } bool VeyeIMX287m::getImage(Image &image) { size_t bufferIdx{}; if (!dequeueImageBuffer(bufferIdx)) { std::cout << "AAAAAAAAAAAAAAAAAA" << std::endl; return false; } image = std::move(m_images[bufferIdx]); { QElapsedTimer t; t.start(); // memcpy(&image.data, m_videoBuffers[bufferIdx], img_size); auto &src = *(Image::radxa_data_t *) m_videoBuffers[bufferIdx]; auto &dst = image.data; Image::copy(dst, src); // #pragma omp parallel for num_threads(4) // for (size_t i = 0; i < img_height; ++i) { // memcpy(dst[i].data(), src[i].data(), img_width); // } // memcpy(&image.data, buffers[bufferIdx].mem[0], img_size); get_elapsed_ns += t.nsecsElapsed(); } return true; }