自学内容网 自学内容网

在QT中使用V4L2获取传上来的yuyv(4:2:2)的数据转换为QImage显示在屏幕上

背景

项目需要用到OV3703 USB免驱摄像头在I.MX6ULL的平台上,但是勾八QCamera的库只能去处理RGB格式的数据,yuyv的处理不了,所以只能自己去把yuyv(4:2:2)的数据转换为RGB去显示。幸好有个德国牙医写了个V4L2的中间件可以获取到yuyv数据,respect…

1.初始化接收定时器

//每隔固定的时间显示一帧
    camera_capture_timer = new QTimer();
    connect(camera_capture_timer, SIGNAL(timeout()), this, SLOT(videoShow()));

    if(0 == v4l2_open()){
        printf("打开相机成功!\n");
        // 由于摄像头默认30帧每秒,虽然10ms定时执行一次,但实际上1秒内最多有30次可以执行成功
        // 其余都会在ioctl处阻塞
        camera_capture_timer->start(10);
    }

2. 初始化V4L2


int MainWindow::v4l2_open()
{
    unsigned int i = 0;
    int j = 0;

    video_fd = open("/dev/video1", O_RDWR);
    if(video_fd < 0){
        perror("open camera failed");
        return -1;
    }
    /* 2.获取摄像头的能力 (VIDIOC_QUERYCAP:是否支持视频采集、内存映射等) */
    struct v4l2_capability capability;
    if(0 == ioctl(video_fd, VIDIOC_QUERYCAP, &capability)){
        if((capability.capabilities & V4L2_CAP_VIDEO_CAPTURE) == 0){
            perror("This camera don't support camera collect!");
            ::close(video_fd);
            return -2;
        }
        if((capability.capabilities & V4L2_MEMORY_MMAP) == 0){
            perror("This camera do not support mmap!");
            ::close(video_fd);
            return -3;
        }
    }

    /* 3.枚举摄像头支持的格式           (VIDIOC_ENUM_FMT:MJPG、YUYV等)
          列举出每种格式下支持的分辨率      (VIDIOC_ENUM_FRAMESIZES) */
    struct v4l2_fmtdesc fmtdesc;
    memset(&fmtdesc, 0, sizeof(fmtdesc));
    fmtdesc.type = V4L2_BUF_TYPE_VIDEO_CAPTURE;  //设置视频采集设备类型
    while(1) {
        fmtdesc.index = i++;
        // 获取支持格式
        if(0 == ioctl(video_fd, VIDIOC_ENUM_FMT, &fmtdesc)){
            printf("支持格式:%s, %c%c%c%c\n", fmtdesc.description,
                                            fmtdesc.pixelformat & 0xff,
                                            fmtdesc.pixelformat >> 8 & 0xff,
                                            fmtdesc.pixelformat >> 16 & 0xff,
                                            fmtdesc.pixelformat >> 24 & 0xff);
            // 列出该格式下支持的分辨率             VIDIOC_ENUM_FRAMESIZES & 默认帧率 VIDIOC_G_PARM
            // 1.默认帧率
            struct v4l2_streamparm streamparm;
            streamparm.type = V4L2_BUF_TYPE_VIDEO_CAPTURE;
            if(0 == ioctl(video_fd, VIDIOC_G_PARM, &streamparm)) {
                printf("该格式默认帧率 %d fps\n", streamparm.parm.capture.timeperframe.denominator);
            }
            // 2.循环列出支持的分辨率
            struct v4l2_frmsizeenum frmsizeenum;
            frmsizeenum.type = V4L2_BUF_TYPE_VIDEO_CAPTURE;
            frmsizeenum.pixel_format = fmtdesc.pixelformat;   //设置成对应的格式
            printf("支持的分辨率有:\n");
            while(1){
                frmsizeenum.index = j++;
                if(0 == ioctl(video_fd, VIDIOC_ENUM_FRAMESIZES, &frmsizeenum))
                    printf("%d x %d\n", frmsizeenum.discrete.width, frmsizeenum.discrete.height);
                else break;
            }
            printf("\n");
        }else {
            break;
        }
    }
    /* 4.设置摄像头类型为捕获、设置分辨率、视频采集格式 (VIDIOC_S_FMT) */
      struct v4l2_format format;
      format.type = V4L2_BUF_TYPE_VIDEO_CAPTURE;   /* 视频采集 */
      format.fmt.pix.width = VIDEO_WIDTH;          /* 宽 */
      format.fmt.pix.height = VIDEO_HEIGHT;     /* 高 */
      format.fmt.pix.pixelformat = V4L2_PIX_FMT_YUYV;   /* 设置输出类型:YUYV */
      format.fmt.pix.field = V4L2_FIELD_INTERLACED;
      if(0 > ioctl(video_fd, VIDIOC_S_FMT, &format)){
          perror("设置摄像头参数失败!");
          ::close(video_fd);
          return -4;
      }
#if 0
        printf("===============G================\n");
        printf("fmt.type:\t\t%d\n",format.type);
        printf("pix.pixelformat:\t%c%c%c%c\n", \
          format.fmt.pix.pixelformat & 0xFF,\
          (format.fmt.pix.pixelformat >> 8) & 0xFF, \
          (format.fmt.pix.pixelformat >> 16) & 0xFF,\
          (format.fmt.pix.pixelformat >> 24) & 0xFF);
        printf("pix.width:\t\t%d\n",format.fmt.pix.width);
        printf("pix.height:\t\t%d\n",format.fmt.pix.height);
        printf("pix.field:\t\t%d\n",format.fmt.pix.field);
        printf("===================================\n\n");


        printf("=================S===================\n");
        if(-1 == ioctl(video_fd, VIDIOC_G_FMT, &format)){//得到图片格式
             perror("set format failed!");
             return -1;
         }

         printf("fmt.type:\t\t%d\n",format.type);
         printf("pix.pixelformat:\t%c%c%c%c\n", \
                 format.fmt.pix.pixelformat & 0xFF,\
                 (format.fmt.pix.pixelformat >> 8) & 0xFF, \
                 (format.fmt.pix.pixelformat >> 16) & 0xFF,\
                 (format.fmt.pix.pixelformat >> 24) & 0xFF);
         printf("pix.width:\t\t%d\n",format.fmt.pix.width);
         printf("pix.height:\t\t%d\n",format.fmt.pix.height);
         printf("pix.field:\t\t%d\n",format.fmt.pix.field);
        printf("=====================================\n");
#endif
      /* 5.向内核申请内存 (VIDIOC_REQBUFS:个数、映射方式为mmap)
           将申请到的缓存加入内核队列 (VIDIOC_QBUF)
           将内核内存映射到用户空间 (mmap) */
      struct v4l2_requestbuffers requestbuffers;
      requestbuffers.type = V4L2_BUF_TYPE_VIDEO_CAPTURE;
      requestbuffers.count = 4;    //申请缓存个数
      requestbuffers.memory = V4L2_MEMORY_MMAP;     //申请为物理连续的内存空间
      if(0 == ioctl(video_fd, VIDIOC_REQBUFS, &requestbuffers)){
          /* 申请到内存后 */
          for(i = 0; i < requestbuffers.count; i++){
              /* 将申请到的缓存加入内核队列 (VIDIOC_QBUF)              */
              struct v4l2_buffer buffer;
              buffer.type = V4L2_BUF_TYPE_VIDEO_CAPTURE;
              buffer.index = i;
              buffer.memory = V4L2_MEMORY_MMAP;
              if(0 == ioctl(video_fd, VIDIOC_QBUF, &buffer)){
                  /* 加入内核队列成功后,将内存映射到用户空间 (mmap) */
                  userbuff[i] = (char *)mmap(NULL, buffer.length, PROT_READ | PROT_WRITE, MAP_SHARED, video_fd, buffer.m.offset);
                  userbuff_length[i] = buffer.length;
              }
          }
      }
      else{
          perror("申请内存失败!");
          ::close(video_fd);
          return -5;
      }
      int type = V4L2_BUF_TYPE_VIDEO_CAPTURE;
      if(0 > ioctl(video_fd, VIDIOC_STREAMON, &type)){
          perror("打开视频流失败!");
          return -6;
      }
      return 0;
}

3.接收定时器回调函数

 void MainWindow::videoShow()
{
    QPixmap pix;
    image = new QImage(VIDEO_WIDTH, VIDEO_HEIGHT, QImage::Format_RGB16);
    /* 采集图片数据 */
    //定义结构体变量,用于获取内核队列数据
    struct v4l2_buffer buffer;
    buffer.type = V4L2_BUF_TYPE_VIDEO_CAPTURE;

    /* 从内核中捕获好的输出队列中取出一个 */
    if(0 == ioctl(video_fd, VIDIOC_DQBUF, &buffer)){

        *image = yuv422ToQImage(userbuff[buffer.index], VIDEO_WIDTH, VIDEO_HEIGHT);
        if (take_photo_flag == 1) {
            // 保存图像
            if (image != NULL) {
                if (saveImageToPhotoFolder(*image, PHOTO_SAVE_PATH, QString("%1.png").arg(para->record_number))) {
                    qDebug() << "Image saved successfully.";
                } else {
                    qDebug() << "Failed to save image.";
                }
            }
            if(0 == v4l2_close()) {
                camera_capture_timer->stop();
                return;
            }
        } else {
            /* 显示在label控件上 */
            pix = QPixmap::fromImage(*image);
            camera_content_show_label->setSizePolicy(QSizePolicy::Ignored, QSizePolicy::Ignored);
            camera_content_show_label->setPixmap(pix);
        }
    }
    /* 将使用后的缓冲区放回到内核的输入队列中 (VIDIOC_QBUF) */
    if(0 > ioctl(video_fd, VIDIOC_QBUF, &buffer)){
        perror("返回队列失败!");
    }
}

4.yuyv转QImage函数

// 将YUV 4:2:2数据转换为QImage的函数
QImage MainWindow::yuv422ToQImage(const char* yuvData, int width, int height)
{
    // 创建一个空的QImage,使用RGB16格式
        QImage image(width, height, QImage::Format_RGB16);

        for (int y = 0; y < height; ++y) {
            for (int x = 0; x < width; x += 2) {
                // 提取YUYV分量
                uchar y0 = yuvData[y * width * 2 + x * 2];
                uchar u = yuvData[y * width * 2 + x * 2 + 1];
                uchar y1 = yuvData[y * width * 2 + x * 2 + 2];
                uchar v = yuvData[y * width * 2 + x * 2 + 3];

                // 调整U和V的范围
                int u_offset = u - 128;
                int v_offset = v - 128;

                // 计算RGB分量
                int r0 = y0 + v_offset + (v_offset >> 2) + (v_offset >> 3) + (v_offset >> 5);
                int g0 = y0 - (u_offset >> 2) - (u_offset >> 4) - (u_offset >> 5) - (v_offset >> 1) + (v_offset >> 3) + (v_offset >> 4) + (v_offset >> 5);
                int b0 = y0 + u_offset + (u_offset >> 1) + (u_offset >> 2) + (u_offset >> 6);

                int r1 = y1 + v_offset + (v_offset >> 2) + (v_offset >> 3) + (v_offset >> 5);
                int g1 = y1 - (u_offset >> 2) - (u_offset >> 4) - (u_offset >> 5) - (v_offset >> 1) + (v_offset >> 3) + (v_offset >> 4) + (v_offset >> 5);
                int b1 = y1 + u_offset + (u_offset >> 1) + (u_offset >> 2) + (u_offset >> 6);

                // 裁剪RGB分量以防止溢出
                r0 = qBound(0, r0, 255);
                g0 = qBound(0, g0, 255);
                b0 = qBound(0, b0, 255);
                r1 = qBound(0, r1, 255);
                g1 = qBound(0, g1, 255);
                b1 = qBound(0, b1, 255);

                // 将RGB分量打包为RGB565格式
                ushort rgb565_1 = RGB565(r0, g0, b0);
                ushort rgb565_2 = RGB565(r1, g1, b1);

                // 存储RGB565值
                *(ushort*)(image.bits() + y * image.bytesPerLine() + x * 2) = rgb565_1;
                if (x + 1 < width) {
                    *(ushort*)(image.bits() + y * image.bytesPerLine() + (x + 1) * 2) = rgb565_2;
                }
            }
        }

          // 创建一个旋转矩阵
          QTransform transform;
          transform.rotate(270);

          // 使用旋转矩阵创建一个新的QImage
          QImage rotatedImage = image.transformed(transform);

          return rotatedImage;
}

5.V4L2关闭函数

int MainWindow::v4l2_close()
{
    /* 8.停止采集,关闭视频流 (VIDIOC_STREAMOFF)
      关闭摄像头设备 & 关闭LCD设备 */
    int type = V4L2_BUF_TYPE_VIDEO_CAPTURE;
    if(0 == ioctl(video_fd, VIDIOC_STREAMOFF, &type)){
        /* 9.释放映射 */
        for(int i = 0; i < 4; i++)
            munmap(userbuff[i], userbuff_length[i]);
        ::close(video_fd);
        printf("关闭相机成功!\n");
        return 0;
    }
    return -1;
}

原文地址:https://blog.csdn.net/weixin_47752005/article/details/142852895

免责声明:本站文章内容转载自网络资源,如本站内容侵犯了原著者的合法权益,可联系本站删除。更多内容请关注自学内容网(zxcms.com)!