• 实战小项目之嵌入式linux图像采集与传输


    项目简介

         本次编程实战主要是围绕嵌入式linux v4l2采集框架展开,包括以下几个部分:

    1. v4l2视频采集
    2. IPU转码
    3. framebuffer显示
    4. 自定义UDP简单协议进行传输
    5. 上位机软件(QT)

      首先是采集部分

    #include "includes.h"
    
    int fd_cam;
    struct cam_buffer *buffers=NULL;
    unsigned int n_buffers=0;
    int frameIndex=0;
    
    void initVideo()
    {
        int ret;
        struct     v4l2_capability cam_cap;        //显示设备信息
        struct  v4l2_cropcap     cam_cropcap;    //设置摄像头的捕捉能力
        struct     v4l2_fmtdesc    cam_fmtdesc;    //查询所有支持的格式:VIDIOC_ENUM_FMT
        struct  v4l2_crop        cam_crop;        //图像的缩放
        struct  v4l2_format     cam_format;        //设置摄像头的视频制式、帧格式等
    
        /*设备的打开*/
        fd_cam = open( USB_VIDEO, O_RDWR );
        if( fd_cam<0 )
        printf("Can't open video device
    ");
    
        /* 使用IOCTL命令VIDIOC_QUERYCAP,获取摄像头的基本信息*/
        ret = ioctl( fd_cam,VIDIOC_QUERYCAP,&cam_cap );
        if( ret<0 ) {
        printf("Can't get device information: VIDIOCGCAP
    ");
        }
        printf("Driver Name:%s
    Card Name:%s
    Bus info:%s
    Driver Version:%u.%u.%u
    ",
                cam_cap.driver,cam_cap.card,cam_cap.bus_info,(cam_cap.version>>16)&0XFF,
                (cam_cap.version>>8)&0XFF,cam_cap.version&0XFF);
    
        /* 使用IOCTL命令VIDIOC_ENUM_FMT,获取摄像头所有支持的格式*/
        cam_fmtdesc.index=0;
        cam_fmtdesc.type=V4L2_BUF_TYPE_VIDEO_CAPTURE;
        printf("Support format:
    ");
        while(ioctl(fd_cam, VIDIOC_ENUM_FMT, &cam_fmtdesc) != -1)
        {
            printf("	%d.%s
    ",cam_fmtdesc.index+1,cam_fmtdesc.description);
            cam_fmtdesc.index++;
        }
    
        /* 使用IOCTL命令VIDIOC_CROPCAP,获取摄像头的捕捉能力*/
        cam_cropcap.type=V4L2_BUF_TYPE_VIDEO_CAPTURE;
        if(0 == ioctl(fd_cam, VIDIOC_CROPCAP, &cam_cropcap)){
            printf("Default rec:
    	left:%d
    	top:%d
    	%d
    	height:%d
    ",
                    cam_cropcap.defrect.left,cam_cropcap.defrect.top,
                    cam_cropcap.defrect.width,cam_cropcap.defrect.height);
            /* 使用IOCTL命令VIDIOC_S_CROP,获取摄像头的窗口取景参数*/
            cam_crop.type = V4L2_BUF_TYPE_VIDEO_CAPTURE;
            cam_crop.c = cam_cropcap.defrect;//默认取景窗口大小
            if(-1 == ioctl(fd_cam, VIDIOC_S_CROP, &cam_crop)){
                //printf("Can't set crop para
    ");
            }
        }
        else{
            printf("Can't set cropcap para
    ");
        }
    
        /* 使用IOCTL命令VIDIOC_S_FMT,设置摄像头帧信息*/
        cam_format.type = V4L2_BUF_TYPE_VIDEO_CAPTURE;
        cam_format.fmt.pix.width = 640;
        cam_format.fmt.pix.height = 480;
        cam_format.fmt.pix.pixelformat = V4L2_PIX_FMT_YUYV;//要和摄像头支持的类型对应
        cam_format.fmt.pix.field = V4L2_FIELD_INTERLACED;
        ret=ioctl(fd_cam, VIDIOC_S_FMT, &cam_format);
        if(ret<0){
                printf("Can't set frame information
    ");
        }
        /* 使用IOCTL命令VIDIOC_G_FMT,获取摄像头帧信息*/
        cam_format.type=V4L2_BUF_TYPE_VIDEO_CAPTURE;
        ret=ioctl(fd_cam, VIDIOC_G_FMT, &cam_format);
        if(ret<0){
            printf("Can't get frame information
    ");
        }
        printf("Current data format information:
    	%d
    	height:%d
    ",
                cam_format.fmt.pix.width,cam_format.fmt.pix.height);
        ret=initBuffers();
        if(ret<0){
            printf("Buffers init error
    ");
            //exit(-1);
        }
    }
    
    void closeVideo()
    {
        //stopCapture();
        //freeBuffers();
        close(fd_cam);
    }
    
    int initBuffers()
    {
        int ret;
        /* 使用IOCTL命令VIDIOC_REQBUFS,申请帧缓冲*/
        struct v4l2_requestbuffers req;
        CLEAR(req);
        req.count=4;
        req.type=V4L2_BUF_TYPE_VIDEO_CAPTURE;
        req.memory = V4L2_MEMORY_MMAP;
        ret=ioctl(fd_cam, VIDIOC_REQBUFS, &req);
        if(ret<0){
            printf("Request frame buffers failed
    ");
            return -1;
        }
        if(req.count<2){
            printf("Request frame buffers while insufficient buffer memory
    ");
            return -1;
        }
        buffers = (struct cam_buffer*)calloc(req.count, sizeof(*buffers));
        if(!buffers){
            printf("Out of memory
    ");
            return -1;
        }
        for(n_buffers = 0; n_buffers < req.count; n_buffers++){
            struct v4l2_buffer buf;
            CLEAR(buf);
            // 查询序号为n_buffers 的缓冲区,得到其起始物理地址和大小
            buf.type = V4L2_BUF_TYPE_VIDEO_CAPTURE;
            buf.memory = V4L2_MEMORY_MMAP;
            buf.index = n_buffers;
            ret=ioctl(fd_cam, VIDIOC_QUERYBUF, &buf);
            if(ret<0 )
            {
                printf("VIDIOC_QUERYBUF %d failed
    ",n_buffers);
                return -1;
            }
            buffers[n_buffers].length = buf.length;
            // 映射内存
            buffers[n_buffers].start =
             mmap(NULL, // start anywhere
                  buf.length,
                  PROT_READ | PROT_WRITE,
                  MAP_SHARED,
                  fd_cam, buf.m.offset);
            if(MAP_FAILED == buffers[n_buffers].start)
            {
                printf("mmap buffer%d failed
    ",n_buffers);
                return -1;
            }
    
        }
        return 0;
    }
    int startCapture()
    {
        unsigned int i;
        //struct v4l2_buffer buf;
        for(i=0;i<n_buffers;i++){
            struct v4l2_buffer buf;
            CLEAR(buf);
            buf.type = V4L2_BUF_TYPE_VIDEO_CAPTURE;
            buf.memory =V4L2_MEMORY_MMAP;
            buf.index = i;
            //        fprintf(stderr, "n_buffers: %d
    ", i);
            if(-1 == ioctl(fd_cam, VIDIOC_QBUF, &buf))    {
                printf("VIDIOC_QBUF buffer%d failed
    ",i);
                return -1;
            }
        }
        enum v4l2_buf_type type;
        type = V4L2_BUF_TYPE_VIDEO_CAPTURE;
        if(-1 == ioctl(fd_cam, VIDIOC_STREAMON, &type)){
             printf("VIDIOC_STREAMON error");
             return -1;
        }
        return 0;
    }
    int stopCapture()
    {
        enum v4l2_buf_type type;
        type = V4L2_BUF_TYPE_VIDEO_CAPTURE;
        if(-1 == ioctl(fd_cam, VIDIOC_STREAMOFF, &type)){
            printf("VIDIOC_STREAMOFF error
    ");
            return -1;
        }
        return 0;
    }
    int freeBuffers()
    {
        unsigned int i;
        for(i = 0; i < n_buffers; ++i){
            if(-1 == munmap(buffers[i].start, buffers[i].length)){
                printf("munmap buffer%d failed
    ",i);
                return -1;
            }
        }
        free(buffers);
        return 0;
    }
    int getFrame(void **frame_buf, size_t* len)
    {
        struct v4l2_buffer queue_buf;
        CLEAR(queue_buf);
        queue_buf.type=V4L2_BUF_TYPE_VIDEO_CAPTURE;
        queue_buf.memory = V4L2_MEMORY_MMAP;
        if(-1 == ioctl(fd_cam, VIDIOC_DQBUF, &queue_buf)){
            printf("VIDIOC_DQBUF error
    ");
            return -1;
        }
        printf("queue_buf.index=%d
    ",queue_buf.index);
        //pthread_rwlock_wrlock(&rwlock);
        *frame_buf = buffers[queue_buf.index].start;
        *len = buffers[queue_buf.index].length;
        frameIndex = queue_buf.index;
        //pthread_rwlock_unlock(&rwlock);
        return 0;
    }
    int backFrame()
    {
        if(frameIndex != -1){
            struct v4l2_buffer queue_buf;
            CLEAR(queue_buf);
            queue_buf.type = V4L2_BUF_TYPE_VIDEO_CAPTURE;
            queue_buf.memory = V4L2_MEMORY_MMAP;
            queue_buf.index = frameIndex;
            if(-1 == ioctl(fd_cam, VIDIOC_QBUF, &queue_buf)){
                printf("VIDIOC_QBUF error
    ");
                return -1;
            }
            return 0;
        }
        return -1;
    }
    
    /*yuv格式转换为rgb格式*/
    int convert_yuv_to_rgb_buffer(unsigned char *yuv, unsigned char *rgb, unsigned int width, unsigned int height)
    {
        unsigned int in, out = 0;
        unsigned int pixel_16;
        unsigned char pixel_24[3];
        unsigned int pixel32;
        int y0, u, y1, v;
        struct timeval starttime,endtime;
        gettimeofday(&starttime,0);
        for(in = 0; in < width * height * 2; in += 4) {
            pixel_16 =
            yuv[in + 3] << 24 |
            yuv[in + 2] << 16 |
            yuv[in + 1] <<  8 |
            yuv[in + 0];
            y0 = (pixel_16 & 0x000000ff);
            u  = (pixel_16 & 0x0000ff00) >>  8;
            y1 = (pixel_16 & 0x00ff0000) >> 16;
            v  = (pixel_16 & 0xff000000) >> 24;
            pixel32 = convert_yuv_to_rgb_pixel(y0, u, v);
            pixel_24[0] = (pixel32 & 0x000000ff);
            pixel_24[1] = (pixel32 & 0x0000ff00) >> 8;
            pixel_24[2] = (pixel32 & 0x00ff0000) >> 16;
            //pthread_rwlock_wrlock(&rwlock);
            rgb[out++] = pixel_24[0];
            rgb[out++] = pixel_24[1];
            rgb[out++] = pixel_24[2];
            //pthread_rwlock_unlock(&rwlock);
            pixel32 = convert_yuv_to_rgb_pixel(y1, u, v);
            pixel_24[0] = (pixel32 & 0x000000ff);
            pixel_24[1] = (pixel32 & 0x0000ff00) >> 8;
            pixel_24[2] = (pixel32 & 0x00ff0000) >> 16;
            //pthread_rwlock_wrlock(&rwlock);
            rgb[out++] = pixel_24[0];
            rgb[out++] = pixel_24[1];
            rgb[out++] = pixel_24[2];
            //pthread_rwlock_unlock(&rwlock);
        }
         gettimeofday(&endtime,0);
         double timeuse = 1000000*(endtime.tv_sec - starttime.tv_sec)+endtime.tv_usec-starttime.tv_usec;
              timeuse /=1000;//除以1000则进行毫秒计时,如果除以1000000则进行秒级别计时,如果除以1则进行微妙级别计时
         printf("yuv2rgb use %f ms
    ",timeuse);
        return 0;
    }
    int convert_yuv_to_rgb_pixel(int y, int u, int v)
    {
        unsigned int pixel32 = 0;
        unsigned char *pixel = (unsigned char *)&pixel32;
        int r, g, b;
        r = y + (1.370705 * (v-128));
        g = y - (0.698001 * (v-128)) - (0.337633 * (u-128));
        b = y + (1.732446 * (u-128));
        if(r > 255) r = 255;
        if(g > 255) g = 255;
        if(b > 255) b = 255;
        if(r < 0) r = 0;
        if(g < 0) g = 0;
        if(b < 0) b = 0;
        pixel[0] = r * 220 / 256;
        pixel[1] = g * 220 / 256;
        pixel[2] = b * 220 / 256;
        return pixel32;
    }
    View Code

      之后是IPU部分

    #include "includes.h"
    
    int fd_ipu=0;
    struct ipu_task taskCam;
    struct timeval begintime, endtime;
    unsigned int ipuOutputSize=0,ipuInputSize=0;
    void *inbuf=NULL;
    void *outbuf=NULL;
    /***************与软件解码对应的IPU解码**************************/
    void initIPU()
    {
        int ret;
        CLEAR(taskCam);
        // Input image size and format
        taskCam.input.width    = 640;
        taskCam.input.height   = 480;
        taskCam.input.format   = v4l2_fourcc('Y', 'U', 'Y', 'V');
    //
    //    taskCam.input.crop.pos.x = 0;
    //    taskCam.input.crop.pos.y = 0;
    //    taskCam.input.crop.w = 0;
    //    taskCam.input.crop.h = 0;
    
        // Output image size and format
        taskCam.output.width   = 640;
        taskCam.output.height  = 480;
        taskCam.output.format  = v4l2_fourcc('B', 'G', 'R', '3');
    
    //    taskCam.output.crop.pos.x = 300;
    //    taskCam.output.crop.pos.y = 300;
    //    taskCam.output.crop.w = 300;
    //    taskCam.output.crop.h = 300;
        // Open IPU device
        fd_ipu = open(IPUDEV, O_RDWR, 0);
        if (fd_ipu < 0) {
            printf("open ipu dev fail
    ");
        }
        ipuOutputSize=taskCam.output.paddr= taskCam.output.width * taskCam.output.height
                * fmt_to_bpp(taskCam.output.format)/8;
        printf("ipuOutputSize=%d
    ",ipuOutputSize);
        ret = ioctl(fd_ipu, IPU_ALLOC, &taskCam.output.paddr);
            if (ret < 0) {
                printf("ioctl IPU_ALLOC fail
    ");
        }
        outbuf= mmap(0, ipuOutputSize, PROT_READ | PROT_WRITE,
                MAP_SHARED, fd_ipu, taskCam.output.paddr);
        if (!outbuf) {
                printf("mmap ipu output image fail
    ");
            }
        ipuInputSize  =taskCam.input.paddr=taskCam.input.width * taskCam.input.height
                    * fmt_to_bpp(taskCam.input.format)/8;
        printf("ipuInputSize=%d
    ",ipuInputSize);
        ret = ioctl(fd_ipu, IPU_ALLOC, &taskCam.input.paddr);
        if (ret < 0) {
            printf("ioctl IPU_ALLOC fail: (errno = %d)
    ", errno);
        }
        inbuf = mmap(0, ipuInputSize, PROT_READ | PROT_WRITE,
                    MAP_SHARED, fd_ipu, taskCam.input.paddr);
        if (!inbuf) {
                    printf("mmap ipu input image fail
    ");
                }
    }
    void IPUConvent(void *in,void *out)
    {
        int ret;
        memcpy(inbuf, in, ipuInputSize);
        gettimeofday(&begintime, NULL);
        // Perform color space conversion
        ret = ioctl(fd_ipu, IPU_QUEUE_TASK, &taskCam);
        if (ret < 0) {
            printf("ioct IPU_QUEUE_TASK fail %x
    ", ret);
        }
        gettimeofday(&endtime, NULL);
        double timeuse = 1000000*(endtime.tv_sec - begintime.tv_sec)+endtime.tv_usec-begintime.tv_usec;
        timeuse /=1000;//除以1000则进行毫秒计时,如果除以1000000则进行秒级别计时,如果除以1则进行微妙级别计时
        printf("yuv2rgb use %f ms
    ",timeuse);
        memcpy(out,outbuf,ipuOutputSize);
    }
    void closeIPU()
    {
        if(rgbFrame)munmap(rgbFrame, ipuOutputSize);
        if(inbuf)munmap(inbuf, ipuInputSize);
        if (taskCam.input.paddr)
                ioctl(fd_ipu, IPU_FREE, &taskCam.input.paddr);
    }
    View Code

      然后是framebuffer显示

    #include "includes.h"
    
    int fd_fb0;
    long int screensize = 0;
    char *fb_buf = 0;
    struct fb_var_screeninfo vinfo;
    struct fb_fix_screeninfo finfo;
    
    void InitDisOnFrameBuffer()
    {
        // Open the file for reading and writing
        fd_fb0 = open(DISON_FB0, O_RDWR);
        if (!fd_fb0) {
            printf("Error: cannot open framebuffer device.
    ");
            exit(1);
        }
        printf("The framebuffer device was opened successfully.
    ");
    
        // Get fixed screen information
        if (ioctl(fd_fb0, FBIOGET_FSCREENINFO, &finfo)) {
            printf("Error reading fixed information.
    ");
            exit(2);
        }
    
        // Get variable screen information
        if (ioctl(fd_fb0, FBIOGET_VSCREENINFO, &vinfo)) {
            printf("Error reading variable information.
    ");
            exit(3);
        }
        printf("%dx%d, %dbpp
    ", vinfo.xres, vinfo.yres, vinfo.bits_per_pixel );
    
        // Figure out the size of the screen in bytes
        screensize = vinfo.xres * vinfo.yres * vinfo.bits_per_pixel / 8;
        printf("screensize=%d
    ",screensize);
    
        // Map the device to memory
        fb_buf = (char *)mmap(0, screensize, PROT_READ | PROT_WRITE, MAP_SHARED,
                       fd_fb0, 0);
        if ((int)fb_buf == -1) {
            printf("Error: failed to map framebuffer device to memory.
    ");
            exit(4);
        }
        printf("The framebuffer device was mapped to memory successfully.
    ");
    }
    void DisOnFrameBuffer(unsigned char *frame)
    {
        //memcpy(fb_buf,frame,640* 480* 3 * sizeof(char));
        int x = 0, y = 0;
        long int location = 0;
        // Figure out where in memory to put the pixel
        for ( y = 0; y < 480; y++ )
            for ( x = 0; x < 640; x++ ) {
                location = (x+vinfo.xoffset) * (vinfo.bits_per_pixel/8) +
                           (y+vinfo.yoffset) * finfo.line_length;
                if ( vinfo.bits_per_pixel == 32 ) {
                    //rgb32 bgra
                    *(fb_buf + location )         = *frame;frame++; // Some blue
                    *(fb_buf + location + 1)    = *frame;frame++; // A little green
                    *(fb_buf + location + 2)     = *frame;frame++; //A lot of red//frame[480*y+x+2];
                    *(fb_buf + location + 3) = 0; // No transparency
                }
                else { //assume 16bpp
                    int b = 10;
                    int g = (x-100)/6; // A little green
                    int r = 31-(y-100)/16; // A lot of red
                    unsigned short int t = r<<11 | g << 5 | b;
                    *((unsigned short int*)(fb_buf + location)) = t;
                }
            }
    }
    void CloseDisOnFrameBuffer()
    {
        munmap(fb_buf, screensize);
        close(fd_fb0);
    }
    View Code

      UDP部分

    #include "includes.h"
    
    
    struct sockaddr_in serveraddr;
    int confd;
    
    char udpRecbuf[MAXLINE];
    
    void initUDPTrans()
    {
        //1.创建一个socket
        confd=socket(AF_INET,SOCK_DGRAM,0);
        //2.初始化服务器地址
        bzero(&serveraddr,sizeof(serveraddr));
        serveraddr.sin_family=AF_INET;
        //
        inet_pton(AF_INET,SEVER_IP,&serveraddr.sin_addr.s_addr);
        serveraddr.sin_port =htons(SERVER_PORT);
    }
    void sendUDPdata(void *datas,unsigned int size)
    {
        size_t len,i,j;//分成1800块 每块512
        char tempflag;
        struct udptransbuf data;
        for(i=0;i<24;i++){
            memcpy(data.buf,datas+i*BLOCKSIZE,BLOCKSIZE);
    //        for(j=0;j<BLOCKSIZE;j++)
    //            data.buf[j]= (unsigned char*)(datas+i*BLOCKSIZE+j);
            if(i==0){
                tempflag='a';
                data.flag=tempflag;
            }
            else{
                tempflag++;
                data.flag=tempflag;
            }
            //3向务器发送数据
            len=sendto(confd,(void*)&data,sizeof(data),0,(struct sockaddr *)&serveraddr,sizeof(serveraddr));
            if(len<0)
                printf("UDP send failed
    ");
        }
        //char udpSendbuf[MAXLINE]="125wwew3332354#@$#";
    }
    void recUDPdata(char *udpRecbuf)//这里要求传入的是数组,要是指针需要修改
    {
        size_t len;
        len=recvfrom(confd,udpRecbuf,sizeof(udpRecbuf),0,NULL,0);
        write(STDIN_FILENO,udpRecbuf,len);
    }
    void closeUDPtrans()
    {
        close(confd);
    }
    View Code

    完整工程

    https://github.com/tla001/CapTrans

    上位机部分

  • 相关阅读:
    win7 计划任务
    计算机英语翻译
    开机自启动win7计划任务
    vc++ 创建异性窗体(1)
    C++ TaskScheduler msdn杂志
    vc++创建异性窗体(2)
    Task Scheduler 参看——有关闭电源设置和添加目录设置参考
    CComPtr用法
    Builtin\administrators 与 Domain Admins 用户组的来历与区别
    CreatDC()和CreateIC()
  • 原文地址:https://www.cnblogs.com/tla001/p/6322823.html
Copyright © 2020-2023  润新知