由于从一开始就用的超群晴天的程序来获取图像,所以对这个过程不是很了解。今天QT显示时改了程序,但是运行结果是没有图像,图像是乱的。
下面我自己来分析和改动程序以便日后使用吧!
http://blog.csdn.net/crazyman2010/article/details/6055022
先看看这篇博客,写的不错
经过多次尝试,发现是usbhub的问题,用完hub之后,第二次用就会出现两个video,这样就导致了乱的图像。我重新照了c++的代码,进行了编译:
videodevice.cpp
#include "videodevice.h" VideoDevice::VideoDevice(QString dev_name) { this->dev_name = dev_name; this->fd = -1; this->buffers = NULL; this->n_buffers = 0; this->index = -1; } int VideoDevice::open_device() { fd = open(dev_name.toStdString().c_str(), O_RDWR/*|O_NONBLOCK*/, 0); // fd = open(dev_name.toStdString().c_str(), O_RDWR|O_NONBLOCK, 0); if(-1 == fd) { emit display_error(tr("open: %1").arg(QString(strerror(errno)))); return -1; } return 0; } int VideoDevice::close_device() { if(-1 == close(fd)) { emit display_error(tr("close: %1").arg(QString(strerror(errno)))); return -1; } return 0; } int VideoDevice::init_device() { v4l2_capability cap; v4l2_cropcap cropcap; v4l2_crop crop; v4l2_format fmt; if(-1 == ioctl(fd, VIDIOC_QUERYCAP, &cap)) { if(EINVAL == errno) { emit display_error(tr("%1 is no V4l2 device").arg(dev_name)); } else { emit display_error(tr("VIDIOC_QUERYCAP: %1").arg(QString(strerror(errno)))); } return -1; } if(!(cap.capabilities & V4L2_CAP_VIDEO_CAPTURE)) { emit display_error(tr("%1 is no video capture device").arg(dev_name)); return -1; } if(!(cap.capabilities & V4L2_CAP_STREAMING)) { emit display_error(tr("%1 does not support streaming i/o").arg(dev_name)); return -1; } CLEAR(cropcap); cropcap.type = V4L2_BUF_TYPE_VIDEO_CAPTURE; if(0 == ioctl(fd, VIDIOC_CROPCAP, &cropcap)) { CLEAR(crop); crop.type = V4L2_BUF_TYPE_VIDEO_CAPTURE; crop.c = cropcap.defrect; if(-1 == ioctl(fd, VIDIOC_S_CROP, &crop)) { if(EINVAL == errno) { // emit display_error(tr("VIDIOC_S_CROP not supported")); } else { emit display_error(tr("VIDIOC_S_CROP: %1").arg(QString(strerror(errno)))); return -1; } } } else { emit display_error(tr("VIDIOC_CROPCAP: %1").arg(QString(strerror(errno)))); return -1; } CLEAR(fmt); fmt.type = V4L2_BUF_TYPE_VIDEO_CAPTURE; fmt.fmt.pix.width = 640; fmt.fmt.pix.height = 480; fmt.fmt.pix.pixelformat = V4L2_PIX_FMT_YUYV; fmt.fmt.pix.field = V4L2_FIELD_INTERLACED; if(-1 == ioctl(fd, VIDIOC_S_FMT, &fmt)) { emit display_error(tr("VIDIOC_S_FMT").arg(QString(strerror(errno)))); return -1; } if(-1 == init_mmap()) { return -1; } return 0; } int VideoDevice::init_mmap() { v4l2_requestbuffers req; CLEAR(req); req.count = 4; req.type = V4L2_BUF_TYPE_VIDEO_CAPTURE; req.memory = V4L2_MEMORY_MMAP; if(-1 == ioctl(fd, VIDIOC_REQBUFS, &req)) { if(EINVAL == errno) { emit display_error(tr("%1 does not support memory mapping").arg(dev_name)); return -1; } else { emit display_error(tr("VIDIOC_REQBUFS %1").arg(QString(strerror(errno)))); return -1; } } if(req.count < 2) { emit display_error(tr("Insufficient buffer memory on %1").arg(dev_name)); return -1; } buffers = (buffer*)calloc(req.count, sizeof(*buffers)); if(!buffers) { emit display_error(tr("out of memory")); return -1; } for(n_buffers = 0; n_buffers < req.count; ++n_buffers) { v4l2_buffer buf; CLEAR(buf); buf.type = V4L2_BUF_TYPE_VIDEO_CAPTURE; buf.memory = V4L2_MEMORY_MMAP; buf.index = n_buffers; if(-1 == ioctl(fd, VIDIOC_QUERYBUF, &buf)) { emit display_error(tr("VIDIOC_QUERYBUF: %1").arg(QString(strerror(errno)))); return -1; } buffers[n_buffers].length = buf.length; buffers[n_buffers].start = mmap(NULL, // start anywhere buf.length, PROT_READ | PROT_WRITE, MAP_SHARED, fd, buf.m.offset); if(MAP_FAILED == buffers[n_buffers].start) { emit display_error(tr("mmap %1").arg(QString(strerror(errno)))); return -1; } } return 0; } int VideoDevice::start_capturing() { unsigned int i; for(i = 0; i < n_buffers; ++i) { v4l2_buffer buf; CLEAR(buf); buf.type = V4L2_BUF_TYPE_VIDEO_CAPTURE; buf.memory =V4L2_MEMORY_MMAP; buf.index = i; // fprintf(stderr, "n_buffers: %d\n", i); if(-1 == ioctl(fd, VIDIOC_QBUF, &buf)) { emit display_error(tr("VIDIOC_QBUF: %1").arg(QString(strerror(errno)))); return -1; } } v4l2_buf_type type; type = V4L2_BUF_TYPE_VIDEO_CAPTURE; if(-1 == ioctl(fd, VIDIOC_STREAMON, &type)) { emit display_error(tr("VIDIOC_STREAMON: %1").arg(QString(strerror(errno)))); return -1; } return 0; } int VideoDevice::stop_capturing() { v4l2_buf_type type; type = V4L2_BUF_TYPE_VIDEO_CAPTURE; if(-1 == ioctl(fd, VIDIOC_STREAMOFF, &type)) { emit display_error(tr("VIDIOC_STREAMOFF: %1").arg(QString(strerror(errno)))); return -1; } return 0; } int VideoDevice::uninit_device() { unsigned int i; for(i = 0; i < n_buffers; ++i) { if(-1 == munmap(buffers[i].start, buffers[i].length)) { emit display_error(tr("munmap: %1").arg(QString(strerror(errno)))); return -1; } } free(buffers); return 0; } int VideoDevice::get_frame(void **frame_buf, size_t* len) { v4l2_buffer queue_buf; CLEAR(queue_buf); queue_buf.type = V4L2_BUF_TYPE_VIDEO_CAPTURE; queue_buf.memory = V4L2_MEMORY_MMAP; if(-1 == ioctl(fd, VIDIOC_DQBUF, &queue_buf)) { switch(errno) { case EAGAIN: // perror("dqbuf"); return -1; case EIO: return -1 ; default: emit display_error(tr("VIDIOC_DQBUF: %1").arg(QString(strerror(errno)))); return -1; } } *frame_buf = buffers[queue_buf.index].start; *len = buffers[queue_buf.index].length; index = queue_buf.index; return 0; } int VideoDevice::unget_frame() { if(index != -1) { v4l2_buffer queue_buf; CLEAR(queue_buf); queue_buf.type = V4L2_BUF_TYPE_VIDEO_CAPTURE; queue_buf.memory = V4L2_MEMORY_MMAP; queue_buf.index = index; if(-1 == ioctl(fd, VIDIOC_QBUF, &queue_buf)) { emit display_error(tr("VIDIOC_QBUF: %1").arg(QString(strerror(errno)))); return -1; } return 0; } return -1; }
videodevice.h
#ifndef VIDEODEVICE_H #define VIDEODEVICE_H #include <string.h> #include <stdlib.h> #include <errno.h> #include <fcntl.h> #include <sys/ioctl.h> #include <sys/mman.h> #include <asm/types.h> #include <linux/videodev2.h> #include <QString> #include <QObject> #define CLEAR(x) memset(&(x), 0, sizeof(x)) class VideoDevice : public QObject { Q_OBJECT public: VideoDevice(QString dev_name); //VideoDevice(); int open_device(); int close_device(); int init_device(); int start_capturing(); int stop_capturing(); int uninit_device(); int get_frame(void **, size_t*); int unget_frame(); private: int init_mmap(); struct buffer { void * start; size_t length; }; QString dev_name; int fd; buffer* buffers; unsigned int n_buffers; int index; signals: void display_error(QString); }; #endif // VIDEODEVICE_H
processimage.h
#ifndef PROCESSIMAGE_H #define PROCESSIMAGE_H #include <QtGui> #include "videodevice.h" class ProcessImage : public QWidget { Q_OBJECT public: ProcessImage(QWidget *parent=0); ~ProcessImage(); private: QPainter *painter; QLabel *label; QImage *frame; //QPixmap *frame; QTimer *timer; int rs; uchar *pp; uchar * p; unsigned int len; int convert_yuv_to_rgb_pixel(int y, int u, int v); int convert_yuv_to_rgb_buffer(unsigned char *yuv, unsigned char *rgb, unsigned int width, unsigned int height); VideoDevice *vd; private slots: void paintEvent(QPaintEvent *); void display_error(QString err); }; #endif
processImage.cpp
这篇代码直接在程序中进行了ui的设计,因此直接运行就可以。
移植到Zedboard上,先挂载,运行,比较顺利!