您的位置:首页 > 运维架构 > Linux

Qt浅谈之四十Centos下Qt结合v4l2实现的视频显示

2016-01-20 18:20 686 查看


一、简介

       v4l2是针对uvc免驱usb设备的编程框架,主要用于采集usb摄像头。 可从网上下载最新的源码(包括v4l2.c和v4l2.h两个文件),本文中修改过。

      Qt运行界面如下(动态变化的):




二、详解

1、准备

(1)插入usb摄像头,检测设备文件/dev/video0



与代码中的pd.dev_name = "/dev/video0";保持一致。

(2)检测颜色编码

安装包#yum install v4l-utils,然后执行命令#v4l2-ctl -d /dev/video0 --list-formats



颜色编码格式为YUYV,与代码中的s->fmt.fmt.pix.pixelformat    = V4L2_PIX_FMT_YUYV;保持一致。

(3)遇到错误

在centos6.6中,遇到了错误:

VIDIOC_STREAMON error 28, 设备上没有空间
没有查到原因,不清楚什么问题,试了如下方法也不行:
#rmmod uvcvideo
#modprobe uvcvideo quirks=128
于是切换到centos6.3上,能顺利的运行(在虚拟机中也是可以正常运行的)。

2、主要点

(1)将YUYV转换成rgb显示在界面,以前使用MPEG没有显示
convert_yuv_to_rgb_buffer((unsigned char *)pd.buffers[pd.buf.index].start,bufrgb,640,480);
QImage image(bufrgb,640,480,QImage::Format_RGB888);
ui.displayLabel->setPixmap(QPixmap::fromImage(image));
(2)将视频流保存到本地文件(最小单位为秒,需要更快的可以调整到毫米)
if (bufrgb > 0 && strlen((char *)bufrgb) > 0) {
tm_time = localtime(&now);
char filename[30] = {0};
sprintf(filename,"%4d-%02d-%02d_%02d.%02d.%02d.png",1900+tm_time->tm_year,1+tm_time->tm_mon,tm_time->tm_mday,
tm_time->tm_hour,tm_time->tm_min,tm_time->tm_sec);
QImage image(bufrgb,640,480,QImage::Format_RGB888);
image.save(filename);
}

3、完整代码

(1)v4l2.h
#ifndef		__V4L2_H__
#define		__V4L2_H__
#include <linux/types.h>
#include <linux/videodev2.h>

#ifdef __cplusplus
extern "C" {
#endif

typedef	struct
{
void	*start;
size_t	length;
}buffer;

typedef	struct
{
int			fd;
int			n_buffers;
char			*dev_name;
buffer			*buffers;
struct v4l2_buffer	buf;
struct v4l2_format 	fmt;
}pass_data;

int init_dev (pass_data *s);
void stop_dev (pass_data *s);
void read_frame(pass_data *s);
void return_data (pass_data *s);

void init_mmap(pass_data *s);
void init_device(pass_data *s);
int open_device(pass_data *s);
void start_capturing(pass_data *s);
void close_device(pass_data *s);
void stop_capturing(pass_data *s);
void stop_device(pass_data *s);
void errno_exit(const char *s);
int xioctl(int fd, int request, void *arg);
void process_image(void *p, pass_data *s, int i);

int convert_yuv_to_rgb_buffer(unsigned char *yuv, unsigned char *rgb, unsigned int width, unsigned int height);

#ifdef __cplusplus
}
#endif

#endif
(2)v4l2.c
#include <stdio.h>
#include <stdlib.h>
#include <string.h>
#include <assert.h>
#include <errno.h>

#include <fcntl.h>
#include <unistd.h>
#include <malloc.h>
#include <sys/stat.h>
#include <sys/types.h>
#include <sys/time.h>
#include <sys/mman.h>
#include <sys/ioctl.h>
#include "v4l2.h"

#define	CLEAR(x)	memset (&x, 0, sizeof(x))
char *mpeg[] = {"./1.mpeg", "./2.mpeg", "./3.mpeg", "./4.mpeg", "./5.mpeg"};

int init_dev (pass_data *s)
{
int flag = open_device(s);
if (flag != 0) {
return flag;
}
init_device(s);
init_mmap(s);
start_capturing(s);
fprintf(stdout, "'%s' initialize finish ...\n", s->dev_name);
return 0;
}

void stop_dev (pass_data *s)
{
stop_capturing(s);
stop_device(s);
close_device(s);
fprintf(stdout, "close '%s' ...\n", s->dev_name);
}

void process_image(void *p, pass_data *s, int i)
{
fputc ('.', stdout);
fflush (stdout);

fprintf (stderr, "%s", mpeg[i]);

int fd;
if ((fd = open (mpeg[i], O_RDWR | O_CREAT | O_TRUNC, 0644)) == -1)
errno_exit("open");
if ((write (fd, (struct v4l2_buffer *)p, s->fmt.fmt.pix.sizeimage)) == -1)
errno_exit("write");

close (fd);
}

void read_frame(pass_data *s)
{
CLEAR (s->buf);
s->buf.type	=	V4L2_BUF_TYPE_VIDEO_CAPTURE;
s->buf.memory	= 	V4L2_MEMORY_MMAP;

if (xioctl (s->fd, VIDIOC_DQBUF, &s->buf) == -1)
{
switch (errno)
{
case EAGAIN:
errno_exit ("VIDIOC_DQBUF");
case EIO:
/* could ignore EIO, see spec. */
default:
errno_exit ("VIDIOC_DQBUF");
}

}
assert (s->buf.index < s->n_buffers);
}

void return_data (pass_data *s)
{
if (xioctl (s->fd, VIDIOC_QBUF, &s->buf) == -1)	errno_exit ("VIDIOC_QBUF");
}

void start_capturing(pass_data *s)
{
unsigned int i;
enum v4l2_buf_type type;

for (i = 0; i < s->n_buffers; ++ i)
{
struct v4l2_buffer buf;
CLEAR (buf);

buf.type	= 	V4L2_BUF_TYPE_VIDEO_CAPTURE;
buf.memory	=	V4L2_MEMORY_MMAP;
buf.index	=	i;

if (xioctl (s->fd, VIDIOC_QBUF, &buf) == -1)
errno_exit("VIDIOC_QBUF");
}
type	=	V4L2_BUF_TYPE_VIDEO_CAPTURE;

if (xioctl (s->fd, VIDIOC_STREAMON, &type))
errno_exit("VIDIOC_STREAMON");
}

void stop_capturing(pass_data *s)
{
enum v4l2_buf_type type;

type = V4L2_BUF_TYPE_VIDEO_CAPTURE;

if (xioctl (s->fd, VIDIOC_STREAMOFF, &type))
errno_exit("VIDIOC_STREAMOFF");
}

void init_mmap(pass_data *s)
{
struct v4l2_requestbuffers req;
CLEAR (req);
req.count	=	20;
req.type	=	V4L2_BUF_TYPE_VIDEO_CAPTURE;
req.memory	=	V4L2_MEMORY_MMAP;

if (xioctl (s->fd, VIDIOC_REQBUFS, &req))
{
if (EINVAL == errno)
{
fprintf(stderr, "%s does not support 'memory mapping'\n", s->dev_name);
exit (EXIT_FAILURE);
}
else
{
errno_exit ("VIDIOC_REQBUFS");
}
}

if (req.count < 2)
{
fprintf(stderr, "Insufficient buffer memory on %s\n", s->dev_name);
exit (EXIT_FAILURE);
}

if ((s->buffers = (buffer *)calloc (req.count, sizeof (*s->buffers))) == NULL)
{
fprintf(stderr, "Out of memory\n");
exit ( EXIT_FAILURE);
}

for (s->n_buffers = 0; s->n_buffers < req.count; ++ s->n_buffers)
{
struct v4l2_buffer buf;
CLEAR (buf);

buf.type	=	V4L2_BUF_TYPE_VIDEO_CAPTURE;
buf.memory	=	V4L2_MEMORY_MMAP;
buf.index	=	s->n_buffers;

if (xioctl (s->fd, VIDIOC_QUERYBUF, &buf) == -1)
errno_exit("VIDIOC_QUERYBUF");

s->buffers[s->n_buffers].length	=	buf.length;
s->buffers[s->n_buffers].start	=
mmap(   NULL,
buf.length,
PROT_READ | PROT_WRITE,
MAP_SHARED,
s->fd,
buf.m.offset
);

if (s->buffers[s->n_buffers].start == MAP_FAILED)
errno_exit ("mmap");
#if 	_DEBUG_
fprintf(stdout, "%d -> %p\n", s->n_buffers, s->buffers[s->n_buffers].start);
#endif
}
}

void init_device(pass_data* s)
{
struct v4l2_capability cap;
struct v4l2_cropcap cropcap;
struct v4l2_crop crop;
unsigned int min;

if (xioctl (s->fd, VIDIOC_QUERYCAP, &cap) == -1)
{
if (EINVAL == errno)
{
fprintf (stderr, "%s is no V4L2 device\n", s->dev_name);
exit (EXIT_FAILURE);
}
else
{
errno_exit ("VIDIOC_QUERYCAP");
}
}

if (! (cap.capabilities & V4L2_CAP_VIDEO_CAPTURE))
{
fprintf(stderr, "%s is no video captrue device\n", s->dev_name);
exit(EXIT_FAILURE);
}

if (! (cap.capabilities & V4L2_CAP_STREAMING))
{
fprintf(stderr, "%s does not support straming I/O\n", s->dev_name);
exit(EXIT_FAILURE);
}

CLEAR(cropcap);

cropcap.type = V4L2_BUF_TYPE_VIDEO_CAPTURE;

if (xioctl (s->fd, VIDIOC_CROPCAP, &cropcap) == 0)
{
crop.type = V4L2_BUF_TYPE_VIDEO_CAPTURE;
crop.c = cropcap.defrect;

if (xioctl (s->fd, VIDIOC_S_CROP, &crop))
{
switch (errno)
{
case EINVAL:
break;
default:
break;
}
}
else
{
/* Errors ignored */
}
}
CLEAR (s->fmt);

s->fmt.type		= V4L2_BUF_TYPE_VIDEO_CAPTURE;
s->fmt.fmt.pix.width	= 640;
s->fmt.fmt.pix.height	= 480;
s->fmt.fmt.pix.pixelformat	= V4L2_PIX_FMT_YUYV;
s->fmt.fmt.pix.field	= V4L2_FIELD_INTERLACED;

if (xioctl (s->fd, VIDIOC_S_FMT, &s->fmt) == -1)
errno_exit("VIDIOC_S_FMT");

min = s->fmt.fmt.pix.width * 2;
if (s->fmt.fmt.pix.bytesperline < min)
s->fmt.fmt.pix.bytesperline = min;

min = s->fmt.fmt.pix.bytesperline * s->fmt.fmt.pix.height;
if (s->fmt.fmt.pix.sizeimage < min)
s->fmt.fmt.pix.sizeimage = min;

}

void stop_device(pass_data *s)
{
unsigned int i;

for (i = 0; i < s->n_buffers; ++i)
if (munmap (s->buffers[i].start, s->buffers[i].length) == -1)
errno_exit("munmap");
}

int open_device(pass_data *s)
{
struct stat st;

if (stat (s->dev_name, &st) == -1)
{
fprintf(stderr, "Can't identify '%s':[%d] %s\n", s->dev_name, errno, strerror(errno));
return -1;
}

if (!S_ISCHR (st.st_mode))
{
fprintf(stderr, "%s is no device\n", s->dev_name);
return -2;
}

if ((s->fd = open (s->dev_name, O_RDWR, 0)) == -1 )
{
fprintf(stderr, "Can't oprn '%s': error %d, %s\n", s->dev_name, errno, strerror(errno));
return -2;
}
return 0;
}

void close_device(pass_data *s)
{
close (s->fd);
}

int xioctl(int fd, int request, void *arg)
{
int r;

do r = ioctl(fd, request, arg);
while(r == -1 && EINTR == errno);

return r;
}

void errno_exit(const char *s)
{
fprintf(stderr, "%s error %d, %s\n", s, errno, strerror(errno));
exit(EXIT_FAILURE);
}

static int convert_yuv_to_rgb_pixel(int y, int u, int v)
{
unsigned int pixel32 = 0;
unsigned char *pixel = (unsigned char *)&pixel32;
int r, g, b;
r = y + (1.370705 * (v-128));
g = y - (0.698001 * (v-128)) - (0.337633 * (u-128));
b = y + (1.732446 * (u-128));
if(r > 255) r = 255;
if(g > 255) g = 255;
if(b > 255) b = 255;
if(r < 0) r = 0;
if(g < 0) g = 0;
if(b < 0) b = 0;
pixel[0] = r * 220 / 256;
pixel[1] = g * 220 / 256;
pixel[2] = b * 220 / 256;
return pixel32;
}

int convert_yuv_to_rgb_buffer(unsigned char *yuv, unsigned char *rgb, unsigned int width, unsigned int height)
{
unsigned int in, out = 0;
unsigned int pixel_16;
unsigned char pixel_24[3];
unsigned int pixel32;
int y0, u, y1, v;
for(in = 0; in < width * height * 2; in += 4) {
pixel_16 = yuv[in + 3] << 24 |
yuv[in + 2] << 16 |
yuv[in + 1] <<  8 |
yuv[in + 0];
y0 = (pixel_16 & 0x000000ff);
u  = (pixel_16 & 0x0000ff00) >>  8;
y1 = (pixel_16 & 0x00ff0000) >> 16;
v  = (pixel_16 & 0xff000000) >> 24;
pixel32 = convert_yuv_to_rgb_pixel(y0, u, v);
pixel_24[0] = (pixel32 & 0x000000ff);
pixel_24[1] = (pixel32 & 0x0000ff00) >> 8;
pixel_24[2] = (pixel32 & 0x00ff0000) >> 16;
rgb[out++] = pixel_24[0];
rgb[out++] = pixel_24[1];
rgb[out++] = pixel_24[2];
pixel32 = convert_yuv_to_rgb_pixel(y1, u, v);
pixel_24[0] = (pixel32 & 0x000000ff);
pixel_24[1] = (pixel32 & 0x0000ff00) >> 8;
pixel_24[2] = (pixel32 & 0x00ff0000) >> 16;
rgb[out++] = pixel_24[0];
rgb[out++] = pixel_24[1];
rgb[out++] = pixel_24[2];
}
return 0;
}
(3)videodisplay.h
#ifndef VIDEODISPLAY_H
#define VIDEODISPLAY_H

#include <QtGui>
#include "ui_dialog.h"
#include "v4l2.h"

class VideoDisplay : public QDialog
{
Q_OBJECT

public:
VideoDisplay(QWidget *parent = 0);
~VideoDisplay();

private slots:
void beginCapture();
void flushBuff();
void savebmpData();

private:
Ui::Dialog ui;
pass_data pd;
QTimer *timer;
unsigned char *bufrgb;
};

#endif // VIDEODISPLAY_H
(4)videodisplay.cpp
#include <string.h>
#include "videodisplay.h"

VideoDisplay::VideoDisplay(QWidget *parent)
: QDialog(parent)
, bufrgb(NULL)
{
ui.setupUi(this);
connect(ui.beginButton, SIGNAL(clicked()), this, SLOT(beginCapture()));
connect(ui.saveButton, SIGNAL(clicked()), this, SLOT(savebmpData()));
connect(ui.exitButton, SIGNAL(clicked()), this, SLOT(reject()));

timer = new QTimer(this);
timer->setInterval(10);
connect(timer, SIGNAL(timeout()), this, SLOT(flushBuff()));

pd.dev_name = "/dev/video0";
}

VideoDisplay::~VideoDisplay()
{
if (timer->isActive()) {
timer->stop();
}
}

void VideoDisplay::beginCapture()
{
int flag = init_dev(&pd);
if (flag == -1) {
QMessageBox::information(this,tr("Tip"),tr("no device"));
exit(1);
}
else if (flag == -2) {
QMessageBox::information(this,tr("Tip"),tr("device is wrong"));
exit(2);
}
else if (flag == -3) {
QMessageBox::information(this,tr("Tip"),tr("can not open device"));
exit(3);
}
timer->start();
ui.beginButton->setDisabled(TRUE);
}

void VideoDisplay::flushBuff()
{
read_frame (&pd);
if (!bufrgb) {
bufrgb = (unsigned char *)malloc(640 * 480* 3);
}
memset(bufrgb, 0, 640 * 480* 3);

convert_yuv_to_rgb_buffer((unsigned char *)pd.buffers[pd.buf.index].start,bufrgb,640,480); QImage image(bufrgb,640,480,QImage::Format_RGB888); ui.displayLabel->setPixmap(QPixmap::fromImage(image));
return_data(&pd);
}

void VideoDisplay::savebmpData()
{
FILE *fp;
time_t now;
struct tm *tm_time;
time(&now);

if (bufrgb > 0 && strlen((char *)bufrgb) > 0) { tm_time = localtime(&now); char filename[30] = {0}; sprintf(filename,"%4d-%02d-%02d_%02d.%02d.%02d.png",1900+tm_time->tm_year,1+tm_time->tm_mon,tm_time->tm_mday, tm_time->tm_hour,tm_time->tm_min,tm_time->tm_sec); QImage image(bufrgb,640,480,QImage::Format_RGB888); image.save(filename); }
}
(5)编译运行


 


点击BEGIN开始显示,点击SAVE会在当前的可执行程序目录下生成界面显示的视频的png的文件:2016-01-20_17.28.36.png、2016-01-20_17.36.08.png




三、总结

(1)不同的系统中v4l2的问题不一样,通过调整参数可以解决部分,但有些因能力有限实在无法解决。

(2)该文仅是以前毕业设计的一个开头,还有图像的转化和处理等一系列的自动识别的功能,接着可以将图片存放到数据库。
(3)若有问题或建议,请留言,在此感谢!
内容来自用户分享和网络整理,不保证内容的准确性,如有侵权内容,可联系管理员处理 点击这里给我发消息
标签:  linux qt