Qt浅谈之四十Centos下Qt结合v4l2实现的视频显示

2024-03-11 10:18

本文主要是介绍Qt浅谈之四十Centos下Qt结合v4l2实现的视频显示,希望对大家解决编程问题提供一定的参考价值,需要的开发者们随着小编来一起学习吧!

一、简介

 

       v4l2是针对uvc免驱usb设备的编程框架,主要用于采集usb摄像头。 可从网上下载最新的源码(包括v4l2.c和v4l2.h两个文件),本文中修改过。
      Qt运行界面如下(动态变化的):

 

二、详解

1、准备

(1)插入usb摄像头,检测设备文件/dev/video0

与代码中的pd.dev_name = "/dev/video0";保持一致。
(2)检测颜色编码
安装包#yum install v4l-utils,然后执行命令#v4l2-ctl -d /dev/video0 --list-formats

颜色编码格式为YUYV,与代码中的s->fmt.fmt.pix.pixelformat    = V4L2_PIX_FMT_YUYV;保持一致。
(3)遇到错误
在centos6.6中,遇到了错误:

VIDIOC_STREAMON error 28, 设备上没有空间

没有查到原因,不清楚什么问题,试了如下方法也不行:

#rmmod uvcvideo
#modprobe uvcvideo quirks=128

于是切换到centos6.3上,能顺利的运行(在虚拟机中也是可以正常运行的)。

 

2、主要点

(1)将YUYV转换成rgb显示在界面,以前使用MPEG没有显示

convert_yuv_to_rgb_buffer((unsigned char *)pd.buffers[pd.buf.index].start,bufrgb,640,480);
QImage image(bufrgb,640,480,QImage::Format_RGB888);
ui.displayLabel->setPixmap(QPixmap::fromImage(image));

(2)将视频流保存到本地文件(最小单位为秒,需要更快的可以调整到毫米)

if (bufrgb > 0 && strlen((char *)bufrgb) > 0) {tm_time = localtime(&now);char filename[30] = {0};sprintf(filename,"%4d-%02d-%02d_%02d.%02d.%02d.png",1900+tm_time->tm_year,1+tm_time->tm_mon,tm_time->tm_mday,tm_time->tm_hour,tm_time->tm_min,tm_time->tm_sec);QImage image(bufrgb,640,480,QImage::Format_RGB888);image.save(filename);
}

3、完整代码

(1)v4l2.h

#ifndef		__V4L2_H__
#define		__V4L2_H__
#include <linux/types.h>
#include <linux/videodev2.h>#ifdef __cplusplus
extern "C" {
#endiftypedef	struct
{void	*start;size_t	length;
}buffer;typedef	struct
{int			fd;int			n_buffers;char			*dev_name;buffer			*buffers;struct v4l2_buffer	buf;struct v4l2_format 	fmt;
}pass_data;int init_dev (pass_data *s);
void stop_dev (pass_data *s);
void read_frame(pass_data *s);
void return_data (pass_data *s);void init_mmap(pass_data *s);
void init_device(pass_data *s);
int open_device(pass_data *s);
void start_capturing(pass_data *s);
void close_device(pass_data *s);
void stop_capturing(pass_data *s);
void stop_device(pass_data *s);
void errno_exit(const char *s);
int xioctl(int fd, int request, void *arg);
void process_image(void *p, pass_data *s, int i);int convert_yuv_to_rgb_buffer(unsigned char *yuv, unsigned char *rgb, unsigned int width, unsigned int height);#ifdef __cplusplus
}
#endif#endif

(2)v4l2.c

#include <stdio.h>
#include <stdlib.h>
#include <string.h>
#include <assert.h>
#include <errno.h>#include <fcntl.h>
#include <unistd.h>
#include <malloc.h>
#include <sys/stat.h>
#include <sys/types.h>
#include <sys/time.h>
#include <sys/mman.h>
#include <sys/ioctl.h>
#include "v4l2.h"#define	CLEAR(x)	memset (&x, 0, sizeof(x))
char *mpeg[] = {"./1.mpeg", "./2.mpeg", "./3.mpeg", "./4.mpeg", "./5.mpeg"};int init_dev (pass_data *s)
{int flag = open_device(s);if (flag != 0) {return flag;}init_device(s);init_mmap(s);start_capturing(s);fprintf(stdout, "'%s' initialize finish ...\n", s->dev_name);return 0;
}void stop_dev (pass_data *s)
{stop_capturing(s);stop_device(s);close_device(s);fprintf(stdout, "close '%s' ...\n", s->dev_name);
}void process_image(void *p, pass_data *s, int i)
{fputc ('.', stdout);fflush (stdout);fprintf (stderr, "%s", mpeg[i]);int fd;	if ((fd = open (mpeg[i], O_RDWR | O_CREAT | O_TRUNC, 0644)) == -1)errno_exit("open");if ((write (fd, (struct v4l2_buffer *)p, s->fmt.fmt.pix.sizeimage)) == -1)errno_exit("write");close (fd);
}void read_frame(pass_data *s)
{CLEAR (s->buf);s->buf.type	=	V4L2_BUF_TYPE_VIDEO_CAPTURE;s->buf.memory	= 	V4L2_MEMORY_MMAP;if (xioctl (s->fd, VIDIOC_DQBUF, &s->buf) == -1){switch (errno){case EAGAIN:errno_exit ("VIDIOC_DQBUF");case EIO:/* could ignore EIO, see spec. */default:errno_exit ("VIDIOC_DQBUF");}}assert (s->buf.index < s->n_buffers);
}void return_data (pass_data *s)
{if (xioctl (s->fd, VIDIOC_QBUF, &s->buf) == -1)	errno_exit ("VIDIOC_QBUF");
}void start_capturing(pass_data *s)
{unsigned int i;enum v4l2_buf_type type;for (i = 0; i < s->n_buffers; ++ i){struct v4l2_buffer buf;CLEAR (buf);buf.type	= 	V4L2_BUF_TYPE_VIDEO_CAPTURE;buf.memory	=	V4L2_MEMORY_MMAP;buf.index	=	i;if (xioctl (s->fd, VIDIOC_QBUF, &buf) == -1)errno_exit("VIDIOC_QBUF");}type	=	V4L2_BUF_TYPE_VIDEO_CAPTURE;if (xioctl (s->fd, VIDIOC_STREAMON, &type))errno_exit("VIDIOC_STREAMON");
}void stop_capturing(pass_data *s)
{enum v4l2_buf_type type;type = V4L2_BUF_TYPE_VIDEO_CAPTURE;if (xioctl (s->fd, VIDIOC_STREAMOFF, &type))errno_exit("VIDIOC_STREAMOFF");
}void init_mmap(pass_data *s)
{struct v4l2_requestbuffers req;CLEAR (req);req.count	=	20;req.type	=	V4L2_BUF_TYPE_VIDEO_CAPTURE;req.memory	=	V4L2_MEMORY_MMAP;if (xioctl (s->fd, VIDIOC_REQBUFS, &req)){if (EINVAL == errno){fprintf(stderr, "%s does not support 'memory mapping'\n", s->dev_name);exit (EXIT_FAILURE);}else{errno_exit ("VIDIOC_REQBUFS");}}if (req.count < 2){fprintf(stderr, "Insufficient buffer memory on %s\n", s->dev_name);exit (EXIT_FAILURE);}	if ((s->buffers = (buffer *)calloc (req.count, sizeof (*s->buffers))) == NULL){fprintf(stderr, "Out of memory\n");exit ( EXIT_FAILURE);}for (s->n_buffers = 0; s->n_buffers < req.count; ++ s->n_buffers){struct v4l2_buffer buf;CLEAR (buf);buf.type	=	V4L2_BUF_TYPE_VIDEO_CAPTURE;buf.memory	=	V4L2_MEMORY_MMAP;buf.index	=	s->n_buffers;if (xioctl (s->fd, VIDIOC_QUERYBUF, &buf) == -1)errno_exit("VIDIOC_QUERYBUF");s->buffers[s->n_buffers].length	=	buf.length;s->buffers[s->n_buffers].start	=mmap(   NULL,buf.length,PROT_READ | PROT_WRITE,MAP_SHARED,s->fd,buf.m.offset	);if (s->buffers[s->n_buffers].start == MAP_FAILED)errno_exit ("mmap");
#if 	_DEBUG_fprintf(stdout, "%d -> %p\n", s->n_buffers, s->buffers[s->n_buffers].start);
#endif}
}void init_device(pass_data* s)
{struct v4l2_capability cap;struct v4l2_cropcap cropcap;struct v4l2_crop crop;unsigned int min;if (xioctl (s->fd, VIDIOC_QUERYCAP, &cap) == -1){if (EINVAL == errno){fprintf (stderr, "%s is no V4L2 device\n", s->dev_name);exit (EXIT_FAILURE);}else{errno_exit ("VIDIOC_QUERYCAP");}}if (! (cap.capabilities & V4L2_CAP_VIDEO_CAPTURE)){fprintf(stderr, "%s is no video captrue device\n", s->dev_name);exit(EXIT_FAILURE);}if (! (cap.capabilities & V4L2_CAP_STREAMING)){fprintf(stderr, "%s does not support straming I/O\n", s->dev_name);exit(EXIT_FAILURE);}	CLEAR(cropcap);cropcap.type = V4L2_BUF_TYPE_VIDEO_CAPTURE;if (xioctl (s->fd, VIDIOC_CROPCAP, &cropcap) == 0){crop.type = V4L2_BUF_TYPE_VIDEO_CAPTURE;crop.c = cropcap.defrect;if (xioctl (s->fd, VIDIOC_S_CROP, &crop)){switch (errno){case EINVAL:break;default:break;}}else{/* Errors ignored */}}CLEAR (s->fmt);s->fmt.type		= V4L2_BUF_TYPE_VIDEO_CAPTURE;s->fmt.fmt.pix.width	= 640;s->fmt.fmt.pix.height	= 480;s->fmt.fmt.pix.pixelformat	= V4L2_PIX_FMT_YUYV;s->fmt.fmt.pix.field	= V4L2_FIELD_INTERLACED;if (xioctl (s->fd, VIDIOC_S_FMT, &s->fmt) == -1)errno_exit("VIDIOC_S_FMT");min = s->fmt.fmt.pix.width * 2;if (s->fmt.fmt.pix.bytesperline < min)s->fmt.fmt.pix.bytesperline = min;min = s->fmt.fmt.pix.bytesperline * s->fmt.fmt.pix.height;if (s->fmt.fmt.pix.sizeimage < min)s->fmt.fmt.pix.sizeimage = min;}void stop_device(pass_data *s)
{unsigned int i;for (i = 0; i < s->n_buffers; ++i)if (munmap (s->buffers[i].start, s->buffers[i].length) == -1)errno_exit("munmap");
}int open_device(pass_data *s)
{struct stat st;if (stat (s->dev_name, &st) == -1){fprintf(stderr, "Can't identify '%s':[%d] %s\n", s->dev_name, errno, strerror(errno));return -1;}if (!S_ISCHR (st.st_mode)){fprintf(stderr, "%s is no device\n", s->dev_name);return -2;}if ((s->fd = open (s->dev_name, O_RDWR, 0)) == -1 ){fprintf(stderr, "Can't oprn '%s': error %d, %s\n", s->dev_name, errno, strerror(errno));return -2;}return 0;
}void close_device(pass_data *s)
{close (s->fd);
}int xioctl(int fd, int request, void *arg)
{int r;do r = ioctl(fd, request, arg);while(r == -1 && EINTR == errno);return r;
}void errno_exit(const char *s)
{fprintf(stderr, "%s error %d, %s\n", s, errno, strerror(errno));exit(EXIT_FAILURE);
}static int convert_yuv_to_rgb_pixel(int y, int u, int v)
{unsigned int pixel32 = 0;unsigned char *pixel = (unsigned char *)&pixel32;int r, g, b;r = y + (1.370705 * (v-128));g = y - (0.698001 * (v-128)) - (0.337633 * (u-128));b = y + (1.732446 * (u-128));if(r > 255) r = 255;if(g > 255) g = 255;if(b > 255) b = 255;if(r < 0) r = 0;if(g < 0) g = 0;if(b < 0) b = 0;pixel[0] = r * 220 / 256;pixel[1] = g * 220 / 256;pixel[2] = b * 220 / 256;return pixel32;
}int convert_yuv_to_rgb_buffer(unsigned char *yuv, unsigned char *rgb, unsigned int width, unsigned int height)
{unsigned int in, out = 0;unsigned int pixel_16;unsigned char pixel_24[3];unsigned int pixel32;int y0, u, y1, v;for(in = 0; in < width * height * 2; in += 4) {pixel_16 = yuv[in + 3] << 24 |yuv[in + 2] << 16 |yuv[in + 1] <<  8 |yuv[in + 0];y0 = (pixel_16 & 0x000000ff);u  = (pixel_16 & 0x0000ff00) >>  8;y1 = (pixel_16 & 0x00ff0000) >> 16;v  = (pixel_16 & 0xff000000) >> 24;pixel32 = convert_yuv_to_rgb_pixel(y0, u, v);pixel_24[0] = (pixel32 & 0x000000ff);pixel_24[1] = (pixel32 & 0x0000ff00) >> 8;pixel_24[2] = (pixel32 & 0x00ff0000) >> 16;rgb[out++] = pixel_24[0];rgb[out++] = pixel_24[1];rgb[out++] = pixel_24[2];pixel32 = convert_yuv_to_rgb_pixel(y1, u, v);pixel_24[0] = (pixel32 & 0x000000ff);pixel_24[1] = (pixel32 & 0x0000ff00) >> 8;pixel_24[2] = (pixel32 & 0x00ff0000) >> 16;rgb[out++] = pixel_24[0];rgb[out++] = pixel_24[1];rgb[out++] = pixel_24[2];}return 0;
}

(3)videodisplay.h

#ifndef VIDEODISPLAY_H
#define VIDEODISPLAY_H#include <QtGui>
#include "ui_dialog.h"
#include "v4l2.h"class VideoDisplay : public QDialog
{Q_OBJECTpublic:VideoDisplay(QWidget *parent = 0);~VideoDisplay();private slots:void beginCapture();void flushBuff();void savebmpData();private:Ui::Dialog ui;pass_data pd;QTimer *timer;unsigned char *bufrgb;
};#endif // VIDEODISPLAY_H

(4)videodisplay.cpp

#include <string.h>
#include "videodisplay.h"VideoDisplay::VideoDisplay(QWidget *parent): QDialog(parent), bufrgb(NULL)
{ui.setupUi(this);connect(ui.beginButton, SIGNAL(clicked()), this, SLOT(beginCapture()));connect(ui.saveButton, SIGNAL(clicked()), this, SLOT(savebmpData()));connect(ui.exitButton, SIGNAL(clicked()), this, SLOT(reject()));timer = new QTimer(this);timer->setInterval(10);connect(timer, SIGNAL(timeout()), this, SLOT(flushBuff()));pd.dev_name = "/dev/video0";
}VideoDisplay::~VideoDisplay()
{if (timer->isActive()) {timer->stop();}
}void VideoDisplay::beginCapture()
{int flag = init_dev(&pd);if (flag == -1) {QMessageBox::information(this,tr("Tip"),tr("no device"));exit(1);}else if (flag == -2) {QMessageBox::information(this,tr("Tip"),tr("device is wrong"));exit(2);}else if (flag == -3) {QMessageBox::information(this,tr("Tip"),tr("can not open device"));exit(3);}timer->start();ui.beginButton->setDisabled(TRUE);
}void VideoDisplay::flushBuff()
{read_frame (&pd);if (!bufrgb) {bufrgb = (unsigned char *)malloc(640 * 480* 3);}memset(bufrgb, 0, 640 * 480* 3);convert_yuv_to_rgb_buffer((unsigned char *)pd.buffers[pd.buf.index].start,bufrgb,640,480);QImage image(bufrgb,640,480,QImage::Format_RGB888);ui.displayLabel->setPixmap(QPixmap::fromImage(image));return_data(&pd);
}void VideoDisplay::savebmpData()
{FILE *fp;time_t now;struct tm *tm_time;time(&now);if (bufrgb > 0 && strlen((char *)bufrgb) > 0) {tm_time = localtime(&now);char filename[30] = {0};sprintf(filename,"%4d-%02d-%02d_%02d.%02d.%02d.png",1900+tm_time->tm_year,1+tm_time->tm_mon,tm_time->tm_mday,tm_time->tm_hour,tm_time->tm_min,tm_time->tm_sec);QImage image(bufrgb,640,480,QImage::Format_RGB888);image.save(filename);}
}

(5)编译运行
 
点击BEGIN开始显示,点击SAVE会在当前的可执行程序目录下生成界面显示的视频的png的文件:2016-01-20_17.28.36.png、2016-01-20_17.36.08.png

 

三、总结

 

(1)不同的系统中v4l2的问题不一样,通过调整参数可以解决部分,但有些因能力有限实在无法解决。
(2)该文仅是以前毕业设计的一个开头,还有图像的转化和处理等一系列的自动识别的功能,接着可以将图片存放到数据库。
(3)若有问题或建议,请留言,在此感谢!

这篇关于Qt浅谈之四十Centos下Qt结合v4l2实现的视频显示的文章就介绍到这儿,希望我们推荐的文章对编程师们有所帮助!



http://www.chinasem.cn/article/797514

相关文章

流媒体平台/视频监控/安防视频汇聚EasyCVR播放暂停后视频画面黑屏是什么原因?

视频智能分析/视频监控/安防监控综合管理系统EasyCVR视频汇聚融合平台,是TSINGSEE青犀视频垂直深耕音视频流媒体技术、AI智能技术领域的杰出成果。该平台以其强大的视频处理、汇聚与融合能力,在构建全栈视频监控系统中展现出了独特的优势。视频监控管理系统EasyCVR平台内置了强大的视频解码、转码、压缩等技术,能够处理多种视频流格式,并以多种格式(RTMP、RTSP、HTTP-FLV、WebS

hdu1043(八数码问题,广搜 + hash(实现状态压缩) )

利用康拓展开将一个排列映射成一个自然数,然后就变成了普通的广搜题。 #include<iostream>#include<algorithm>#include<string>#include<stack>#include<queue>#include<map>#include<stdio.h>#include<stdlib.h>#include<ctype.h>#inclu

第10章 中断和动态时钟显示

第10章 中断和动态时钟显示 从本章开始,按照书籍的划分,第10章开始就进入保护模式(Protected Mode)部分了,感觉从这里开始难度突然就增加了。 书中介绍了为什么有中断(Interrupt)的设计,中断的几种方式:外部硬件中断、内部中断和软中断。通过中断做了一个会走的时钟和屏幕上输入字符的程序。 我自己理解中断的一些作用: 为了更好的利用处理器的性能。协同快速和慢速设备一起工作

综合安防管理平台LntonAIServer视频监控汇聚抖动检测算法优势

LntonAIServer视频质量诊断功能中的抖动检测是一个专门针对视频稳定性进行分析的功能。抖动通常是指视频帧之间的不必要运动,这种运动可能是由于摄像机的移动、传输中的错误或编解码问题导致的。抖动检测对于确保视频内容的平滑性和观看体验至关重要。 优势 1. 提高图像质量 - 清晰度提升:减少抖动,提高图像的清晰度和细节表现力,使得监控画面更加真实可信。 - 细节增强:在低光条件下,抖

嵌入式QT开发:构建高效智能的嵌入式系统

摘要: 本文深入探讨了嵌入式 QT 相关的各个方面。从 QT 框架的基础架构和核心概念出发,详细阐述了其在嵌入式环境中的优势与特点。文中分析了嵌入式 QT 的开发环境搭建过程,包括交叉编译工具链的配置等关键步骤。进一步探讨了嵌入式 QT 的界面设计与开发,涵盖了从基本控件的使用到复杂界面布局的构建。同时也深入研究了信号与槽机制在嵌入式系统中的应用,以及嵌入式 QT 与硬件设备的交互,包括输入输出设

【C++】_list常用方法解析及模拟实现

相信自己的力量,只要对自己始终保持信心,尽自己最大努力去完成任何事,就算事情最终结果是失败了,努力了也不留遗憾。💓💓💓 目录   ✨说在前面 🍋知识点一:什么是list? •🌰1.list的定义 •🌰2.list的基本特性 •🌰3.常用接口介绍 🍋知识点二:list常用接口 •🌰1.默认成员函数 🔥构造函数(⭐) 🔥析构函数 •🌰2.list对象

【Prometheus】PromQL向量匹配实现不同标签的向量数据进行运算

✨✨ 欢迎大家来到景天科技苑✨✨ 🎈🎈 养成好习惯,先赞后看哦~🎈🎈 🏆 作者简介:景天科技苑 🏆《头衔》:大厂架构师,华为云开发者社区专家博主,阿里云开发者社区专家博主,CSDN全栈领域优质创作者,掘金优秀博主,51CTO博客专家等。 🏆《博客》:Python全栈,前后端开发,小程序开发,人工智能,js逆向,App逆向,网络系统安全,数据分析,Django,fastapi

让树莓派智能语音助手实现定时提醒功能

最初的时候是想直接在rasa 的chatbot上实现,因为rasa本身是带有remindschedule模块的。不过经过一番折腾后,忽然发现,chatbot上实现的定时,语音助手不一定会有响应。因为,我目前语音助手的代码设置了长时间无应答会结束对话,这样一来,chatbot定时提醒的触发就不会被语音助手获悉。那怎么让语音助手也具有定时提醒功能呢? 我最后选择的方法是用threading.Time

Android实现任意版本设置默认的锁屏壁纸和桌面壁纸(两张壁纸可不一致)

客户有些需求需要设置默认壁纸和锁屏壁纸  在默认情况下 这两个壁纸是相同的  如果需要默认的锁屏壁纸和桌面壁纸不一样 需要额外修改 Android13实现 替换默认桌面壁纸: 将图片文件替换frameworks/base/core/res/res/drawable-nodpi/default_wallpaper.*  (注意不能是bmp格式) 替换默认锁屏壁纸: 将图片资源放入vendo

安卓链接正常显示,ios#符被转义%23导致链接访问404

原因分析: url中含有特殊字符 中文未编码 都有可能导致URL转换失败,所以需要对url编码处理  如下: guard let allowUrl = webUrl.addingPercentEncoding(withAllowedCharacters: .urlQueryAllowed) else {return} 后面发现当url中有#号时,会被误伤转义为%23,导致链接无法访问