uvc-录制并显示到界面-QT

2024-05-15 22:38
文章标签 显示 qt 界面 录制 uvc

本文主要是介绍uvc-录制并显示到界面-QT,希望对大家解决编程问题提供一定的参考价值,需要的开发者们随着小编来一起学习吧!

效果:
在这里插入图片描述
代码:

~/code/MediaPlayer$ tree 
.
├── main.cpp
├── mediaplayer.cpp
├── mediaplayer.h
├── MediaPlayer.pro
└── MediaPlayer.pro.user

MediaPlayer.pro

#-------------------------------------------------
#
# Project created by QtCreator 2020-08-17T13:43:30
#
#-------------------------------------------------QT       += core guigreaterThan(QT_MAJOR_VERSION, 4): QT += widgetsTARGET = MediaPlayer
TEMPLATE = app# The following define makes your compiler emit warnings if you use
# any feature of Qt which as been marked as deprecated (the exact warnings
# depend on your compiler). Please consult the documentation of the
# deprecated API in order to know how to port your code away from it.
DEFINES += QT_DEPRECATED_WARNINGS# You can also make your code fail to compile if you use deprecated APIs.
# In order to do so, uncomment the following line.
# You can also select to disable deprecated APIs only up to a certain version of Qt.
#DEFINES += QT_DISABLE_DEPRECATED_BEFORE=0x060000    # disables all the APIs deprecated before Qt 6.0.0SOURCES += main.cpp\mediaplayer.cppHEADERS  += mediaplayer.h
#LIBS += -L/usr/lib/x86_64-linux-gnuLIBS += -ljpegwin32:CONFIG(release, debug|release): LIBS += -L$$PWD/../../../../usr/lib/x86_64-linux-gnu/release/ -ljpeg
else:win32:CONFIG(debug, debug|release): LIBS += -L$$PWD/../../../../usr/lib/x86_64-linux-gnu/debug/ -ljpeg
else:unix: LIBS += -L$$PWD/../../../../usr/lib/x86_64-linux-gnu/ -ljpegINCLUDEPATH += $$PWD/../../../../usr/lib/x86_64-linux-gnu
DEPENDPATH += $$PWD/../../../../usr/lib/x86_64-linux-gnuwin32:CONFIG(release, debug|release): LIBS += -L$$PWD/../../../../usr/lib/x86_64-linux-gnu/release/ -lx264
else:win32:CONFIG(debug, debug|release): LIBS += -L$$PWD/../../../../usr/lib/x86_64-linux-gnu/debug/ -lx264
else:unix: LIBS += -L$$PWD/../../../../usr/lib/x86_64-linux-gnu/ -lx264INCLUDEPATH += $$PWD/../../../../usr/lib/x86_64-linux-gnu
DEPENDPATH += $$PWD/../../../../usr/lib/x86_64-linux-gnuwin32:CONFIG(release, debug|release): LIBS += -L$$PWD/../../../../usr/lib/x86_64-linux-gnu/release/ -lasound
else:win32:CONFIG(debug, debug|release): LIBS += -L$$PWD/../../../../usr/lib/x86_64-linux-gnu/debug/ -lasound
else:unix: LIBS += -L$$PWD/../../../../usr/lib/x86_64-linux-gnu/ -lasoundINCLUDEPATH += $$PWD/../../../../usr/lib/x86_64-linux-gnu
DEPENDPATH += $$PWD/../../../../usr/lib/x86_64-linux-gnu

main.cpp

#include "mediaplayer.h"
#include <QApplication>int main(int argc, char *argv[])
{QApplication a(argc, argv);MediaPlayer w;w.show();return a.exec();
}

mediaplayer.h

#ifndef MEDIAPLAYER_H
#define MEDIAPLAYER_H#include <QMainWindow>
#include <QDebug>
#include <QMutexLocker>
#include <QPainter>
/** capturing from UVC cam* requires: libjpeg-dev* build: gcc -std=c99 capture.c -ljpeg -o capture*/
extern "C" {
#include <stdint.h>
#include <stdlib.h>
#include <stdio.h>
#include <string.h>
#include <errno.h>#include <fcntl.h>
#include <sys/ioctl.h>
#include <sys/mman.h>
#include <asm/types.h>
#include <linux/videodev2.h>#include <sys/time.h>
#include <sys/types.h>
#include <unistd.h>#include <jpeglib.h>
#include <x264.h>
#include <linux/videodev2.h>
#include <alsa/asoundlib.h>}
#include <iostream>
using namespace std;
#include <fstream>
class MediaPlayer : public QMainWindow
{Q_OBJECT
private:QMutex m_mutex;QImage m_image;public:MediaPlayer(QWidget *parent = 0);void onRecvFrame( char *data, int width, int height);void paintEvent(QPaintEvent *);void PaintImage(QPainter &painter);~MediaPlayer();
};#endif // MEDIAPLAYER_H

mediaplayer.cpp

#include "mediaplayer.h"
void quit(const char * msg)
{fprintf(stderr, "[%s] %d: %s\n", msg, errno, strerror(errno));exit(EXIT_FAILURE);
}int xioctl(int fd, int request, void* arg)
{for (int i = 0; i < 100; i++) {int r = ioctl(fd, request, arg);if (r != -1 || errno != EINTR) return r;}return -1;
}typedef struct {uint8_t* start;size_t length;
} buffer_t;
typedef struct {//add by zykx264_param_t        *param;x264_t              *handle;x264_picture_t      *picture;	//一个视频序列中每帧特点x264_nal_t          *nal;
} X264Encoder;
typedef struct {X264Encoder             encoder;/*add by zyk*/unsigned char       *h264_buf;      //encoded bufferunsigned int encodedLength;int fd;uint32_t width;uint32_t height;size_t buffer_count;buffer_t* buffers;buffer_t head;
} camera_t;camera_t* camera_open(const char * device, uint32_t width, uint32_t height)
{int fd = open(device, O_RDWR | O_NONBLOCK, 0);if (fd == -1) quit("camera_open ");camera_t* camera = (camera_t*)malloc(sizeof (camera_t));camera->fd = fd;camera->width = width;camera->height = height;camera->buffer_count = 0;camera->buffers = NULL;camera->head.length = 0;camera->head.start = NULL;return camera;
}void camera_init(camera_t* camera) {struct v4l2_capability cap;if (xioctl(camera->fd, VIDIOC_QUERYCAP, &cap) == -1) quit("VIDIOC_QUERYCAP");if (!(cap.capabilities & V4L2_CAP_VIDEO_CAPTURE)) quit("no capture");if (!(cap.capabilities & V4L2_CAP_STREAMING)) quit("no streaming");struct v4l2_cropcap cropcap;memset(&cropcap, 0, sizeof cropcap);cropcap.type = V4L2_BUF_TYPE_VIDEO_CAPTURE;if (xioctl(camera->fd, VIDIOC_CROPCAP, &cropcap) == 0) {struct v4l2_crop crop;crop.type = V4L2_BUF_TYPE_VIDEO_CAPTURE;crop.c = cropcap.defrect;if (xioctl(camera->fd, VIDIOC_S_CROP, &crop) == -1) {// cropping not supported}}struct v4l2_format format;memset(&format, 0, sizeof format);format.type = V4L2_BUF_TYPE_VIDEO_CAPTURE;format.fmt.pix.width = camera->width;format.fmt.pix.height = camera->height;format.fmt.pix.pixelformat = V4L2_PIX_FMT_YUYV;format.fmt.pix.field = V4L2_FIELD_NONE;if (xioctl(camera->fd, VIDIOC_S_FMT, &format) == -1) quit("VIDIOC_S_FMT");struct v4l2_requestbuffers req;memset(&req, 0, sizeof req);req.count = 4;req.type = V4L2_BUF_TYPE_VIDEO_CAPTURE;req.memory = V4L2_MEMORY_MMAP;if (xioctl(camera->fd, VIDIOC_REQBUFS, &req) == -1) quit("VIDIOC_REQBUFS");camera->buffer_count = req.count;camera->buffers = (buffer_t*)calloc(req.count, sizeof (buffer_t));size_t buf_max = 0;for (size_t i = 0; i < camera->buffer_count; i++) {struct v4l2_buffer buf;memset(&buf, 0, sizeof buf);buf.type = V4L2_BUF_TYPE_VIDEO_CAPTURE;buf.memory = V4L2_MEMORY_MMAP;buf.index = i;if (xioctl(camera->fd, VIDIOC_QUERYBUF, &buf) == -1)quit("VIDIOC_QUERYBUF");if (buf.length > buf_max) buf_max = buf.length;camera->buffers[i].length = buf.length;camera->buffers[i].start =( uint8_t*)mmap(NULL, buf.length, PROT_READ | PROT_WRITE, MAP_SHARED,camera->fd, buf.m.offset);if (camera->buffers[i].start == MAP_FAILED) quit("mmap");}camera->head.start =  ( uint8_t*)malloc(buf_max);
}void camera_start(camera_t* camera)
{for (size_t i = 0; i < camera->buffer_count; i++) {struct v4l2_buffer buf;memset(&buf, 0, sizeof buf);buf.type = V4L2_BUF_TYPE_VIDEO_CAPTURE;buf.memory = V4L2_MEMORY_MMAP;buf.index = i;if (xioctl(camera->fd, VIDIOC_QBUF, &buf) == -1) quit("VIDIOC_QBUF");}enum v4l2_buf_type type = V4L2_BUF_TYPE_VIDEO_CAPTURE;if (xioctl(camera->fd, VIDIOC_STREAMON, &type) == -1)quit("VIDIOC_STREAMON");
}void camera_stop(camera_t* camera)
{enum v4l2_buf_type type = V4L2_BUF_TYPE_VIDEO_CAPTURE;if (xioctl(camera->fd, VIDIOC_STREAMOFF, &type) == -1)quit("VIDIOC_STREAMOFF");
}void camera_finish(camera_t* camera)
{for (size_t i = 0; i < camera->buffer_count; i++) {munmap(camera->buffers[i].start, camera->buffers[i].length);}free(camera->buffers);camera->buffer_count = 0;camera->buffers = NULL;free(camera->head.start);camera->head.length = 0;camera->head.start = NULL;
}void camera_close(camera_t* camera)
{if (close(camera->fd) == -1) quit("close");free(camera);
}void h264_encoder_init(X264Encoder *encoder,int width,int height)
{encoder->param = (x264_param_t *) malloc(sizeof(x264_param_t));encoder->picture = (x264_picture_t *) malloc(sizeof(x264_picture_t));x264_param_default(encoder->param);x264_param_default_preset(encoder->param, "veryfast", "zerolatency");encoder->param->i_width = width;encoder->param->i_height = height;encoder->param->rc.i_lookahead = 0;	//i帧向前缓冲区encoder->param->i_fps_num = 15;encoder->param->i_fps_den = 1;encoder->param->b_annexb = 1;encoder->param->i_keyint_max=100;encoder->param->i_keyint_min=90;encoder->param->i_bframe=3;encoder->param->b_repeat_headers=1;x264_param_apply_profile(encoder->param, "baseline");	//使用baselineif ((encoder->handle = x264_encoder_open(encoder->param)) == 0) {printf("x264_encoder_open error!\n");return;}x264_picture_alloc(encoder->picture, X264_CSP_I420, encoder->param->i_width,encoder->param->i_height);encoder->picture->img.i_csp = X264_CSP_I420;encoder->picture->img.i_plane = 3;return;}
static int pts_time=0;int h264_compress_frame(X264Encoder * encoder, int type, uint8_t * in, uint8_t * out)
{x264_picture_t pic_out;int nNal = -1;int result = 0;//int i = 0;uint8_t *p_out = out;unsigned int i,j;unsigned int base_h;char *y = (char *)encoder->picture->img.plane[0];char *u = (char *)encoder->picture->img.plane[1];char *v = (char *)encoder->picture->img.plane[2];int is_y = 1, is_u = 1;int y_index = 0, u_index = 0, v_index = 0;int yuv422_length = 2 * encoder->param->i_width * encoder->param->i_height;printf("%s %d\n",__FUNCTION__,__LINE__);for(i=0; i<yuv422_length; i+=2){*(y+y_index) = *(in+i);y_index++;}printf("%s %d\n",__FUNCTION__,__LINE__);for(i=0; i<encoder->param->i_height; i+=2){base_h = i*encoder->param->i_width*2;for(j=base_h+1; j<base_h+encoder->param->i_width*2; j+=2){if(is_u){*(u+u_index) = *(in+j);u_index++;is_u = 0;}else{*(v+v_index) = *(in+j);v_index++;is_u = 1;}}}printf("%s %d\n",__FUNCTION__,__LINE__);switch (type) {case 0:encoder->picture->i_type = X264_TYPE_P;break;case 1:encoder->picture->i_type = X264_TYPE_IDR;break;case 2:encoder->picture->i_type = X264_TYPE_I;break;default:encoder->picture->i_type = X264_TYPE_AUTO;break;}printf("%s %d\n",__FUNCTION__,__LINE__);encoder->picture->i_pts=pts_time;if (x264_encoder_encode(encoder->handle, &(encoder->nal), &nNal, encoder->picture,&pic_out) < 0) {printf("x264_encoder_encode error,type:%08x!\n",encoder->picture->img.i_csp);return -1;}printf("%s %d\n",__FUNCTION__,__LINE__);for (i = 0; i < nNal; i++) {printf("%s %d\n",__FUNCTION__,__LINE__);memcpy(p_out, encoder->nal[i].p_payload, encoder->nal[i].i_payload);printf("%s %d\n",__FUNCTION__,__LINE__);p_out += encoder->nal[i].i_payload;result += encoder->nal[i].i_payload;}printf("%s %d\n",__FUNCTION__,__LINE__);pts_time=pts_time+1;printf("%s %d\n",__FUNCTION__,__LINE__);return result;
}ofstream f("binary.h264",ios::binary);static void h264_encode_frame(camera_t* cam, uint8_t * yuv_frame,size_t yuv_length)
{int encLength = 0;printf("%s %d\n",__FUNCTION__,__LINE__);encLength = h264_compress_frame(&cam->encoder, -1, yuv_frame, cam->h264_buf);cam->encodedLength=encLength;f.write((char*)cam->h264_buf,cam->encodedLength);return;
}
snd_pcm_t *captureHandle;
int retValue;
char *readBuffer;
snd_pcm_uframes_t frames=1024;ofstream a("binary.pcm",ios::binary);int camera_capture(camera_t* camera)
{struct v4l2_buffer buf;memset(&buf, 0, sizeof buf);buf.type = V4L2_BUF_TYPE_VIDEO_CAPTURE;buf.memory = V4L2_MEMORY_MMAP;printf("%s %d\n",__FUNCTION__,__LINE__);if (xioctl(camera->fd, VIDIOC_DQBUF, &buf) == -1) return FALSE;printf("%s %d\n",__FUNCTION__,__LINE__);memcpy(camera->head.start, camera->buffers[buf.index].start, buf.bytesused);camera->head.length = buf.bytesused;printf("%s %d\n",__FUNCTION__,__LINE__);if (xioctl(camera->fd, VIDIOC_QBUF, &buf) == -1) return FALSE;printf("%s %d\n",__FUNCTION__,__LINE__);
//  h264_encode_frame(camera,camera->head.start,camera->head.length);return TRUE;
}
int camera_record(camera_t* camera)
{enum v4l2_buf_type type = V4L2_BUF_TYPE_VIDEO_CAPTURE;if (xioctl(camera->fd, VIDIOC_STREAMON, &type) < 0) {printf("VIDIOC_STREAMON\n");return TRUE;}return FALSE;
}
int camera_stop_record(camera_t* camera)
{enum v4l2_buf_type type = V4L2_BUF_TYPE_VIDEO_CAPTURE;if (xioctl(camera->fd, VIDIOC_STREAMOFF, &type) < 0) {printf("VIDIOC_STREAMOFF\n");return TRUE;}return FALSE;
}
int camera_frame(camera_t* camera, struct timeval timeout) {fd_set fds;FD_ZERO(&fds);FD_SET(camera->fd, &fds);int r = select(camera->fd + 1, &fds, 0, 0, &timeout);if (r == -1) quit("select");if (r == 0) return FALSE;return camera_capture(camera);
}void jpeg(FILE* dest, uint8_t* rgb, uint32_t width, uint32_t height, int quality)
{JSAMPARRAY image;image = (JSAMPARRAY)calloc(height, sizeof (JSAMPROW));for (size_t i = 0; i < height; i++) {image[i] = (JSAMPROW)calloc(width * 3, sizeof (JSAMPLE));for (size_t j = 0; j < width; j++) {image[i][j * 3 + 0] = rgb[(i * width + j) * 3 + 0];image[i][j * 3 + 1] = rgb[(i * width + j) * 3 + 1];image[i][j * 3 + 2] = rgb[(i * width + j) * 3 + 2];}}struct jpeg_compress_struct compress;struct jpeg_error_mgr error;compress.err = jpeg_std_error(&error);jpeg_create_compress(&compress);jpeg_stdio_dest(&compress, dest);compress.image_width = width;compress.image_height = height;compress.input_components = 3;compress.in_color_space = JCS_RGB;jpeg_set_defaults(&compress);jpeg_set_quality(&compress, quality, TRUE);jpeg_start_compress(&compress, TRUE);jpeg_write_scanlines(&compress, image, height);jpeg_finish_compress(&compress);jpeg_destroy_compress(&compress);for (size_t i = 0; i < height; i++) {free(image[i]);}free(image);
}int minmax(int min, int v, int max)
{return (v < min) ? min : (max < v) ? max : v;
}uint8_t* yuyv2rgb(uint8_t* yuyv, uint32_t width, uint32_t height)
{uint8_t* rgb = (uint8_t*)calloc(width * height * 3, sizeof (uint8_t));for (size_t i = 0; i < height; i++) {for (size_t j = 0; j < width; j += 2) {size_t index = i * width + j;int y0 = yuyv[index * 2 + 0] << 8;int u = yuyv[index * 2 + 1] - 128;int y1 = yuyv[index * 2 + 2] << 8;int v = yuyv[index * 2 + 3] - 128;rgb[index * 3 + 0] = minmax(0, (y0 + 359 * v) >> 8, 255);rgb[index * 3 + 1] = minmax(0, (y0 + 88 * v - 183 * u) >> 8, 255);rgb[index * 3 + 2] = minmax(0, (y0 + 454 * u) >> 8, 255);rgb[index * 3 + 3] = minmax(0, (y1 + 359 * v) >> 8, 255);rgb[index * 3 + 4] = minmax(0, (y1 + 88 * v - 183 * u) >> 8, 255);rgb[index * 3 + 5] = minmax(0, (y1 + 454 * u) >> 8, 255);}}return rgb;
}
int detachThreadCreate(pthread_t *thread, void * start_routine, void *arg)
{pthread_attr_t attr;pthread_t thread_t;int ret = 0;if(thread==NULL){thread = &thread_t;}//初始化线程的属性if(pthread_attr_init(&attr)){printf("pthread_attr_init fail!\n");return -1;}//设置线程detachstate属性。该表示新线程是否与进程中其他线程脱离同步if(pthread_attr_setdetachstate(&attr, PTHREAD_CREATE_DETACHED)){//新线程不能用pthread_join()来同步,且在退出时自行释放所占用的资源。printf("pthread_attr_setdetachstate fail!\n");goto error;}ret = pthread_create(thread, &attr, (void *(*)(void *))start_routine, arg);if(ret < 0){printf("pthread_create fail!\n");goto error;}//将状态改为unjoinable状态,确保资源的释放。ret =  pthread_detach(thread_t);error:pthread_attr_destroy(&attr);return ret;
}
void MediaPlayer::onRecvFrame( char *data, int width, int height)
{QMutexLocker locker(&m_mutex);//QImage::Format_RGB888等于24const int imageSize = (((width * 24 + 31) >> 5) << 2) * height;uint8_t *dataTmp = new uint8_t[imageSize];//将数据拷贝到一个临时内存中,因为OnRecvFrame是异步执行的,data可能已经无效。try{memcpy(dataTmp, data, imageSize);}catch (...){delete[] dataTmp;return;}//转化rgb数据为可显示的图像对象。QImage image = QImage(dataTmp, width, height, QImage::Format_RGB888);if (image.isNull()){qDebug()<<"Receive frame error, width:%d, height:%d."<<width<<height;return;}m_image = image.copy(0, 0, width, height);delete[] dataTmp;update();}
void updateVideo2GUI(void* rgbData,unsigned long userData,int width,int height)
{MediaPlayer* window=(MediaPlayer*)userData;if(window != NULL){window->onRecvFrame((char*)rgbData, width, height);}}
void MediaPlayer::PaintImage(QPainter &painter)
{QMutexLocker locker(&m_mutex);int imageWidth = m_image.width();int imageHeight = m_image.height();QRect m_imageRect=QRect(0,0,imageWidth,imageHeight);painter.drawImage(m_imageRect, m_image);
}
void MediaPlayer::paintEvent(QPaintEvent *)
{QPainter painter(this);if (!m_image.isNull()){PaintImage(painter);}}void* doRecordH264Thread(void* arg)
{MediaPlayer* pMediaPlayer=(MediaPlayer*)arg;if(pMediaPlayer!=NULL){
//        camClient->doLoginAuthentication();qDebug()<<"doRecordH264Thread";camera_t* camera = camera_open("/dev/video0", 640, 480);qDebug()<<"camera_open\n";camera_init(camera);qDebug()<<"camera_init\n";camera_start(camera);h264_encoder_init(&camera->encoder,camera->width,camera->height);camera->h264_buf = (unsigned char*)malloc(sizeof(uint8_t) * camera->width * camera->height * 4);//如果未分配空间则会内存出错memset(camera->h264_buf,0,sizeof(uint8_t) * camera->width * camera->height * 4);struct timeval timeout;timeout.tv_sec = 1;timeout.tv_usec = 0;printf("%s %d\n",__FUNCTION__,__LINE__);/* skip 5 frames for booting a cam */for (int i = 0; i < 5; i++) {printf("%s %d\n",__FUNCTION__,__LINE__);camera_frame(camera, timeout);printf("%s %d\n",__FUNCTION__,__LINE__);}#if 1while (1){/* code */camera_frame(camera, timeout);unsigned char* rgb =yuyv2rgb(camera->head.start, camera->width, camera->height);updateVideo2GUI((void*)rgb,(unsigned long)pMediaPlayer,camera->width,camera->height);printf("%s %d\n",__FUNCTION__,__LINE__);}#endif#if 0unsigned char* rgb =yuyv2rgb(camera->head.start, camera->width, camera->height);FILE* out = fopen("result.jpg", "w");jpeg(out, rgb, camera->width, camera->height, 100);updateVideo2GUI((void*)rgb,pMediaPlayer);fclose(out);free(rgb);#endifprintf("%s %d\n",__FUNCTION__,__LINE__);camera_stop(camera);camera_finish(camera);camera_close(camera);}return NULL;
}MediaPlayer::MediaPlayer(QWidget *parent): QMainWindow(parent)
{detachThreadCreate(NULL,(void*)doRecordH264Thread,(void *)this);
//    return 0;
}MediaPlayer::~MediaPlayer()
{}

这篇关于uvc-录制并显示到界面-QT的文章就介绍到这儿,希望我们推荐的文章对编程师们有所帮助!



http://www.chinasem.cn/article/993123

相关文章

Python中构建终端应用界面利器Blessed模块的使用

《Python中构建终端应用界面利器Blessed模块的使用》Blessed库作为一个轻量级且功能强大的解决方案,开始在开发者中赢得口碑,今天,我们就一起来探索一下它是如何让终端UI开发变得轻松而高... 目录一、安装与配置:简单、快速、无障碍二、基本功能:从彩色文本到动态交互1. 显示基本内容2. 创建链

基于Qt开发一个简单的OFD阅读器

《基于Qt开发一个简单的OFD阅读器》这篇文章主要为大家详细介绍了如何使用Qt框架开发一个功能强大且性能优异的OFD阅读器,文中的示例代码讲解详细,有需要的小伙伴可以参考一下... 目录摘要引言一、OFD文件格式解析二、文档结构解析三、页面渲染四、用户交互五、性能优化六、示例代码七、未来发展方向八、结论摘要

电脑显示hdmi无信号怎么办? 电脑显示器无信号的终极解决指南

《电脑显示hdmi无信号怎么办?电脑显示器无信号的终极解决指南》HDMI无信号的问题却让人头疼不已,遇到这种情况该怎么办?针对这种情况,我们可以采取一系列步骤来逐一排查并解决问题,以下是详细的方法... 无论你是试图为笔记本电脑设置多个显示器还是使用外部显示器,都可能会弹出“无HDMI信号”错误。此消息可能

python与QT联合的详细步骤记录

《python与QT联合的详细步骤记录》:本文主要介绍python与QT联合的详细步骤,文章还展示了如何在Python中调用QT的.ui文件来实现GUI界面,并介绍了多窗口的应用,文中通过代码介绍... 目录一、文章简介二、安装pyqt5三、GUI页面设计四、python的使用python文件创建pytho

QT实现TCP客户端自动连接

《QT实现TCP客户端自动连接》这篇文章主要为大家详细介绍了QT中一个TCP客户端自动连接的测试模型,文中的示例代码讲解详细,感兴趣的小伙伴可以跟随小编一起学习一下... 目录版本 1:没有取消按钮 测试效果测试代码版本 2:有取消按钮测试效果测试代码版本 1:没有取消按钮 测试效果缺陷:无法手动停

基于Qt实现系统主题感知功能

《基于Qt实现系统主题感知功能》在现代桌面应用程序开发中,系统主题感知是一项重要的功能,它使得应用程序能够根据用户的系统主题设置(如深色模式或浅色模式)自动调整其外观,Qt作为一个跨平台的C++图形用... 目录【正文开始】一、使用效果二、系统主题感知助手类(SystemThemeHelper)三、实现细节

Qt实现文件的压缩和解压缩操作

《Qt实现文件的压缩和解压缩操作》这篇文章主要为大家详细介绍了如何使用Qt库中的QZipReader和QZipWriter实现文件的压缩和解压缩功能,文中的示例代码简洁易懂,需要的可以参考一下... 目录一、实现方式二、具体步骤1、在.pro文件中添加模块gui-private2、通过QObject方式创建

Qt QWidget实现图片旋转动画

《QtQWidget实现图片旋转动画》这篇文章主要为大家详细介绍了如何使用了Qt和QWidget实现图片旋转动画效果,文中的示例代码讲解详细,感兴趣的小伙伴可以跟随小编一起学习一下... 一、效果展示二、源码分享本例程通过QGraphicsView实现svg格式图片旋转。.hpjavascript

第10章 中断和动态时钟显示

第10章 中断和动态时钟显示 从本章开始,按照书籍的划分,第10章开始就进入保护模式(Protected Mode)部分了,感觉从这里开始难度突然就增加了。 书中介绍了为什么有中断(Interrupt)的设计,中断的几种方式:外部硬件中断、内部中断和软中断。通过中断做了一个会走的时钟和屏幕上输入字符的程序。 我自己理解中断的一些作用: 为了更好的利用处理器的性能。协同快速和慢速设备一起工作

嵌入式QT开发:构建高效智能的嵌入式系统

摘要: 本文深入探讨了嵌入式 QT 相关的各个方面。从 QT 框架的基础架构和核心概念出发,详细阐述了其在嵌入式环境中的优势与特点。文中分析了嵌入式 QT 的开发环境搭建过程,包括交叉编译工具链的配置等关键步骤。进一步探讨了嵌入式 QT 的界面设计与开发,涵盖了从基本控件的使用到复杂界面布局的构建。同时也深入研究了信号与槽机制在嵌入式系统中的应用,以及嵌入式 QT 与硬件设备的交互,包括输入输出设