uvc-录制并显示到界面-QT

2024-05-15 22:38
文章标签 显示 qt 界面 录制 uvc

本文主要是介绍uvc-录制并显示到界面-QT,希望对大家解决编程问题提供一定的参考价值,需要的开发者们随着小编来一起学习吧!

效果:
在这里插入图片描述
代码:

~/code/MediaPlayer$ tree 
.
├── main.cpp
├── mediaplayer.cpp
├── mediaplayer.h
├── MediaPlayer.pro
└── MediaPlayer.pro.user

MediaPlayer.pro

#-------------------------------------------------
#
# Project created by QtCreator 2020-08-17T13:43:30
#
#-------------------------------------------------QT       += core guigreaterThan(QT_MAJOR_VERSION, 4): QT += widgetsTARGET = MediaPlayer
TEMPLATE = app# The following define makes your compiler emit warnings if you use
# any feature of Qt which as been marked as deprecated (the exact warnings
# depend on your compiler). Please consult the documentation of the
# deprecated API in order to know how to port your code away from it.
DEFINES += QT_DEPRECATED_WARNINGS# You can also make your code fail to compile if you use deprecated APIs.
# In order to do so, uncomment the following line.
# You can also select to disable deprecated APIs only up to a certain version of Qt.
#DEFINES += QT_DISABLE_DEPRECATED_BEFORE=0x060000    # disables all the APIs deprecated before Qt 6.0.0SOURCES += main.cpp\mediaplayer.cppHEADERS  += mediaplayer.h
#LIBS += -L/usr/lib/x86_64-linux-gnuLIBS += -ljpegwin32:CONFIG(release, debug|release): LIBS += -L$$PWD/../../../../usr/lib/x86_64-linux-gnu/release/ -ljpeg
else:win32:CONFIG(debug, debug|release): LIBS += -L$$PWD/../../../../usr/lib/x86_64-linux-gnu/debug/ -ljpeg
else:unix: LIBS += -L$$PWD/../../../../usr/lib/x86_64-linux-gnu/ -ljpegINCLUDEPATH += $$PWD/../../../../usr/lib/x86_64-linux-gnu
DEPENDPATH += $$PWD/../../../../usr/lib/x86_64-linux-gnuwin32:CONFIG(release, debug|release): LIBS += -L$$PWD/../../../../usr/lib/x86_64-linux-gnu/release/ -lx264
else:win32:CONFIG(debug, debug|release): LIBS += -L$$PWD/../../../../usr/lib/x86_64-linux-gnu/debug/ -lx264
else:unix: LIBS += -L$$PWD/../../../../usr/lib/x86_64-linux-gnu/ -lx264INCLUDEPATH += $$PWD/../../../../usr/lib/x86_64-linux-gnu
DEPENDPATH += $$PWD/../../../../usr/lib/x86_64-linux-gnuwin32:CONFIG(release, debug|release): LIBS += -L$$PWD/../../../../usr/lib/x86_64-linux-gnu/release/ -lasound
else:win32:CONFIG(debug, debug|release): LIBS += -L$$PWD/../../../../usr/lib/x86_64-linux-gnu/debug/ -lasound
else:unix: LIBS += -L$$PWD/../../../../usr/lib/x86_64-linux-gnu/ -lasoundINCLUDEPATH += $$PWD/../../../../usr/lib/x86_64-linux-gnu
DEPENDPATH += $$PWD/../../../../usr/lib/x86_64-linux-gnu

main.cpp

#include "mediaplayer.h"
#include <QApplication>int main(int argc, char *argv[])
{QApplication a(argc, argv);MediaPlayer w;w.show();return a.exec();
}

mediaplayer.h

#ifndef MEDIAPLAYER_H
#define MEDIAPLAYER_H#include <QMainWindow>
#include <QDebug>
#include <QMutexLocker>
#include <QPainter>
/** capturing from UVC cam* requires: libjpeg-dev* build: gcc -std=c99 capture.c -ljpeg -o capture*/
extern "C" {
#include <stdint.h>
#include <stdlib.h>
#include <stdio.h>
#include <string.h>
#include <errno.h>#include <fcntl.h>
#include <sys/ioctl.h>
#include <sys/mman.h>
#include <asm/types.h>
#include <linux/videodev2.h>#include <sys/time.h>
#include <sys/types.h>
#include <unistd.h>#include <jpeglib.h>
#include <x264.h>
#include <linux/videodev2.h>
#include <alsa/asoundlib.h>}
#include <iostream>
using namespace std;
#include <fstream>
class MediaPlayer : public QMainWindow
{Q_OBJECT
private:QMutex m_mutex;QImage m_image;public:MediaPlayer(QWidget *parent = 0);void onRecvFrame( char *data, int width, int height);void paintEvent(QPaintEvent *);void PaintImage(QPainter &painter);~MediaPlayer();
};#endif // MEDIAPLAYER_H

mediaplayer.cpp

#include "mediaplayer.h"
void quit(const char * msg)
{fprintf(stderr, "[%s] %d: %s\n", msg, errno, strerror(errno));exit(EXIT_FAILURE);
}int xioctl(int fd, int request, void* arg)
{for (int i = 0; i < 100; i++) {int r = ioctl(fd, request, arg);if (r != -1 || errno != EINTR) return r;}return -1;
}typedef struct {uint8_t* start;size_t length;
} buffer_t;
typedef struct {//add by zykx264_param_t        *param;x264_t              *handle;x264_picture_t      *picture;	//一个视频序列中每帧特点x264_nal_t          *nal;
} X264Encoder;
typedef struct {X264Encoder             encoder;/*add by zyk*/unsigned char       *h264_buf;      //encoded bufferunsigned int encodedLength;int fd;uint32_t width;uint32_t height;size_t buffer_count;buffer_t* buffers;buffer_t head;
} camera_t;camera_t* camera_open(const char * device, uint32_t width, uint32_t height)
{int fd = open(device, O_RDWR | O_NONBLOCK, 0);if (fd == -1) quit("camera_open ");camera_t* camera = (camera_t*)malloc(sizeof (camera_t));camera->fd = fd;camera->width = width;camera->height = height;camera->buffer_count = 0;camera->buffers = NULL;camera->head.length = 0;camera->head.start = NULL;return camera;
}void camera_init(camera_t* camera) {struct v4l2_capability cap;if (xioctl(camera->fd, VIDIOC_QUERYCAP, &cap) == -1) quit("VIDIOC_QUERYCAP");if (!(cap.capabilities & V4L2_CAP_VIDEO_CAPTURE)) quit("no capture");if (!(cap.capabilities & V4L2_CAP_STREAMING)) quit("no streaming");struct v4l2_cropcap cropcap;memset(&cropcap, 0, sizeof cropcap);cropcap.type = V4L2_BUF_TYPE_VIDEO_CAPTURE;if (xioctl(camera->fd, VIDIOC_CROPCAP, &cropcap) == 0) {struct v4l2_crop crop;crop.type = V4L2_BUF_TYPE_VIDEO_CAPTURE;crop.c = cropcap.defrect;if (xioctl(camera->fd, VIDIOC_S_CROP, &crop) == -1) {// cropping not supported}}struct v4l2_format format;memset(&format, 0, sizeof format);format.type = V4L2_BUF_TYPE_VIDEO_CAPTURE;format.fmt.pix.width = camera->width;format.fmt.pix.height = camera->height;format.fmt.pix.pixelformat = V4L2_PIX_FMT_YUYV;format.fmt.pix.field = V4L2_FIELD_NONE;if (xioctl(camera->fd, VIDIOC_S_FMT, &format) == -1) quit("VIDIOC_S_FMT");struct v4l2_requestbuffers req;memset(&req, 0, sizeof req);req.count = 4;req.type = V4L2_BUF_TYPE_VIDEO_CAPTURE;req.memory = V4L2_MEMORY_MMAP;if (xioctl(camera->fd, VIDIOC_REQBUFS, &req) == -1) quit("VIDIOC_REQBUFS");camera->buffer_count = req.count;camera->buffers = (buffer_t*)calloc(req.count, sizeof (buffer_t));size_t buf_max = 0;for (size_t i = 0; i < camera->buffer_count; i++) {struct v4l2_buffer buf;memset(&buf, 0, sizeof buf);buf.type = V4L2_BUF_TYPE_VIDEO_CAPTURE;buf.memory = V4L2_MEMORY_MMAP;buf.index = i;if (xioctl(camera->fd, VIDIOC_QUERYBUF, &buf) == -1)quit("VIDIOC_QUERYBUF");if (buf.length > buf_max) buf_max = buf.length;camera->buffers[i].length = buf.length;camera->buffers[i].start =( uint8_t*)mmap(NULL, buf.length, PROT_READ | PROT_WRITE, MAP_SHARED,camera->fd, buf.m.offset);if (camera->buffers[i].start == MAP_FAILED) quit("mmap");}camera->head.start =  ( uint8_t*)malloc(buf_max);
}void camera_start(camera_t* camera)
{for (size_t i = 0; i < camera->buffer_count; i++) {struct v4l2_buffer buf;memset(&buf, 0, sizeof buf);buf.type = V4L2_BUF_TYPE_VIDEO_CAPTURE;buf.memory = V4L2_MEMORY_MMAP;buf.index = i;if (xioctl(camera->fd, VIDIOC_QBUF, &buf) == -1) quit("VIDIOC_QBUF");}enum v4l2_buf_type type = V4L2_BUF_TYPE_VIDEO_CAPTURE;if (xioctl(camera->fd, VIDIOC_STREAMON, &type) == -1)quit("VIDIOC_STREAMON");
}void camera_stop(camera_t* camera)
{enum v4l2_buf_type type = V4L2_BUF_TYPE_VIDEO_CAPTURE;if (xioctl(camera->fd, VIDIOC_STREAMOFF, &type) == -1)quit("VIDIOC_STREAMOFF");
}void camera_finish(camera_t* camera)
{for (size_t i = 0; i < camera->buffer_count; i++) {munmap(camera->buffers[i].start, camera->buffers[i].length);}free(camera->buffers);camera->buffer_count = 0;camera->buffers = NULL;free(camera->head.start);camera->head.length = 0;camera->head.start = NULL;
}void camera_close(camera_t* camera)
{if (close(camera->fd) == -1) quit("close");free(camera);
}void h264_encoder_init(X264Encoder *encoder,int width,int height)
{encoder->param = (x264_param_t *) malloc(sizeof(x264_param_t));encoder->picture = (x264_picture_t *) malloc(sizeof(x264_picture_t));x264_param_default(encoder->param);x264_param_default_preset(encoder->param, "veryfast", "zerolatency");encoder->param->i_width = width;encoder->param->i_height = height;encoder->param->rc.i_lookahead = 0;	//i帧向前缓冲区encoder->param->i_fps_num = 15;encoder->param->i_fps_den = 1;encoder->param->b_annexb = 1;encoder->param->i_keyint_max=100;encoder->param->i_keyint_min=90;encoder->param->i_bframe=3;encoder->param->b_repeat_headers=1;x264_param_apply_profile(encoder->param, "baseline");	//使用baselineif ((encoder->handle = x264_encoder_open(encoder->param)) == 0) {printf("x264_encoder_open error!\n");return;}x264_picture_alloc(encoder->picture, X264_CSP_I420, encoder->param->i_width,encoder->param->i_height);encoder->picture->img.i_csp = X264_CSP_I420;encoder->picture->img.i_plane = 3;return;}
static int pts_time=0;int h264_compress_frame(X264Encoder * encoder, int type, uint8_t * in, uint8_t * out)
{x264_picture_t pic_out;int nNal = -1;int result = 0;//int i = 0;uint8_t *p_out = out;unsigned int i,j;unsigned int base_h;char *y = (char *)encoder->picture->img.plane[0];char *u = (char *)encoder->picture->img.plane[1];char *v = (char *)encoder->picture->img.plane[2];int is_y = 1, is_u = 1;int y_index = 0, u_index = 0, v_index = 0;int yuv422_length = 2 * encoder->param->i_width * encoder->param->i_height;printf("%s %d\n",__FUNCTION__,__LINE__);for(i=0; i<yuv422_length; i+=2){*(y+y_index) = *(in+i);y_index++;}printf("%s %d\n",__FUNCTION__,__LINE__);for(i=0; i<encoder->param->i_height; i+=2){base_h = i*encoder->param->i_width*2;for(j=base_h+1; j<base_h+encoder->param->i_width*2; j+=2){if(is_u){*(u+u_index) = *(in+j);u_index++;is_u = 0;}else{*(v+v_index) = *(in+j);v_index++;is_u = 1;}}}printf("%s %d\n",__FUNCTION__,__LINE__);switch (type) {case 0:encoder->picture->i_type = X264_TYPE_P;break;case 1:encoder->picture->i_type = X264_TYPE_IDR;break;case 2:encoder->picture->i_type = X264_TYPE_I;break;default:encoder->picture->i_type = X264_TYPE_AUTO;break;}printf("%s %d\n",__FUNCTION__,__LINE__);encoder->picture->i_pts=pts_time;if (x264_encoder_encode(encoder->handle, &(encoder->nal), &nNal, encoder->picture,&pic_out) < 0) {printf("x264_encoder_encode error,type:%08x!\n",encoder->picture->img.i_csp);return -1;}printf("%s %d\n",__FUNCTION__,__LINE__);for (i = 0; i < nNal; i++) {printf("%s %d\n",__FUNCTION__,__LINE__);memcpy(p_out, encoder->nal[i].p_payload, encoder->nal[i].i_payload);printf("%s %d\n",__FUNCTION__,__LINE__);p_out += encoder->nal[i].i_payload;result += encoder->nal[i].i_payload;}printf("%s %d\n",__FUNCTION__,__LINE__);pts_time=pts_time+1;printf("%s %d\n",__FUNCTION__,__LINE__);return result;
}ofstream f("binary.h264",ios::binary);static void h264_encode_frame(camera_t* cam, uint8_t * yuv_frame,size_t yuv_length)
{int encLength = 0;printf("%s %d\n",__FUNCTION__,__LINE__);encLength = h264_compress_frame(&cam->encoder, -1, yuv_frame, cam->h264_buf);cam->encodedLength=encLength;f.write((char*)cam->h264_buf,cam->encodedLength);return;
}
snd_pcm_t *captureHandle;
int retValue;
char *readBuffer;
snd_pcm_uframes_t frames=1024;ofstream a("binary.pcm",ios::binary);int camera_capture(camera_t* camera)
{struct v4l2_buffer buf;memset(&buf, 0, sizeof buf);buf.type = V4L2_BUF_TYPE_VIDEO_CAPTURE;buf.memory = V4L2_MEMORY_MMAP;printf("%s %d\n",__FUNCTION__,__LINE__);if (xioctl(camera->fd, VIDIOC_DQBUF, &buf) == -1) return FALSE;printf("%s %d\n",__FUNCTION__,__LINE__);memcpy(camera->head.start, camera->buffers[buf.index].start, buf.bytesused);camera->head.length = buf.bytesused;printf("%s %d\n",__FUNCTION__,__LINE__);if (xioctl(camera->fd, VIDIOC_QBUF, &buf) == -1) return FALSE;printf("%s %d\n",__FUNCTION__,__LINE__);
//  h264_encode_frame(camera,camera->head.start,camera->head.length);return TRUE;
}
int camera_record(camera_t* camera)
{enum v4l2_buf_type type = V4L2_BUF_TYPE_VIDEO_CAPTURE;if (xioctl(camera->fd, VIDIOC_STREAMON, &type) < 0) {printf("VIDIOC_STREAMON\n");return TRUE;}return FALSE;
}
int camera_stop_record(camera_t* camera)
{enum v4l2_buf_type type = V4L2_BUF_TYPE_VIDEO_CAPTURE;if (xioctl(camera->fd, VIDIOC_STREAMOFF, &type) < 0) {printf("VIDIOC_STREAMOFF\n");return TRUE;}return FALSE;
}
int camera_frame(camera_t* camera, struct timeval timeout) {fd_set fds;FD_ZERO(&fds);FD_SET(camera->fd, &fds);int r = select(camera->fd + 1, &fds, 0, 0, &timeout);if (r == -1) quit("select");if (r == 0) return FALSE;return camera_capture(camera);
}void jpeg(FILE* dest, uint8_t* rgb, uint32_t width, uint32_t height, int quality)
{JSAMPARRAY image;image = (JSAMPARRAY)calloc(height, sizeof (JSAMPROW));for (size_t i = 0; i < height; i++) {image[i] = (JSAMPROW)calloc(width * 3, sizeof (JSAMPLE));for (size_t j = 0; j < width; j++) {image[i][j * 3 + 0] = rgb[(i * width + j) * 3 + 0];image[i][j * 3 + 1] = rgb[(i * width + j) * 3 + 1];image[i][j * 3 + 2] = rgb[(i * width + j) * 3 + 2];}}struct jpeg_compress_struct compress;struct jpeg_error_mgr error;compress.err = jpeg_std_error(&error);jpeg_create_compress(&compress);jpeg_stdio_dest(&compress, dest);compress.image_width = width;compress.image_height = height;compress.input_components = 3;compress.in_color_space = JCS_RGB;jpeg_set_defaults(&compress);jpeg_set_quality(&compress, quality, TRUE);jpeg_start_compress(&compress, TRUE);jpeg_write_scanlines(&compress, image, height);jpeg_finish_compress(&compress);jpeg_destroy_compress(&compress);for (size_t i = 0; i < height; i++) {free(image[i]);}free(image);
}int minmax(int min, int v, int max)
{return (v < min) ? min : (max < v) ? max : v;
}uint8_t* yuyv2rgb(uint8_t* yuyv, uint32_t width, uint32_t height)
{uint8_t* rgb = (uint8_t*)calloc(width * height * 3, sizeof (uint8_t));for (size_t i = 0; i < height; i++) {for (size_t j = 0; j < width; j += 2) {size_t index = i * width + j;int y0 = yuyv[index * 2 + 0] << 8;int u = yuyv[index * 2 + 1] - 128;int y1 = yuyv[index * 2 + 2] << 8;int v = yuyv[index * 2 + 3] - 128;rgb[index * 3 + 0] = minmax(0, (y0 + 359 * v) >> 8, 255);rgb[index * 3 + 1] = minmax(0, (y0 + 88 * v - 183 * u) >> 8, 255);rgb[index * 3 + 2] = minmax(0, (y0 + 454 * u) >> 8, 255);rgb[index * 3 + 3] = minmax(0, (y1 + 359 * v) >> 8, 255);rgb[index * 3 + 4] = minmax(0, (y1 + 88 * v - 183 * u) >> 8, 255);rgb[index * 3 + 5] = minmax(0, (y1 + 454 * u) >> 8, 255);}}return rgb;
}
int detachThreadCreate(pthread_t *thread, void * start_routine, void *arg)
{pthread_attr_t attr;pthread_t thread_t;int ret = 0;if(thread==NULL){thread = &thread_t;}//初始化线程的属性if(pthread_attr_init(&attr)){printf("pthread_attr_init fail!\n");return -1;}//设置线程detachstate属性。该表示新线程是否与进程中其他线程脱离同步if(pthread_attr_setdetachstate(&attr, PTHREAD_CREATE_DETACHED)){//新线程不能用pthread_join()来同步,且在退出时自行释放所占用的资源。printf("pthread_attr_setdetachstate fail!\n");goto error;}ret = pthread_create(thread, &attr, (void *(*)(void *))start_routine, arg);if(ret < 0){printf("pthread_create fail!\n");goto error;}//将状态改为unjoinable状态,确保资源的释放。ret =  pthread_detach(thread_t);error:pthread_attr_destroy(&attr);return ret;
}
void MediaPlayer::onRecvFrame( char *data, int width, int height)
{QMutexLocker locker(&m_mutex);//QImage::Format_RGB888等于24const int imageSize = (((width * 24 + 31) >> 5) << 2) * height;uint8_t *dataTmp = new uint8_t[imageSize];//将数据拷贝到一个临时内存中,因为OnRecvFrame是异步执行的,data可能已经无效。try{memcpy(dataTmp, data, imageSize);}catch (...){delete[] dataTmp;return;}//转化rgb数据为可显示的图像对象。QImage image = QImage(dataTmp, width, height, QImage::Format_RGB888);if (image.isNull()){qDebug()<<"Receive frame error, width:%d, height:%d."<<width<<height;return;}m_image = image.copy(0, 0, width, height);delete[] dataTmp;update();}
void updateVideo2GUI(void* rgbData,unsigned long userData,int width,int height)
{MediaPlayer* window=(MediaPlayer*)userData;if(window != NULL){window->onRecvFrame((char*)rgbData, width, height);}}
void MediaPlayer::PaintImage(QPainter &painter)
{QMutexLocker locker(&m_mutex);int imageWidth = m_image.width();int imageHeight = m_image.height();QRect m_imageRect=QRect(0,0,imageWidth,imageHeight);painter.drawImage(m_imageRect, m_image);
}
void MediaPlayer::paintEvent(QPaintEvent *)
{QPainter painter(this);if (!m_image.isNull()){PaintImage(painter);}}void* doRecordH264Thread(void* arg)
{MediaPlayer* pMediaPlayer=(MediaPlayer*)arg;if(pMediaPlayer!=NULL){
//        camClient->doLoginAuthentication();qDebug()<<"doRecordH264Thread";camera_t* camera = camera_open("/dev/video0", 640, 480);qDebug()<<"camera_open\n";camera_init(camera);qDebug()<<"camera_init\n";camera_start(camera);h264_encoder_init(&camera->encoder,camera->width,camera->height);camera->h264_buf = (unsigned char*)malloc(sizeof(uint8_t) * camera->width * camera->height * 4);//如果未分配空间则会内存出错memset(camera->h264_buf,0,sizeof(uint8_t) * camera->width * camera->height * 4);struct timeval timeout;timeout.tv_sec = 1;timeout.tv_usec = 0;printf("%s %d\n",__FUNCTION__,__LINE__);/* skip 5 frames for booting a cam */for (int i = 0; i < 5; i++) {printf("%s %d\n",__FUNCTION__,__LINE__);camera_frame(camera, timeout);printf("%s %d\n",__FUNCTION__,__LINE__);}#if 1while (1){/* code */camera_frame(camera, timeout);unsigned char* rgb =yuyv2rgb(camera->head.start, camera->width, camera->height);updateVideo2GUI((void*)rgb,(unsigned long)pMediaPlayer,camera->width,camera->height);printf("%s %d\n",__FUNCTION__,__LINE__);}#endif#if 0unsigned char* rgb =yuyv2rgb(camera->head.start, camera->width, camera->height);FILE* out = fopen("result.jpg", "w");jpeg(out, rgb, camera->width, camera->height, 100);updateVideo2GUI((void*)rgb,pMediaPlayer);fclose(out);free(rgb);#endifprintf("%s %d\n",__FUNCTION__,__LINE__);camera_stop(camera);camera_finish(camera);camera_close(camera);}return NULL;
}MediaPlayer::MediaPlayer(QWidget *parent): QMainWindow(parent)
{detachThreadCreate(NULL,(void*)doRecordH264Thread,(void *)this);
//    return 0;
}MediaPlayer::~MediaPlayer()
{}

这篇关于uvc-录制并显示到界面-QT的文章就介绍到这儿,希望我们推荐的文章对编程师们有所帮助!


原文地址:
本文来自互联网用户投稿,该文观点仅代表作者本人,不代表本站立场。本站仅提供信息存储空间服务,不拥有所有权,不承担相关法律责任。如若转载,请注明出处:http://www.chinasem.cn/article/993123

相关文章

Python实战之屏幕录制功能的实现

《Python实战之屏幕录制功能的实现》屏幕录制,即屏幕捕获,是指将计算机屏幕上的活动记录下来,生成视频文件,本文主要为大家介绍了如何使用Python实现这一功能,希望对大家有所帮助... 目录屏幕录制原理图像捕获音频捕获编码压缩输出保存完整的屏幕录制工具高级功能实时预览增加水印多平台支持屏幕录制原理屏幕

Qt 中 isHidden 和 isVisible 的区别与使用小结

《Qt中isHidden和isVisible的区别与使用小结》Qt中的isHidden()和isVisible()方法都用于查询组件显示或隐藏状态,然而,它们有很大的区别,了解它们对于正确操... 目录1. 基础概念2. 区别清见3. 实际案例4. 注意事项5. 总结1. 基础概念Qt 中的 isHidd

QT移植到RK3568开发板的方法步骤

《QT移植到RK3568开发板的方法步骤》本文主要介绍了QT移植到RK3568开发板的方法步骤,文中通过图文示例介绍的非常详细,对大家的学习或者工作具有一定的参考学习价值,需要的朋友们下面随着小编来一... 目录前言一、获取SDK1. 安装依赖2. 获取SDK资源包3. SDK工程目录介绍4. 获取补丁包二

Qt把文件夹从A移动到B的实现示例

《Qt把文件夹从A移动到B的实现示例》本文主要介绍了Qt把文件夹从A移动到B的实现示例,文中通过示例代码介绍的非常详细,对大家的学习或者工作具有一定的参考学习价值,需要的朋友们下面随着小编来一起学习学... 目录如何移动一个文件? 如何移动文件夹(包含里面的全部内容):如何删除文件夹:QT 文件复制,移动(

Linux虚拟机不显示IP地址的解决方法(亲测有效)

《Linux虚拟机不显示IP地址的解决方法(亲测有效)》本文主要介绍了通过VMware新装的Linux系统没有IP地址的解决方法,主要步骤包括:关闭虚拟机、打开VM虚拟网络编辑器、还原VMnet8或修... 目录前言步骤0.问题情况1.关闭虚拟机2.China编程打开VM虚拟网络编辑器3.1 方法一:点击还原VM

CSS模拟 html 的 title 属性(鼠标悬浮显示提示文字效果)

《CSS模拟html的title属性(鼠标悬浮显示提示文字效果)》:本文主要介绍了如何使用CSS模拟HTML的title属性,通过鼠标悬浮显示提示文字效果,通过设置`.tipBox`和`.tipBox.tipContent`的样式,实现了提示内容的隐藏和显示,详细内容请阅读本文,希望能对你有所帮助... 效

Qt实现发送HTTP请求的示例详解

《Qt实现发送HTTP请求的示例详解》这篇文章主要为大家详细介绍了如何通过Qt实现发送HTTP请求,文中的示例代码讲解详细,具有一定的借鉴价值,感兴趣的小伙伴可以跟随小编一起学习一下... 目录1、添加network模块2、包含改头文件3、创建网络访问管理器4、创建接口5、创建网络请求对象6、创建一个回复对

Qt 中集成mqtt协议的使用方法

《Qt中集成mqtt协议的使用方法》文章介绍了如何在工程中引入qmqtt库,并通过声明一个单例类来暴露订阅到的主题数据,本文通过实例代码给大家介绍的非常详细,感兴趣的朋友一起看看吧... 目录一,引入qmqtt 库二,使用一,引入qmqtt 库我是将整个头文件/源文件都添加到了工程中进行编译,这样 跨平台

如何设置vim永久显示行号

《如何设置vim永久显示行号》在Linux环境下,vim默认不显示行号,这在程序编译出错时定位错误语句非常不便,通过修改vim配置文件vimrc,可以在每次打开vim时永久显示行号... 目录设置vim永久显示行号1.临时显示行号2.永www.chinasem.cn久显示行号总结设置vim永久显示行号在li

Python中的可视化设计与UI界面实现

《Python中的可视化设计与UI界面实现》本文介绍了如何使用Python创建用户界面(UI),包括使用Tkinter、PyQt、Kivy等库进行基本窗口、动态图表和动画效果的实现,通过示例代码,展示... 目录从像素到界面:python带你玩转UI设计示例:使用Tkinter创建一个简单的窗口绘图魔法:用