图形处理库-基于opencv

2024-06-17 16:08
文章标签 opencv 图形处理

本文主要是介绍图形处理库-基于opencv,希望对大家解决编程问题提供一定的参考价值,需要的开发者们随着小编来一起学习吧!

图形增强常用方法,记录

包含:

1.旋转 rotation ,3D旋转

2.随机填充

3. JPEG 压缩

4.高斯噪声

5.随机剪裁

6.对比度调整

7.各种图形模糊平滑处理

#pragma once
#ifndef _UTIL_IMG_
#define _UTIL_IMG_#include <opencv2/opencv.hpp>
#include <time.h>namespace util {/*Rotation img random .theta  range[0, 360]landmarks size can be set 0**/cv::Mat get_rotation_img(cv::Mat img, std::vector<cv::Point2f> &landmarks, int theta = 20) {cv::Mat mat = img;cv::Mat dst;cv::Point2f center(img.size().width / 2, img.size().height / 2);double angle = theta;cv::Mat rot = cv::getRotationMatrix2D(center, angle, 1);cv::Rect bbox = cv::RotatedRect(center, mat.size(), angle).boundingRect();cv::warpAffine(mat, dst, rot, bbox.size());for (int j = 0; j < landmarks.size(); j++){float theta = -3.1415926 / (180 / angle);float x1 = landmarks[j].x - rot.at<double>(1, 2);float y1 = landmarks[j].y - rot.at<double>(0, 2);landmarks[j].x = x1 * cos(theta) - y1 * sin(theta);landmarks[j].y = x1 * sin(theta) + y1 * cos(theta);}return dst;}cv::Mat get_rotation_img_random(cv::Mat img, std::vector<cv::Point2f> &landmarks, int base_theta = 20) {srand(time(NULL));cv::Mat mat = img;cv::Mat dst;cv::Point2f center(img.size().width / 2, img.size().height / 2);double angle = (rand() % base_theta) * std::pow((-1), rand()%2);cv::Mat rot = cv::getRotationMatrix2D(center, angle, 1);cv::Rect bbox = cv::RotatedRect(center, mat.size(), angle).boundingRect();cv::warpAffine(mat, dst, rot, bbox.size());for (int j = 0; j < landmarks.size(); j++){float theta = -3.1415926 / (180 / angle);float x1 = landmarks[j].x - rot.at<double>(1, 2);float y1 = landmarks[j].y - rot.at<double>(0, 2);landmarks[j].x = x1 * cos(theta) - y1 * sin(theta);landmarks[j].y = x1 * sin(theta) + y1 * cos(theta);}return dst;}/*filled roi rect with value.value range[0,255]**/template <typename type>void mat_filled_value(cv::Mat &img, cv::Rect rect, int value = 0) {int rows = img.rows;int cols = img.cols;if (rect.x < 0 || rect.y < 0 || rect.x + rect.width > cols || rect.y + rect.height > rows) {std::cout << "rect value error." << std::endl;return ;}int channel = img.channels();int offsetx = rect.x * channel;int offsetx_ = (rect.x + rect.width)*channel;for (size_t i = 0; i < rows; i++){type *ptr = img.ptr<type>(i);for (size_t j = 0; j < cols*channel; j = j + channel){if (i >= rect.y && i < rect.y + rect.height && j >= offsetx && j < offsetx_) {if (channel == 1) {ptr[j] = 0;}else if (channel == 3) {ptr[j] = 0;ptr[j + 1] = 0;ptr[j + 2] = 0;}}}}}/*value range [1, img_rows]**/static std::vector<cv::Rect> generate_random_rect(int widths, int heights, int value = 6) {srand(time(NULL));//Rect rect1, rect2, rect3, rect4;std::vector<cv::Rect> rects;int x1 = rand() % (heights / value);int x2 = rand() % (heights / value);int x3 = rand() % (widths / value);int x4 = rand() % (widths / value);cv::Rect rect[4];rect[0] = cv::Rect(0, 0, widths, x1);rect[1] = cv::Rect(0, heights - x2, widths, x2);rect[2] = cv::Rect(0, 0, x3, heights);rect[3] = cv::Rect(widths - x4, 0, x4, heights);for (size_t i = 0; i < 4; i++){int x5 = rand() % 20;int x6 = rand() % 3;if (x5 < x6) {rects.push_back(rect[i]);}}return rects;}template <typename type>cv::Mat mat_filled_value_with_random(cv::Mat img, int value = 0) {cv::Mat dst = img.clone();std::vector<cv::Rect> rects = generate_random_rect(img.cols, img.rows);for (size_t i = 0; i < rects.size(); i++){std::cout << " rect" << rects[i].x << " " << rects[i].y << " " << rects[i].width << " " << rects[i].height << std::endl;mat_filled_value<type>(dst, rects[i], 0);}return dst;}/*Random JPEG Compression**/cv::Mat encoder_JPEG(cv::Mat img, int value = 40) {srand(time(NULL));int QF = 100;// JPEG quality factorQF = 100-value + (rand() % (value+1));std::vector<int> compression_params = { 1, QF };std::vector<unsigned char> img_jpeg;cv::imencode(".jpg", img, img_jpeg, compression_params);cv::Mat temp = cv::imdecode(img_jpeg, 1);return temp;}static void composeExternalMatrix(float yaw, float pitch, float roll, float trans_x, float trans_y, float trans_z, cv::Mat& external_matrix){external_matrix.release();external_matrix.create(3, 4, CV_64FC1);double sin_yaw = sin((double)yaw * CV_PI / 180);double cos_yaw = cos((double)yaw * CV_PI / 180);double sin_pitch = sin((double)pitch * CV_PI / 180);double cos_pitch = cos((double)pitch * CV_PI / 180);double sin_roll = sin((double)roll * CV_PI / 180);double cos_roll = cos((double)roll * CV_PI / 180);external_matrix.at<double>(0, 0) = cos_pitch * cos_yaw;external_matrix.at<double>(0, 1) = -cos_pitch * sin_yaw;external_matrix.at<double>(0, 2) = sin_pitch;external_matrix.at<double>(1, 0) = cos_roll * sin_yaw + sin_roll * sin_pitch * cos_yaw;external_matrix.at<double>(1, 1) = cos_roll * cos_yaw - sin_roll * sin_pitch * sin_yaw;external_matrix.at<double>(1, 2) = -sin_roll * cos_pitch;external_matrix.at<double>(2, 0) = sin_roll * sin_yaw - cos_roll * sin_pitch * cos_yaw;external_matrix.at<double>(2, 1) = sin_roll * cos_yaw + cos_roll * sin_pitch * sin_yaw;external_matrix.at<double>(2, 2) = cos_roll * cos_pitch;external_matrix.at<double>(0, 3) = trans_x;external_matrix.at<double>(1, 3) = trans_y;external_matrix.at<double>(2, 3) = trans_z;}static cv::Mat Rect2Mat(const cv::Rect& img_rect){// 画像プレートの四隅の座標cv::Mat srcCoord(3, 4, CV_64FC1);srcCoord.at<double>(0, 0) = img_rect.x;srcCoord.at<double>(1, 0) = img_rect.y;srcCoord.at<double>(2, 0) = 1;srcCoord.at<double>(0, 1) = img_rect.x + img_rect.width;srcCoord.at<double>(1, 1) = img_rect.y;srcCoord.at<double>(2, 1) = 1;srcCoord.at<double>(0, 2) = img_rect.x + img_rect.width;srcCoord.at<double>(1, 2) = img_rect.y + img_rect.height;srcCoord.at<double>(2, 2) = 1;srcCoord.at<double>(0, 3) = img_rect.x;srcCoord.at<double>(1, 3) = img_rect.y + img_rect.height;srcCoord.at<double>(2, 3) = 1;return srcCoord;}static void CircumTransImgRect(const cv::Size& img_size, const cv::Mat& transM, cv::Rect_<double>& CircumRect){// 入力画像の四隅を斉次座標へ変換cv::Mat cornersMat = Rect2Mat(cv::Rect(0, 0, img_size.width, img_size.height));// 座標変換し、範囲を取得cv::Mat dstCoord = transM * cornersMat;double min_x = std::min(dstCoord.at<double>(0, 0) / dstCoord.at<double>(2, 0), dstCoord.at<double>(0, 3) / dstCoord.at<double>(2, 3));double max_x = std::max(dstCoord.at<double>(0, 1) / dstCoord.at<double>(2, 1), dstCoord.at<double>(0, 2) / dstCoord.at<double>(2, 2));double min_y = std::min(dstCoord.at<double>(1, 0) / dstCoord.at<double>(2, 0), dstCoord.at<double>(1, 1) / dstCoord.at<double>(2, 1));double max_y = std::max(dstCoord.at<double>(1, 2) / dstCoord.at<double>(2, 2), dstCoord.at<double>(1, 3) / dstCoord.at<double>(2, 3));CircumRect.x = min_x;CircumRect.y = min_y;CircumRect.width = max_x - min_x;CircumRect.height = max_y - min_y;}static void CreateMap(const cv::Size& src_size, const cv::Rect_<double>& dst_rect, const cv::Mat& transMat, cv::Mat& map_x, cv::Mat& map_y){map_x.create(dst_rect.size(), CV_32FC1);map_y.create(dst_rect.size(), CV_32FC1);double Z = transMat.at<double>(2, 3);cv::Mat invTransMat = transMat.inv();	// 逆行列cv::Mat dst_pos(3, 1, CV_64FC1);	// 出力画像上の座標dst_pos.at<double>(2, 0) = Z;for (int dy = 0; dy<map_x.rows; dy++) {dst_pos.at<double>(1, 0) = dst_rect.y + dy;for (int dx = 0; dx<map_x.cols; dx++) {dst_pos.at<double>(0, 0) = dst_rect.x + dx;cv::Mat rMat = -invTransMat(cv::Rect(3, 2, 1, 1)) / (invTransMat(cv::Rect(0, 2, 3, 1)) * dst_pos);cv::Mat src_pos = invTransMat(cv::Rect(0, 0, 3, 2)) * dst_pos * rMat + invTransMat(cv::Rect(3, 0, 1, 2));map_x.at<float>(dy, dx) = src_pos.at<double>(0, 0) + (float)src_size.width / 2;map_y.at<float>(dy, dx) = src_pos.at<double>(1, 0) + (float)src_size.height / 2;}}}cv::Mat rotate_image_3D(const cv::Mat& src, float yaw, float pitch, float roll,float Z = 1000, int interpolation = cv::INTER_LINEAR, int boarder_mode = cv::BORDER_CONSTANT, const cv::Scalar& border_color = cv::Scalar(0, 0, 0)){cv::Mat dst;// rotation matrixcv::Mat rotMat_3x4;composeExternalMatrix(yaw, pitch, roll, 0, 0, Z, rotMat_3x4);cv::Mat rotMat = cv::Mat::eye(4, 4, rotMat_3x4.type());rotMat_3x4.copyTo(rotMat(cv::Rect(0, 0, 4, 3)));// From 2D coordinates to 3D coordinates// The center of image is (0,0,0)cv::Mat invPerspMat = cv::Mat::zeros(4, 3, CV_64FC1);invPerspMat.at<double>(0, 0) = 1;invPerspMat.at<double>(1, 1) = 1;invPerspMat.at<double>(3, 2) = 1;invPerspMat.at<double>(0, 2) = -(double)src.cols / 2;invPerspMat.at<double>(1, 2) = -(double)src.rows / 2;// 3次元座標から2次元座標へ透視変換cv::Mat perspMat = cv::Mat::zeros(3, 4, CV_64FC1);perspMat.at<double>(0, 0) = Z;perspMat.at<double>(1, 1) = Z;perspMat.at<double>(2, 2) = 1;// 座標変換し、出力画像の座標範囲を取得cv::Mat transMat = perspMat * rotMat * invPerspMat;cv::Rect_<double> CircumRect;CircumTransImgRect(src.size(), transMat, CircumRect);// 出力画像と入力画像の対応マップを作成cv::Mat map_x, map_y;CreateMap(src.size(), CircumRect, rotMat, map_x, map_y);cv::remap(src, dst, map_x, map_y, interpolation, boarder_mode, border_color);return dst;}static double generateGaussianNoise(){#define TWO_PI 6.2831853071795864769252866 static bool hasSpare = false;static double rand1, rand2;if (hasSpare){hasSpare = false;return sqrt(rand1) * sin(rand2);}hasSpare = true;rand1 = rand() / ((double)RAND_MAX);if (rand1 < 1e-100) rand1 = 1e-100;rand1 = -2 * log(rand1);rand2 = (rand() / ((double)RAND_MAX)) * TWO_PI;return sqrt(rand1) * cos(rand2);}/*gaussian noise with value range[1, 255]**/cv::Mat get_gaussian_noise_img(cv::Mat img, int value = 10) {srand(time(NULL));value = rand() % value;cv::Mat dst = img.clone();int channels = dst.channels();int nRows = dst.rows;int nCols = dst.cols * channels;if (dst.isContinuous()) {nCols *= nRows;nRows = 1;}int i, j;uchar* p;for (i = 0; i < nRows; ++i) {p = dst.ptr<uchar>(i);for (j = 0; j < nCols; ++j) {double val = p[j] + generateGaussianNoise() * value;if (val < 0)val = 0;if (val > 255)val = 255;p[j] = (uchar)val;}}return dst;}/*Random Cropping Img, And Resize Same With Input Image.**/cv::Mat get_crop_img_same(cv::Mat img, cv::Size crop_size) {srand(time(NULL));int h_off = 0;int w_off = 0;int img_height = img.rows;int img_width = img.cols;if (img_height < crop_size.height) {crop_size.height = img_height;}if (img_width < crop_size.width) {crop_size.width = img_width;}cv::Mat cv_cropped_img = img;h_off = rand() % (img_height - crop_size.height + 1);w_off = rand() % (img_width - crop_size.width + 1);cv::Rect roi(w_off, h_off, crop_size.width, crop_size.height);cv_cropped_img = img(roi);cv::resize(cv_cropped_img, cv_cropped_img, cv::Size(img_width, img_height));return cv_cropped_img;}/* Contrast and Brightness Adjuestment.img = (i,j)*alpha + beta.@param command value = 0.2   range[0, 1]**/cv::Mat contrast_adjustment(cv::Mat img, float value = 0.2) {srand(time(NULL));cv::Mat dst;cv::RNG rng;float alpha = 1, beta = 0;float min_alpha = 1- value, max_alpha = 1+ value;alpha = rng.uniform(min_alpha, max_alpha);beta = (float)(rand() % 8);// flip signif (rand() %2 != 0) beta = -beta;img.convertTo(dst, -1, alpha, beta);return dst;}/*双边滤波,高通滤波,保边去噪。param command value is 3  range[1,img_rows]**/cv::Mat bilateratl_filter(cv::Mat img, int value = 3) {int g_nsigmaColorValue = 2 * value + 5;int g_nsigmaSpaceValue = 2 * value + 5;cv::Mat dst;cv::bilateralFilter(img, dst, value, g_nsigmaColorValue, g_nsigmaSpaceValue);return dst;}/*value_quality command value is 3   range[0,  img_rows]**/cv::Mat motion_blur(cv::Mat in, unsigned int steps_x =3, unsigned int steps_y =3){// steps_x steps_y should >= 2.int rows = in.rows;int cols = in.cols;int channel = in.channels();cv::Mat out = in.clone();int element = cols * channel;if (steps_x != 0) {for (size_t i = 0; i < rows; i++){uchar * ptr_in = in.ptr<uchar>(i);uchar * ptr_out = out.ptr<uchar>(i);float sum_b = 0, sum_g = 0, sum_r = 0;for (size_t j = 0; j < element; j += channel){if ((j / channel) < steps_x) {if (channel == 3) {sum_b += ptr_in[j];sum_g += ptr_in[j + 1];sum_r += ptr_in[j + 2];ptr_out[j] = static_cast<uchar>(sum_b / ((j / channel) + 1));ptr_out[j + 1] = static_cast<uchar>(sum_g / ((j / channel) + 1));ptr_out[j + 2] = static_cast<uchar>(sum_r / ((j / channel) + 1));}else if (channel == 1) {sum_b += ptr_in[j];ptr_out[j] = static_cast<uchar>(sum_b / (j + 1));}}else {if (channel == 3) {sum_b = sum_b + ptr_in[j] - ptr_in[j - steps_x * channel];sum_g = sum_g + ptr_in[j + 1] - ptr_in[j - steps_x * channel + 1];sum_r = sum_r + ptr_in[j + 2] - ptr_in[j - steps_x * channel + 2];ptr_out[j] = static_cast<uchar>(sum_b / steps_x);ptr_out[j + 1] = static_cast<uchar>(sum_g / steps_x);ptr_out[j + 2] = static_cast<uchar>(sum_r / steps_x);}else if (channel == 1) {sum_b = sum_b + ptr_in[j] - ptr_in[j - steps_x];ptr_out[j] = static_cast<uchar>(sum_b / steps_x);}}}}}if (steps_y != 0) {for (size_t i = 0; i < cols; i++){float sum_b = 0, sum_g = 0, sum_r = 0;for (size_t j = 0; j < rows; j++){if (j < steps_y) {if (channel == 3) {sum_b += in.at<cv::Vec3b>(j, i)[0];sum_g += in.at<cv::Vec3b>(j, i)[1];sum_r += in.at<cv::Vec3b>(j, i)[2];out.at<cv::Vec3b>(j, i)[0] = static_cast<uchar>(sum_b / (j + 1));out.at<cv::Vec3b>(j, i)[1] = static_cast<uchar>(sum_g / (j + 1));out.at<cv::Vec3b>(j, i)[2] = static_cast<uchar>(sum_r / (j + 1));}else if (channel == 1) {sum_b += in.at<uchar>(j, i);out.at<uchar>(j, i) = static_cast<uchar>(sum_b / (j + 1));}}else {if (channel == 3) {sum_b = sum_b + in.at<cv::Vec3b>(j, i)[0] - in.at<cv::Vec3b>(j - steps_y, i)[0];sum_g = sum_g + in.at<cv::Vec3b>(j, i)[1] - in.at<cv::Vec3b>(j - steps_y, i)[1];sum_r = sum_r + in.at<cv::Vec3b>(j, i)[2] - in.at<cv::Vec3b>(j - steps_y, i)[2];out.at<cv::Vec3b>(j, i)[0] = static_cast<uchar>(sum_b / steps_y);out.at<cv::Vec3b>(j, i)[1] = static_cast<uchar>(sum_g / steps_y);out.at<cv::Vec3b>(j, i)[2] = static_cast<uchar>(sum_r / steps_y);}else if (channel == 1) {sum_b = sum_b + in.at<uchar>(j, i) - in.at<uchar>(j - steps_y, i);out.at<uchar>(j, i) = static_cast<uchar>(sum_b / steps_y);}}}}}return out;}/*value_quality command value is 2   range[0,  img_rows]**/cv::Mat dft_blur(cv::Mat inputs, const unsigned int value_quality = 2) {std::vector<cv::Mat> mergeM;std::vector<cv::Mat> splitM;cv::split(inputs, splitM);int size_m = splitM.size();for (size_t j = 0; j < size_m; j++){int w = cv::getOptimalDFTSize(splitM[j].cols);int h = cv::getOptimalDFTSize(splitM[j].rows);//获取最佳尺寸,快速傅立叶变换要求尺寸为2的n次方cv::Mat padded;cv::copyMakeBorder(splitM[j], padded, 0, h - splitM[j].rows, 0, w - splitM[j].cols, cv::BORDER_CONSTANT, cv::Scalar::all(0));//填充图像保存到padded中cv::Mat plane[] = { cv::Mat_<float>(padded), cv::Mat_<float>::zeros(padded.size()) };//创建通道cv::Mat complexIm;cv::merge(plane, 2, complexIm);//合并通道cv::dft(complexIm, complexIm);//进行傅立叶变换,结果保存在自身int rows = complexIm.rows;int cols = complexIm.cols;int offsetX = rows / (value_quality + 1);int offsetY = cols / (value_quality + 1);for (size_t i = 0; i < rows; i++){float *ptr = complexIm.ptr<float>(i);for (size_t j = 0; j < cols * 2; j = j + 2){if ((i > offsetX && i < (rows - offsetX)) && (j > 2 * offsetY && j < (2 * cols - 2 * offsetY))) {ptr[j] = 0;ptr[j + 1] = 0;}//if ((i < offsetX || i > (rows - offsetX)) || (j < 2 * offsetY || j > (2 * cols - 2 * offsetY))) {//    ptr[j] = 0;//    ptr[j + 1] = 0;//}}}cv::Mat _complexim;complexIm.copyTo(_complexim);//把变换结果复制一份,进行逆变换,也就是恢复原图cv::Mat iDft[] = { cv::Mat::zeros(plane[0].size(),CV_32F),cv::Mat::zeros(plane[0].size(),CV_32F) };//创建两个通道,类型为float,大小为填充后的尺寸idft(_complexim, _complexim);//傅立叶逆变换split(_complexim, iDft);//结果貌似也是复数cv::magnitude(iDft[0], iDft[1], iDft[0]);cv::normalize(iDft[0], iDft[0], 255, 0, CV_MINMAX);//cv::imshow("idft", iDft[0]);//显示逆变换//waitKey(0);mergeM.push_back(iDft[0]);}cv::Mat dst;cv::merge(mergeM, dst);if (dst.channels() == 3) {dst.convertTo(dst, CV_8UC3);}else if (dst.channels() == 1) {dst.convertTo(dst, CV_8UC1);}return dst;}/*  Smooth Filtering.GaussianBlurblurmedianBlurboxFilter**/cv::Mat smooth_filter(cv::Mat img, int ksize = 3) {srand(time(NULL));cv::Mat dst;int smooth_type = rand() % 7; // see opencv_util.hppksize = ksize + 2 * (rand()%2);switch (smooth_type) {case 0:cv::GaussianBlur(img, dst, cv::Size(ksize, ksize), 0);break;case 1:cv::blur(img, dst, cv::Size(ksize, ksize));break;case 2:cv::medianBlur(img, dst, ksize);break;case 3:cv::boxFilter(img, dst, -1, cv::Size(ksize, ksize));break;case 4:dst = motion_blur(img, ksize*(rand() % 2), ksize * (rand()%2));break;case 5:dst = dft_blur(img, ksize);break;case 6:dst = bilateratl_filter(img, ksize-1);break;default:cv::GaussianBlur(img, dst, cv::Size(ksize, ksize), 0);break;}return dst;}}#endif // !1

 

7.图像马赛克

void Masic(IplImage* img, IplImage* dst, int nSize)
{int offset = (nSize-1)/2;for ( int row = offset; row <img->height - offset; row= row+offset){for( int col= offset; col<img->width - offset; col = col+offset){int val0 = getPixel(img, row, col, 0);int val1 = getPixel(img, row, col, 1);int val2 = getPixel(img, row, col, 2);for ( int m= -offset; m<offset; m++){for ( int n=-offset; n<offset; n++){setPixel(dst, row+m, col+n, 0, val0);setPixel(dst, row+m, col+n, 1, val1);setPixel(dst, row+m, col+n, 2, val2);}}}}
}

 

这篇关于图形处理库-基于opencv的文章就介绍到这儿,希望我们推荐的文章对编程师们有所帮助!



http://www.chinasem.cn/article/1069912

相关文章

使用opencv优化图片(画面变清晰)

文章目录 需求影响照片清晰度的因素 实现降噪测试代码 锐化空间锐化Unsharp Masking频率域锐化对比测试 对比度增强常用算法对比测试 需求 对图像进行优化,使其看起来更清晰,同时保持尺寸不变,通常涉及到图像处理技术如锐化、降噪、对比度增强等 影响照片清晰度的因素 影响照片清晰度的因素有很多,主要可以从以下几个方面来分析 1. 拍摄设备 相机传感器:相机传

opencv 滚动条

参数介绍:createTrackbar( trackbarname , "hello" , &alpha_slider ,alpha_max ,  on_trackbar )  ;在标签中显示的文字(提示滑动条的用途) TrackbarName创建的滑动条要放置窗体的名字 “hello”滑动条的取值范围从 0 到 alpha_max (最小值只能为 zero).滑动后的值存放在

android-opencv-jni

//------------------start opencv--------------------@Override public void onResume(){ super.onResume(); //通过OpenCV引擎服务加载并初始化OpenCV类库,所谓OpenCV引擎服务即是 //OpenCV_2.4.3.2_Manager_2.4_*.apk程序包,存

OpenCV结构分析与形状描述符(11)椭圆拟合函数fitEllipse()的使用

操作系统:ubuntu22.04 OpenCV版本:OpenCV4.9 IDE:Visual Studio Code 编程语言:C++11 算法描述 围绕一组2D点拟合一个椭圆。 该函数计算出一个椭圆,该椭圆在最小二乘意义上最好地拟合一组2D点。它返回一个内切椭圆的旋转矩形。使用了由[90]描述的第一个算法。开发者应该注意,由于数据点靠近包含的 Mat 元素的边界,返回的椭圆/旋转矩形数据

树莓派5_opencv笔记27:Opencv录制视频(无声音)

今日继续学习树莓派5 8G:(Raspberry Pi,简称RPi或RasPi)  本人所用树莓派5 装载的系统与版本如下:  版本可用命令 (lsb_release -a) 查询: Opencv 与 python 版本如下: 今天就水一篇文章,用树莓派摄像头,Opencv录制一段视频保存在指定目录... 文章提供测试代码讲解,整体代码贴出、测试效果图 目录 阶段一:录制一段

Verybot之OpenCV应用三:色标跟踪

下面的这个应用主要完成的是Verybot跟踪色标的功能,识别部分还是居于OpenCV编写,色标跟踪一般需要将图像的颜色模式进行转换,将RGB转换为HSV,因为对HSV格式下的图像进行识别时受光线的影响比较小,但是也有采用RGB模式来进行识别的情况,这种情况一般光线条件比较固定,背景跟识别物在颜色上很容易区分出来。         下面这个程序的流程大致是这样的:

Verybot之OpenCV应用二:霍夫变换查找圆

其实我是想通过这个程序来测试一下,OpenCV在Verybot上跑得怎么样,霍夫变换的原理就不多说了,下面是程序: #include "cv.h"#include "highgui.h"#include "stdio.h"int main(int argc, char** argv){cvNamedWindow("vedio",0);CvCapture* capture;i

Verybot之OpenCV应用一:安装与图像采集测试

在Verybot上安装OpenCV是很简单的,只需要执行:         sudo apt-get update         sudo apt-get install libopencv-dev         sudo apt-get install python-opencv         下面就对安装好的OpenCV进行一下测试,编写一个通过USB摄像头采

虚拟机ubuntu配置opencv和opencv_contrib

前期准备  1.下载opencv和opencv_contrib源码 opencv-4.6.0:https://opencv.org/releases/ opencv_contrib-4.6.0:https://github.com/opencv/opencv_contrib 在ubuntu直接下载或者在window上下好传到虚拟机里都可以 自己找个地方把他们解压,个人习惯在home下新建一

Windows下使用cmake编译OpenCV

Windows下使用cmake编译OpenCV cmake下载OpenCV下载编译OpenCV cmake下载 下载地址:https://cmake.org/download/ 下载完成,点击选择路径安装即可 OpenCV下载 下载地址:https://github.com/opencv/opencv/releases/tag/4.8.1因为我们是编译OpenCV,下图选择