本文主要是介绍iOS中图片的一些处理,磨砂,压缩,,,希望对大家解决编程问题提供一定的参考价值,需要的开发者们随着小编来一起学习吧!
有时候,我们在进行上传图片,或是进行处理图片的时候会出现修改图片大小,进行压缩处理,这时候我们就会用到下面的方法
/**
* 图片压缩处理
*
* @param type 压缩类型(大中小)
*
* @return 压缩后的图片
*/
-(UIImage *) compressionImage:(NSInteger) type;
/**
* 修改图像尺寸并压缩大小
*
* @param image 图片
*
* @return 压缩后的图片
*/
+ (UIImage )scaleAndRotateImage:(UIImage )image;
/**
* 头像图片压缩
*
* @param image 图片
*
* @return 压缩后的图片
*/
+ (UIImage )scaleUserImage:(UIImage )image;
/**
* 将图片做磨砂处理
*
* @param blur 磨砂度
*
* @return
*/
- (UIImage *)blurryImageWithBlurLevel:(CGFloat)blur;
- 这是一个处理图片进行磨砂处理的样子
@implementation UIImage(Category)
/**
* 图片压缩处理
*
* @param type 压缩类型(大中小)
*
* @return 压缩后的图片
*/
-(UIImage *) compressionImage:(NSInteger) type {
CGFloat compressionQuality;
switch (type) {
case image_original:
case image_big:
compressionQuality = 1.0f;
break;
case image_middle:
compressionQuality = 0.8f;
break;
case image_small:
compressionQuality = 0.6f;
break;
default:
compressionQuality = 0;
break;
}
return [UIImage imageWithData:UIImageJPEGRepresentation(self, compressionQuality)];
}
/**
* 等比例压缩
*
* @param sourceImage
* @param size
*
* @return
*/
-(UIImage ) imageCompressForSize:(UIImage )sourceImage targetSize:(CGSize)size{
UIImage *newImage = nil;
CGSize imageSize = sourceImage.size;
CGFloat width = imageSize.width;
CGFloat height = imageSize.height;
CGFloat targetWidth = size.width;
CGFloat targetHeight = size.height;
CGFloat scaleFactor = 0.0;
CGFloat scaledWidth = targetWidth;
CGFloat scaledHeight = targetHeight;
CGPoint thumbnailPoint = CGPointMake(0.0, 0.0);
if(CGSizeEqualToSize(imageSize, size) == NO){
CGFloat widthFactor = targetWidth / width;
CGFloat heightFactor = targetHeight / height;
if(widthFactor > heightFactor){
scaleFactor = widthFactor;
}
else{
scaleFactor = heightFactor;
}
scaledWidth = width * scaleFactor;
scaledHeight = height * scaleFactor;
if(widthFactor > heightFactor){
thumbnailPoint.y = (targetHeight - scaledHeight) * 0.5;
}else if(widthFactor < heightFactor){
thumbnailPoint.x = (targetWidth - scaledWidth) * 0.5;
}
}
UIGraphicsBeginImageContext(size);CGRect thumbnailRect = CGRectZero;
thumbnailRect.origin = thumbnailPoint;
thumbnailRect.size.width = scaledWidth;
thumbnailRect.size.height = scaledHeight;
[sourceImage drawInRect:thumbnailRect];
newImage = UIGraphicsGetImageFromCurrentImageContext();if(newImage == nil){NSLog(@"scale image fail");
}UIGraphicsEndImageContext();return newImage;
}
/**
* 修改图像尺寸并压缩大小
*
* @param image 图片
*
* @return 压缩后的图片
*/
- (UIImage )scaleAndRotateImage:(UIImage )image {
CGSize imageSize = image.size;
CGFloat width = imageSize.width;
CGFloat height = imageSize.height;
CGFloat thumbSize = 0.8;CGFloat tempW = 480;
if (width == 3264 || width == 2592) {tempW = 1024;thumbSize = 0.6;
} else if (width == 2448 || width == 1936) {tempW = 720;thumbSize = 0.6;
} else if (width <= 480) {tempW = screenWidth;
} else if (width > 480 && width <= 1024) {tempW = width;
} else if (width > 1024) {tempW = 1024;
}CGFloat scaleFactor = 0.0;
CGPoint thumbPoint = CGPointMake(0.0,0.0);
CGFloat widthFactor = tempW / width;
CGFloat thumbHeight = tempW * (height/width);
CGFloat heightFactor = thumbHeight / height;if (widthFactor > heightFactor) {scaleFactor = widthFactor;
}else {scaleFactor = heightFactor;
}CGFloat scaledWidth = width * scaleFactor;
CGFloat scaledHeight = height * scaleFactor;if (widthFactor > heightFactor) {thumbPoint.y = (thumbHeight - scaledHeight) * 0.5;
} else if(widthFactor < heightFactor) {thumbPoint.x = (tempW - scaledWidth) * 0.5;
}UIGraphicsBeginImageContext(CGSizeMake(tempW, thumbHeight));
CGRect thumbRect = CGRectZero;
thumbRect.origin = thumbPoint;
thumbRect.size.width = scaledWidth;
thumbRect.size.height = scaledHeight;
[image drawInRect:thumbRect];
UIImage *thumbImage = UIGraphicsGetImageFromCurrentImageContext();
UIGraphicsEndImageContext();
NSData *thumbImageData = UIImageJPEGRepresentation(thumbImage, thumbSize);
CGFloat compressionQuality = thumbSize;while (thumbImageData.length > 280000 && compressionQuality > 0.5) {thumbImageData = UIImageJPEGRepresentation(thumbImage, compressionQuality -= 0.1);
}UIImage *aimage = [UIImage imageWithData: thumbImageData];return aimage;
}
/**
* 头像压缩
*
* @param image 图片
*
* @return 压缩后的图片
*/
+ (UIImage )scaleUserImage:(UIImage )image {
CGSize imageSize = image.size;
CGFloat width = imageSize.width;
CGFloat height = imageSize.height;
CGFloat thumbSize = 0.8;CGFloat tempW = width;if (width > 1024) {tempW = 640;
}CGFloat scaleFactor = 0.0;
CGPoint thumbPoint = CGPointMake(0.0,0.0);
CGFloat widthFactor = tempW / width;
CGFloat thumbHeight = tempW * (height/width);
CGFloat heightFactor = thumbHeight / height;if (widthFactor > heightFactor) {scaleFactor = widthFactor;
}else {scaleFactor = heightFactor;
}CGFloat scaledWidth = width * scaleFactor;
CGFloat scaledHeight = height * scaleFactor;if (widthFactor > heightFactor) {thumbPoint.y = (thumbHeight - scaledHeight) * 0.5;
} else if(widthFactor < heightFactor) {thumbPoint.x = (tempW - scaledWidth) * 0.5;
}UIGraphicsBeginImageContext(CGSizeMake(tempW, thumbHeight));
CGRect thumbRect = CGRectZero;
thumbRect.origin = thumbPoint;
thumbRect.size.width = scaledWidth;
thumbRect.size.height = scaledHeight;
[image drawInRect:thumbRect];
UIImage *thumbImage = UIGraphicsGetImageFromCurrentImageContext();
UIGraphicsEndImageContext();
NSData *thumbImageData = UIImageJPEGRepresentation(thumbImage, thumbSize);
CGFloat compressionQuality = thumbSize;while (thumbImageData.length > 280000 && compressionQuality > 0.5) {thumbImageData = UIImageJPEGRepresentation(thumbImage, compressionQuality -= 0.1);
}UIImage *aimage = [UIImage imageWithData: thumbImageData];return aimage;
}
/**
* 将图片做磨砂处理
*
* @param blur 磨砂度
*
* @return
*/
- (UIImage *)blurryImageWithBlurLevel:(CGFloat)blur {
int boxSize = (int)(blur * 40);
boxSize = boxSize - (boxSize % 2) + 1;
CGImageRef img = self.CGImage;
vImage_Buffer inBuffer, outBuffer;
vImage_Error error;
void *pixelBuffer;//create vImage_Buffer with data from CGImageRef
CGDataProviderRef inProvider = CGImageGetDataProvider(img);
CFDataRef inBitmapData = CGDataProviderCopyData(inProvider);inBuffer.width = CGImageGetWidth(img);
inBuffer.height = CGImageGetHeight(img);
inBuffer.rowBytes = CGImageGetBytesPerRow(img);inBuffer.data = (void*)CFDataGetBytePtr(inBitmapData);//create vImage_Buffer for output
pixelBuffer = malloc(CGImageGetBytesPerRow(img) * CGImageGetHeight(img));if(pixelBuffer == NULL)NSLog(@"No pixelbuffer");outBuffer.data = pixelBuffer;
outBuffer.width = CGImageGetWidth(img);
outBuffer.height = CGImageGetHeight(img);
outBuffer.rowBytes = CGImageGetBytesPerRow(img);//perform convolution
error = vImageBoxConvolve_ARGB8888(&inBuffer, &outBuffer, NULL, 0, 0, boxSize, boxSize, NULL, kvImageEdgeExtend)
?: vImageBoxConvolve_ARGB8888(&outBuffer, &inBuffer, NULL, 0, 0, boxSize, boxSize, NULL, kvImageEdgeExtend)
?: vImageBoxConvolve_ARGB8888(&inBuffer, &outBuffer, NULL, 0, 0, boxSize, boxSize, NULL, kvImageEdgeExtend);if (error) {NSLog(@"error from convolution %ld", error);
}CGColorSpaceRef colorSpace = CGColorSpaceCreateDeviceRGB();
CGContextRef ctx = CGBitmapContextCreate(outBuffer.data,outBuffer.width,outBuffer.height,8,outBuffer.rowBytes,colorSpace,(CGBitmapInfo)kCGImageAlphaNoneSkipLast);
CGImageRef imageRef = CGBitmapContextCreateImage (ctx);
UIImage *returnImage = [UIImage imageWithCGImage:imageRef];//clean up
CGContextRelease(ctx);
CGColorSpaceRelease(colorSpace);free(pixelBuffer);
//free(pixelBuffer2);
CFRelease(inBitmapData);
CGImageRelease(imageRef);
return returnImage;
}
但是要记得在实现的时候记得倒入两个框架
一#import
impor
这篇关于iOS中图片的一些处理,磨砂,压缩,,的文章就介绍到这儿,希望我们推荐的文章对编程师们有所帮助!