iOS 毛玻璃 效果實現
//加模糊效果,image是圖片,blur是模糊度
(UIImage )blurryImage:(UIImage )image withBlurLevel:(CGFloat)blur {
//模糊度,
if ((blur < 0.1f) || (blur > 2.0f)) {
blur = 0.5f;
}
//boxSize必須大于0
int boxSize = (int)(blur * 100);
boxSize -= (boxSize % 2) + 1;
NSLog(@"boxSize:%i",boxSize);
//圖像處理
CGImageRef img = image.CGImage;
//需要引入#import <Accelerate/Accelerate.h>
/*
This document describes the Accelerate Framework, which contains C APIs for vector and matrix math, digital signal processing, large number handling, and image processing.
本文檔介紹了Accelerate Framework,其中包含C語言應用程序接口(API)的向量和矩陣數學,數字信號處理,大量處理和圖像處理。
*/
//圖像緩存,輸入緩存,輸出緩存
vImage_Buffer inBuffer, outBuffer;
vImage_Error error;
//像素緩存
void *pixelBuffer;
//數據源提供者,Defines an opaque type that supplies Quartz with data.
CGDataProviderRef inProvider = CGImageGetDataProvider(img);
// provider’s data.
CFDataRef inBitmapData = CGDataProviderCopyData(inProvider);
//寬,高,字節/行,data
inBuffer.width = CGImageGetWidth(img);
inBuffer.height = CGImageGetHeight(img);
inBuffer.rowBytes = CGImageGetBytesPerRow(img);
inBuffer.data = (void*)CFDataGetBytePtr(inBitmapData);
//像數緩存,字節行*圖片高
pixelBuffer = malloc(CGImageGetBytesPerRow(img) * CGImageGetHeight(img));
outBuffer.data = pixelBuffer;
outBuffer.width = CGImageGetWidth(img);
outBuffer.height = CGImageGetHeight(img);
outBuffer.rowBytes = CGImageGetBytesPerRow(img);
// 第三個中間的緩存區,抗鋸齒的效果
void *pixelBuffer2 = malloc(CGImageGetBytesPerRow(img) * CGImageGetHeight(img));
vImage_Buffer outBuffer2;
outBuffer2.data = pixelBuffer2;
outBuffer2.width = CGImageGetWidth(img);
outBuffer2.height = CGImageGetHeight(img);
outBuffer2.rowBytes = CGImageGetBytesPerRow(img);
//Convolves a region of interest within an ARGB8888 source image by an implicit M x N kernel that has the effect of a box filter.
error = vImageBoxConvolve_ARGB8888(&inBuffer, &outBuffer2, NULL, 0, 0, boxSize, boxSize, NULL, kvImageEdgeExtend);
error = vImageBoxConvolve_ARGB8888(&outBuffer2, &inBuffer, NULL, 0, 0, boxSize, boxSize, NULL, kvImageEdgeExtend);
error = vImageBoxConvolve_ARGB8888(&inBuffer, &outBuffer, NULL, 0, 0, boxSize, boxSize, NULL, kvImageEdgeExtend);
if (error) {
NSLog(@"error from convolution %ld", error);
}
// NSLog(@"字節組成部分:%zu",CGImageGetBitsPerComponent(img));
//顏色空間DeviceRGB
CGColorSpaceRef colorSpace = CGColorSpaceCreateDeviceRGB();
//用圖片創建上下文,CGImageGetBitsPerComponent(img),7,8
CGContextRef ctx = CGBitmapContextCreate(
outBuffer.data,
outBuffer.width,
outBuffer.height,
8,
outBuffer.rowBytes,
colorSpace,
CGImageGetBitmapInfo(image.CGImage));
//根據上下文,處理過的圖片,重新組件
CGImageRef imageRef = CGBitmapContextCreateImage (ctx);
UIImage *returnImage = [UIImage imageWithCGImage:imageRef];
//clean up
CGContextRelease(ctx);
CGColorSpaceRelease(colorSpace);
free(pixelBuffer);
free(pixelBuffer2);
CFRelease(inBitmapData);
CGColorSpaceRelease(colorSpace);
CGImageRelease(imageRef);
return returnImage;
}</pre>
本文由用戶 jopen 自行上傳分享,僅供網友學習交流。所有權歸原作者,若您的權利被侵害,請聯系管理員。
轉載本站原創文章,請注明出處,并保留原始鏈接、圖片水印。
本站是一個以用戶分享為主的開源技術平臺,歡迎各類分享!