iOS實現毛玻璃效果,圖片模糊效果的三種方法

xg48 9年前發布 | 5K 次閱讀 Objective-C IOS

App設計時往往會用到一些模糊效果或者毛玻璃效果,iOS目前已提供一些模糊API可以讓我們方便是使用。


第一種使用Core Image進行模糊

- (UIImage *)blurryImage:(UIImage *)image   
           withBlurLevel:(CGFloat)blur {  
    CIImage *inputImage = [CIImage imageWithCGImage:image.CGImage];  
    CIFilter *filter = [CIFilter filterWithName:@"CIGaussianBlur"  
                         keysAndValues:kCIInputImageKey, inputImage,  
                                       @"inputRadius", @(blur),   
                                       nil];  

    CIImage *outputImage = filter.outputImage;  

    CGImageRef outImage = [self.context createCGImage:outputImage   
                                   fromRect:[outputImage extent]];  
    return [UIImage imageWithCGImage:outImage];  
}

第二種使用vImage API進行模糊

- (UIImage *)blurryImage:(UIImage *)image withBlurLevel:(CGFloat)blur {  
    if (blur < 0.f || blur > 1.f) {  
        blur = 0.5f;  
    }  
    int boxSize = (int)(blur * 100);  
    boxSize = boxSize - (boxSize % 2) + 1;  

    CGImageRef img = image.CGImage;  

    vImage_Buffer inBuffer, outBuffer;  
    vImage_Error error;  

    void *pixelBuffer;  

    CGDataProviderRef inProvider = CGImageGetDataProvider(img);  
    CFDataRef inBitmapData = CGDataProviderCopyData(inProvider);  

    inBuffer.width = CGImageGetWidth(img);  
    inBuffer.height = CGImageGetHeight(img);  
    inBuffer.rowBytes = CGImageGetBytesPerRow(img);  

    inBuffer.data = (void*)CFDataGetBytePtr(inBitmapData);  

    pixelBuffer = malloc(CGImageGetBytesPerRow(img) *   
                         CGImageGetHeight(img));  

    if(pixelBuffer == NULL)  
        NSLog(@"No pixelbuffer");  

    outBuffer.data = pixelBuffer;  
    outBuffer.width = CGImageGetWidth(img);  
    outBuffer.height = CGImageGetHeight(img);  
    outBuffer.rowBytes = CGImageGetBytesPerRow(img);  

    error = vImageBoxConvolve_ARGB8888(&inBuffer,   
                                       &outBuffer,   
                                       NULL,   
                                       0,   
                                       0,   
                                       boxSize,   
                                       boxSize,   
                                       NULL,   
                                       kvImageEdgeExtend);  


    if (error) {  
        NSLog(@"error from convolution %ld", error);  
    }  

    CGColorSpaceRef colorSpace = CGColorSpaceCreateDeviceRGB();  
    CGContextRef ctx = CGBitmapContextCreate(  
                                    outBuffer.data,  
                                    outBuffer.width,  
                                    outBuffer.height,  
                                    8,  
                                    outBuffer.rowBytes,  
                                    colorSpace,  
                                    kCGImageAlphaNoneSkipLast);  
    CGImageRef imageRef = CGBitmapContextCreateImage (ctx);  
    UIImage *returnImage = [UIImage imageWithCGImage:imageRef];  

    //clean up  
    CGContextRelease(ctx);  
    CGColorSpaceRelease(colorSpace);  

    free(pixelBuffer);  
    CFRelease(inBitmapData);  

    CGColorSpaceRelease(colorSpace);  
    CGImageRelease(imageRef);  

    return returnImage;  
}  

第三種方法是網上找到的(毛玻璃效果)

// 內部方法,核心代碼,封裝了毛玻璃效果 參數:半徑,顏色,色彩飽和度
- (UIImage *)imageBluredWithRadius:(CGFloat)blurRadius tintColor:(UIColor *)tintColor saturationDeltaFactor:(CGFloat)saturationDeltaFactor maskImage:(UIImage *)maskImage {
    CGRect imageRect = { CGPointZero, self.size };
    UIImage *effectImage = self;
    BOOL hasBlur = blurRadius > __FLT_EPSILON__;
    BOOL hasSaturationChange = fabs(saturationDeltaFactor - 1.) > __FLT_EPSILON__;
    if (hasBlur || hasSaturationChange) {
        UIGraphicsBeginImageContextWithOptions(self.size, NO, [[UIScreen mainScreen] scale]);
        CGContextRef effectInContext = UIGraphicsGetCurrentContext();
        CGContextScaleCTM(effectInContext, 1.0, -1.0);
        CGContextTranslateCTM(effectInContext, 0, -self.size.height);
        CGContextDrawImage(effectInContext, imageRect, self.CGImage);

        vImage_Buffer effectInBuffer;
        effectInBuffer.data     = CGBitmapContextGetData(effectInContext);
        effectInBuffer.width    = CGBitmapContextGetWidth(effectInContext);
        effectInBuffer.height   = CGBitmapContextGetHeight(effectInContext);
        effectInBuffer.rowBytes = CGBitmapContextGetBytesPerRow(effectInContext);

        UIGraphicsBeginImageContextWithOptions(self.size, NO, [[UIScreen mainScreen] scale]);
        CGContextRef effectOutContext = UIGraphicsGetCurrentContext();
        vImage_Buffer effectOutBuffer;
        effectOutBuffer.data     = CGBitmapContextGetData(effectOutContext);
        effectOutBuffer.width    = CGBitmapContextGetWidth(effectOutContext);
        effectOutBuffer.height   = CGBitmapContextGetHeight(effectOutContext);
        effectOutBuffer.rowBytes = CGBitmapContextGetBytesPerRow(effectOutContext);

        if (hasBlur) {
            CGFloat inputRadius = blurRadius * [[UIScreen mainScreen] scale];
            NSUInteger radius = floor(inputRadius * 3. * sqrt(2 * M_PI) / 4 + 0.5);
            if (radius % 2 != 1) {
                radius += 1; // force radius to be odd so that the three box-blur methodology works.
            }
            vImageBoxConvolve_ARGB8888(&effectInBuffer, &effectOutBuffer, NULL, 0, 0, (short)radius, (short)radius, 0, kvImageEdgeExtend);
            vImageBoxConvolve_ARGB8888(&effectOutBuffer, &effectInBuffer, NULL, 0, 0, (short)radius, (short)radius, 0, kvImageEdgeExtend);
            vImageBoxConvolve_ARGB8888(&effectInBuffer, &effectOutBuffer, NULL, 0, 0, (short)radius, (short)radius, 0, kvImageEdgeExtend);
        }
        BOOL effectImageBuffersAreSwapped = NO;
        if (hasSaturationChange) {
            CGFloat s = saturationDeltaFactor;
            CGFloat floatingPointSaturationMatrix[] = {
                0.0722 + 0.9278 * s,  0.0722 - 0.0722 * s,  0.0722 - 0.0722 * s,  0,
                0.7152 - 0.7152 * s,  0.7152 + 0.2848 * s,  0.7152 - 0.7152 * s,  0,
                0.2126 - 0.2126 * s,  0.2126 - 0.2126 * s,  0.2126 + 0.7873 * s,  0,
                0,                    0,                    0,  1,
            };
            const int32_t divisor = 256;
            NSUInteger matrixSize = sizeof(floatingPointSaturationMatrix)/sizeof(floatingPointSaturationMatrix[0]);
            int16_t saturationMatrix[matrixSize];
            for (NSUInteger i = 0; i < matrixSize; ++i) {
                saturationMatrix[i] = (int16_t)roundf(floatingPointSaturationMatrix[i] * divisor);
            }
            if (hasBlur) {
                vImageMatrixMultiply_ARGB8888(&effectOutBuffer, &effectInBuffer, saturationMatrix, divisor, NULL, NULL, kvImageNoFlags);
                effectImageBuffersAreSwapped = YES;
            }
            else {
                vImageMatrixMultiply_ARGB8888(&effectInBuffer, &effectOutBuffer, saturationMatrix, divisor, NULL, NULL, kvImageNoFlags);
            }
        }
        if (!effectImageBuffersAreSwapped)
            effectImage = UIGraphicsGetImageFromCurrentImageContext();
        UIGraphicsEndImageContext();

        if (effectImageBuffersAreSwapped)
            effectImage = UIGraphicsGetImageFromCurrentImageContext();
        UIGraphicsEndImageContext();
    }

    // 開啟上下文 用于輸出圖像
    UIGraphicsBeginImageContextWithOptions(self.size, NO, [[UIScreen mainScreen] scale]);
    CGContextRef outputContext = UIGraphicsGetCurrentContext();
    CGContextScaleCTM(outputContext, 1.0, -1.0);
    CGContextTranslateCTM(outputContext, 0, -self.size.height);

    // 開始畫底圖
    CGContextDrawImage(outputContext, imageRect, self.CGImage);

    // 開始畫模糊效果
    if (hasBlur) {
        CGContextSaveGState(outputContext);
        if (maskImage) {
            CGContextClipToMask(outputContext, imageRect, maskImage.CGImage);
        }
        CGContextDrawImage(outputContext, imageRect, effectImage.CGImage);
        CGContextRestoreGState(outputContext);
    }

    // 添加顏色渲染
    if (tintColor) {
        CGContextSaveGState(outputContext);
        CGContextSetFillColorWithColor(outputContext, tintColor.CGColor);
        CGContextFillRect(outputContext, imageRect);
        CGContextRestoreGState(outputContext);
    }

    // 輸出成品,并關閉上下文
    UIImage *outputImage = UIGraphicsGetImageFromCurrentImageContext();
    UIGraphicsEndImageContext();

    return outputImage;
}




 本文由用戶 xg48 自行上傳分享,僅供網友學習交流。所有權歸原作者,若您的權利被侵害,請聯系管理員。
 轉載本站原創文章,請注明出處,并保留原始鏈接、圖片水印。
 本站是一個以用戶分享為主的開源技術平臺,歡迎各類分享!