iOS毛玻璃效果

 

iOS7新API-实现毛玻璃效果  《转载》

 
原图:
技术分享
 效果图:
技术分享
 实现: 首先需要导入Accelerate.framework。 然后把两个文件加入到自己的项目中即可。 UIImage+ImageEffects.h

#import <UIKit/UIKit.h>
@interface UIImage (ImageEffects)
- (UIImage *)applyLightEffect; - (UIImage *)applyExtraLightEffect; - (UIImage *)applyDarkEffect; - (UIImage *)applyTintEffectWithColor:(UIColor *)tintColor;
- (UIImage *)applyBlurWithRadius:(CGFloat)blurRadius tintColor:(UIColor *)tintColor saturationDeltaFactor:(CGFloat)saturationDeltaFactor maskImage:(UIImage *)maskImage;
@end

UIImage+ImageEffects.m

#import "UIImage+ImageEffects.h"
#import <Accelerate/Accelerate.h> #import <float.h>
@implementation UIImage (ImageEffects)
- (UIImage *)applyLightEffect { UIColor *tintColor = [UIColor colorWithWhite:1.0 alpha:0.3]; return [self applyBlurWithRadius:30 tintColor:tintColor saturationDeltaFactor:1.8 maskImage:nil]; }
- (UIImage *)applyExtraLightEffect { UIColor *tintColor = [UIColor colorWithWhite:0.97 alpha:0.82]; return [self applyBlurWithRadius:20 tintColor:tintColor saturationDeltaFactor:1.8 maskImage:nil]; }
- (UIImage *)applyDarkEffect { UIColor *tintColor = [UIColor colorWithWhite:0.11 alpha:0.73]; return [self applyBlurWithRadius:20 tintColor:tintColor saturationDeltaFactor:1.8 maskImage:nil]; }
- (UIImage *)applyTintEffectWithColor:(UIColor *)tintColor { const CGFloat EffectColorAlpha = 0.6; UIColor *effectColor = tintColor; int componentCount = CGColorGetNumberOfComponents(tintColor.CGColor); if (componentCount == 2) { CGFloat b; if ([tintColor getWhite:&b alpha:NULL]) { effectColor = [UIColor colorWithWhite:b alpha:EffectColorAlpha]; } } else { CGFloat r, g, b; if ([tintColor getRed:&r green:&g blue:&b alpha:NULL]) { effectColor = [UIColor colorWithRed:r green:g blue:b alpha:EffectColorAlpha]; } } return [self applyBlurWithRadius:10 tintColor:effectColor saturationDeltaFactor:-1.0 maskImage:nil]; }
- (UIImage *)applyBlurWithRadius:(CGFloat)blurRadius tintColor:(UIColor *)tintColor saturationDeltaFactor:(CGFloat)saturationDeltaFactor maskImage:(UIImage *)maskImage { // Check pre-conditions. if (self.size.width < 1 || self.size.height < 1) { NSLog (@"*** error: invalid size: (%.2f x %.2f). Both dimensions must be >= 1: %@", self.size.width, self.size.height, self); return nil; } if (!self.CGImage) { NSLog (@"*** error: image must be backed by a CGImage: %@", self); return nil; } if (maskImage && !maskImage.CGImage) { NSLog (@"*** error: maskImage must be backed by a CGImage: %@", maskImage); return nil; }
CGRect imageRect = { CGPointZero, self.size }; UIImage *effectImage = self; BOOL hasBlur = blurRadius > __FLT_EPSILON__; BOOL hasSaturationChange = fabs(saturationDeltaFactor - 1.) > __FLT_EPSILON__; if (hasBlur || hasSaturationChange) { UIGraphicsBeginImageContextWithOptions(self.size, NO, [[UIScreen mainScreen] scale]); CGContextRef effectInContext = UIGraphicsGetCurrentContext(); CGContextScaleCTM(effectInContext, 1.0, -1.0); CGContextTranslateCTM(effectInContext, 0, -self.size.height); CGContextDrawImage(effectInContext, imageRect, self.CGImage);
vImage_Buffer effectInBuffer; effectInBuffer.data = CGBitmapContextGetData(effectInContext); effectInBuffer.width = CGBitmapContextGetWidth(effectInContext); effectInBuffer.height = CGBitmapContextGetHeight(effectInContext); effectInBuffer.rowBytes = CGBitmapContextGetBytesPerRow(effectInContext); UIGraphicsBeginImageContextWithOptions(self.size, NO, [[UIScreen mainScreen] scale]); CGContextRef effectOutContext = UIGraphicsGetCurrentContext(); vImage_Buffer effectOutBuffer; effectOutBuffer.data = CGBitmapContextGetData(effectOutContext); effectOutBuffer.width = CGBitmapContextGetWidth(effectOutContext); effectOutBuffer.height = CGBitmapContextGetHeight(effectOutContext); effectOutBuffer.rowBytes = CGBitmapContextGetBytesPerRow(effectOutContext);
if (hasBlur) { // A description of how to compute the box kernel width from the Gaussian // radius (aka standard deviation) appears in the SVG spec: // http://www.w3.org/TR/SVG/filters.html#feGaussianBlurElement // // For larger values of ‘s‘ (s >= 2.0), an approximation can be used: Three // successive box-blurs build a piece-wise quadratic convolution kernel, which // approximates the Gaussian kernel to within roughly 3%. // // let d = floor(s * 3*sqrt(2*pi)/4 + 0.5) // // ... if d is odd, use three box-blurs of size ‘d‘, centered on the output pixel. // CGFloat inputRadius = blurRadius * [[UIScreen mainScreen] scale]; NSUInteger radius = floor(inputRadius * 3. * sqrt(2 * M_PI) / 4 + 0.5); if (radius % 2 != 1) { radius += 1; // force radius to be odd so that the three box-blur methodology works. } vImageBoxConvolve_ARGB8888(&effectInBuffer, &effectOutBuffer, NULL, 0, 0, radius, radius, 0, kvImageEdgeExtend); vImageBoxConvolve_ARGB8888(&effectOutBuffer, &effectInBuffer, NULL, 0, 0, radius, radius, 0, kvImageEdgeExtend); vImageBoxConvolve_ARGB8888(&effectInBuffer, &effectOutBuffer, NULL, 0, 0, radius, radius, 0, kvImageEdgeExtend); } BOOL effectImageBuffersAreSwapped = NO; if (hasSaturationChange) { CGFloat s = saturationDeltaFactor; CGFloat floatingPointSaturationMatrix[] = { 0.0722 + 0.9278 * s, 0.0722 - 0.0722 * s, 0.0722 - 0.0722 * s, 0, 0.7152 - 0.7152 * s, 0.7152 + 0.2848 * s, 0.7152 - 0.7152 * s, 0, 0.2126 - 0.2126 * s, 0.2126 - 0.2126 * s, 0.2126 + 0.7873 * s, 0, 0, 0, 0, 1, }; const int32_t divisor = 256; NSUInteger matrixSize = sizeof(floatingPointSaturationMatrix)/sizeof(floatingPointSaturationMatrix[0]); int16_t saturationMatrix[matrixSize]; for (NSUInteger i = 0; i < matrixSize; ++i) { saturationMatrix[i] = (int16_t)roundf(floatingPointSaturationMatrix[i] * divisor); } if (hasBlur) { vImageMatrixMultiply_ARGB8888(&effectOutBuffer, &effectInBuffer, saturationMatrix, divisor, NULL, NULL, kvImageNoFlags); effectImageBuffersAreSwapped = YES; } else { vImageMatrixMultiply_ARGB8888(&effectInBuffer, &effectOutBuffer, saturationMatrix, divisor, NULL, NULL, kvImageNoFlags); } } if (!effectImageBuffersAreSwapped) effectImage = UIGraphicsGetImageFromCurrentImageContext(); UIGraphicsEndImageContext();
if (effectImageBuffersAreSwapped) effectImage = UIGraphicsGetImageFromCurrentImageContext(); UIGraphicsEndImageContext(); }
// Set up output context. UIGraphicsBeginImageContextWithOptions(self.size, NO, [[UIScreen mainScreen] scale]); CGContextRef outputContext = UIGraphicsGetCurrentContext(); CGContextScaleCTM(outputContext, 1.0, -1.0); CGContextTranslateCTM(outputContext, 0, -self.size.height);
// Draw base image. CGContextDrawImage(outputContext, imageRect, self.CGImage);
// Draw effect image. if (hasBlur) { CGContextSaveGState(outputContext); if (maskImage) { CGContextClipToMask(outputContext, imageRect, maskImage.CGImage); } CGContextDrawImage(outputContext, imageRect, effectImage.CGImage); CGContextRestoreGState(outputContext); }
// Add in color tint. if (tintColor) { CGContextSaveGState(outputContext); CGContextSetFillColorWithColor(outputContext, tintColor.CGColor); CGContextFillRect(outputContext, imageRect); CGContextRestoreGState(outputContext); }
// Output image is ready. UIImage *outputImage = UIGraphicsGetImageFromCurrentImageContext(); UIGraphicsEndImageContext();
return outputImage; }
@end

调用:

UIImageView *me = [[UIImageView alloc] initWithFrame:CGRectMake(10, 480, 614 ,381)]; [me setImage:[[UIImage imageNamed:@"me.png"] applyBlurWithRadius:5 tintColor:[UIColor colorWithWhite:1 alpha:0.2] saturationDeltaFactor:1.8 maskImage:nil]]; [self.view addSubview:me];

ok!So easy

 

郑重声明:本站内容如果来自互联网及其他传播媒体,其版权均属原媒体及文章作者所有。转载目的在于传递更多信息及用于网络分享,并不代表本站赞同其观点和对其真实性负责,也不构成任何其他建议。