iOS8之上已经有相应的控件了,所以这个不考虑。但是iOS7怎么实现局部毛玻璃效果。我也知道github有相应的框架。但是看了几个都是全部图片毛玻璃效果,还有的就是文件太大。就算使用了也是得不偿失,请问大神方法
IOS7 情况下使用图片的局部毛玻璃效果
你试试 ImageEffect 这个库。http://stackoverflow.com/questions/11601166/iphone-sdk-frosted-glass-ios-7-blur-effect
-
(UIImage*)blurPercent:(CGFloat)blurPercent
{
NSData imageData = UIImageJPEGRepresentation(self, 1); // convert to jpeg
UIImage destImage = [UIImage imageWithData:imageData];if (blurPercent < 0.f || blurPercent > 1.f) {
blurPercent = 0.5f;
}
int boxSize = (int)(blurPercent * 40);
boxSize = boxSize - (boxSize % 2) + 1;CGImageRef img = destImage.CGImage;
vImage_Buffer inBuffer, outBuffer;
vImage_Error error;
void *pixelBuffer;
//create vImage_Buffer with data from CGImageRef
CGDataProviderRef inProvider = CGImageGetDataProvider(img);
CFDataRef inBitmapData = CGDataProviderCopyData(inProvider);inBuffer.width = CGImageGetWidth(img);
inBuffer.height = CGImageGetHeight(img);
inBuffer.rowBytes = CGImageGetBytesPerRow(img);inBuffer.data = (void*)CFDataGetBytePtr(inBitmapData);
//create vImage_Buffer for output
pixelBuffer = malloc(CGImageGetBytesPerRow(img) * CGImageGetHeight(img));
if(pixelBuffer == NULL)
NSLog(@“No pixelbuffer”);outBuffer.data = pixelBuffer;
outBuffer.width = CGImageGetWidth(img);
outBuffer.height = CGImageGetHeight(img);
outBuffer.rowBytes = CGImageGetBytesPerRow(img);// Create a third buffer for intermediate processing
void *pixelBuffer2 = malloc(CGImageGetBytesPerRow(img) * CGImageGetHeight(img));
vImage_Buffer outBuffer2;
outBuffer2.data = pixelBuffer2;
outBuffer2.width = CGImageGetWidth(img);
outBuffer2.height = CGImageGetHeight(img);
outBuffer2.rowBytes = CGImageGetBytesPerRow(img);//perform convolution
error = vImageBoxConvolve_ARGB8888(&inBuffer, &outBuffer2, NULL, 0, 0, boxSize, boxSize, NULL, kvImageEdgeExtend);
if (error) {
NSLog(@“error from convolution %ld”, error);
}
error = vImageBoxConvolve_ARGB8888(&outBuffer2, &inBuffer, NULL, 0, 0, boxSize, boxSize, NULL, kvImageEdgeExtend);
if (error) {
NSLog(@“error from convolution %ld”, error);
}
error = vImageBoxConvolve_ARGB8888(&inBuffer, &outBuffer, NULL, 0, 0, boxSize, boxSize, NULL, kvImageEdgeExtend);
if (error) {
NSLog(@“error from convolution %ld”, error);
}CGColorSpaceRef colorSpace = CGColorSpaceCreateDeviceRGB();
CGContextRef ctx = CGBitmapContextCreate(outBuffer.data,
outBuffer.width,
outBuffer.height,
8,
outBuffer.rowBytes,
colorSpace,
(CGBitmapInfo)kCGImageAlphaNoneSkipLast);
CGImageRef imageRef = CGBitmapContextCreateImage (ctx);
UIImage *returnImage = [UIImage imageWithCGImage:imageRef];//clean up
CGContextRelease(ctx);
CGColorSpaceRelease(colorSpace);free(pixelBuffer);
free(pixelBuffer2);
CFRelease(inBitmapData);CGImageRelease(imageRef);
return returnImage;
}