GPImage => Using Category of UIImage
some util tools for UIImage
======================================================
//
// GPImage.h
//
// Created by Gaurav D. Sharma & Piyush Kashyap
// Date 11/06/12.
//
#import <Foundation/Foundation.h>
@interface UIImage (Gaurav)
// --- CS_Extensions
- (UIImage *)imageAtRect:(CGRect)rect;
- (UIImage *)imageByScalingProportionallyToMinimumSize:(CGSize)targetSize;
- (UIImage *)imageByScalingProportionallyToSize:(CGSize)targetSize;
- (UIImage *)imageByScalingToSize:(CGSize)targetSize;
- (UIImage *)imageRotatedByRadians:(CGFloat)radians;
- (UIImage *)imageRotatedByDegrees:(CGFloat)degrees;
- (UIImage *)fixOrientation;
// --- RoundedCorner & Border
- (void)imageWithBorderFromImage;
- (UIImage *)roundedCornerImage:(NSInteger)cornerSize borderSize:(NSInteger)borderSize;
- (UIImage *) normalize;
// --- UIImageAdditions
/*
* Alternative to using imageNamed:, which caches
* images and doesn't clear the cache.
*/
+ (UIImage *)newImageFromResource:(NSString *)filename;
/*
* Creates an image from the contents of a URL
*/
+ (UIImage*)imageWithContentsOfURL:(NSURL*)url;
/*
* Scales the image to the given size
*/
- (UIImage*)scaleToSize:(CGSize)size;
/*
* Scales and crops the image to the given size
* Automatically detects the size/height and offset
* Sides of the image will be cropped so the result is centered
*/
- (UIImage*)scaleAndCropToSize:(CGSize)size;
/*
* Scales the height and crops the width to the size
* Sides of the image will be cropped so the result is centered
*/
- (UIImage*)scaleHeightAndCropWidthToSize:(CGSize)size;
/*
* Scales the width and crops the height to the size
* Sides of the image will be cropped so the result is centered
*/
- (UIImage*)scaleWidthAndCropHeightToSize:(CGSize)size;
/*
* Scales image to the size, crops to the offset
* Provide offset based on scaled size, not original size
*
* Example:
* Image is 640x480, scaling to 480x320
* This will then scale to 480x360
*
* If you want to vertically center the image, set the offset to CGPointMake(0.0,-20.0f)
* Now it will clip the top 20px, and the bottom 20px giving you the desired 480x320
*/
- (UIImage*)scaleToSize:(CGSize)size withOffset:(CGPoint)offset;
// --- Alpha
- (BOOL)hasAlpha;
- (UIImage *)imageWithAlpha;
- (UIImage *)transparentBorderImage:(NSUInteger)borderSize;
@end
======================================================
//
// GPImage.m
//
// Created by Gaurav D. Sharma & Piyush Kashyap
// Date 11/06/12.
//
#import "GPImage.h"
@interface UIImage ()
// --- RoundedCorner
- (void)addRoundedRect2Path:(CGRect)rect context:(CGContextRef)context ovalWidth:(CGFloat)ovalWidth ovalHeight:(CGFloat)ovalHeight;
// --- Alpha
- (CGImageRef)newBorderMask:(NSUInteger)borderSize size:(CGSize)size;
@end
@implementation UIImage (Gaurav)
#pragma mark - CS_Extensions
CGFloat DegreesToRadians(CGFloat degrees) {return degrees * M_PI / 180;};
CGFloat RadiansToDegrees(CGFloat radians) {return radians * 180/M_PI;};
-(UIImage *)imageAtRect:(CGRect)rect
{
CGImageRef imageRef = CGImageCreateWithImageInRect([self CGImage], rect);
UIImage* subImage = [UIImage imageWithCGImage: imageRef];
CGImageRelease(imageRef);
return subImage;
}
- (UIImage *)imageByScalingProportionallyToMinimumSize:(CGSize)targetSize {
UIImage *sourceImage = self;
UIImage *newImage = nil;
CGSize imageSize = sourceImage.size;
CGFloat width = imageSize.width;
CGFloat height = imageSize.height;
CGFloat targetWidth = targetSize.width;
CGFloat targetHeight = targetSize.height;
CGFloat scaleFactor = 0.0;
CGFloat scaledWidth = targetWidth;
CGFloat scaledHeight = targetHeight;
CGPoint thumbnailPoint = CGPointMake(0.0,0.0);
if (CGSizeEqualToSize(imageSize, targetSize) == NO) {
CGFloat widthFactor = targetWidth / width;
CGFloat heightFactor = targetHeight / height;
if (widthFactor > heightFactor)
scaleFactor = widthFactor;
else
scaleFactor = heightFactor;
scaledWidth = width * scaleFactor;
scaledHeight = height * scaleFactor;
// center the image
if (widthFactor > heightFactor) {
thumbnailPoint.y = (targetHeight - scaledHeight) * 0.5;
} else if (widthFactor < heightFactor) {
thumbnailPoint.x = (targetWidth - scaledWidth) * 0.5;
}
}
// this is actually the interesting part:
UIGraphicsBeginImageContext(targetSize);
CGRect thumbnailRect = CGRectZero;
thumbnailRect.origin = thumbnailPoint;
thumbnailRect.size.width = scaledWidth;
thumbnailRect.size.height = scaledHeight;
[sourceImage drawInRect:thumbnailRect];
newImage = UIGraphicsGetImageFromCurrentImageContext();
UIGraphicsEndImageContext();
if(newImage == nil) NSLog(@"could not scale image");
return newImage ;
}
- (UIImage *)imageByScalingProportionallyToSize:(CGSize)targetSize {
UIImage *sourceImage = self;
UIImage *newImage = nil;
CGSize imageSize = sourceImage.size;
CGFloat width = imageSize.width;
CGFloat height = imageSize.height;
CGFloat targetWidth = targetSize.width;
CGFloat targetHeight = targetSize.height;
CGFloat scaleFactor = 0.0;
CGFloat scaledWidth = targetWidth;
CGFloat scaledHeight = targetHeight;
CGPoint thumbnailPoint = CGPointMake(0.0,0.0);
if (CGSizeEqualToSize(imageSize, targetSize) == NO) {
CGFloat widthFactor = targetWidth / width;
CGFloat heightFactor = targetHeight / height;
if (widthFactor < heightFactor)
scaleFactor = widthFactor;
else
scaleFactor = heightFactor;
scaledWidth = width * scaleFactor;
scaledHeight = height * scaleFactor;
// center the image
if (widthFactor < heightFactor) {
thumbnailPoint.y = (targetHeight - scaledHeight) * 0.5;
} else if (widthFactor > heightFactor) {
thumbnailPoint.x = (targetWidth - scaledWidth) * 0.5;
}
}
// this is actually the interesting part:
UIGraphicsBeginImageContext(targetSize);
CGRect thumbnailRect = CGRectZero;
thumbnailRect.origin = thumbnailPoint;
thumbnailRect.size.width = scaledWidth;
thumbnailRect.size.height = scaledHeight;
[sourceImage drawInRect:thumbnailRect];
newImage = UIGraphicsGetImageFromCurrentImageContext();
UIGraphicsEndImageContext();
if(newImage == nil) NSLog(@"could not scale image");
return newImage ;
}
- (UIImage *)imageByScalingToSize:(CGSize)targetSize {
UIImage *sourceImage = self;
UIImage *newImage = nil;
// CGSize imageSize = sourceImage.size;
// CGFloat width = imageSize.width;
// CGFloat height = imageSize.height;
CGFloat targetWidth = targetSize.width;
CGFloat targetHeight = targetSize.height;
// CGFloat scaleFactor = 0.0;
CGFloat scaledWidth = targetWidth;
CGFloat scaledHeight = targetHeight;
CGPoint thumbnailPoint = CGPointMake(0.0,0.0);
// this is actually the interesting part:
UIGraphicsBeginImageContext(targetSize);
CGRect thumbnailRect = CGRectZero;
thumbnailRect.origin = thumbnailPoint;
thumbnailRect.size.width = scaledWidth;
thumbnailRect.size.height = scaledHeight;
[sourceImage drawInRect:thumbnailRect];
newImage = UIGraphicsGetImageFromCurrentImageContext();
UIGraphicsEndImageContext();
if(newImage == nil) NSLog(@"could not scale image");
return newImage ;
}
- (UIImage *)imageRotatedByRadians:(CGFloat)radians
{
return [self imageRotatedByDegrees:RadiansToDegrees(radians)];
}
- (UIImage *)imageRotatedByDegrees:(CGFloat)degrees
{
// calculate the size of the rotated view's containing box for our drawing space
UIView *rotatedViewBox = [[UIView alloc] initWithFrame:CGRectMake(0,0,self.size.width, self.size.height)];
CGAffineTransform t = CGAffineTransformMakeRotation(DegreesToRadians(degrees));
rotatedViewBox.transform = t;
CGSize rotatedSize = rotatedViewBox.frame.size;
// Create the bitmap context
UIGraphicsBeginImageContext(rotatedSize);
CGContextRef bitmap = UIGraphicsGetCurrentContext();
// Move the origin to the middle of the image so we will rotate and scale around the center.
CGContextTranslateCTM(bitmap, rotatedSize.width/2, rotatedSize.height/2);
// // Rotate the image context
CGContextRotateCTM(bitmap, DegreesToRadians(degrees));
// Now, draw the rotated/scaled image into the context
CGContextScaleCTM(bitmap, 1.0, -1.0);
CGContextDrawImage(bitmap, CGRectMake(-self.size.width / 2, -self.size.height / 2, self.size.width, self.size.height), [self CGImage]);
UIImage *newImage = UIGraphicsGetImageFromCurrentImageContext();
UIGraphicsEndImageContext();
return newImage;
}
- (UIImage *)fixOrientation {
// No-op if the orientation is already correct
if (self.imageOrientation == UIImageOrientationUp) return self;
// We need to calculate the proper transformation to make the image upright.
// We do it in 2 steps: Rotate if Left/Right/Down, and then flip if Mirrored.
CGAffineTransform transform = CGAffineTransformIdentity;
switch (self.imageOrientation) {
case UIImageOrientationDown:
case UIImageOrientationDownMirrored:
transform = CGAffineTransformTranslate(transform, self.size.width, self.size.height);
transform = CGAffineTransformRotate(transform, M_PI);
break;
case UIImageOrientationLeft:
case UIImageOrientationLeftMirrored:
transform = CGAffineTransformTranslate(transform, self.size.width, 0);
transform = CGAffineTransformRotate(transform, M_PI_2);
break;
case UIImageOrientationRight:
case UIImageOrientationRightMirrored:
transform = CGAffineTransformTranslate(transform, 0, self.size.height);
transform = CGAffineTransformRotate(transform, -M_PI_2);
break;
case UIImageOrientationUp:
case UIImageOrientationUpMirrored:
break;
}
switch (self.imageOrientation) {
case UIImageOrientationUpMirrored:
case UIImageOrientationDownMirrored:
transform = CGAffineTransformTranslate(transform, self.size.width, 0);
transform = CGAffineTransformScale(transform, -1, 1);
break;
case UIImageOrientationLeftMirrored:
case UIImageOrientationRightMirrored:
transform = CGAffineTransformTranslate(transform, self.size.height, 0);
transform = CGAffineTransformScale(transform, -1, 1);
break;
case UIImageOrientationUp:
case UIImageOrientationDown:
case UIImageOrientationLeft:
case UIImageOrientationRight:
break;
}
// Now we draw the underlying CGImage into a new context, applying the transform
// calculated above.
CGContextRef ctx = CGBitmapContextCreate(NULL, self.size.width, self.size.height,
CGImageGetBitsPerComponent(self.CGImage), 0,
CGImageGetColorSpace(self.CGImage),
CGImageGetBitmapInfo(self.CGImage));
CGContextConcatCTM(ctx, transform);
switch (self.imageOrientation) {
case UIImageOrientationLeft:
case UIImageOrientationLeftMirrored:
case UIImageOrientationRight:
case UIImageOrientationRightMirrored:
// Grr...
CGContextDrawImage(ctx, CGRectMake(0,0,self.size.height,self.size.width), self.CGImage);
break;
default:
CGContextDrawImage(ctx, CGRectMake(0,0,self.size.width,self.size.height), self.CGImage);
break;
}
// And now we just create a new UIImage from the drawing context
CGImageRef cgimg = CGBitmapContextCreateImage(ctx);
UIImage *img = [UIImage imageWithCGImage:cgimg];
CGContextRelease(ctx);
CGImageRelease(cgimg);
return img;
}
#pragma mark - RoundedCorner
- (void)imageWithBorderFromImage
{
CGSize size = [self size];
UIGraphicsBeginImageContext(size);
CGRect rect = CGRectMake(0, 0, size.width, size.height);
[self drawInRect:rect blendMode:kCGBlendModeNormal alpha:1.0];
CGContextRef context = UIGraphicsGetCurrentContext();
CGContextSetRGBStrokeColor(context, 1.0, 0.5, 1.0, 1.0);
CGContextStrokeRect(context, rect);
UIGraphicsGetImageFromCurrentImageContext();
UIGraphicsEndImageContext();
}
// Creates a copy of this image with rounded corners
// If borderSize is non-zero, a transparent border of the given size will also be added
// Original author: Björn Sållarp. Used with permission. See: http://blog.sallarp.com/iphone-uiimage-round-corners/
- (UIImage *)roundedCornerImage:(NSInteger)cornerSize borderSize:(NSInteger)borderSize {
// If the image does not have an alpha layer, add one
UIImage *image = [self imageWithAlpha];
// Build a context that's the same dimensions as the new size
CGContextRef context = CGBitmapContextCreate(NULL,
image.size.width,
image.size.height,
CGImageGetBitsPerComponent(image.CGImage),
0,
CGImageGetColorSpace(image.CGImage),
CGImageGetBitmapInfo(image.CGImage));
// Create a clipping path with rounded corners
CGContextBeginPath(context);
[self addRoundedRect2Path:CGRectMake(borderSize, borderSize, image.size.width - borderSize * 2, image.size.height - borderSize * 2)
context:context
ovalWidth:cornerSize
ovalHeight:cornerSize];
CGContextClosePath(context);
CGContextClip(context);
// Draw the image to the context; the clipping path will make anything outside the rounded rect transparent
CGContextDrawImage(context, CGRectMake(0, 0, image.size.width, image.size.height), image.CGImage);
// Create a CGImage from the context
CGImageRef clippedImage = CGBitmapContextCreateImage(context);
CGContextRelease(context);
// Create a UIImage from the CGImage
UIImage *roundedImage = [UIImage imageWithCGImage:clippedImage];
CGImageRelease(clippedImage);
return roundedImage;
}
- (UIImage *) normalize {
CGColorSpaceRef genericColorSpace = CGColorSpaceCreateDeviceRGB();
CGContextRef thumbBitmapCtxt = CGBitmapContextCreate(NULL,
self.size.width,
self.size.height,
8, (4 * self.size.width),
genericColorSpace,
kCGImageAlphaPremultipliedFirst);
CGColorSpaceRelease(genericColorSpace);
CGContextSetInterpolationQuality(thumbBitmapCtxt, kCGInterpolationDefault);
CGRect destRect = CGRectMake(0, 0, self.size.width, self.size.height);
CGContextDrawImage(thumbBitmapCtxt, destRect, self.CGImage);
CGImageRef tmpThumbImage = CGBitmapContextCreateImage(thumbBitmapCtxt);
CGContextRelease(thumbBitmapCtxt);
UIImage *result = [UIImage imageWithCGImage:tmpThumbImage];
CGImageRelease(tmpThumbImage);
return result;
}
#pragma mark helper method
// Adds a rectangular path to the given context and rounds its corners by the given extents
// Original author: Björn Sållarp. Used with permission. See: http://blog.sallarp.com/iphone-uiimage-round-corners/
- (void)addRoundedRect2Path:(CGRect)rect context:(CGContextRef)context ovalWidth:(CGFloat)ovalWidth ovalHeight:(CGFloat)ovalHeight {
if (ovalWidth == 0 || ovalHeight == 0) {
CGContextAddRect(context, rect);
return;
}
CGContextSaveGState(context);
CGContextTranslateCTM(context, CGRectGetMinX(rect), CGRectGetMinY(rect));
CGContextScaleCTM(context, ovalWidth, ovalHeight);
CGFloat fw = CGRectGetWidth(rect) / ovalWidth;
CGFloat fh = CGRectGetHeight(rect) / ovalHeight;
CGContextMoveToPoint(context, fw, fh/2);
CGContextAddArcToPoint(context, fw, fh, fw/2, fh, 1);
CGContextAddArcToPoint(context, 0, fh, 0, fh/2, 1);
CGContextAddArcToPoint(context, 0, 0, fw/2, 0, 1);
CGContextAddArcToPoint(context, fw, 0, fw, fh/2, 1);
CGContextClosePath(context);
CGContextRestoreGState(context);
}
#pragma mark - UIImageAdditions
+ (UIImage *)newImageFromResource:(NSString *)filename {
NSString *imageFile = [[NSString alloc] initWithFormat:@"%@/%@", [[NSBundle mainBundle] resourcePath], filename];
UIImage *image = [[UIImage alloc] initWithContentsOfFile:imageFile];
// [imageFile release];
return image;
}
+ (UIImage*)imageWithContentsOfURL:(NSURL*)url {
NSError* error;
NSData* data = [NSData dataWithContentsOfURL:url options:0 error:&error];
if(error || !data) {
return nil;
} else {
return [UIImage imageWithData:data];
}
}
- (UIImage*)scaleToSize:(CGSize)size {
UIGraphicsBeginImageContext(size);
CGContextRef context = UIGraphicsGetCurrentContext();
CGContextTranslateCTM(context, 0.0, size.height);
CGContextScaleCTM(context, 1.0, -1.0);
CGContextDrawImage(context, CGRectMake(0.0f, 0.0f, size.width, size.height), self.CGImage);
UIImage* scaledImage = UIGraphicsGetImageFromCurrentImageContext();
UIGraphicsEndImageContext();
return scaledImage;
}
- (UIImage*)scaleAndCropToSize:(CGSize)size {
if(size.height > size.width) {
if(self.size.height > self.size.width) {
if((self.size.width / self.size.height) >= (size.width / size.height)) {
return [self scaleHeightAndCropWidthToSize:size];
} else {
return [self scaleWidthAndCropHeightToSize:size];
}
} else {
return [self scaleHeightAndCropWidthToSize:size];
}
} else {
if(self.size.width > self.size.height) {
if((self.size.height / self.size.width) >= (size.height / size.width)) {
return [self scaleWidthAndCropHeightToSize:size];
} else {
return [self scaleHeightAndCropWidthToSize:size];
}
} else {
return [self scaleWidthAndCropHeightToSize:size];
}
}
}
- (UIImage*)scaleHeightAndCropWidthToSize:(CGSize)size {
float newWidth = (self.size.width * size.height) / self.size.height;
return [self scaleToSize:size withOffset:CGPointMake((newWidth - size.width) / 2, 0.0f)];
}
- (UIImage*)scaleWidthAndCropHeightToSize:(CGSize)size {
float newHeight = (self.size.height * size.width) / self.size.width;
return [self scaleToSize:size withOffset:CGPointMake(0, (newHeight - size.height) / 2)];
}
- (UIImage*)scaleToSize:(CGSize)size withOffset:(CGPoint)offset {
UIImage* scaledImage = [self scaleToSize:CGSizeMake(size.width + (offset.x * -2), size.height + (offset.y * -2))];
UIGraphicsBeginImageContext(size);
CGContextRef context = UIGraphicsGetCurrentContext();
CGContextTranslateCTM(context, 0.0, size.height);
CGContextScaleCTM(context, 1.0, -1.0);
CGRect croppedRect;
croppedRect.size = size;
croppedRect.origin = CGPointZero;
CGContextClipToRect(context, croppedRect);
CGRect drawRect;
drawRect.origin = offset;
drawRect.size = scaledImage.size;
CGContextDrawImage(context, drawRect, scaledImage.CGImage);
UIImage* croppedImage = UIGraphicsGetImageFromCurrentImageContext();
UIGraphicsEndImageContext();
return croppedImage;
}
#pragma mark - Alpha
// Returns true if the image has an alpha layer
- (BOOL)hasAlpha {
CGImageAlphaInfo alpha = CGImageGetAlphaInfo(self.CGImage);
return (alpha == kCGImageAlphaFirst ||
alpha == kCGImageAlphaLast ||
alpha == kCGImageAlphaPremultipliedFirst ||
alpha == kCGImageAlphaPremultipliedLast);
}
// Returns a copy of the given image, adding an alpha channel if it doesn't already have one
- (UIImage *)imageWithAlpha {
if ([self hasAlpha]) {
return self;
}
CGImageRef imageRef = self.CGImage;
size_t width = CGImageGetWidth(imageRef);
size_t height = CGImageGetHeight(imageRef);
// The bitsPerComponent and bitmapInfo values are hard-coded to prevent an "unsupported parameter combination" error
CGContextRef offscreenContext = CGBitmapContextCreate(NULL,
width,
height,
8,
0,
CGImageGetColorSpace(imageRef),
kCGBitmapByteOrderDefault | kCGImageAlphaPremultipliedFirst);
// Draw the image into the context and retrieve the new image, which will now have an alpha layer
CGContextDrawImage(offscreenContext, CGRectMake(0, 0, width, height), imageRef);
CGImageRef imageRefWithAlpha = CGBitmapContextCreateImage(offscreenContext);
UIImage *imageWithAlpha = [UIImage imageWithCGImage:imageRefWithAlpha];
// Clean up
CGContextRelease(offscreenContext);
CGImageRelease(imageRefWithAlpha);
return imageWithAlpha;
}
// Returns a copy of the image with a transparent border of the given size added around its edges.
// If the image has no alpha layer, one will be added to it.
- (UIImage *)transparentBorderImage:(NSUInteger)borderSize {
// If the image does not have an alpha layer, add one
UIImage *image = [self imageWithAlpha];
CGRect newRect = CGRectMake(0, 0, image.size.width + borderSize * 2, image.size.height + borderSize * 2);
// Build a context that's the same dimensions as the new size
CGContextRef bitmap = CGBitmapContextCreate(NULL,
newRect.size.width,
newRect.size.height,
CGImageGetBitsPerComponent(self.CGImage),
0,
CGImageGetColorSpace(self.CGImage),
CGImageGetBitmapInfo(self.CGImage));
// Draw the image in the center of the context, leaving a gap around the edges
CGRect imageLocation = CGRectMake(borderSize, borderSize, image.size.width, image.size.height);
CGContextDrawImage(bitmap, imageLocation, self.CGImage);
CGImageRef borderImageRef = CGBitmapContextCreateImage(bitmap);
// Create a mask to make the border transparent, and combine it with the image
CGImageRef maskImageRef = [self newBorderMask:borderSize size:newRect.size];
CGImageRef transparentBorderImageRef = CGImageCreateWithMask(borderImageRef, maskImageRef);
UIImage *transparentBorderImage = [UIImage imageWithCGImage:transparentBorderImageRef];
// Clean up
CGContextRelease(bitmap);
CGImageRelease(borderImageRef);
CGImageRelease(maskImageRef);
CGImageRelease(transparentBorderImageRef);
return transparentBorderImage;
}
#pragma mark Private helper methods
// Creates a mask that makes the outer edges transparent and everything else opaque
// The size must include the entire mask (opaque part + transparent border)
// The caller is responsible for releasing the returned reference by calling CGImageRelease
- (CGImageRef)newBorderMask:(NSUInteger)borderSize size:(CGSize)size {
CGColorSpaceRef colorSpace = CGColorSpaceCreateDeviceGray();
// Build a context that's the same dimensions as the new size
CGContextRef maskContext = CGBitmapContextCreate(NULL,
size.width,
size.height,
8, // 8-bit grayscale
0,
colorSpace,
kCGBitmapByteOrderDefault | kCGImageAlphaNone);
// Start with a mask that's entirely transparent
CGContextSetFillColorWithColor(maskContext, [UIColor blackColor].CGColor);
CGContextFillRect(maskContext, CGRectMake(0, 0, size.width, size.height));
// Make the inner part (within the border) opaque
CGContextSetFillColorWithColor(maskContext, [UIColor whiteColor].CGColor);
CGContextFillRect(maskContext, CGRectMake(borderSize, borderSize, size.width - borderSize * 2, size.height - borderSize * 2));
// Get an image of the context
CGImageRef maskImageRef = CGBitmapContextCreateImage(maskContext);
// Clean up
CGContextRelease(maskContext);
CGColorSpaceRelease(colorSpace);
return maskImageRef;
}
@end
some util tools for UIImage
======================================================
//
// GPImage.h
//
// Created by Gaurav D. Sharma & Piyush Kashyap
// Date 11/06/12.
//
#import <Foundation/Foundation.h>
@interface UIImage (Gaurav)
// --- CS_Extensions
- (UIImage *)imageAtRect:(CGRect)rect;
- (UIImage *)imageByScalingProportionallyToMinimumSize:(CGSize)targetSize;
- (UIImage *)imageByScalingProportionallyToSize:(CGSize)targetSize;
- (UIImage *)imageByScalingToSize:(CGSize)targetSize;
- (UIImage *)imageRotatedByRadians:(CGFloat)radians;
- (UIImage *)imageRotatedByDegrees:(CGFloat)degrees;
- (UIImage *)fixOrientation;
// --- RoundedCorner & Border
- (void)imageWithBorderFromImage;
- (UIImage *)roundedCornerImage:(NSInteger)cornerSize borderSize:(NSInteger)borderSize;
- (UIImage *) normalize;
// --- UIImageAdditions
/*
* Alternative to using imageNamed:, which caches
* images and doesn't clear the cache.
*/
+ (UIImage *)newImageFromResource:(NSString *)filename;
/*
* Creates an image from the contents of a URL
*/
+ (UIImage*)imageWithContentsOfURL:(NSURL*)url;
/*
* Scales the image to the given size
*/
- (UIImage*)scaleToSize:(CGSize)size;
/*
* Scales and crops the image to the given size
* Automatically detects the size/height and offset
* Sides of the image will be cropped so the result is centered
*/
- (UIImage*)scaleAndCropToSize:(CGSize)size;
/*
* Scales the height and crops the width to the size
* Sides of the image will be cropped so the result is centered
*/
- (UIImage*)scaleHeightAndCropWidthToSize:(CGSize)size;
/*
* Scales the width and crops the height to the size
* Sides of the image will be cropped so the result is centered
*/
- (UIImage*)scaleWidthAndCropHeightToSize:(CGSize)size;
/*
* Scales image to the size, crops to the offset
* Provide offset based on scaled size, not original size
*
* Example:
* Image is 640x480, scaling to 480x320
* This will then scale to 480x360
*
* If you want to vertically center the image, set the offset to CGPointMake(0.0,-20.0f)
* Now it will clip the top 20px, and the bottom 20px giving you the desired 480x320
*/
- (UIImage*)scaleToSize:(CGSize)size withOffset:(CGPoint)offset;
// --- Alpha
- (BOOL)hasAlpha;
- (UIImage *)imageWithAlpha;
- (UIImage *)transparentBorderImage:(NSUInteger)borderSize;
@end
======================================================
//
// GPImage.m
//
// Created by Gaurav D. Sharma & Piyush Kashyap
// Date 11/06/12.
//
#import "GPImage.h"
@interface UIImage ()
// --- RoundedCorner
- (void)addRoundedRect2Path:(CGRect)rect context:(CGContextRef)context ovalWidth:(CGFloat)ovalWidth ovalHeight:(CGFloat)ovalHeight;
// --- Alpha
- (CGImageRef)newBorderMask:(NSUInteger)borderSize size:(CGSize)size;
@end
@implementation UIImage (Gaurav)
#pragma mark - CS_Extensions
CGFloat DegreesToRadians(CGFloat degrees) {return degrees * M_PI / 180;};
CGFloat RadiansToDegrees(CGFloat radians) {return radians * 180/M_PI;};
-(UIImage *)imageAtRect:(CGRect)rect
{
CGImageRef imageRef = CGImageCreateWithImageInRect([self CGImage], rect);
UIImage* subImage = [UIImage imageWithCGImage: imageRef];
CGImageRelease(imageRef);
return subImage;
}
- (UIImage *)imageByScalingProportionallyToMinimumSize:(CGSize)targetSize {
UIImage *sourceImage = self;
UIImage *newImage = nil;
CGSize imageSize = sourceImage.size;
CGFloat width = imageSize.width;
CGFloat height = imageSize.height;
CGFloat targetWidth = targetSize.width;
CGFloat targetHeight = targetSize.height;
CGFloat scaleFactor = 0.0;
CGFloat scaledWidth = targetWidth;
CGFloat scaledHeight = targetHeight;
CGPoint thumbnailPoint = CGPointMake(0.0,0.0);
if (CGSizeEqualToSize(imageSize, targetSize) == NO) {
CGFloat widthFactor = targetWidth / width;
CGFloat heightFactor = targetHeight / height;
if (widthFactor > heightFactor)
scaleFactor = widthFactor;
else
scaleFactor = heightFactor;
scaledWidth = width * scaleFactor;
scaledHeight = height * scaleFactor;
// center the image
if (widthFactor > heightFactor) {
thumbnailPoint.y = (targetHeight - scaledHeight) * 0.5;
} else if (widthFactor < heightFactor) {
thumbnailPoint.x = (targetWidth - scaledWidth) * 0.5;
}
}
// this is actually the interesting part:
UIGraphicsBeginImageContext(targetSize);
CGRect thumbnailRect = CGRectZero;
thumbnailRect.origin = thumbnailPoint;
thumbnailRect.size.width = scaledWidth;
thumbnailRect.size.height = scaledHeight;
[sourceImage drawInRect:thumbnailRect];
newImage = UIGraphicsGetImageFromCurrentImageContext();
UIGraphicsEndImageContext();
if(newImage == nil) NSLog(@"could not scale image");
return newImage ;
}
- (UIImage *)imageByScalingProportionallyToSize:(CGSize)targetSize {
UIImage *sourceImage = self;
UIImage *newImage = nil;
CGSize imageSize = sourceImage.size;
CGFloat width = imageSize.width;
CGFloat height = imageSize.height;
CGFloat targetWidth = targetSize.width;
CGFloat targetHeight = targetSize.height;
CGFloat scaleFactor = 0.0;
CGFloat scaledWidth = targetWidth;
CGFloat scaledHeight = targetHeight;
CGPoint thumbnailPoint = CGPointMake(0.0,0.0);
if (CGSizeEqualToSize(imageSize, targetSize) == NO) {
CGFloat widthFactor = targetWidth / width;
CGFloat heightFactor = targetHeight / height;
if (widthFactor < heightFactor)
scaleFactor = widthFactor;
else
scaleFactor = heightFactor;
scaledWidth = width * scaleFactor;
scaledHeight = height * scaleFactor;
// center the image
if (widthFactor < heightFactor) {
thumbnailPoint.y = (targetHeight - scaledHeight) * 0.5;
} else if (widthFactor > heightFactor) {
thumbnailPoint.x = (targetWidth - scaledWidth) * 0.5;
}
}
// this is actually the interesting part:
UIGraphicsBeginImageContext(targetSize);
CGRect thumbnailRect = CGRectZero;
thumbnailRect.origin = thumbnailPoint;
thumbnailRect.size.width = scaledWidth;
thumbnailRect.size.height = scaledHeight;
[sourceImage drawInRect:thumbnailRect];
newImage = UIGraphicsGetImageFromCurrentImageContext();
UIGraphicsEndImageContext();
if(newImage == nil) NSLog(@"could not scale image");
return newImage ;
}
- (UIImage *)imageByScalingToSize:(CGSize)targetSize {
UIImage *sourceImage = self;
UIImage *newImage = nil;
// CGSize imageSize = sourceImage.size;
// CGFloat width = imageSize.width;
// CGFloat height = imageSize.height;
CGFloat targetWidth = targetSize.width;
CGFloat targetHeight = targetSize.height;
// CGFloat scaleFactor = 0.0;
CGFloat scaledWidth = targetWidth;
CGFloat scaledHeight = targetHeight;
CGPoint thumbnailPoint = CGPointMake(0.0,0.0);
// this is actually the interesting part:
UIGraphicsBeginImageContext(targetSize);
CGRect thumbnailRect = CGRectZero;
thumbnailRect.origin = thumbnailPoint;
thumbnailRect.size.width = scaledWidth;
thumbnailRect.size.height = scaledHeight;
[sourceImage drawInRect:thumbnailRect];
newImage = UIGraphicsGetImageFromCurrentImageContext();
UIGraphicsEndImageContext();
if(newImage == nil) NSLog(@"could not scale image");
return newImage ;
}
- (UIImage *)imageRotatedByRadians:(CGFloat)radians
{
return [self imageRotatedByDegrees:RadiansToDegrees(radians)];
}
- (UIImage *)imageRotatedByDegrees:(CGFloat)degrees
{
// calculate the size of the rotated view's containing box for our drawing space
UIView *rotatedViewBox = [[UIView alloc] initWithFrame:CGRectMake(0,0,self.size.width, self.size.height)];
CGAffineTransform t = CGAffineTransformMakeRotation(DegreesToRadians(degrees));
rotatedViewBox.transform = t;
CGSize rotatedSize = rotatedViewBox.frame.size;
// Create the bitmap context
UIGraphicsBeginImageContext(rotatedSize);
CGContextRef bitmap = UIGraphicsGetCurrentContext();
// Move the origin to the middle of the image so we will rotate and scale around the center.
CGContextTranslateCTM(bitmap, rotatedSize.width/2, rotatedSize.height/2);
// // Rotate the image context
CGContextRotateCTM(bitmap, DegreesToRadians(degrees));
// Now, draw the rotated/scaled image into the context
CGContextScaleCTM(bitmap, 1.0, -1.0);
CGContextDrawImage(bitmap, CGRectMake(-self.size.width / 2, -self.size.height / 2, self.size.width, self.size.height), [self CGImage]);
UIImage *newImage = UIGraphicsGetImageFromCurrentImageContext();
UIGraphicsEndImageContext();
return newImage;
}
- (UIImage *)fixOrientation {
// No-op if the orientation is already correct
if (self.imageOrientation == UIImageOrientationUp) return self;
// We need to calculate the proper transformation to make the image upright.
// We do it in 2 steps: Rotate if Left/Right/Down, and then flip if Mirrored.
CGAffineTransform transform = CGAffineTransformIdentity;
switch (self.imageOrientation) {
case UIImageOrientationDown:
case UIImageOrientationDownMirrored:
transform = CGAffineTransformTranslate(transform, self.size.width, self.size.height);
transform = CGAffineTransformRotate(transform, M_PI);
break;
case UIImageOrientationLeft:
case UIImageOrientationLeftMirrored:
transform = CGAffineTransformTranslate(transform, self.size.width, 0);
transform = CGAffineTransformRotate(transform, M_PI_2);
break;
case UIImageOrientationRight:
case UIImageOrientationRightMirrored:
transform = CGAffineTransformTranslate(transform, 0, self.size.height);
transform = CGAffineTransformRotate(transform, -M_PI_2);
break;
case UIImageOrientationUp:
case UIImageOrientationUpMirrored:
break;
}
switch (self.imageOrientation) {
case UIImageOrientationUpMirrored:
case UIImageOrientationDownMirrored:
transform = CGAffineTransformTranslate(transform, self.size.width, 0);
transform = CGAffineTransformScale(transform, -1, 1);
break;
case UIImageOrientationLeftMirrored:
case UIImageOrientationRightMirrored:
transform = CGAffineTransformTranslate(transform, self.size.height, 0);
transform = CGAffineTransformScale(transform, -1, 1);
break;
case UIImageOrientationUp:
case UIImageOrientationDown:
case UIImageOrientationLeft:
case UIImageOrientationRight:
break;
}
// Now we draw the underlying CGImage into a new context, applying the transform
// calculated above.
CGContextRef ctx = CGBitmapContextCreate(NULL, self.size.width, self.size.height,
CGImageGetBitsPerComponent(self.CGImage), 0,
CGImageGetColorSpace(self.CGImage),
CGImageGetBitmapInfo(self.CGImage));
CGContextConcatCTM(ctx, transform);
switch (self.imageOrientation) {
case UIImageOrientationLeft:
case UIImageOrientationLeftMirrored:
case UIImageOrientationRight:
case UIImageOrientationRightMirrored:
// Grr...
CGContextDrawImage(ctx, CGRectMake(0,0,self.size.height,self.size.width), self.CGImage);
break;
default:
CGContextDrawImage(ctx, CGRectMake(0,0,self.size.width,self.size.height), self.CGImage);
break;
}
// And now we just create a new UIImage from the drawing context
CGImageRef cgimg = CGBitmapContextCreateImage(ctx);
UIImage *img = [UIImage imageWithCGImage:cgimg];
CGContextRelease(ctx);
CGImageRelease(cgimg);
return img;
}
#pragma mark - RoundedCorner
- (void)imageWithBorderFromImage
{
CGSize size = [self size];
UIGraphicsBeginImageContext(size);
CGRect rect = CGRectMake(0, 0, size.width, size.height);
[self drawInRect:rect blendMode:kCGBlendModeNormal alpha:1.0];
CGContextRef context = UIGraphicsGetCurrentContext();
CGContextSetRGBStrokeColor(context, 1.0, 0.5, 1.0, 1.0);
CGContextStrokeRect(context, rect);
UIGraphicsGetImageFromCurrentImageContext();
UIGraphicsEndImageContext();
}
// Creates a copy of this image with rounded corners
// If borderSize is non-zero, a transparent border of the given size will also be added
// Original author: Björn Sållarp. Used with permission. See: http://blog.sallarp.com/iphone-uiimage-round-corners/
- (UIImage *)roundedCornerImage:(NSInteger)cornerSize borderSize:(NSInteger)borderSize {
// If the image does not have an alpha layer, add one
UIImage *image = [self imageWithAlpha];
// Build a context that's the same dimensions as the new size
CGContextRef context = CGBitmapContextCreate(NULL,
image.size.width,
image.size.height,
CGImageGetBitsPerComponent(image.CGImage),
0,
CGImageGetColorSpace(image.CGImage),
CGImageGetBitmapInfo(image.CGImage));
// Create a clipping path with rounded corners
CGContextBeginPath(context);
[self addRoundedRect2Path:CGRectMake(borderSize, borderSize, image.size.width - borderSize * 2, image.size.height - borderSize * 2)
context:context
ovalWidth:cornerSize
ovalHeight:cornerSize];
CGContextClosePath(context);
CGContextClip(context);
// Draw the image to the context; the clipping path will make anything outside the rounded rect transparent
CGContextDrawImage(context, CGRectMake(0, 0, image.size.width, image.size.height), image.CGImage);
// Create a CGImage from the context
CGImageRef clippedImage = CGBitmapContextCreateImage(context);
CGContextRelease(context);
// Create a UIImage from the CGImage
UIImage *roundedImage = [UIImage imageWithCGImage:clippedImage];
CGImageRelease(clippedImage);
return roundedImage;
}
- (UIImage *) normalize {
CGColorSpaceRef genericColorSpace = CGColorSpaceCreateDeviceRGB();
CGContextRef thumbBitmapCtxt = CGBitmapContextCreate(NULL,
self.size.width,
self.size.height,
8, (4 * self.size.width),
genericColorSpace,
kCGImageAlphaPremultipliedFirst);
CGColorSpaceRelease(genericColorSpace);
CGContextSetInterpolationQuality(thumbBitmapCtxt, kCGInterpolationDefault);
CGRect destRect = CGRectMake(0, 0, self.size.width, self.size.height);
CGContextDrawImage(thumbBitmapCtxt, destRect, self.CGImage);
CGImageRef tmpThumbImage = CGBitmapContextCreateImage(thumbBitmapCtxt);
CGContextRelease(thumbBitmapCtxt);
UIImage *result = [UIImage imageWithCGImage:tmpThumbImage];
CGImageRelease(tmpThumbImage);
return result;
}
#pragma mark helper method
// Adds a rectangular path to the given context and rounds its corners by the given extents
// Original author: Björn Sållarp. Used with permission. See: http://blog.sallarp.com/iphone-uiimage-round-corners/
- (void)addRoundedRect2Path:(CGRect)rect context:(CGContextRef)context ovalWidth:(CGFloat)ovalWidth ovalHeight:(CGFloat)ovalHeight {
if (ovalWidth == 0 || ovalHeight == 0) {
CGContextAddRect(context, rect);
return;
}
CGContextSaveGState(context);
CGContextTranslateCTM(context, CGRectGetMinX(rect), CGRectGetMinY(rect));
CGContextScaleCTM(context, ovalWidth, ovalHeight);
CGFloat fw = CGRectGetWidth(rect) / ovalWidth;
CGFloat fh = CGRectGetHeight(rect) / ovalHeight;
CGContextMoveToPoint(context, fw, fh/2);
CGContextAddArcToPoint(context, fw, fh, fw/2, fh, 1);
CGContextAddArcToPoint(context, 0, fh, 0, fh/2, 1);
CGContextAddArcToPoint(context, 0, 0, fw/2, 0, 1);
CGContextAddArcToPoint(context, fw, 0, fw, fh/2, 1);
CGContextClosePath(context);
CGContextRestoreGState(context);
}
#pragma mark - UIImageAdditions
+ (UIImage *)newImageFromResource:(NSString *)filename {
NSString *imageFile = [[NSString alloc] initWithFormat:@"%@/%@", [[NSBundle mainBundle] resourcePath], filename];
UIImage *image = [[UIImage alloc] initWithContentsOfFile:imageFile];
// [imageFile release];
return image;
}
+ (UIImage*)imageWithContentsOfURL:(NSURL*)url {
NSError* error;
NSData* data = [NSData dataWithContentsOfURL:url options:0 error:&error];
if(error || !data) {
return nil;
} else {
return [UIImage imageWithData:data];
}
}
- (UIImage*)scaleToSize:(CGSize)size {
UIGraphicsBeginImageContext(size);
CGContextRef context = UIGraphicsGetCurrentContext();
CGContextTranslateCTM(context, 0.0, size.height);
CGContextScaleCTM(context, 1.0, -1.0);
CGContextDrawImage(context, CGRectMake(0.0f, 0.0f, size.width, size.height), self.CGImage);
UIImage* scaledImage = UIGraphicsGetImageFromCurrentImageContext();
UIGraphicsEndImageContext();
return scaledImage;
}
- (UIImage*)scaleAndCropToSize:(CGSize)size {
if(size.height > size.width) {
if(self.size.height > self.size.width) {
if((self.size.width / self.size.height) >= (size.width / size.height)) {
return [self scaleHeightAndCropWidthToSize:size];
} else {
return [self scaleWidthAndCropHeightToSize:size];
}
} else {
return [self scaleHeightAndCropWidthToSize:size];
}
} else {
if(self.size.width > self.size.height) {
if((self.size.height / self.size.width) >= (size.height / size.width)) {
return [self scaleWidthAndCropHeightToSize:size];
} else {
return [self scaleHeightAndCropWidthToSize:size];
}
} else {
return [self scaleWidthAndCropHeightToSize:size];
}
}
}
- (UIImage*)scaleHeightAndCropWidthToSize:(CGSize)size {
float newWidth = (self.size.width * size.height) / self.size.height;
return [self scaleToSize:size withOffset:CGPointMake((newWidth - size.width) / 2, 0.0f)];
}
- (UIImage*)scaleWidthAndCropHeightToSize:(CGSize)size {
float newHeight = (self.size.height * size.width) / self.size.width;
return [self scaleToSize:size withOffset:CGPointMake(0, (newHeight - size.height) / 2)];
}
- (UIImage*)scaleToSize:(CGSize)size withOffset:(CGPoint)offset {
UIImage* scaledImage = [self scaleToSize:CGSizeMake(size.width + (offset.x * -2), size.height + (offset.y * -2))];
UIGraphicsBeginImageContext(size);
CGContextRef context = UIGraphicsGetCurrentContext();
CGContextTranslateCTM(context, 0.0, size.height);
CGContextScaleCTM(context, 1.0, -1.0);
CGRect croppedRect;
croppedRect.size = size;
croppedRect.origin = CGPointZero;
CGContextClipToRect(context, croppedRect);
CGRect drawRect;
drawRect.origin = offset;
drawRect.size = scaledImage.size;
CGContextDrawImage(context, drawRect, scaledImage.CGImage);
UIImage* croppedImage = UIGraphicsGetImageFromCurrentImageContext();
UIGraphicsEndImageContext();
return croppedImage;
}
#pragma mark - Alpha
// Returns true if the image has an alpha layer
- (BOOL)hasAlpha {
CGImageAlphaInfo alpha = CGImageGetAlphaInfo(self.CGImage);
return (alpha == kCGImageAlphaFirst ||
alpha == kCGImageAlphaLast ||
alpha == kCGImageAlphaPremultipliedFirst ||
alpha == kCGImageAlphaPremultipliedLast);
}
// Returns a copy of the given image, adding an alpha channel if it doesn't already have one
- (UIImage *)imageWithAlpha {
if ([self hasAlpha]) {
return self;
}
CGImageRef imageRef = self.CGImage;
size_t width = CGImageGetWidth(imageRef);
size_t height = CGImageGetHeight(imageRef);
// The bitsPerComponent and bitmapInfo values are hard-coded to prevent an "unsupported parameter combination" error
CGContextRef offscreenContext = CGBitmapContextCreate(NULL,
width,
height,
8,
0,
CGImageGetColorSpace(imageRef),
kCGBitmapByteOrderDefault | kCGImageAlphaPremultipliedFirst);
// Draw the image into the context and retrieve the new image, which will now have an alpha layer
CGContextDrawImage(offscreenContext, CGRectMake(0, 0, width, height), imageRef);
CGImageRef imageRefWithAlpha = CGBitmapContextCreateImage(offscreenContext);
UIImage *imageWithAlpha = [UIImage imageWithCGImage:imageRefWithAlpha];
// Clean up
CGContextRelease(offscreenContext);
CGImageRelease(imageRefWithAlpha);
return imageWithAlpha;
}
// Returns a copy of the image with a transparent border of the given size added around its edges.
// If the image has no alpha layer, one will be added to it.
- (UIImage *)transparentBorderImage:(NSUInteger)borderSize {
// If the image does not have an alpha layer, add one
UIImage *image = [self imageWithAlpha];
CGRect newRect = CGRectMake(0, 0, image.size.width + borderSize * 2, image.size.height + borderSize * 2);
// Build a context that's the same dimensions as the new size
CGContextRef bitmap = CGBitmapContextCreate(NULL,
newRect.size.width,
newRect.size.height,
CGImageGetBitsPerComponent(self.CGImage),
0,
CGImageGetColorSpace(self.CGImage),
CGImageGetBitmapInfo(self.CGImage));
// Draw the image in the center of the context, leaving a gap around the edges
CGRect imageLocation = CGRectMake(borderSize, borderSize, image.size.width, image.size.height);
CGContextDrawImage(bitmap, imageLocation, self.CGImage);
CGImageRef borderImageRef = CGBitmapContextCreateImage(bitmap);
// Create a mask to make the border transparent, and combine it with the image
CGImageRef maskImageRef = [self newBorderMask:borderSize size:newRect.size];
CGImageRef transparentBorderImageRef = CGImageCreateWithMask(borderImageRef, maskImageRef);
UIImage *transparentBorderImage = [UIImage imageWithCGImage:transparentBorderImageRef];
// Clean up
CGContextRelease(bitmap);
CGImageRelease(borderImageRef);
CGImageRelease(maskImageRef);
CGImageRelease(transparentBorderImageRef);
return transparentBorderImage;
}
#pragma mark Private helper methods
// Creates a mask that makes the outer edges transparent and everything else opaque
// The size must include the entire mask (opaque part + transparent border)
// The caller is responsible for releasing the returned reference by calling CGImageRelease
- (CGImageRef)newBorderMask:(NSUInteger)borderSize size:(CGSize)size {
CGColorSpaceRef colorSpace = CGColorSpaceCreateDeviceGray();
// Build a context that's the same dimensions as the new size
CGContextRef maskContext = CGBitmapContextCreate(NULL,
size.width,
size.height,
8, // 8-bit grayscale
0,
colorSpace,
kCGBitmapByteOrderDefault | kCGImageAlphaNone);
// Start with a mask that's entirely transparent
CGContextSetFillColorWithColor(maskContext, [UIColor blackColor].CGColor);
CGContextFillRect(maskContext, CGRectMake(0, 0, size.width, size.height));
// Make the inner part (within the border) opaque
CGContextSetFillColorWithColor(maskContext, [UIColor whiteColor].CGColor);
CGContextFillRect(maskContext, CGRectMake(borderSize, borderSize, size.width - borderSize * 2, size.height - borderSize * 2));
// Get an image of the context
CGImageRef maskImageRef = CGBitmapContextCreateImage(maskContext);
// Clean up
CGContextRelease(maskContext);
CGColorSpaceRelease(colorSpace);
return maskImageRef;
}
@end
No comments:
Post a Comment