UIImage彩色置灰
第一种(问题:透明部分转换成了黑色)
+ (UIImage *)grayImage:(UIImage *)sourceImage {
size_t width = sourceImage.size.width;
size_t height = sourceImage.size.height;
uint32_t bitmapInfo = kCGBitmapByteOrderDefault; // kCGImageAlphaNone;
CGColorSpaceRef colorSpace = CGColorSpaceCreateDeviceGray();
/// 第四个参数:8 bits per component
CGContextRef context = CGBitmapContextCreate (nil, width, height, 8, 0,
colorSpace, bitmapInfo);
CGColorSpaceRelease(colorSpace);
if (context == NULL) {
return nil;
}
CGContextDrawImage(context, CGRectMake(0, 0, width, height),
sourceImage.CGImage);
UIImage *grayImage = [UIImage imageWithCGImage:CGBitmapContextCreateImage(context)];
CGContextRelease(context);
return grayImage;
}
第二种(问题:性能上可能不是很好)
// 实现 1
+ (UIImage *)grayscaleImage:(UIImage *)image {
CIImage *ciImage = [[CIImage alloc] initWithImage:image];
CIImage *grayscale = [ciImage imageByApplyingFilter:@"CIColorControls"
withInputParameters: @{kCIInputSaturationKey : @0.0}];
return [UIImage imageWithCIImage:grayscale];
}
// 实现 2
+ (UIImage *)convertImageToGrayScale:(UIImage *)image {
CIImage *inputImage = [CIImage imageWithCGImage:image.CGImage];
CIContext *context = [CIContext contextWithOptions:nil];
CIFilter *filter = [CIFilter filterWithName:@"CIColorControls"];
[filter setValue:inputImage forKey:kCIInputImageKey];
[filter setValue:@(0.0) forKey:kCIInputSaturationKey];
CIImage *outputImage = filter.outputImage;
CGImageRef cgImageRef = [context createCGImage:outputImage fromRect:outputImage.extent];
UIImage *result = [UIImage imageWithCGImage:cgImageRef];
CGImageRelease(cgImageRef);
return result;
}
第三种(符合需求)
注意:// 如果不乘以 source.scale,图片会模糊
- size_t width = sourceImg.size.width * sourceImg.scale;
- size_t height = sourceImg.size.height * sourceImg.scale;
typedef enum {
ALPHA = 0,
BLUE = 1,
GREEN = 2,
RED = 3
} PIXELS;
+ (UIImage *)convertToGrayscale:(UIImage *)sourceImg {
// 如果不乘以 source.scale,图片会模糊
size_t width = sourceImg.size.width * sourceImg.scale;
size_t height = sourceImg.size.height * sourceImg.scale;
// the pixels will be painted to this array
uint32_t *pixels = (uint32_t *)malloc(width * height * sizeof(uint32_t));
// clear the pixels so any transparency is preserved
memset(pixels, 0, width * height * sizeof(uint32_t));
CGColorSpaceRef colorSpace = CGColorSpaceCreateDeviceRGB();
CGContextRef context = CGBitmapContextCreate(pixels, width, height, 8,
width * sizeof(uint32_t), colorSpace,
kCGBitmapByteOrder32Little | kCGImageAlphaPremultipliedLast);
// paint the bitmap to our context which will fill in the pixels array
CGContextDrawImage(context, CGRectMake(0,0, width, height), sourceImg.CGImage);
for(int y = 0; y < height; y++) {
for(int x = 0; x < width; x++) {
uint8_t *rgbaPixel = (uint8_t *) & pixels[y * width + x];
// 图片转灰度公式:http://en.wikipedia.org/wiki/Grayscale#Converting_color_to_grayscale
uint32_t gray = 0.299 * rgbaPixel[RED] + 0.587 * rgbaPixel[GREEN] + 0.114 * rgbaPixel[BLUE];
rgbaPixel[RED] = gray;
rgbaPixel[GREEN] = gray;
rgbaPixel[BLUE] = gray;
}
}
CGImageRef image = CGBitmapContextCreateImage(context);
CGContextRelease(context);
CGColorSpaceRelease(colorSpace);
free(pixels);
UIImage *resultImg = [UIImage imageWithCGImage:image];
CGImageRelease(image);
return resultImg;
}
图像像素颜色反转(OpenCV)
- (void)viewDidLoad {
[super viewDidLoad];
UIImage *mImage = [UIImage imageNamed:@"001.jpg"];
self.imageView.image = mImage;
IplImage *image = [self convertToIplImage:mImage];
CvScalar cs;
for (int i=0; i< image->height; i++) {
for (int j=0; j < image->width; j++) {
cs = cvGet2D(image, i, j);
cs.val[0] = 255 - cs.val[0];
cs.val[1] = 255 - cs.val[1];
cs.val[2] = 255 - cs.val[2];
cvSet2D(image,i, j, cs);
}
}
self.imageView1.image = [self convertToUIImage:image];
}
/// UIImage类型转换为IPlImage类型
- (IplImage *)convertToIplImage:(UIImage *)image {
CGImageRef imageRef = image.CGImage;
CGColorSpaceRef colorSpace = CGColorSpaceCreateDeviceRGB();
IplImage *iplImage = cvCreateImage(cvSize(image.size.width, image.size.height), IPL_DEPTH_8U, 4);
CGContextRef contextRef = CGBitmapContextCreate(iplImage->imageData, iplImage->width, iplImage->height, iplImage->depth, iplImage->widthStep, colorSpace, kCGImageAlphaPremultipliedLast | kCGBitmapByteOrderDefault);
CGContextDrawImage(contextRef, CGRectMake(0, 0, image.size.width, image.size.height), imageRef);
CGContextRelease(contextRef);
CGColorSpaceRelease(colorSpace);
IplImage *ret = cvCreateImage(cvGetSize(iplImage), IPL_DEPTH_8U, 3);
cvCvtColor(iplImage, ret, CV_RGB2BGR);
cvReleaseImage(&iplImage);
return ret;
}
/// IplImage类型转换为UIImage类型
- (UIImage *)convertToUIImage:(IplImage *)image {
cvCvtColor(image, image, CV_BGR2RGB);
CGColorSpaceRef colorSpace = CGColorSpaceCreateDeviceRGB();
NSData *data = [NSData dataWithBytes:image->imageData length:image->imageSize];
CGDataProviderRef provider = CGDataProviderCreateWithCFData((CFDataRef)data);
CGImageRef imageRef = CGImageCreate(image->width, image->height, image->depth, image->depth * image->nChannels, image->widthStep, colorSpace, kCGImageAlphaNone | kCGBitmapByteOrderDefault, provider, NULL, false, kCGRenderingIntentDefault);
UIImage *ret = [UIImage imageWithCGImage:imageRef];
CGImageRelease(imageRef);
CGDataProviderRelease(provider);
CGColorSpaceRelease(colorSpace);
return ret;
}