I am trying to implement some image filters, like polaroid, in iphone. I searched on how to filter an existing UIImage to convert it into a polaroid style and come across this stackoverflow link. Taking the answer there as a starting point, I looped through each pixel of the image, taking RGB values, and converted them to HSV, up to this point I have been successful. So this is what I have done (anyone is free to point any mistakes..)
double minRGB(double r, double g, double b){
if (r < g){
if (r < b){
return r;
}else {
return b;
}
}else {
if (g < b){
return g;
}else{
return b;
}
}
}
double maxRGB(double r, double g, double b){
if (r > g){
if (r > b){
return r;
}else {
return b;
}
}else {
if (g > b){
return g;
}else {
return b;
}
}
}
void rgbToHsv(double redIn,double greenIn,double blueIn,double *hue,double *saturation,double* value){
double min,max,delta;
min = minRGB(redIn,greenIn,blueIn);
max = maxRGB(redIn,greenIn,blueIn);
*value = max;
delta = max - min;
if (max != 0) {
*saturation = delta/max;
}else {
*saturation = 0;
*hue = -1.0;
return ;
}
if (redIn == max) {
*hue = (greenIn - blueIn)/delta;
}else if (greenIn == max) {
*hue = 2 + (blueIn - redIn)/delta;
}else {
*hue = 4 + (redIn - greenIn)/delta;
}
*hue *= 60.0;
if (*hue < 0) {
*hue += 360.0;
}
}
void hsvToRgb(double h,double s, double v, double *r,double *g, double *b){
int i;
float f, p, q, t;
if( s == 0 ) {
// achromatic (grey)
*r = *g = *b = v;
return;
}
h /= 60; // sector 0 to 5
i = floor( h );
f = h - i; // factorial part of h
p = v * ( 1 - s );
q = v * ( 1 - s * f );
t = v * ( 1 - s * ( 1 - f ) );
switch( i ) {
case 0:
*r = v;
*g = t;
*b = p;
break;
case 1:
*r = q;
*g = v;
*b = p;
break;
case 2:
*r = p;
*g = v;
*b = t;
break;
case 3:
*r = p;
*g = q;
*b = v;
break;
case 4:
*r = t;
*g = p;
*b = v;
break;
default: // case 5:
*r = v;
*g = p;
*b = q;
break;
}
}
-(void)makeImagePolaroid:(UIImage*)myImage{
CGImageRef originalImage = [myImage CGImage];
CGColorSpaceRef colorSpace = CGColorSpaceCreateDeviceRGB();
CGContextRef bitmapContext = CGBitmapContextCreate(NULL,CGImageGetWidth(originalImage),CGImageGetHeight(originalImage),8,CGImageGetWidth(originalImage)*4,colorSpace,kCGImageAlphaPremultipliedLast);
CGColorSpaceRelease(colorSpace);
CGContextDrawImage(bitmapContext, CGRectMake(0, 0, CGBitmapContextGetWidth(bitmapContext), CGBitmapContextGetHeight(bitmapContext)), originalImage);
UInt8 *data = CGBitmapContextGetData(bitmapContext);
int numComponents = 4;
int bytesInContext = CGBitmapContextGetHeight(bitmapContext) * CGBitmapContextGetBytesPerRow(bitmapContext);
double redIn, greenIn, blueIn,alphaIn;
double hue,saturation,value;
for (int i = 0; i < bytesInContext; i += numComponents) {
redIn = (double)data[i]/255.0;
greenIn = (double)data[i+1]/255.0;
blueIn = (double)data[i+2]/255.0;
alphaIn = (double)data[i+3]/255.0;
rgbToHsv(redIn,greenIn,blueIn,&hue,&saturation,&value);
hue = hue * 0.7;
if (hue > 360) {
hue = 360;
}
saturation = saturation *1.3;
if (saturation > 1.0) {
saturation = 1.0;
}
value = value * 0.8;
if (value > 1.0) {
value = 1.0;
}
hsvToRgb(hue,saturation,value,&redIn,&greenIn,&blueIn);
data[i] = redIn * 255.0;
data[i+1] = greenIn * 255.0;
data[i+2] = blueIn * 255.0;
}
CGImageRef outImage = CGBitmapContextCreateImage(bitmapContext);
myImage = [UIImage imageWithCGImage:outImage];
CGImageRelease(outImage);
return myImage;
}
Now my idea about image processing is very childish (not even amateurish). I read this and tried to adjust the saturation and hue to see if I can get a polaroid effect..I think I am missing something, for I got every effect on earth other than a polaroid (saying that I havent got anything)..
- Is there any document on net (or books) which tells about image filtering on a programmers point of view? (and not on a designers point of view and without a photoshop screenshot)
- What is the hue, saturation, value difference I have to make on a pixel so that I can make it polaroid?
- And third, Am I on the right track?
Thanks in advance..