I'm trying to get ARGB components from CGBitmapContext with the following codes:
-(id) initWithImage: (UIImage*) image //create BitmapContext with UIImage and use 'pixelData' as the pointer
{
CGContextRef context = NULL;
CGColorSpaceRef colorSpace;
int bitmapByteCount;
int bitmapBytesPerRow;
bitmapBytesPerRow = (image.size.width * 4);
bitmapByteCount = (bitmapBytesPerRow * image.size.height);
colorSpace = CGColorSpaceCreateDeviceRGB();
pixelData = malloc( bitmapByteCount ); //unsigned char* pixelData is defined in head file
context = CGBitmapContextCreate (pixelData,
image.size.width,
image.size.height,
8, // bits per component
bitmapBytesPerRow,
colorSpace,
kCGImageAlphaPremultipliedFirst
);
CGColorSpaceRelease( colorSpace );
CGContextDrawImage(context, CGRectMake(0, 0, image.size.width, image.size.height), image.CGImage);
pixelData = CGBitmapContextGetData(context);
return self;
}
-(float) alphaAtX:(int)x y:(int)y //get alpha component using the pointer 'pixelData'
{
return pixelData[(y *width + x) *4 + 3]; //+0 for red, +1 for green, +2 for blue, +3 for alpha
}
-(void)viewDidLoad {
[super viewDidLoad];
UIImage *img = [UIImage imageNamed:@"MacDrive.png"]; //load image
[self initWithImage:img]; //create BitmapContext with UIImage
float alpha = [self alphaAtX:20 y:20]; //store alpha component
}
When I try to store red/green/blue, they turn out to be always 240. And alpha is always 255.
So I think maybe something is wrong with the pointer. It could not return the correct ARGB data I want. Any ideas about what's wrong with the code?