Basically I have image that gets drawn when the touch is moved. When i toggle on a button called eraser I want it to detect if the context I have drawn is NOT black at the position the touch is at. Is there any way to do this?
- (id)initWithFrame:(CGRect)frame
{
self = [super initWithFrame:frame];
if (self) {
hue = 0.0;
[self initContext:frame.size];
framsize = frame.size;
}
return self;
}
-(void)clear{
cacheContext = nil;
cacheBitmap = nil;
[self initContext:framsize];
[self setNeedsDisplay];
}
- (BOOL) initContext:(CGSize)size {
float scaleFactor = [[UIScreen mainScreen] scale];
int bitmapByteCount;
int bitmapBytesPerRow;
// Declare the number of bytes per row. Each pixel in the bitmap in this
// example is represented by 4 bytes; 8 bits each of red, green, blue, and
// alpha.
bitmapBytesPerRow = (size.width * 4);
bitmapByteCount = (bitmapBytesPerRow * size.height)*scaleFactor*scaleFactor;
// Allocate memory for image data. This is the destination in memory
// where any drawing to the bitmap context will be rendered.
cacheBitmap = malloc( bitmapByteCount );
if (cacheBitmap == NULL){
return NO;
}
CGBitmapInfo bitmapInfo = kCGImageAlphaPremultipliedFirst | kCGBitmapByteOrder32Big;
CGColorSpaceRef colorSpace = CGColorSpaceCreateDeviceRGB();
cacheContext = CGBitmapContextCreate (cacheBitmap, size.width*scaleFactor, size.height *scaleFactor, 8, bitmapBytesPerRow*scaleFactor, colorSpace, bitmapInfo);
CGContextScaleCTM(cacheContext, scaleFactor, scaleFactor);
CGColorSpaceRelease(colorSpace);
CGContextSetRGBFillColor(cacheContext, 1, 0, 0, 0.0);
CGContextFillRect(cacheContext, (CGRect){CGPointZero, CGSizeMake(size.height*scaleFactor, size.width*scaleFactor)});
return YES;
}
//-(float) alphaAtX:(int)x y:(int)y //get alpha component using the pointer 'pixelData'
//{
// float scaleFactor = [[UIScreen mainScreen] scale];
// return pixelData[(y * 320*scaleFactor + x) *4 + 3]; //+0 for red, +1 for green, +2 for blue, +3 for alpha
//}
-(UIColor *) colorOfPoint:(CGPoint)point
{
unsigned char pixel[4] = {0};
CGColorSpaceRef colorSpace = CGColorSpaceCreateDeviceRGB();
CGContextRef context = CGBitmapContextCreate(pixel,
1, 1, 8, 4, colorSpace, (CGBitmapInfo)kCGImageAlphaPremultipliedLast);
CGContextTranslateCTM(context, -point.x, -point.y);
[self.layer renderInContext:context];
CGContextRelease(context);
CGColorSpaceRelease(colorSpace);
UIColor *color = [UIColor colorWithRed:pixel[0]/255.0
green:pixel[1]/255.0 blue:pixel[2]/255.0
alpha:pixel[3]/255.0];
return color;
}
- (void) drawToCache:(CGPoint)currentpos andLastPos:(CGPoint)pos Color:(UIColor *)colors Thickness:(float)thickness{
hue += 0.005;
if(hue > 1.0) hue = 0.0;
UIColor *color;
if (!colors) {
color = [UIColor colorWithHue:hue saturation:0.7 brightness:1.0 alpha:1.0];
} else {
color = colors;
}
CGContextSetStrokeColorWithColor(cacheContext, [color CGColor]);
CGContextSetLineCap(cacheContext, kCGLineCapRound);
CGContextSetLineWidth(cacheContext, 6+thickness);
CGPoint lastPoint = currentpos;
CGPoint newPoint = lastPoint;
CGContextMoveToPoint(cacheContext, lastPoint.x, lastPoint.y);
CGContextAddLineToPoint(cacheContext, newPoint.x, newPoint.y);
CGContextStrokePath(cacheContext);
CGRect dirtyPoint1 = CGRectMake(lastPoint.x-10, lastPoint.y-10, 20, 20);
CGRect dirtyPoint2 = CGRectMake(newPoint.x-10, newPoint.y-10, 20, 20);
[self setNeedsDisplayInRect:CGRectUnion(dirtyPoint1, dirtyPoint2)];
}
- (void) drawRect:(CGRect)rect {
CGContextRef context = UIGraphicsGetCurrentContext();
CGImageRef cacheImage = CGBitmapContextCreateImage(cacheContext);
CGContextDrawImage(context, self.bounds, cacheImage);
CGImageRelease(cacheImage);
}
@end
AFAIK there is no easy way to get color of some point in CGContext. But you can use approach described here http://www.markj.net/iphone-uiimage-pixel-color/
I have not tested this code but I hope it works or is easy to fix: