I can read the barcode but I can't get the snapshot of the screen. getScreenImage function gets a white screen. How can I get the screenshot including the screen which I see the camera view? Thank you.
@interface igViewController () <AVCaptureMetadataOutputObjectsDelegate,AVCaptureVideoDataOutputSampleBufferDelegate>
{
AVCaptureSession *_session;
AVCaptureDevice *_device;
AVCaptureDeviceInput *_input;
AVCaptureMetadataOutput *_output;
AVCaptureVideoPreviewLayer *_prevLayer;
UIView *_highlightView;
UILabel *_label;
UIImage *img;
}
@end
- (void)viewDidLoad
{
[super viewDidLoad];
_highlightView = [[UIView alloc] init];
_highlightView.autoresizingMask = UIViewAutoresizingFlexibleTopMargin|UIViewAutoresizingFlexibleLeftMargin|UIViewAutoresizingFlexibleRightMargin|UIViewAutoresizingFlexibleBottomMargin;
_highlightView.layer.borderColor = [UIColor greenColor].CGColor;
_highlightView.layer.borderWidth = 3;
[self.view addSubview:_highlightView];
_label = [[UILabel alloc] init];
_label.frame = CGRectMake(0, self.view.bounds.size.height -100, self.view.bounds.size.width, 100);
_label.autoresizingMask = UIViewAutoresizingFlexibleTopMargin;
_label.backgroundColor = [UIColor colorWithWhite:0.15 alpha:0.65];
_label.textColor = [UIColor whiteColor];
_label.textAlignment = NSTextAlignmentCenter;
//[_label addObserver:self forKeyPath:@"text" options:NSKeyValueObservingOptionNew context:NULL];
[self.view addSubview:_label];
_session = [[AVCaptureSession alloc] init];
_device = [AVCaptureDevice defaultDeviceWithMediaType:AVMediaTypeVideo];
NSError *error = nil;
_input = [AVCaptureDeviceInput deviceInputWithDevice:_device error:&error];
if (_input) {
[_session addInput:_input];
} else {
NSLog(@"Error: %@", error);
}
_output = [[AVCaptureMetadataOutput alloc] init];
[_output setMetadataObjectsDelegate:self queue:dispatch_get_main_queue()];
[_session addOutput:_output];
_output.metadataObjectTypes = [_output availableMetadataObjectTypes];
_prevLayer = [AVCaptureVideoPreviewLayer layerWithSession:_session];
_prevLayer.frame = self.view.bounds;
_prevLayer.videoGravity = AVLayerVideoGravityResizeAspectFill;
[self.view.layer addSublayer:_prevLayer];
[_session startRunning];
[self.view bringSubviewToFront:_highlightView];
[self.view bringSubviewToFront:_label];
}
- (void)captureOutput:(AVCaptureOutput *)captureOutput didOutputMetadataObjects:(NSArray *)metadataObjects fromConnection:(AVCaptureConnection *)connection
{
CGRect highlightViewRect = CGRectZero;
AVMetadataMachineReadableCodeObject *barCodeObject;
detectionString = nil;
NSArray *barCodeTypes = @[AVMetadataObjectTypeUPCECode, AVMetadataObjectTypeCode39Code, AVMetadataObjectTypeCode39Mod43Code,
AVMetadataObjectTypeEAN13Code, AVMetadataObjectTypeEAN8Code, AVMetadataObjectTypeCode93Code, AVMetadataObjectTypeCode128Code,
AVMetadataObjectTypePDF417Code, AVMetadataObjectTypeQRCode, AVMetadataObjectTypeAztecCode];
for (AVMetadataObject *metadata in metadataObjects) {
for (NSString *type in barCodeTypes) {
if ([metadata.type isEqualToString:type])
{
barCodeObject = (AVMetadataMachineReadableCodeObject *)[_prevLayer transformedMetadataObjectForMetadataObject:(AVMetadataMachineReadableCodeObject *)metadata];
highlightViewRect = barCodeObject.bounds;
detectionString = [(AVMetadataMachineReadableCodeObject *)metadata stringValue];
break;
}
}
_highlightView.frame = highlightViewRect;
if (detectionString != nil)
{ NSDictionary *outputSettings = [[NSDictionary alloc] initWithObjectsAndKeys: AVVideoCodecJPEG, AVVideoCodecKey, nil];
[self getScreenImage];
_label.text = detectionString;
[self performSelector:@selector(changeText) withObject:nil afterDelay:2.0];
break;
}
}
if(detectionString!=nil){
[MainViewController setResultTexts:detectionString img:img];
detectionString=nil;
[self dismissViewControllerAnimated:YES completion:nil];
[_session stopRunning];
}
}
CGImageRef UIGetScreenImage(void);
-(void)getScreenImage{
if(detectionString!=nil){
CGImageRef screen = UIGetScreenImage();
img = [UIImage imageWithCGImage:screen];
CGImageRelease(screen);
}
}
Edited:
This works. But need to make output change for session more quickly. Because the session changing to capture an image. So the screen is getting disappear for a second. And I only get screen with %50 opacity. This link is where I get help. How can I get a 100% opacity screenshot of it now?
_highlightView.frame = highlightViewRect;
if (detectionString != nil)
{
**[_session removeOutput:_output];
[_session addOutput:_stillImageOutput];**
_label.text = detectionString;
**[self captureNow];**
[self performSelector:@selector(changeText) withObject:nil afterDelay:1.0];
break;
}
-(void)captureNow {
AVCaptureConnection *videoConnection = nil;
for (AVCaptureConnection *connection in _stillImageOutput.connections)
{
for (AVCaptureInputPort *port in [connection inputPorts])
{
if ([[port mediaType] isEqual:AVMediaTypeVideo] )
{
videoConnection = connection;
break;
}
}
if (videoConnection)
{
break;
}
}
NSLog(@"about to request a capture from: %@", _stillImageOutput);
[_stillImageOutput captureStillImageAsynchronouslyFromConnection:videoConnection completionHandler: ^(CMSampleBufferRef imageSampleBuffer, NSError *error)
{
NSData *imageData = [AVCaptureStillImageOutput jpegStillImageNSDataRepresentation:imageSampleBuffer];
img = [[UIImage alloc] initWithData:imageData];
}];
}