-->

ios AVCaptureVideoPreviewLayer capture current ima

2019-02-16 02:49发布

问题:

Once the default iPhone Camera app takes a photo, a preview appears and the image animates to the camera roll button. I am trying to replicate this animation.

   session = [[AVCaptureSession alloc] init];
session.sessionPreset = AVCaptureSessionPresetPhoto;

CALayer *viewLayer = self.vImagePreview.layer;
NSLog(@"viewLayer = %@", viewLayer);

captureVideoPreviewLayer = [[AVCaptureVideoPreviewLayer alloc] initWithSession:session];

captureVideoPreviewLayer.frame = CGRectMake(0, 0, 322, 425);
[self.vImagePreview.layer addSublayer:captureVideoPreviewLayer];



device = [AVCaptureDevice defaultDeviceWithMediaType:AVMediaTypeVideo];

NSError *error = nil;
AVCaptureDeviceInput *input = [AVCaptureDeviceInput deviceInputWithDevice:device error:&error];
if (!input) {
    // Handle the error appropriately.
    NSLog(@"ERROR: trying to open camera: %@", error);
}
[session addInput:input];
_stillImageOutput = [[AVCaptureStillImageOutput alloc] init];
NSDictionary *outputSettings = [[NSDictionary alloc] initWithObjectsAndKeys: AVVideoCodecJPEG, AVVideoCodecKey, nil];
[_stillImageOutput setOutputSettings:outputSettings];
[session addOutput:_stillImageOutput];

This is how I take an image:

AVCaptureConnection *videoConnection = nil;
for (AVCaptureConnection *connection in _stillImageOutput.connections)
{
    for (AVCaptureInputPort *port in [connection inputPorts])
    {
        if ([[port mediaType] isEqual:AVMediaTypeVideo] )
        {
            videoConnection = connection;
            break;
        }
    }
    if (videoConnection) { break; }
}
NSLog(@"about to request a capture from: %@", _stillImageOutput);
[_stillImageOutput captureStillImageAsynchronouslyFromConnection:videoConnection completionHandler: ^(CMSampleBufferRef imageSampleBuffer, NSError *error)
 {
     CFDictionaryRef exifAttachments = CMGetAttachment( imageSampleBuffer, kCGImagePropertyExifDictionary, NULL);
     if (exifAttachments)
     {
         // Do something with the attachments.
         NSLog(@"attachements: %@", exifAttachments);
     }
     else
         NSLog(@"no attachments");

     NSData *imageData = [AVCaptureStillImageOutput jpegStillImageNSDataRepresentation:imageSampleBuffer];
     UIImage *image = [[UIImage alloc] initWithData:imageData];
     UIImageWriteToSavedPhotosAlbum(image, self, @selector(image:didFinishSavingWithError:contextInfo:), nil);
 }];    

This takes the full photo image, but I also need an image from AVCaptureVideoPreviewLayer so I could use it for replicating animation. How do I get the currently shown image in AVCaptureVideoPreviewLayer?

回答1:

the way to do this is to add another output to the session:

    frameOutput = [[AVCaptureVideoDataOutput alloc] init];
frameOutput.videoSettings = @{(id)kCVPixelBufferPixelFormatTypeKey : @(kCVPixelFormatType_32BGRA)};

[frameOutput setSampleBufferDelegate:self queue:dispatch_get_main_queue()];

And once you have that you can get the current image on the buffer:

     - (void)captureOutput:(AVCaptureOutput *)captureOutput didOutputSampleBuffer:(CMSampleBufferRef)sampleBuffer
   fromConnection:(AVCaptureConnection *)connection{


// Create a UIImage from the sample buffer data
UIImage *image = [self imageFromSampleBuffer:sampleBuffer];
rotatedScreenImage = rotateUIImage(image, 90.0);} 
- (UIImage *) imageFromSampleBuffer:(CMSampleBufferRef) sampleBuffer{ 
CVImageBufferRef imageBuffer = CMSampleBufferGetImageBuffer(sampleBuffer);

CVPixelBufferLockBaseAddress(imageBuffer, 0);

// Get the number of bytes per row for the pixel buffer
void *baseAddress = CVPixelBufferGetBaseAddress(imageBuffer);

// Get the number of bytes per row for the pixel buffer
size_t bytesPerRow = CVPixelBufferGetBytesPerRow(imageBuffer);
// Get the pixel buffer width and height
size_t width = CVPixelBufferGetWidth(imageBuffer);
size_t height = CVPixelBufferGetHeight(imageBuffer);

// Create a device-dependent RGB color space
CGColorSpaceRef colorSpace = CGColorSpaceCreateDeviceRGB();

// Create a bitmap graphics context with the sample buffer data
CGContextRef context = CGBitmapContextCreate(baseAddress, width, height, 8,
                                             bytesPerRow, colorSpace, kCGBitmapByteOrder32Little | kCGImageAlphaPremultipliedFirst);
// Create a Quartz image from the pixel data in the bitmap graphics context
CGImageRef quartzImage = CGBitmapContextCreateImage(context);
// Unlock the pixel buffer
CVPixelBufferUnlockBaseAddress(imageBuffer,0);


// Free up the context and color space
CGContextRelease(context);
CGColorSpaceRelease(colorSpace);

// Create an image object from the Quartz image
UIImage *image = [UIImage imageWithCGImage:quartzImage];

// Release the Quartz image
CGImageRelease(quartzImage);

return (image);

}

And that return(image) is what actually is on the screen buffer.