AVPlayerLayer get Image to UIImageView Buffer

2019-04-23 05:58发布

问题:

i try to [playerLayer renderInContext:UIGraphicsGetCurrentContext()]; it will display black background. so i get current player item as thump image continuously it is not look like video play just still images continuously flow. so any other option to do?

回答1:

Try this Code for setup Reader

//setUp Reader
    AVURLAsset * asset = [AVURLAsset URLAssetWithURL:urlvalue options:nil]; 
    [asset loadValuesAsynchronouslyForKeys:[NSArray arrayWithObject:@"tracks"] completionHandler: ^{ dispatch_async(dispatch_get_main_queue(), ^{
        AVAssetTrack * videoTrack = nil; 
        NSArray * tracks = [asset tracksWithMediaType:AVMediaTypeVideo];
        if ([tracks count] == 1) {
            videoTrack = [tracks objectAtIndex:0];
            NSError * error = nil; 
            _movieReader = [[AVAssetReader alloc] initWithAsset:asset error:&error]; 
            if (error) 
                NSLog(error.localizedDescription); 
            NSString* key = (NSString*)kCVPixelBufferPixelFormatTypeKey; 
            NSNumber* value = [NSNumber numberWithUnsignedInt:kCVPixelFormatType_4444AYpCbCr16]; NSDictionary* videoSettings = [NSDictionary dictionaryWithObject:value forKey:key];
            [_movieReader addOutput:[AVAssetReaderTrackOutput assetReaderTrackOutputWithTrack:videoTrack outputSettings:videoSettings]]; 
             [_movieReader startReading];

        } 
    }); 
    }];

to get next movie frame

- (void) readNextMovieFrame { 

    if (_movieReader.status == AVAssetReaderStatusReading) { 

        AVAssetReaderTrackOutput * output = [_movieReader.outputs objectAtIndex:0]; 
        CMSampleBufferRef sampleBuffer = [output copyNextSampleBuffer];
        if (sampleBuffer) {
            CVImageBufferRef imageBuffer = CMSampleBufferGetImageBuffer(sampleBuffer); 
            // Lock the image buffer 
            CVPixelBufferLockBaseAddress(imageBuffer,0);
            // Get information of the image 
            uint8_t *baseAddress = (uint8_t *)CVPixelBufferGetBaseAddress(imageBuffer); 
            size_t bytesPerRow = CVPixelBufferGetBytesPerRow(imageBuffer); 
            size_t width = CVPixelBufferGetWidth(imageBuffer); 
            size_t height = CVPixelBufferGetHeight(imageBuffer); 

            /*We unlock the  image buffer*/
            CVPixelBufferUnlockBaseAddress(imageBuffer,0);

            /*Create a CGImageRef from the CVImageBufferRef*/
             CGColorSpaceRef colorSpace = CGColorSpaceCreateDeviceRGB(); 
            CGContextRef newContext = CGBitmapContextCreate(baseAddress, width, height, 8, bytesPerRow, colorSpace, kCGBitmapByteOrder32Little | kCGImageAlphaPremultipliedFirst); 
            CGImageRef newImage = CGBitmapContextCreateImage(newContext); 

            /*We release some components*/
            CGContextRelease(newContext); 
            CGColorSpaceRelease(colorSpace);

            /*We display the result on the custom layer*/
            /*self.customLayer.contents = (id) newImage;*/

            /*We display the result on the image view (We need to change the orientation of the image so that the video is displayed correctly)*/
            UIImage *image= [UIImage imageWithCGImage:newImage scale:0.0 orientation:UIImageOrientationRight];
            UIGraphicsBeginImageContext(image.size);

            [image drawAtPoint:CGPointMake(0, 0)];

           // UIImage *img=UIGraphicsGetImageFromCurrentImageContext();
            videoImage=UIGraphicsGetImageFromCurrentImageContext();

            UIGraphicsEndImageContext();


//videoImage=image;

          //  if (frameCount < 40) {
                NSLog(@"readNextMovieFrame==%d",frameCount);
                      NSString* filename = [NSString stringWithFormat:@"Documents/frame_%d.png", frameCount];
                      NSString* pngPath = [NSHomeDirectory() stringByAppendingPathComponent:filename];
                     [UIImagePNGRepresentation(videoImage) writeToFile: pngPath atomically: YES];
                     frameCount++;
        //        }

            CVPixelBufferUnlockBaseAddress(imageBuffer,0); 
            CFRelease(sampleBuffer); 
        } 
    } 
}