I have the following code to attempt to get a screenshot of a video file from NSData. I can confirm the NSData is valid and not nil, however both dataString and movieURL are returning nil.
- (UIImage *)imageFromMovie:(NSData *)movieData {
// set up the movie player
NSString *dataString = [[NSString alloc] initWithData:movieData encoding:NSUTF8StringEncoding];
NSURL *movieURL = [NSURL URLWithString:dataString];
// get the thumbnail
AVURLAsset *asset1 = [[AVURLAsset alloc] initWithURL:movieURL options:nil];
AVAssetImageGenerator *generate1 = [[AVAssetImageGenerator alloc] initWithAsset:asset1];
generate1.appliesPreferredTrackTransform = YES;
NSError *err = NULL;
CMTime time = CMTimeMake(1, 2);
CGImageRef oneRef = [generate1 copyCGImageAtTime:time actualTime:NULL error:&err];
UIImage *one = [[UIImage alloc] initWithCGImage:oneRef];
return(one);
}
EDIT: Here's a look at where/how I'm getting the NSData from the UIImagePicker
if ([mediaType isEqualToString:@"ALAssetTypeVideo"]) {
ALAssetsLibrary *assetLibrary=[[ALAssetsLibrary alloc] init];
[assetLibrary assetForURL:[[info objectAtIndex:x] valueForKey:UIImagePickerControllerReferenceURL] resultBlock:^(ALAsset *asset) {
ALAssetRepresentation *rep = [asset defaultRepresentation];
unsigned long DataSize = (unsigned long)[rep size];
Byte *buffer = (Byte*)malloc(DataSize);
NSUInteger buffered = [rep getBytes:buffer fromOffset:0.0 length:DataSize error:nil];
//here’s the NSData
NSData *data = [NSData dataWithBytesNoCopy:buffer length:buffered freeWhenDone:YES];
} failureBlock:^(NSError *err) {
NSLog(@"Error: %@",[err localizedDescription]);
}];
}
Possible, you have problems with encoding.
NSString instance method -(id)initWithData:data:encoding
returns nil if data does not represent valid data for encoding.(https://developer.apple.com/library/mac/documentation/Cocoa/Reference/Foundation/Classes/NSString_Class/#//apple_ref/occ/instm/NSString/initWithData:encoding:)
Try to use correct encoding in -(id)initWithData:data:encoding
method.
You are trying to convert the movie data to NSURL
, that's why you are getting a nil
url.
In your implementation, you can get the thumbnail in the following way:
AVURLAsset *asset1 = [[AVURLAsset alloc] initWithURL:[[info objectAtIndex:x] valueForKey:UIImagePickerControllerReferenceURL] options:nil];
AVAssetImageGenerator *generate1 = [[AVAssetImageGenerator alloc] initWithAsset:asset1];
generate1.appliesPreferredTrackTransform = YES;
NSError *err = NULL;
CMTime time = CMTimeMake(1, 2);
CGImageRef oneRef = [generate1 copyCGImageAtTime:time actualTime:NULL error:&err];
UIImage *one = [[UIImage alloc] initWithCGImage:oneRef];
Download my sample project before reading this answer from:
https://drive.google.com/open?id=0B_exgT43OZJOWl9HMDJCR0cyTW8
I know it's been a really long time since you posted this question; but, I found it, can answer it, and am reasonably confident that, unless you used the sample code provided by the Apple Developer Connection web site that does what you're asking, you still need answer. I base that solely on this fact: it's hard to figure out.
Nonetheless, I have a basic, working project that addresses your question; however, before looking at it, check out a video I made of it running on my iPhone 6s Plus:
<iframe width="640" height="360" src="https://www.youtube.com/embed/GiF-FFKvy5M?rel=0&controls=0&showinfo=0" frameborder="0" allowfullscreen></iframe>
As you can see, the poster frame for every asset in my iPhone's video collection is displayed in UICollectionViewCell; in the UICollectionViewController (or the UICollectionView / datasource delegate:
void (^renderThumbnail)(NSIndexPath *, CustomCell *) = ^(NSIndexPath *indexPath, CustomCell *cell) {
[[PHImageManager defaultManager] requestAVAssetForVideo:AppDelegate.assetsFetchResults[indexPath.section] options:nil resultHandler:^(AVAsset * _Nullable asset, AVAudioMix * _Nullable audioMix, NSDictionary * _Nullable info) {
cell.asset = [asset copy];
cell.frameTime = [NSValue valueWithCMTime:kCMTimeZero];
}];
};
- (UICollectionViewCell *)collectionView:(UICollectionView *)collectionView cellForItemAtIndexPath:(NSIndexPath *)indexPath {
PHAsset *phAsset = AppDelegate.assetsFetchResults[indexPath.section];
CustomCell *cell = [collectionView dequeueReusableCellWithReuseIdentifier:CellReuseIdentifier forIndexPath:indexPath];
cell.representedAssetIdentifier = phAsset.localIdentifier;
CGFloat hue = (CGFloat)indexPath.section / 5;
cell.backgroundColor = [UIColor colorWithHue:hue saturation:1.0f brightness:0.5f alpha:1.0f];
if ([cell.representedAssetIdentifier isEqualToString:phAsset.localIdentifier]) {
NSPurgeableData *data = [self.thumbnailCache objectForKey:phAsset.localIdentifier];
[data beginContentAccess];
UIImage *image = [UIImage imageWithData:data];
if (image != nil) {
cell.contentView.layer.contents = (__bridge id)image.CGImage;
NSLog(@"Cached image found");
} else {
renderThumbnail(indexPath, cell);
}
[data endContentAccess];
[data discardContentIfPossible];
}
// Request an image for the asset from the PHCachingImageManager.
/*[AppDelegate.imageManager requestImageForAsset:phAsset
targetSize:cell.contentView.bounds.size
contentMode:PHImageContentModeAspectFill
options:nil
resultHandler:^(UIImage *result, NSDictionary *info) {
// Set the cell's thumbnail image if it's still showing the same asset.
if ([cell.representedAssetIdentifier isEqualToString:phAsset.localIdentifier]) {
cell.thumbnailImage = result;
}
}];*/
return cell;
}
In the UICollectionViewCell subclass:
@implementation CustomCell
- (void)prepareForReuse {
[super prepareForReuse];
_asset = nil;
_frameTime = nil;
_thumbnailImage = nil;
[self.contentView.layer setContents:nil];
[[self contentView] setContentMode:UIViewContentModeScaleAspectFit];
[[self contentView] setClipsToBounds:YES];
}
- (void)dealloc {
}
- (void)setAsset:(AVAsset *)asset {
_asset = asset;
}
- (void)setFrameTime:(NSValue *)frameTime {
_frameTime = frameTime;
dispatch_queue_t concurrentQueue = dispatch_get_global_queue(DISPATCH_QUEUE_PRIORITY_HIGH, 0);
dispatch_async(concurrentQueue, ^{
AVAssetImageGenerator *imageGenerator = [AVAssetImageGenerator assetImageGeneratorWithAsset:_asset];
imageGenerator.appliesPreferredTrackTransform = YES;
imageGenerator.requestedTimeToleranceAfter = kCMTimeZero;
imageGenerator.requestedTimeToleranceBefore = kCMTimeZero;
[imageGenerator generateCGImagesAsynchronouslyForTimes:@[frameTime] completionHandler:^(CMTime requestedTime, CGImageRef _Nullable image, CMTime actualTime, AVAssetImageGeneratorResult result, NSError * _Nullable error) {
dispatch_sync(dispatch_get_main_queue(), ^{
self.thumbnailImage = [UIImage imageWithCGImage:image scale:25.0 orientation:UIImageOrientationUp];
});
}];
});
}
- (void)setThumbnailImage:(UIImage *)thumbnailImage {
_thumbnailImage = thumbnailImage;
self.contentView.layer.contents = (__bridge id)_thumbnailImage.CGImage;
}
@end
The NSCache is set up like this:
self.thumbnailCache = [[NSCache alloc] init];
self.thumbnailCache.name = @"Thumbnail Cache";
self.thumbnailCache.delegate = self;
self.thumbnailCache.evictsObjectsWithDiscardedContent = true;
self.thumbnailCache.countLimit = AppDelegate.assetsFetchResults.count;
The PHAssets were acquired this way:
- (PHFetchResult *)assetsFetchResults {
__block PHFetchResult *i = self->_assetsFetchResults;
if (!i) {
static dispatch_once_t onceToken;
dispatch_once(&onceToken, ^{
PHFetchResult *smartAlbums = [PHAssetCollection fetchAssetCollectionsWithType:PHAssetCollectionTypeSmartAlbum subtype:PHAssetCollectionSubtypeSmartAlbumVideos options:nil];
self->_assetCollection = smartAlbums.firstObject;
if (![self->_assetCollection isKindOfClass:[PHAssetCollection class]]) self->_assetCollection = nil;
PHFetchOptions *allPhotosOptions = [[PHFetchOptions alloc] init];
allPhotosOptions.sortDescriptors = @[[NSSortDescriptor sortDescriptorWithKey:@"creationDate" ascending:NO]];
i = [PHAsset fetchAssetsInAssetCollection:self->_assetCollection options:allPhotosOptions];
self->_assetsFetchResults = i;
});
}
return i;
}