In Objective-C, there's a Photos Framework a.k.a. PhotoKit which enables iOS developers to access the photos library on iPhone and iPad and to retrieve the pictures/videos along with their metadata.
How would Mac developers perform a similar task?
It seems PhotoKit is only available in iOS 8.0. Is there an equivalent of the Photos Framework for Mac OS X?
The Media Library Framework is the place to go.
Usage:
@import MediaLibrary;
- (void) awakeFromNib
{
NSDictionary *options = @{
MLMediaLoadSourceTypesKey: @(MLMediaSourceTypeImage),
MLMediaLoadIncludeSourcesKey: @[MLMediaSourcePhotosIdentifier]
};
MLMediaLibrary *mediaLibrary = [[MLMediaLibrary alloc] initWithOptions:options];
self.mediaLibrary = mediaLibrary;
[mediaLibrary addObserver:self
forKeyPath:@"mediaSources"
options:0
context:(__bridge void *)@"mediaLibraryLoaded"];
[mediaLibrary mediaSources]; // returns nil and starts asynchronous loading
}
- (void)observeValueForKeyPath:(NSString *)keyPath ofObject:(id)object
change:(NSDictionary *)change context:(void *)context
{
if (context == (__bridge void *)@"mediaLibraryLoaded") {
// Media Library is loaded now, we can access mediaSources
MLMediaSource *mediaSource = [self.mediaLibrary.mediaSources objectForKey:@"com.apple.Photos"];
}
}
The concept behind the library is that you have to request it to read an attribute of an object, which returns an empty reference. Then you subscribe to this attribute with a key-value-observer and you wait till it is loaded. Then you can retrieve the next child with the same principle and so on...
Based on Pierre F answer I extend code for displaying url's for all photos:
- (void)applicationDidFinishLaunching:(NSNotification *)aNotification {
NSDictionary *options = @{
MLMediaLoadSourceTypesKey: @(MLMediaSourceTypeImage),
MLMediaLoadIncludeSourcesKey: @[MLMediaSourcePhotosIdentifier]
};
self.mediaLibrary = [[MLMediaLibrary alloc] initWithOptions:options];
[self.mediaLibrary addObserver:self
forKeyPath:@"mediaSources"
options:0
context:(__bridge void *)@"mediaLibraryLoaded"];
[self.mediaLibrary.mediaSources objectForKey:MLMediaSourcePhotosIdentifier];
}
- (void)observeValueForKeyPath:(NSString *)keyPath ofObject:(id)object
change:(NSDictionary *)change context:(void *)context
{
MLMediaSource *mediaSource = [self.mediaLibrary.mediaSources objectForKey:MLMediaSourcePhotosIdentifier];
if (context == (__bridge void *)@"mediaLibraryLoaded")
{
[mediaSource addObserver:self
forKeyPath:@"rootMediaGroup"
options:0
context:(__bridge void *)@"rootMediaGroupLoaded"];
[mediaSource rootMediaGroup];
}
else if (context == (__bridge void *)@"rootMediaGroupLoaded")
{
MLMediaGroup *albums = [mediaSource mediaGroupForIdentifier:@"TopLevelAlbums"];
for (MLMediaGroup *album in albums.childGroups)
{
NSString *albumIdentifier = [album.attributes objectForKey:@"identifier"];
if ([albumIdentifier isEqualTo:@"allPhotosAlbum"])
{
self.allPhotosAlbum = album;
[album addObserver:self
forKeyPath:@"mediaObjects"
options:0
context:@"mediaObjects"];
[album mediaObjects];
break;
}
}
}
else if (context == (__bridge void *)@"mediaObjects")
{
NSArray * mediaObjects = self.allPhotosAlbum.mediaObjects;
for(MLMediaObject * mediaObject in mediaObjects)
{
NSURL * url = mediaObject.URL;
NSLog(url.path);
}
}
}