merge Audio files on iPhone

2019-01-07 19:37发布

问题:

I wanna merge a .caf file and a .mp3 file to a .mp3 file on iPhone,or I can convert them to .aac and then merge them.How can I do this ? (Just Like Kala OK,I wanna merge my voice and the music together)

回答1:

You need to decode both files to LPCM (plain old ints), add/mix them together then re-encode. The iPhone SDK does not support MP3 encoding, so you should re-encode to AAC.

The Apple sample code iPhoneExtAudioFileConvertTest would be a good place to start.



回答2:

You can use this method

- (BOOL) combineVoices1
{
    NSError *error = nil;
    BOOL ok = NO;


    NSArray *paths = NSSearchPathForDirectoriesInDomains(NSDocumentDirectory,    NSUserDomainMask, YES);
    NSString *documentsDirectory = [paths objectAtIndex:0];


    CMTime nextClipStartTime = kCMTimeZero;
    //Create AVMutableComposition Object.This object will hold our multiple AVMutableCompositionTrack.
    AVMutableComposition *composition = [[AVMutableComposition alloc] init];

    AVMutableCompositionTrack *compositionAudioTrack = [composition addMutableTrackWithMediaType:AVMediaTypeAudio preferredTrackID:kCMPersistentTrackID_Invalid];
    [compositionAudioTrack setPreferredVolume:0.8];
    NSString *soundOne  =[[NSBundle mainBundle]pathForResource:@"test1" ofType:@"caf"];
    NSURL *url = [NSURL fileURLWithPath:soundOne];
    AVAsset *avAsset = [AVURLAsset URLAssetWithURL:url options:nil];
    NSArray *tracks = [avAsset tracksWithMediaType:AVMediaTypeAudio];
    AVAssetTrack *clipAudioTrack = [[avAsset tracksWithMediaType:AVMediaTypeAudio] objectAtIndex:0];
    [compositionAudioTrack insertTimeRange:CMTimeRangeMake(kCMTimeZero, avAsset.duration) ofTrack:clipAudioTrack atTime:kCMTimeZero error:nil];

    AVMutableCompositionTrack *compositionAudioTrack1 = [composition addMutableTrackWithMediaType:AVMediaTypeAudio preferredTrackID:kCMPersistentTrackID_Invalid];
    [compositionAudioTrack setPreferredVolume:0.3];
    NSString *soundOne1  =[[NSBundle mainBundle]pathForResource:@"test" ofType:@"caf"];
    NSURL *url1 = [NSURL fileURLWithPath:soundOne1];
    AVAsset *avAsset1 = [AVURLAsset URLAssetWithURL:url1 options:nil];
    NSArray *tracks1 = [avAsset1 tracksWithMediaType:AVMediaTypeAudio];
    AVAssetTrack *clipAudioTrack1 = [[avAsset1 tracksWithMediaType:AVMediaTypeAudio] objectAtIndex:0];
    [compositionAudioTrack1 insertTimeRange:CMTimeRangeMake(kCMTimeZero, avAsset.duration) ofTrack:clipAudioTrack1 atTime:kCMTimeZero error:nil];


    AVMutableCompositionTrack *compositionAudioTrack2 = [composition addMutableTrackWithMediaType:AVMediaTypeAudio preferredTrackID:kCMPersistentTrackID_Invalid];
    [compositionAudioTrack2 setPreferredVolume:1.0];
    NSString *soundOne2  =[[NSBundle mainBundle]pathForResource:@"song" ofType:@"caf"];
    NSURL *url2 = [NSURL fileURLWithPath:soundOne2];
    AVAsset *avAsset2 = [AVURLAsset URLAssetWithURL:url2 options:nil];
    NSArray *tracks2 = [avAsset2 tracksWithMediaType:AVMediaTypeAudio];
    AVAssetTrack *clipAudioTrack2 = [[avAsset2 tracksWithMediaType:AVMediaTypeAudio] objectAtIndex:0];
    [compositionAudioTrack1 insertTimeRange:CMTimeRangeMake(kCMTimeZero, avAsset2.duration) ofTrack:clipAudioTrack2 atTime:kCMTimeZero error:nil];



    AVAssetExportSession *exportSession = [AVAssetExportSession
                                           exportSessionWithAsset:composition
                                           presetName:AVAssetExportPresetAppleM4A];
    if (nil == exportSession) return NO;

    NSString *soundOneNew = [documentsDirectory stringByAppendingPathComponent:@"combined10.m4a"];
    //NSLog(@"Output file path - %@",soundOneNew);

    // configure export session  output with all our parameters
    exportSession.outputURL = [NSURL fileURLWithPath:soundOneNew]; // output path
    exportSession.outputFileType = AVFileTypeAppleM4A; // output file type

    // perform the export
    [exportSession exportAsynchronouslyWithCompletionHandler:^{

        if (AVAssetExportSessionStatusCompleted == exportSession.status) {
            NSLog(@"AVAssetExportSessionStatusCompleted");
        } else if (AVAssetExportSessionStatusFailed == exportSession.status) {
            // a failure may happen because of an event out of your control
            // for example, an interruption like a phone call comming in
            // make sure and handle this case appropriately
            NSLog(@"AVAssetExportSessionStatusFailed");
        } else {
            NSLog(@"Export Session Status: %d", exportSession.status);
        }
    }];


    return YES;


}


回答3:

you can use the this SDAVAssetExportSession with the following code to export many files to AAC:

-(void)mergeAudioFiles
{
    NSFileManager * fm = [[NSFileManager alloc] init];
    NSError * error;
    NSArray * filesNames = **NSArray of File Names;
    NSString * filePath = @"Dest File Name";
    NSString * pathToSave =[NSString stringWithFormat:@"%@%@",filePath,@".m4a"];

    CMTime startTime = kCMTimeZero;
    AVMutableComposition *composition = [AVMutableComposition composition];
    AVMutableCompositionTrack *compositionAudioTrack =[AVMutableCompositionTrack alloc];
    compositionAudioTrack = [composition addMutableTrackWithMediaType:AVMediaTypeAudio preferredTrackID:kCMPersistentTrackID_Invalid];

    float audioEndTime=0;
    for (NSString *fileName in filesNames) {
        NSURL *audioUrl = [NSURL fileURLWithPath:fileName];
        AVURLAsset *audioasset = [[AVURLAsset alloc]initWithURL:audioUrl options:nil];
        CMTimeRange timeRange  = CMTimeRangeMake(kCMTimeZero, audioasset.duration);
        AVAssetTrack *audioAssetTrack= [[audioasset tracksWithMediaType:AVMediaTypeAudio] lastObject];
       [compositionAudioTrack insertTimeRange:timeRange ofTrack:audioAssetTrack atTime:startTime error:&error];
       startTime = CMTimeAdd(startTime, timeRange.duration);
       CMTime assetTime2 = [audioasset duration];
       Float64 duration2 = CMTimeGetSeconds(assetTime2);
       audioEndTime+=duration2;
    }

    NSURL *exportUrl = [NSURL fileURLWithPath:pathToSave];

    float audioStartTime=0;
    CMTime startTime1 = CMTimeMake((int)(floor(audioStartTime * 100)), 100);
    CMTime stopTime = CMTimeMake((int)(ceil(audioEndTime * 100)), 100);
    CMTimeRange exportTimeRange = CMTimeRangeFromTimeToTime(startTime1, stopTime);


    SDAVAssetExportSession *encoder = [SDAVAssetExportSession.alloc initWithAsset:composition];
    encoder.outputFileType = AVFileTypeAppleM4A;
    encoder.outputURL = exportUrl;
    encoder.audioSettings = @
    {
       AVFormatIDKey: @(kAudioFormatMPEG4AAC),
       AVNumberOfChannelsKey: @2,
       AVSampleRateKey: @44100,
       AVEncoderBitRateKey: @128000,
    };
    encoder.timeRange = exportTimeRange;
    dispatch_semaphore_t semaphore = dispatch_semaphore_create(0);
    NSLog(@"Starting Audio Marge");
   [encoder exportAsynchronouslyWithCompletionHandler:^
   {
        if (encoder.status == AVAssetExportSessionStatusCompleted)
        {
            NSLog(@"Audio Marge succeeded");
            NSError * err = NULL;
            BOOL result = [fm moveItemAtPath:pathToSave toPath:filePath error:&err];
            if(!result) {
                NSLog(@"Error: %@", err);
            }
            NSLog(@"Audio Copied");
        } else if (encoder.status == AVAssetExportSessionStatusCancelled) {
            NSLog(@"Audio export cancelled");
        } else {
           NSLog(@"Audio export failed with error: %@ (%ld)", encoder.error.localizedDescription, encoder.error.code);
        }
        dispatch_semaphore_signal(semaphore);
   }];
   NSLog(@"Audio Wait to Finish");
   dispatch_semaphore_wait(semaphore, DISPATCH_TIME_FOREVER);
  //cleanup
   for (NSString *fileName in filesNames) {
       [fm removeItemAtPath:fileName error:&error];
   }
   NSLog(@"Audio Marge Finished");
}


回答4:

merging two audio files code

-(void)mergeTwoAudioFile
{

    NSError * error;


    NSArray *paths = NSSearchPathForDirectoriesInDomains(NSDocumentDirectory, NSUserDomainMask, YES);

    NSString *documentsDirectory = [[paths objectAtIndex:0] stringByAppendingPathComponent:@"Music_Directory"];


    //first audio path

    NSString *firstPath= [documentsDirectory stringByAppendingPathComponent:@"audio01.m4a"];

    NSURL *audioUrl1 = [NSURL fileURLWithPath:firstPath];


    AVMutableComposition *mixComposition = [AVMutableComposition composition];

    AVMutableCompositionTrack *audioCompositionTrack1 =[[AVMutableCompositionTrack alloc]init];

    AVURLAsset *audioasset1 = [[AVURLAsset alloc]initWithURL:audioUrl1 options:nil];
    AVAssetTrack *audioAssetTrack1= [[audioasset1 tracksWithMediaType:AVMediaTypeAudio] lastObject];

    audioCompositionTrack1 = [mixComposition addMutableTrackWithMediaType:AVMediaTypeAudio preferredTrackID:kCMPersistentTrackID_Invalid];

    CMTime tempTime1= mixComposition.duration;

    [audioCompositionTrack1 insertTimeRange:CMTimeRangeMake(kCMTimeZero, audioasset1.duration) ofTrack:audioAssetTrack1 atTime:tempTime1 error:&error];



    //========second audio

    NSString *secondPath= [documentsDirectory stringByAppendingPathComponent:@"audio02.m4a"];
    NSURL *audioUrl2 = [NSURL fileURLWithPath:secondPath];
    AVMutableCompositionTrack *audioCompositionTrack2 =[[AVMutableCompositionTrack alloc]init];

    AVURLAsset *audioasset2 = [[AVURLAsset alloc]initWithURL:audioUrl2 options:nil];
    AVAssetTrack *audioAssetTrack2= [[audioasset2 tracksWithMediaType:AVMediaTypeAudio] lastObject];


    audioCompositionTrack2 = [mixComposition addMutableTrackWithMediaType:AVMediaTypeAudio preferredTrackID:kCMPersistentTrackID_Invalid];

    CMTime tempTime2 = mixComposition.duration;

    [audioCompositionTrack2 insertTimeRange:CMTimeRangeMake(kCMTimeZero, audioasset2.duration) ofTrack:audioAssetTrack2 atTime:tempTime2 error:&error];


    NSString * pathToSave = [NSString stringWithFormat:@"finalTest.m4a"];
    pathToSave =[documentsDirectory stringByAppendingPathComponent:pathToSave];

    NSURL *movieUrl = [NSURL fileURLWithPath:pathToSave];

    AVAssetExportSession *exporter =[[AVAssetExportSession alloc] initWithAsset:mixComposition presetName:AVAssetExportPresetAppleM4A];
    exporter.shouldOptimizeForNetworkUse = YES;
    exporter.outputURL = movieUrl;

    exporter.outputFileType = AVFileTypeAppleM4A;



    //====================================================================

    float audioStartTime=0;

    CMTime assetTime1 = [audioasset1 duration];
    Float64 duration1 = CMTimeGetSeconds(assetTime1);

    CMTime assetTime2 = [audioasset2 duration];
    Float64 duration2 = CMTimeGetSeconds(assetTime2);

    float audioEndTime=duration1+duration2;


    CMTime startTime = CMTimeMake((int)(floor(audioStartTime * 100)), 100);

    CMTime stopTime = CMTimeMake((int)(ceil(audioEndTime * 100)), 100);

    CMTimeRange exportTimeRange = CMTimeRangeFromTimeToTime(startTime, stopTime);


    exporter.timeRange =exportTimeRange;
    //====================================================================

    [exporter exportAsynchronouslyWithCompletionHandler:^(void) {
        NSString* message;
        switch (exporter.status) {
            case AVAssetExportSessionStatusFailed:
                message = [NSString stringWithFormat:@"Export failed. Error: %@", exporter.error.description];
                NSLog(@"%@", message);
                break;
            case AVAssetExportSessionStatusCompleted: 
                message = [NSString stringWithFormat:@"Export completed"];
                NSLog(@"%@", message);

                break;
            case AVAssetExportSessionStatusCancelled:
                message = [NSString stringWithFormat:@"Export cancelled!"];
                NSLog(@"%@", message);
                break;
            default:
                NSLog(@"Export unhandled status: %ld", (long)exporter.status);
                break;
        }
    }];
}