2011-10-05 15 views
5

modelinde bir araya getirdim. Baktım ve bir cevap aradım, ancak bir tane bulamıyorum. Çok şey sordu, ama hiçbiri cevap alamadı. AVAudioRecorder kullanarak ses kaydeden bir uygulamam var. Şimdi sadece iki veya daha fazla kaydı e-postayla gönderilebilen tek bir dosyada birleştirmek istiyorum. Bunun nasıl yapılabileceği konusunda herhangi bir ipucu var mı? İki .caf dosyasını iPhone

( This answer Ses Servis Kuyrukları denilen şey kullanılmasını önerir, ancak bu konuda hiçbir şey bilmiyorum)

+0

Veya olarak adlandırılan bir dizi içerdiği edilir Ses dosyaları sırayla sayısı o yararlı da .. – Snowman

cevap

8

Düşündüğün gibi oldukça kolay değil. iAmRingtones'u oluşturmak için tam olarak ne yapmak istediğinizi yapmak için AVFoundation framework'u kullandım. Ses dosyalarından AVAssets oluşturmak ve bir AVExportSession kurmak gerekiyordu. Son sonuç harikaydı, ama kesinlikle biraz iş gerektirdi. Aşağıdaki yönteminden (bir kez her ses parçası için) iki kez

- (void) setUpAndAddAudioAtPath:(NSURL*)assetURL toComposition:(AVMutableComposition *)composition { 

    AVURLAsset *songAsset = [AVURLAsset URLAssetWithURL:assetURL options:nil]; 

    AVMutableCompositionTrack *track = [composition addMutableTrackWithMediaType:AVMediaTypeAudio preferredTrackID:kCMPersistentTrackID_Invalid]; 
    AVAssetTrack *sourceAudioTrack = [[songAsset tracksWithMediaType:AVMediaTypeAudio] objectAtIndex:0]; 

    NSError *error = nil; 
    BOOL ok = NO; 

    CMTime startTime = CMTimeMakeWithSeconds(0, 1); 
    CMTime trackDuration = songAsset.duration; 
    CMTime longestTime = CMTimeMake(848896, 44100); //(19.24 seconds) 
    CMTimeRange tRange = CMTimeRangeMake(startTime, trackDuration); 

    //Set Volume 
    AVMutableAudioMixInputParameters *trackMix = [AVMutableAudioMixInputParameters audioMixInputParametersWithTrack:track]; 
    [trackMix setVolume:0.8f atTime:startTime]; 
    [audioMixParams addObject:trackMix]; 

    //Insert audio into track 
    ok = [track insertTimeRange:tRange ofTrack:sourceAudioTrack atTime:CMTimeMake(0, 44100) error:&error]; 
} 

yukarıdaki yöntemi denir alır:

- (void) exportAudio { 

    AVMutableComposition *composition = [AVMutableComposition composition]; 
    audioMixParams = [[NSMutableArray alloc] initWithObjects:nil]; 

    //Add Audio Tracks to Composition 
    NSString *URLPath1 = pathToYourAudioFile1; 
    NSURL *assetURL1 = [NSURL fileURLWithPath:URLPath1]; 
    [self setUpAndAddAudioAtPath:assetURL1 toComposition:composition]; 

    NSString *URLPath2 = pathToYourAudioFile2; 
    NSURL *assetURL2 = [NSURL fileURLWithPath:URLPath2]; 
    [self setUpAndAddAudioAtPath:assetURL2 toComposition:composition]; 

    AVMutableAudioMix *audioMix = [AVMutableAudioMix audioMix]; 
    audioMix.inputParameters = [NSArray arrayWithArray:audioMixParams]; 

    //If you need to query what formats you can export to, here's a way to find out 
    NSLog (@"compatible presets for songAsset: %@", 
      [AVAssetExportSession exportPresetsCompatibleWithAsset:composition]); 

    AVAssetExportSession *exporter = [[AVAssetExportSession alloc] 
            initWithAsset: composition 
            presetName: AVAssetExportPresetAppleM4A]; 
    exporter.audioMix = audioMix; 
    exporter.outputFileType = @"com.apple.m4a-audio"; 
    NSString *fileName = @"someFilename"; 
    NSString *exportFile = [[util getDocumentsDirectory] stringByAppendingFormat: @"/%@.m4a", fileName];  

    // set up export 
    myDeleteFile(exportFile); 
    NSURL *exportURL = [NSURL fileURLWithPath:exportFile]; 
    exporter.outputURL = exportURL; 

    // do the export 
    [exporter exportAsynchronouslyWithCompletionHandler:^{ 
      int exportStatus = exporter.status; 
      switch (exportStatus) { 
       case AVAssetExportSessionStatusFailed: 
        NSError *exportError = exporter.error; 
        NSLog (@"AVAssetExportSessionStatusFailed: %@", exportError); 
        break; 

       case AVAssetExportSessionStatusCompleted: NSLog (@"AVAssetExportSessionStatusCompleted"); break; 
       case AVAssetExportSessionStatusUnknown: NSLog (@"AVAssetExportSessionStatusUnknown"); break; 
       case AVAssetExportSessionStatusExporting: NSLog (@"AVAssetExportSessionStatusExporting"); break; 
       case AVAssetExportSessionStatusCancelled: NSLog (@"AVAssetExportSessionStatusCancelled"); break; 
       case AVAssetExportSessionStatusWaiting: NSLog (@"AVAssetExportSessionStatusWaiting"); break; 
       default: NSLog (@"didn't get export status"); break; 
    } 
}]; 

    // start up the export progress bar 
    progressView.hidden = NO; 
    progressView.progress = 0.0; 
    [NSTimer scheduledTimerWithTimeInterval:0.1 
           target:self 
           selector:@selector (updateExportProgress:) 
           userInfo:exporter 
           repeats:YES]; 

} 
+0

olacağını nasıl verebilirim .m4a dosyası yerine .caf dosyası olarak dışa aktarın? – Newbie

+0

exporter.outputFileType = AVFileTypeCoreAudioFormat; //.caf – Underdog

1

herhangi Nasıl birleştirmek Burada daha fazla veya bizim app ihracat işlevselliği nasıl yarattığını az bulunuyor herkes iki .wav dosyaları birleştirmek için nasıl biliyorsa yolu recordingsArray

# pragma mark mergeRecording 

- (void) mergeRecording 
{ 
     AVMutableComposition *composition = [AVMutableComposition composition]; 
     [self buildSequenceComposition:composition]; //given Below 

     NSLog (@"compatible presets for songAsset: %@",[AVAssetExportSession exportPresetsCompatibleWithAsset:composition]); 

     AVAssetExportSession *exporter = [[AVAssetExportSession alloc] initWithAsset: composition presetName: AVAssetExportPresetAppleM4A]; 
     exporter.outputFileType = @"com.apple.m4a-audio"; 

     //File Name 

     NSString *recordingFileName = [self setRecordingFileName]; 
     self.recordingTimeLbl.text = @"00:00:00"; 
     NSString *exportFile = [NSTemporaryDirectory() stringByAppendingFormat: @"/%@.m4a", recordingFileName]; 

     // set up export 
     BOOL yes = [[NSFileManager defaultManager] removeItemAtPath:exportFile error:NULL]; 
     NSURL *exportURL = [NSURL fileURLWithPath:exportFile]; 
     exporter.outputURL = exportURL; 
     NSData *sound1Data = [[NSData alloc] initWithContentsOfURL: exportURL]; 
     NSLog(@"Length %i",sound1Data.length); 

     [exporter exportAsynchronouslyWithCompletionHandler:^{ 
      int exportStatus = exporter.status; 
      switch (exportStatus) { 
       case AVAssetExportSessionStatusFailed: 
        NSLog (@"AVAssetExportSessionStatusFailed:"); 
        break; 

       case AVAssetExportSessionStatusCompleted: NSLog (@"AVAssetExportSessionStatusCompleted"); break; 
       case AVAssetExportSessionStatusUnknown: NSLog (@"AVAssetExportSessionStatusUnknown"); break; 
       case AVAssetExportSessionStatusExporting: NSLog (@"AVAssetExportSessionStatusExporting"); break; 
       case AVAssetExportSessionStatusCancelled: NSLog (@"AVAssetExportSessionStatusCancelled"); break; 
       case AVAssetExportSessionStatusWaiting: NSLog (@"AVAssetExportSessionStatusWaiting"); break; 
       default: NSLog (@"didn't get export status"); break; 
      } 
     }]; 

     // start up the export progress bar 
     [NSTimer scheduledTimerWithTimeInterval:0.1 target:self selector:@selector (updateProgress:) userInfo:exporter repeats:NO]; 
} 


- (NSString *) setRecordingFileName 
{ 
    NSDate *todaysDate = [NSDate date]; 

    NSDateFormatter *dateFormat = [[NSDateFormatter alloc] init]; 
    [dateFormat setDateFormat:@"dd-MM-yyyy"]; 
    NSString *dateString11 = [dateFormat stringFromDate:todaysDate]; 

    NSCalendar *gregorian = [[NSCalendar alloc] initWithCalendarIdentifier:NSGregorianCalendar]; 
    NSDateComponents *dateComponents = [gregorian components:(NSHourCalendarUnit | NSMinuteCalendarUnit | NSSecondCalendarUnit) fromDate:todaysDate]; 
    NSInteger hour = [dateComponents hour]; 
    NSInteger minute = [dateComponents minute]; 
    NSInteger second = [dateComponents second]; 
    [gregorian release]; 

    NSLog(@"Date: %@ \n Time : %@-%@-%@",dateString11,[NSString stringWithFormat:@"%i",hour],[NSString stringWithFormat:@"%i",minute],[NSString stringWithFormat:@"%i",second]); 


    NSString *recordingFileName = @"Any Name"; 
    if(recordingFileName.length > 0) 
    { 
      recordingFileName = [NSString stringWithFormat:@"%@AND%@AND%@-%@-%@", recordingFileName, dateString11, [NSString stringWithFormat:@"%i",hour], [NSString stringWithFormat:@"%i",minute], [NSString stringWithFormat:@"%i",second]]; 
    } 
    else 
    { 
      recordingFileName = [NSString stringWithFormat:@"%@AND%@-%@-%@",dateString11,[NSString stringWithFormat:@"%i",hour],[NSString stringWithFormat:@"%i",minute],[NSString stringWithFormat:@"%i",second]]; 
    } 
    return recordingFileName; 
} 


- (void)updateProgress:(id)timer 
{ 
    AVAssetExportSession *session; 
    if([timer isKindOfClass:[NSTimer class]]) 
     session = (AVAssetExportSession *)[timer userInfo]; 
    else if([timer isKindOfClass:[AVAssetExportSession class]]) 
     session = timer; 

    if (session.status == AVAssetExportSessionStatusExporting) 
    { 

     NSArray *modes = [[[NSArray alloc] initWithObjects:NSDefaultRunLoopMode, UITrackingRunLoopMode, nil] autorelease]; 
     [self performSelector:@selector(updateProgress:) withObject:session afterDelay:0.5 inModes:modes]; 

    } 
    else if(session.status == AVAssetExportSessionStatusCompleted) 
    { 
     NSLog(@"Exporting Ended"); 
     NSURL *exportURL = session.outputURL; 
     NSData *sound1Data = [[NSData alloc] initWithContentsOfURL: exportURL]; 
     NSLog(@"Length %i \n Path %@",sound1Data.length,exportURL); 

     [self.activityIndicator stopAnimating]; 
     self.activityIndicator.hidden = YES; 
     NSLog(@"Merging Complete"); 

     for(int x = 0 ; x < [recordingsArray count] ; x++) 
     { 
       NSURL *recordingPathUrl = [recordingsArray objectAtIndex:x]; 
       BOOL yes = [[NSFileManager defaultManager] removeItemAtPath:recordingPathUrl.relativePath error:NULL]; 
       if (yes) 
       { 
        NSLog(@"File Removed at Path %@",recordingPathUrl.relativePath); 
       } 
       else 
       { 
        NSLog(@"Problem During Removal of Recording At Path %@",recordingPathUrl.relativePath); 
       } 

     } 

     NSString *exportFile = [NSString stringWithFormat:@"%@",exportURL]; 
     NSString *recordingFileName = [self setRecordingFileName]; 
     BOOL isInserted = [[DbFile sharedDatabase] insertRecordingDataIntoTable:recordingFileName recordingPath:exportFile]; 

     if(isInserted) 
     { 
      NSLog(@"Recording Inserted In Database"); 
     } 
     else 
     { 
      NSLog(@"Recording Inserted In Database"); 
     } 


     if([timer isKindOfClass:[NSTimer class]]) 
      [timer invalidate]; 

    } 
    else if(session.status == AVAssetExportSessionStatusFailed) 
    { 

      [self.activityIndicator stopAnimating]; 
      NSLog(@"Recording Export Failed"); 

      UIAlertView *alertView = [[UIAlertView alloc] initWithTitle:@"Error" message:@"Recording Export Failed" delegate:nil cancelButtonTitle:@"OK" otherButtonTitles: nil]; 
      [alertView show]; 
      [alertView release]; 

      if([timer isKindOfClass:[NSTimer class]]) 
       [timer invalidate]; 

    } 
    else if(session.status == AVAssetExportSessionStatusCancelled) 
    { 

      [self.activityIndicator stopAnimating]; 
      NSLog(@"Recording Export Cancelled"); 

      UIAlertView *alertView = [[UIAlertView alloc] initWithTitle:@"Error" message:@"Recording Export Cancelled" delegate:nil cancelButtonTitle:@"OK" otherButtonTitles: nil]; 
      [alertView show]; 
      [alertView release]; 
      if([timer isKindOfClass:[NSTimer class]]) 
       [timer invalidate]; 
    } 
} 


- (void) buildSequenceComposition:(AVMutableComposition *)composition 
{ 
    AVMutableCompositionTrack *audioTrack1 = [composition addMutableTrackWithMediaType:AVMediaTypeAudio 
                     preferredTrackID:kCMPersistentTrackID_Invalid]; 
    CMTime nextClipStartTime = kCMTimeZero; 

    for(NSURL * view in recordingsArray) 
    { 
     AVURLAsset* audioAsset = [[AVURLAsset alloc]initWithURL:view options:nil]; 
     CMTimeRange timeRangeInAsset; 
     timeRangeInAsset = CMTimeRangeMake(kCMTimeZero, [audioAsset duration]); 

     AVAssetTrack *clipVideoTrack = [[audioAsset tracksWithMediaType:AVMediaTypeAudio] objectAtIndex:0]; 
     [audioTrack1 insertTimeRange:timeRangeInAsset ofTrack:clipVideoTrack atTime:nextClipStartTime error:nil]; 
     nextClipStartTime = CMTimeAdd(nextClipStartTime, timeRangeInAsset.duration); 
    } 
}