2011-12-14 54 views
1

我想将两个.wav录制文件组合在一起。 任何人都可以帮助我弄清楚如何实现这一目标。我尝试了将数据结合起来,但标题正在创建问题。 我们可以像我们一样合并波形文件吗?在iPhone中结合两个.wav文件使用Objective C

我这是怎么做结合,

NSMutableData *datas = [NSMutableData alloc]; 

NSData *data1 = [NSData dataWithContentsOfFile: [recordedTmpFile1 path]]; 

NSData *data2 = [NSData dataWithContentsOfFile: [recordedTmpFile2 path]]; 

NSLog(@"file1 size : %d", [data1 length]); 

NSLog(@"file2 size : %d", [data2 length]); 

[datas appendData:data1]; 

    [datas appendData:data2]; 

NSLog(@"file3 size : %d", [datas length]); 

      NSURL *combinedPath = [NSURL alloc]; 

combinedPath = [NSURL fileURLWithPath:[NSTemporaryDirectory() stringByAppendingPathComponent: 

[NSString stringWithFormat: @"1_20111215.%@",@"wav"]]]; 

      [[NSFileManager defaultManager] createFileAtPath:[combinedPath path] contents:datas attributes:nil];  

      NSFileManager * fm = [NSFileManager defaultManager]; 

[fm removeItemAtPath:[recordedTmpFile2 path] error:nil]; 
+0

你想如何结合它是什么问题?你可以说得更详细点吗 ? – StackFlowed 2011-12-14 13:31:09

回答

10

我可以通过记录以.wav扩展音频实现两个文件的合并和代码如下,

NSURL *originalFileName = [NSURL alloc];  

NSURL *RecordingPath =[NSURL fileURLWithPath:[appDelegate.RecordingPath stringByAppendingPathComponent: 
               [NSString stringWithFormat:@"RecordingFile.wav"]]]; 
NSLog(@"LocalRecoding Path :%@",RecordingPath); 


originalFileName=RecordingPath; 

NSURL *temporaryFileName = [NSURL alloc]; 
temporaryFileName = [NSURL fileURLWithPath:[appDelegate.RecordingPath stringByAppendingPathComponent: 
              [NSString stringWithFormat: @"tempRecordingFile.%@", @"wav"]]]; 

long totalAudioLen = 0; 
long totalDataLen = 0; 
long longSampleRate = 11025.0; 
int channels = 2; 
long byteRate = 16 * 11025.0 * channels/8;  

NSData * wav1Data = [NSData dataWithContentsOfFile:[originalFileName path]]; 
NSData * wav2Data = [NSData dataWithContentsOfFile:[temporaryFileName path]]; 

if([wav1Data length]>0 && [wav2Data length] >0) 
{ 
int wav1DataSize = [wav1Data length] - 44; 
NSLog(@"WAV I:%d",wav1DataSize); 
int wav2DataSize = [wav2Data length] - 44; 
NSLog(@"WAV II:%d",wav2DataSize); 
NSData *Wave1= [NSMutableData dataWithData:[wav1Data subdataWithRange:NSMakeRange(44, wav1DataSize)]]; 
NSData *Wave2 =[NSMutableData dataWithData:[wav2Data subdataWithRange:NSMakeRange(44, wav2DataSize)]]; 

NSLog(@"WAV 1:%d",[Wave1 length]); 

NSLog(@"WAV 2:%d",[Wave2 length]); 


totalAudioLen=[Wave1 length]+[Wave2 length]; 

totalDataLen = totalAudioLen + 44; 

Byte *header = (Byte*)malloc(44); 
header[0] = 'R'; // RIFF/WAVE header 
header[1] = 'I'; 
header[2] = 'F'; 
header[3] = 'F'; 
header[4] = (Byte) (totalDataLen & 0xff); 
header[5] = (Byte) ((totalDataLen >> 8) & 0xff); 
header[6] = (Byte) ((totalDataLen >> 16) & 0xff); 
header[7] = (Byte) ((totalDataLen >> 24) & 0xff); 
header[8] = 'W'; 
header[9] = 'A'; 
header[10] = 'V'; 
header[11] = 'E'; 
header[12] = 'f'; // 'fmt ' chunk 
header[13] = 'm'; 
header[14] = 't'; 
header[15] = ' '; 
header[16] = 16; // 4 bytes: size of 'fmt ' chunk 
header[17] = 0; 
header[18] = 0; 
header[19] = 0; 
header[20] = 1; // format = 1 
header[21] = 0; 
header[22] = (Byte) channels; 
header[23] = 0; 
header[24] = (Byte) (longSampleRate & 0xff); 
header[25] = (Byte) ((longSampleRate >> 8) & 0xff); 
header[26] = (Byte) ((longSampleRate >> 16) & 0xff); 
header[27] = (Byte) ((longSampleRate >> 24) & 0xff); 
header[28] = (Byte) (byteRate & 0xff); 
header[29] = (Byte) ((byteRate >> 8) & 0xff); 
header[30] = (Byte) ((byteRate >> 16) & 0xff); 
header[31] = (Byte) ((byteRate >> 24) & 0xff); 
header[32] = (Byte) (2 * 8/8); // block align 
header[33] = 0; 
header[34] = 16; // bits per sample 
header[35] = 0; 
header[36] = 'd'; 
header[37] = 'a'; 
header[38] = 't'; 
header[39] = 'a'; 
header[40] = (Byte) (totalAudioLen & 0xff); 
header[41] = (Byte) ((totalAudioLen >> 8) & 0xff); 
header[42] = (Byte) ((totalAudioLen >> 16) & 0xff); 
header[43] = (Byte) ((totalAudioLen >> 24) & 0xff); 


NSData *headerData = [NSData dataWithBytes:header length:44]; 

//Merge the sound data of the original file with the temp file and create a new sound file with the 
//update header. 
NSMutableData * soundFileData = [NSMutableData alloc]; 
[soundFileData appendData:[headerData subdataWithRange:NSMakeRange(0, 44)]]; 
[soundFileData appendData:Wave1]; 
[soundFileData appendData:Wave2]; 

//delete the original file 
NSFileManager * fm = [NSFileManager defaultManager]; 
[fm removeItemAtPath:[originalFileName path] error:nil]; 

//create a new file with merged audio with the original file name. 
NSURL *mergedFilePath = [NSURL alloc]; 

mergedFilePath = [NSURL fileURLWithPath:[appDelegate.RecordingPath stringByAppendingPathComponent: 
             [NSString stringWithFormat:[originalFileName lastPathComponent]]]]; 

[[NSFileManager defaultManager] createFileAtPath:[mergedFilePath path] contents:soundFileData attributes:nil]; 

NSLog(@"COMBINED FILE PATH :%@",mergedFilePath); 
} 
3

此代码将去掉头部和只返回PCM数据的NSData的。

- (NSData *)stripCAFHeader: (NSString *) path 

    NSFileManager* fileMgr = [NSFileManager defaultManager]; 

    //Load the file in 
    NSData* dataBuffer = [fileMgr contentsAtPath: path]; 
    //This is the data header 
    NSData* searchString = [NSData dataWithBytes:"data" length:4]; 

    //Find where the header starts 
    NSUInteger dataWordStart = [dataBuffer rangeOfData:searchString options:0 range:NSMakeRange(0,[dataBuffer length])].location; 

    //Create the new range without the header 
    //4 bytes for the DATA word, 8 bytes for the data length, and 4 bytes for the edit count 
    NSRange dataRange = NSMakeRange(dataWordStart + 4 + 4 + 8, [dataBuffer length] - dataWordStart - 4 - 4 - 8); 

    //Copy the new data 
    NSData *data = [dataBuffer subdataWithRange:dataRange]; 
    return data; 
} 
+0

我添加了这段代码但添加数据后它不工作,它的大小增加但只有第一个文件被播放第二个记录没有播放 – Selwyn 2011-12-15 09:12:04

+0

我也想更新第一个文件的头文件,以便将总长度添加 – Selwyn 2011-12-15 09:23:00

0

我刚才提出,在我的项目。我正在使用需要#import <AVFoundation/AVFoundation.h>AVMutableComposition方法。我在应用程序的文档目录中有两个.wav文件,并将它们合并为一个新的.wav文件。

AVAsset *audio1 = [AVURLAsset URLAssetWithURL:audioFilePath options:nil]; 
AVAsset *audio2 = [AVURLAsset URLAssetWithURL:audioFilePath options:nil]; 

AVMutableComposition* composition = [[AVMutableComposition alloc] init]; 

AVMutableCompositionTrack *audioCombinedTrack = [composition addMutableTrackWithMediaType:AVMediaTypeAudio preferredTrackID:kCMPersistentTrackID_Invalid]; 
[audioCombinedTrack insertTimeRange:CMTimeRangeMake(kCMTimeZero, [audio1 duration]) ofTrack:[audio1.tracks objectAtIndex:0] atTime:kCMTimeZero error:&error]; 
[audioCombinedTrack insertTimeRange:CMTimeRangeMake(kCMTimeZero, [audio2 duration]) ofTrack:[audio2.tracks objectAtIndex:0] atTime:[audio1 duration] error:&error]; 

AVAssetExportSession *exportSession = [[AVAssetExportSession alloc] initWithAsset:composition presetName:AVAssetExportPresetPassthrough]; 

NSString *exportPath = [[NSSearchPathForDirectoriesInDomains(NSDocumentDirectory, NSUserDomainMask, YES) objectAtIndex:0] stringByAppendingPathComponent:@"Combined.wav"]; 
NSURL *exportURL = [NSURL fileURLWithPath:exportPath]; 

exportSession.outputURL = exportURL; 
exportSession.outputFileType = AVFileTypeWAVE; 

[exportSession exportAsynchronouslyWithCompletionHandler:^{ 
    NSLog (@"Exporting. status is %ld", (long)exportSession.status); 
    switch (exportSession.status) { 
     case AVAssetExportSessionStatusFailed: 
     case AVAssetExportSessionStatusCompleted: { 
      NSLog(@"export done"); 
      break; 
     } 
    }; 
}];