2011-03-04 76 views
3

我试图让音频与iOS应用程序的视频一起工作。视频很好。没有音频记录到文件AVAssetWritter无法使用音频

这里的初始化设置(我的iPhone扬声器的工作原理。):

session = [[AVCaptureSession alloc] init]; 
    menu->session = session; 
    menu_open = NO; 
    session.sessionPreset = AVCaptureSessionPresetMedium; 
    camera = [AVCaptureDevice defaultDeviceWithMediaType:AVMediaTypeVideo]; 
    microphone = [AVCaptureDevice defaultDeviceWithMediaType:AVMediaTypeAudio]; 
    menu->camera = camera; 
    [session beginConfiguration]; 
    [camera lockForConfiguration:nil]; 
    if([camera isExposureModeSupported:AVCaptureExposureModeContinuousAutoExposure]){ 
     camera.exposureMode = AVCaptureExposureModeContinuousAutoExposure; 
    } 
    if([camera isFocusModeSupported:AVCaptureFocusModeContinuousAutoFocus]){ 
     camera.focusMode = AVCaptureFocusModeContinuousAutoFocus; 
    } 
    if([camera isWhiteBalanceModeSupported:AVCaptureWhiteBalanceModeContinuousAutoWhiteBalance]){ 
     camera.whiteBalanceMode = AVCaptureWhiteBalanceModeContinuousAutoWhiteBalance; 
    } 
    if ([camera hasTorch]) { 
     if([camera isTorchModeSupported:AVCaptureTorchModeOn]){ 
      [camera setTorchMode:AVCaptureTorchModeOn]; 
     } 
    } 
    [camera unlockForConfiguration]; 
    [session commitConfiguration]; 
    AVCaptureDeviceInput * camera_input = [AVCaptureDeviceInput deviceInputWithDevice:camera error:nil]; 
    [session addInput:camera_input]; 
    microphone_input = [[AVCaptureDeviceInput deviceInputWithDevice:microphone error:nil] retain]; 
    AVCaptureVideoDataOutput * output = [[[AVCaptureVideoDataOutput alloc] init] autorelease]; 
    output.videoSettings = [NSDictionary dictionaryWithObject: [NSNumber numberWithInt:kCVPixelFormatType_32BGRA] forKey:(id)kCVPixelBufferPixelFormatTypeKey]; 
    [session addOutput:output]; 
    output.minFrameDuration = CMTimeMake(1,30); 
    dispatch_queue_t queue = dispatch_queue_create("MY QUEUE", NULL); 
    [output setSampleBufferDelegate:self queue:queue]; 
    dispatch_release(queue); 
    audio_output = [[[AVCaptureAudioDataOutput alloc] init] retain]; 
    queue = dispatch_queue_create("MY QUEUE", NULL); 
    AudioOutputBufferDelegate * special_delegate = [[[AudioOutputBufferDelegate alloc] init] autorelease]; 
    special_delegate->normal_delegate = self; 
    [special_delegate retain]; 
    [audio_output setSampleBufferDelegate:special_delegate queue:queue]; 
    dispatch_release(queue); 
    [session startRunning]; 

这里是开始和结束记录:

if (recording) { //Hence stop recording 
    [video_button setTitle:@"Video" forState: UIControlStateNormal]; 
    recording = NO; 
    [writer_input markAsFinished]; 
    [audio_writer_input markAsFinished]; 
    [video_writer endSessionAtSourceTime: CMTimeMakeWithSeconds([[NSDate date] timeIntervalSinceDate: start_time],30)]; 
    [video_writer finishWriting]; 
    UISaveVideoAtPathToSavedPhotosAlbum(temp_url,self,@selector(video:didFinishSavingWithError:contextInfo:),nil); 
    [start_time release]; 
    [temp_url release]; 
    [av_adaptor release]; 
    [microphone lockForConfiguration:nil]; 
    [session beginConfiguration]; 
    [session removeInput:microphone_input]; 
    [session removeOutput:audio_output]; 
    [session commitConfiguration]; 
    [microphone unlockForConfiguration]; 
    [menu restateConfigiration]; 
    [vid_off play]; 
}else{ //Start recording 
    [vid_on play]; 
    [microphone lockForConfiguration:nil]; 
    [session beginConfiguration]; 
    [session addInput:microphone_input]; 
    [session addOutput:audio_output]; 
    [session commitConfiguration]; 
    [microphone unlockForConfiguration]; 
    [menu restateConfigiration]; 
    [video_button setTitle:@"Stop" forState: UIControlStateNormal]; 
    recording = YES; 
    NSError *error = nil; 
    NSFileManager * file_manager = [[NSFileManager alloc] init]; 
    temp_url = [[NSString alloc] initWithFormat:@"%@/%@", NSTemporaryDirectory(), @"temp.mp4"]; 
    [file_manager removeItemAtPath: temp_url error:NULL]; 
    [file_manager release]; 
    video_writer = [[AVAssetWriter alloc] initWithURL: [NSURL fileURLWithPath:temp_url] fileType: AVFileTypeMPEG4 error: &error]; 
    NSDictionary *video_settings = [NSDictionary dictionaryWithObjectsAndKeys: AVVideoCodecH264, AVVideoCodecKey,[NSNumber numberWithInt:360], AVVideoWidthKey,[NSNumber numberWithInt:480], AVVideoHeightKey,nil]; 
    writer_input = [[AVAssetWriterInput assetWriterInputWithMediaType:AVMediaTypeVideo outputSettings:video_settings] retain]; 
    AudioChannelLayout acl; 
    bzero(&acl, sizeof(acl)); 
    acl.mChannelLayoutTag = kAudioChannelLayoutTag_Mono; 
    audio_writer_input = [[AVAssetWriterInput assetWriterInputWithMediaType:AVMediaTypeAudio outputSettings: [NSDictionary dictionaryWithObjectsAndKeys: [NSNumber numberWithInt: kAudioFormatMPEG4AAC], AVFormatIDKey,[NSNumber numberWithInt: 1], AVNumberOfChannelsKey,[NSNumber numberWithFloat: 44100.0], AVSampleRateKey,[NSNumber numberWithInt: 64000], AVEncoderBitRateKey,[NSData dataWithBytes: &acl length: sizeof(acl) ], AVChannelLayoutKey,nil]] retain]; 
    audio_writer_input.expectsMediaDataInRealTime = YES; 
    av_adaptor = [[AVAssetWriterInputPixelBufferAdaptor assetWriterInputPixelBufferAdaptorWithAssetWriterInput: writer_input sourcePixelBufferAttributes:NULL] retain]; 
    [video_writer addInput:writer_input]; 
    [video_writer addInput: audio_writer_input]; 
    [video_writer startWriting]; 
    [video_writer startSessionAtSourceTime: CMTimeMake(0,1)]; 
    start_time = [[NSDate alloc] init]; 
} 

这里是代表对于音频:

@implementation AudioOutputBufferDelegate 
    -(void)captureOutput: (AVCaptureOutput *) captureOutput didOutputSampleBuffer: (CMSampleBufferRef) sampleBuffer fromConnection: (AVCaptureConnection *) conenction{ 
     if (normal_delegate->recording) { 
      CMSampleBufferSetOutputPresentationTimeStamp(sampleBuffer,CMTimeMakeWithSeconds([[NSDate date] timeIntervalSinceDate: normal_delegate->start_time],30)); 
      [normal_delegate->audio_writer_input appendSampleBuffer: sampleBuffer]; 
     } 
    } 
@end 

视频方法并不重要,因为它的工作原理。 “restateConfigiration”只是整理会议配置,否则火炬熄灭等:

[session beginConfiguration]; 
    switch (quality) { 
     case Low: 
      session.sessionPreset = AVCaptureSessionPresetLow; 
      break; 
     case Medium: 
      session.sessionPreset = AVCaptureSessionPreset640x480; 
      break; 
    } 
    [session commitConfiguration]; 
    [camera lockForConfiguration:nil]; 
    if([camera isExposureModeSupported:AVCaptureExposureModeContinuousAutoExposure]){ 
     camera.exposureMode = AVCaptureExposureModeContinuousAutoExposure; 
    } 
    if([camera isFocusModeSupported:AVCaptureFocusModeContinuousAutoFocus]){ 
     camera.focusMode = AVCaptureFocusModeContinuousAutoFocus; 
    } 
    if([camera isWhiteBalanceModeSupported:AVCaptureWhiteBalanceModeContinuousAutoWhiteBalance]){ 
     camera.whiteBalanceMode = AVCaptureWhiteBalanceModeContinuousAutoWhiteBalance; 
    } 
    if ([camera hasTorch]) { 
     if (torch) { 
      if([camera isTorchModeSupported:AVCaptureTorchModeOn]){ 
       [camera setTorchMode:AVCaptureTorchModeOn]; 
      } 
     }else{ 
      if([camera isTorchModeSupported:AVCaptureTorchModeOff]){ 
       [camera setTorchMode:AVCaptureTorchModeOff]; 
      } 
     } 
    } 
    [camera unlockForConfiguration]; 

谢谢你的任何帮助。

回答

8

AVAssetWriter and Audio

这可能是由于在链接的文章中提到了同样的问题。尝试注释掉这些行

[writer_input markAsFinished]; 
[audio_writer_input markAsFinished]; 
[video_writer endSessionAtSourceTime: CMTimeMakeWithSeconds([[NSDate date] timeIntervalSinceDate: start_time],30)]; 

编辑

我不知道,如果你设置了显示时间标记的方法是一定是错误的。我处理这个问题的方法是在开始时将本地变量设置为0。然后,当我委托收到的第一个包我做的:

if (_startTime.value == 0) { 
    _startTime = CMSampleBufferGetPresentationTimeStamp(sampleBuffer); 
} 

然后

[bufferWriter->writer startWriting]; 
[bufferWriter->writer startSessionAtSourceTime:_startTime]; 

你的代码看起来有效,你是计算每个接收数据包的时间差。但是,AVFoundation会为您计算此值,并优化在交错容器中放置的时间戳。我不确定的另一件事是每个用于音频的CMSampleBufferRef包含多于1个数据缓冲区,其中每个数据缓冲区都有它自己的PTS。我不确定设置PTS是否会自动调整所有其他数据缓冲区。

我的代码与您的代码不同的地方在于,我为音频和视频使用了单个调度队列。在我使用的回调中(删除了一些代码)。

switch (bufferWriter->writer.status) { 
    case AVAssetWriterStatusUnknown: 

     if (_startTime.value == 0) { 
      _startTime = CMSampleBufferGetPresentationTimeStamp(sampleBuffer); 
     } 

     [bufferWriter->writer startWriting]; 
     [bufferWriter->writer startSessionAtSourceTime:_startTime]; 

     //Break if not ready, otherwise fall through. 
     if (bufferWriter->writer.status != AVAssetWriterStatusWriting) { 
      break ; 
     } 

    case AVAssetWriterStatusWriting: 
     if(captureOutput == self.captureManager.audioOutput) { 
       if(!bufferWriter->audioIn.readyForMoreMediaData) { 
        break; 
       } 

       @try { 
        if(![bufferWriter->audioIn appendSampleBuffer:sampleBuffer]) { 
         [self delegateMessage:@"Audio Writing Error" withType:ERROR]; 
        } 
       } 
       @catch (NSException *e) { 
        NSLog(@"Audio Exception: %@", [e reason]); 
       } 
     } 
     else if(captureOutput == self.captureManager.videoOutput) { 

      if(!bufferWriter->videoIn.readyForMoreMediaData) { 
       break;; 
      } 

      @try { 
       if (!frontCamera) { 
        if(![bufferWriter->videoIn appendSampleBuffer:sampleBuffer]) { 
         [self delegateMessage:@"Video Writing Error" withType:ERROR]; 
        } 
       } 
       else { 
        CMTime pt = CMSampleBufferGetPresentationTimeStamp(sampleBuffer); 

        flipBuffer(sampleBuffer, pixelBuffer); 

        if(![bufferWriter->adaptor appendPixelBuffer:pixelBuffer withPresentationTime:pt]) { 
         [self delegateMessage:@"Video Writing Error" withType:ERROR]; 
        } 
       } 

      } 
      @catch (NSException *e) { 
       NSLog(@"Video Exception Exception: %@", [e reason]); 
      } 
     } 

     break; 
    case AVAssetWriterStatusCompleted: 
     return; 
    case AVAssetWriterStatusFailed: 
     [self delegateMessage:@"Critical Error Writing Queues" withType:ERROR]; 
     bufferWriter->writer_failed = YES ; 
     _broadcastError = YES; 
     [self stopCapture] ; 
     return; 
    case AVAssetWriterStatusCancelled: 
     break; 
    default: 
     break; 
} 
+1

谢谢你的回答。我删除了这些行,它的工作方式就像以前一样。没有音频仍然。 – 2011-03-05 20:12:46

+0

看起来一目了然。唯一不确定的是你如何处理样本缓冲区的PTS。 – 2011-03-05 21:48:08

+0

对不起,什么是PTS? – 2011-03-05 22:01:08

相关问题