2013-04-25 66 views
1

我一直在争取一个星期,并通过AVAssetWriter将我的opengl效果图(我用于绿色屏蔽)保存到视频中。为什么我不能写出我的opengl FBO到AVAssetWriter?

我在下面创建了一个简单的装备来展示我在做什么。

我已经在苹果论坛上提出过问题,并收到了关于这个过程的建议,这里也有描述: allmybrain.com/2011/12/08/rendering-to-a-texture-with-ios-5-texture -cache-api /并在GPUImage库中使用。

据我所知,我正在做同样的事情 - 我甚至使用GPUImage的方法来创建FBO。

我核实,绘图是好的(我已经画在这个代码的方法太,这是禁用),

的FBO创建好和返回成功信息:glCheckFramebufferStatus

没有崩溃,没有例外,没有警告,编写器处于良好状态,所有纹理缓存,缓冲区等都是在没有错误的情况下创建的。

但是我仍然得到黑色的视频输出。

如果我将我的glClear设置为白色,那么我会得到一个白色的矩形,它不是我要求的视频大小。

我从来没有把我的三角形渲染到我的视频中。

#import <AVFoundation/AVFoundation.h> 
#import <AssetsLibrary/AssetsLibrary.h> 
#import "TestViewController.h" 


///////////////////////////////////////////////////////////////// 
// This data type is used to store information for each vertex 
typedef struct 
{ 
    GLKVector3 positionCoords; 
} 
     SceneVertex; 

///////////////////////////////////////////////////////////////// 
// Define vertex data for a triangle to use in example 
static const SceneVertex vertices[] = 
     { 
       {{-1.0f, -1.0f, 1.0}}, // lower left corner 
       {{1.0f, -1.0f, 0.5}}, // lower right corner 
       {{1.0f, 1.0f, 0.0}} // upper left corner 
     }; 


@interface TestViewController() 

@property(nonatomic, readwrite, assign) CVOpenGLESTextureCacheRef videoTextureCache; 
@property(strong, nonatomic) GLKTextureInfo *background; 
@property(nonatomic, strong) AVAssetWriter *assetWriter; 

@property(nonatomic) BOOL isRecording; 

@property(nonatomic, strong) AVAssetWriterInput *assetWriterVideoInput; 

@property(nonatomic, strong) AVAssetWriterInputPixelBufferAdaptor *assetWriterPixelBufferInput; 

@property(nonatomic, assign) CFAbsoluteTime startTime; 

@property(nonatomic, strong) GLKView *glkView; 

@property(nonatomic, strong) GLKBaseEffect *screenGLEffect; 
@property(nonatomic, strong) GLKBaseEffect *FBOGLEffect; 

@property(nonatomic, strong) NSTimer *recordingTimer; 

- (BOOL)isRetina; 
@end 


@implementation TestViewController 
{ 
    CVOpenGLESTextureCacheRef _writerTextureCache; 
    GLuint _writerRenderFrameBuffer; 
    GLuint vertexBufferID; 

    EAGLContext *_writerContext; 
    CVOpenGLESTextureRef _writerTexture; 
} 

- (GLKBaseEffect *)createBasicDrawingEffectInCurrentContext 
{ 
    GLKBaseEffect *basicGLEffect = [[GLKBaseEffect alloc] init]; 
    basicGLEffect.useConstantColor = GL_TRUE; 
    basicGLEffect.constantColor = GLKVector4Make(
      .5f, // Red 
      1.0f, // Green 
      .5f, // Blue 
      1.0f);// Alpha 

    // Set the background color stored in the current context 
    glClearColor(0.0f, 0.0f, 0.0f, 1.0f); // background color 

    // Generate, bind, and initialize contents of a buffer to be 
    // stored in GPU memory 
    glGenBuffers(1,    // STEP 1 
      &vertexBufferID); 
    glBindBuffer(GL_ARRAY_BUFFER, // STEP 2 
      vertexBufferID); 
    glBufferData(     // STEP 3 
      GL_ARRAY_BUFFER, // Initialize buffer contents 
      sizeof(vertices), // Number of bytes to copy 
      vertices,   // Address of bytes to copy 
      GL_STATIC_DRAW); // Hint: cache in GPU memory 
    return basicGLEffect; 
} 


///////////////////////////////////////////////////////////////// 
// 
- (void)viewDidUnload 
{ 
    [super viewDidUnload]; 

    // Make the view's context current 
    GLKView *view = (GLKView *) self.view; 
    [EAGLContext setCurrentContext:view.context]; 

    // Stop using the context created in -viewDidLoad 
    ((GLKView *) self.view).context = nil; 
    [EAGLContext setCurrentContext:nil]; 

////////////////////////////////////////////////////////////// 

的#pragma标记AVWriter设置 ///////////////////////////////////// /////////////////////////

- (NSString *)tempFilePath 
{ 
    return [NSHomeDirectory() stringByAppendingPathComponent:@"Documents/output2.m4v"]; 
} 


- (void)removeTempFile 
{ 
    NSString *path = [self tempFilePath]; 
    NSFileManager *fileManager = [NSFileManager defaultManager]; 

    BOOL exists = [fileManager fileExistsAtPath:path]; 
    NSLog(@">>>remove %@ Exists %d", path, exists); 

    NSError *error; 
    unlink([path UTF8String]); 

    NSLog(@">>>AFTER REMOVE %@ Exists %d %@", path, exists, error); 

} 

- (void)createWriter 
{ 
    //My setup code is based heavily on the GPUImage project, https://github.com/BradLarson/GPUImage so some of these dictionary names and structure are similar to the code from that project - I recommend you check it out if you are interested in Video filtering/recording 
    [self removeTempFile]; 

    NSError *error; 
    self.assetWriter = [[AVAssetWriter alloc] 
             initWithURL:[NSURL fileURLWithPath:[self tempFilePath]] 
              fileType:AVFileTypeQuickTimeMovie 
              error:&error]; 

    if (error) 
    { 
     NSLog(@"Couldn't create writer, %@", error.localizedDescription); 
     return; 
    } 

    NSDictionary *outputSettings = @{ 
      AVVideoCodecKey : AVVideoCodecH264, 
      AVVideoWidthKey : @640, 
      AVVideoHeightKey : @480 
    }; 

    self.assetWriterVideoInput = [AVAssetWriterInput assetWriterInputWithMediaType:AVMediaTypeVideo 
                    outputSettings:outputSettings]; 

    self.assetWriterVideoInput.expectsMediaDataInRealTime = YES; 

    NSDictionary *sourcePixelBufferAttributesDictionary = @{(id) kCVPixelBufferPixelFormatTypeKey : @(kCVPixelFormatType_32BGRA), 
                  (id) kCVPixelBufferWidthKey : @640, 
                  (id) kCVPixelBufferHeightKey : @480}; 

    self.assetWriterPixelBufferInput = [AVAssetWriterInputPixelBufferAdaptor assetWriterInputPixelBufferAdaptorWithAssetWriterInput:self.assetWriterVideoInput 
                             sourcePixelBufferAttributes:sourcePixelBufferAttributesDictionary]; 

    self.assetWriterVideoInput.transform = CGAffineTransformMakeScale(1, -1); 

    if ([_assetWriter canAddInput:self.assetWriterVideoInput]) 
    { 
     [_assetWriter addInput:self.assetWriterVideoInput]; 
    } else 
    { 
     NSLog(@"can't add video writer input %@", self.assetWriterVideoInput); 
    } 
    /* 
    _assetWriterAudioInput = [AVAssetWriterInput assetWriterInputWithMediaType:AVMediaTypeAudio outputSettings:nil]; 
    if ([_assetWriter canAddInput:_assetWriterAudioInput]) { 
     [_assetWriter addInput:_assetWriterAudioInput]; 
     _assetWriterAudioInput.expectsMediaDataInRealTime = YES; 
    } 
    */ 
} 


- (void)writeMovieToLibraryWithPath:(NSURL *)path 
{ 
    NSLog(@"writing %@ to library", path); 
    ALAssetsLibrary *library = [[ALAssetsLibrary alloc] init]; 
    [library writeVideoAtPathToSavedPhotosAlbum:path 
           completionBlock:^(NSURL *assetURL, NSError *error) { 
            if (error) 
            { 
             NSLog(@"Error saving to library%@", [error localizedDescription]); 
            } else 
            { 
             NSLog(@"SAVED %@ to photo lib", path); 
            } 
           }]; 
} 


////////////////////////////////////////////////////////////// 
#pragma mark touch handling 
////////////////////////////////////////////////////////////// 

- (void)touchesEnded:(NSSet *)touches withEvent:(UIEvent *)event 
{ 
    [super touchesEnded:touches withEvent:event]; 
    if (self.isRecording) 
    { 
     [self finishRecording]; 
    } else 
    { 
     [self startRecording]; 
    } 
} 

////////////////////////////////////////////////////////////// 
#pragma mark recording 
////////////////////////////////////////////////////////////// 


- (void)startRecording; 
{ 
    NSLog(@"started recording"); 
#warning debugging startrecording 
// NSLog(@"bypassing usual write method"); 
//  if (![assetWriter startWriting]){ 
//  NSLog(@"writer not started %@, %d", assetWriter.error, assetWriter.status); 
// } 
    self.startTime = CFAbsoluteTimeGetCurrent(); 

    [self createWriter]; 
    [self.assetWriter startWriting]; 
    [self.assetWriter startSessionAtSourceTime:kCMTimeZero]; 

    NSAssert([self.assetWriterPixelBufferInput pixelBufferPool], @"writerpixelbuffer input has no pools"); 

    if (!_writerContext) 
    { 
     _writerContext = [[EAGLContext alloc] initWithAPI:kEAGLRenderingAPIOpenGLES2]; 
     if (!_writerContext || ![EAGLContext setCurrentContext:_writerContext]) 
     { 
      NSLog(@"Problem with OpenGL context."); 

      return; 
     } 
    } 
    [EAGLContext setCurrentContext:_writerContext]; 

    NSLog(@"Creating FBO"); 
    [self createDataFBOUsingGPUImagesMethod]; 
// [self createDataFBO]; 
    self.isRecording = YES; 
    NSLog(@"Recording is started"); 

    self.recordingTimer = [NSTimer scheduledTimerWithTimeInterval:1/30 
                  target:self 
                 selector:@selector(tick:) 
                 userInfo:nil repeats:YES]; 
} 

- (void)tick:(id)tick 
{ 
    [self drawBasicGLTOFBOForWriting]; 
} 

- (void)finishRecording; 
{ 
    [self.recordingTimer invalidate]; 
    self.recordingTimer = nil; 

    NSLog(@"finished recording"); 
    if (self.assetWriter.status == AVAssetWriterStatusCompleted || !self.isRecording) 
    { 
     NSLog(@"already completed ingnoring"); 
     return; 
    } 

    NSLog(@"Asset writer writing"); 
    self.isRecording = NO; 
// runOnMainQueueWithoutDeadlocking(^{ 
    NSLog(@"markng inputs as finished"); 
    //TODO - these cause an error 
    [self.assetWriterVideoInput markAsFinished]; 
    __weak TestViewController *blockSelf = self; 

    [self.assetWriter finishWritingWithCompletionHandler:^{ 
     if (self.assetWriter.error == nil) 
     { 
      NSLog(@"saved ok - writing to lib"); 
      [self writeMovieToLibraryWithPath:[NSURL fileURLWithPath:[self tempFilePath]]]; 
     } else 
     { 
      NSLog(@" did not save due to error %@", self.assetWriter.error); 
     } 
    }]; 
// }); 
} 


- (void)drawBasicGLTOFBOForWriting 
{ 
    if (!self.isRecording) 
    { 
     return; 
    } 
    [EAGLContext setCurrentContext:_writerContext]; 
    if (!self.FBOGLEffect) 
    { 
     self.FBOGLEffect = [self createBasicDrawingEffectInCurrentContext]; 
    } 

    glDisable(GL_DEPTH_TEST); 
    glBindFramebuffer(GL_FRAMEBUFFER, _writerRenderFrameBuffer); 

    glClearColor(1, 1, 1, 1); 
    glClear(GL_COLOR_BUFFER_BIT); 

    [self.FBOGLEffect prepareToDraw]; 

    // Clear Frame Buffer (erase previous drawing) 
    // Enable use of positions from bound vertex buffer 
    glEnableVertexAttribArray(  // STEP 4 
      GLKVertexAttribPosition); 

    glVertexAttribPointer(   // STEP 5 
      GLKVertexAttribPosition, 
      3,     // three components per vertex 
      GL_FLOAT,   // data is floating point 
      GL_FALSE,   // no fixed point scaling 
      sizeof(SceneVertex), // no gaps in data 
      NULL);    // NULL tells GPU to start at 
    // beginning of bound buffer 

    // Draw triangles using the first three vertices in the 
    // currently bound vertex buffer 
    glDrawArrays(GL_TRIANGLES,  // STEP 6 
      0, // Start with first vertex in currently bound buffer 
      3); // Use three vertices from currently bound buffer 
    glFlush(); 


    CFAbsoluteTime interval = (CFAbsoluteTimeGetCurrent() - self.startTime) * 1000; 
    CMTime currentTime = CMTimeMake((int) interval, 1000); 
    [self writeToFileWithTime:currentTime]; 
} 

- (void)writeToFileWithTime:(CMTime)time 
{ 
    if (!self.assetWriterVideoInput.readyForMoreMediaData) 
    { 
     NSLog(@"Had to drop a video frame"); 
     return; 
    } 
    if (kCVReturnSuccess == CVPixelBufferLockBaseAddress(_writerPixelBuffer, 
      kCVPixelBufferLock_ReadOnly)) 
    { 
     uint8_t *pixels = (uint8_t *) CVPixelBufferGetBaseAddress(_writerPixelBuffer); 
     // process pixels how you like! 
     BOOL success = [self.assetWriterPixelBufferInput appendPixelBuffer:_writerPixelBuffer 
                 withPresentationTime:time]; 
     NSLog(@"wrote at %@ : %@", CMTimeCopyDescription(NULL, time), success ? @"YES" : @"NO"); 
     CVPixelBufferUnlockBaseAddress(_writerPixelBuffer, kCVPixelBufferLock_ReadOnly); 
    } 
} 



////////////////////////////////////////////////////////////// 
#pragma mark FBO setup 
////////////////////////////////////////////////////////////// 

- (void)createDataFBOUsingGPUImagesMethod; 
{ 
    glActiveTexture(GL_TEXTURE1); 
    glGenFramebuffers(1, &_writerRenderFrameBuffer); 
    glBindFramebuffer(GL_FRAMEBUFFER, _writerRenderFrameBuffer); 

    CVReturn err = CVOpenGLESTextureCacheCreate(kCFAllocatorDefault, NULL, _writerContext, NULL, &_writerTextureCache); 

    if (err) 
    { 
     NSAssert(NO, @"Error at CVOpenGLESTextureCacheCreate %d", err); 
    } 

    // Code originally sourced from http://allmybrain.com/2011/12/08/rendering-to-a-texture-with-ios-5-texture-cache-api/ 


    CVPixelBufferPoolCreatePixelBuffer(NULL, [self.assetWriterPixelBufferInput pixelBufferPool], &_writerPixelBuffer); 

    err = CVOpenGLESTextureCacheCreateTextureFromImage(kCFAllocatorDefault, _writerTextureCache, _writerPixelBuffer, 
      NULL, // texture attributes 
      GL_TEXTURE_2D, 
      GL_RGBA, // opengl format 
      480, 
      320, 
      GL_BGRA, // native iOS format 
      GL_UNSIGNED_BYTE, 
      0, 
      &_writerTexture); 

    if (err) 
    { 
     NSAssert(NO, @"Error at CVOpenGLESTextureCacheCreateTextureFromImage %d", err); 
    } 


    glBindTexture(CVOpenGLESTextureGetTarget(_writerTexture), CVOpenGLESTextureGetName(_writerTexture)); 
    glTexParameterf(GL_TEXTURE_2D, GL_TEXTURE_WRAP_S, GL_CLAMP_TO_EDGE); 
    glTexParameterf(GL_TEXTURE_2D, GL_TEXTURE_WRAP_T, GL_CLAMP_TO_EDGE); 

    glFramebufferTexture2D(GL_FRAMEBUFFER, GL_COLOR_ATTACHMENT0, GL_TEXTURE_2D, CVOpenGLESTextureGetName(_writerTexture), 0); 


    GLenum status = glCheckFramebufferStatus(GL_FRAMEBUFFER); 

    NSAssert(status == GL_FRAMEBUFFER_COMPLETE, @"Incomplete filter FBO: %d", status); 
} 


@end 

回答

0

四种可能性跳到脑海:

  1. 视口不正确的大小/形状/在正确的地方。在绘制任何东西之前尝试在某处调用glViewport。

  2. 着色器坏了。我看到你没有任何一种着色器设置,所以你可能需要添加一个基本的透视顶点和片段着色器对,它只是通过透视和模型视图矩阵来放大位置,并且使用顶点颜色或固定颜色进行绘制。

  3. 您的投影矩阵不好。首先尝试使用基本的正交矩阵。

  4. 您的模型视图矩阵不好。如果您可以创建动画,请尝试从单位矩阵开始,然后先慢慢旋转,然后再通过X轴然后Y轴。

0

确保_writerpixelbuffer不为NULL。

相关问题