0
我已经使用videoToolBox用于编码和解码数据, 所以一旦我们得到了解码数据直接我们传递到AVSampleBufferDisplayLayer
VideoToolBox解码图像显示在约塞米蒂flickring OSX
这里是代码片段
-(CVPixelBufferRef)decode:(AVPacket*)vp {
CVPixelBufferRef outputPixelBuffer = NULL;
CMBlockBufferRef blockBuffer = NULL;
OSStatus status = CMBlockBufferCreateWithMemoryBlock(kCFAllocatorDefault,
(void*)vp->data, vp->size,
kCFAllocatorNull,
NULL, 0, vp->size,
0, &blockBuffer);
//NSLog(@"status is for CMBlockBufferCreateWithMemoryBlock is %d",(int)status);
if(status == kCMBlockBufferNoErr) {
CMSampleBufferRef sampleBuffer = NULL;
const size_t sampleSizeArray[] = {vp->size};
status = CMSampleBufferCreateReady(kCFAllocatorDefault,
blockBuffer,
formatDesc ,
1, 0, NULL, 1, sampleSizeArray,
&sampleBuffer);
// NSLog(@"status is for formatdesc is %d",(int)status);
if (status == kCMBlockBufferNoErr && sampleBuffer) {
CFArrayRef attachments = CMSampleBufferGetSampleAttachmentsArray(sampleBuffer, YES);
CFMutableDictionaryRef dict = (CFMutableDictionaryRef)CFArrayGetValueAtIndex(attachments, 0);
CFDictionarySetValue(dict, kCMSampleAttachmentKey_DisplayImmediately, kCFBooleanTrue);
[displayLayer flush];
if([displayLayer isReadyForMoreMediaData])
{
[displayLayer enqueueSampleBuffer:sampleBuffer];
}
[displayLayer setNeedsDisplay];
[self performSelectorOnMainThread:@selector(setView:) withObject:[[RPCViewerAppInitializer sharedRPCViewerAppInitializer] rpcView] waitUntilDone:NO];
CFRelease(blockBuffer);
CFRelease(sampleBuffer);
}
}
return outputPixelBuffer;
}
上面的代码在10.10 osx中工作正常,但在10.10我在屏幕上闪烁。
任何帮助将是可观的。
能够将此属性设置为得到回答自己:) –
谜底到底是什么? !将它添加到此处并将其标记为已回答! –
我将AVSampleBufferDisplayLayer的controlTimebase属性设置为零,这是原因,现在我根本没有设置此属性 –