2013-08-06 60 views
1

制作视频,请找到下面的代码。应用程序崩溃时从图像

-(void) writeImagesAsMovie:(NSArray *)array toPath:(NSString*)path numPhoto:(NSInteger)totPics { 

    ALAsset *asset = [assets objectAtIndex:0]; 

    ALAssetRepresentation *assetRepresentation = [asset defaultRepresentation];  
    UIImage *getImage = [UIImage imageWithCGImage:[assetRepresentation fullScreenImage] scale:[assetRepresentation scale] orientation:(UIImageOrientation)[assetRepresentation orientation]]; 
    UIImage *first = [getImage imageByScalingAndCroppingForSize:CGSizeMake(720.0, 960.0)]; 

    CGSize frameSize = CGSizeMake(first.size.width,first.size.height); 

    NSLog(@"frameSize = %@",NSStringFromCGSize(frameSize)); 
    NSError *error = nil; 
    AVAssetWriter *videoWriter = [[AVAssetWriter alloc] initWithURL: 
            [NSURL fileURLWithPath:path] fileType:AVFileTypeQuickTimeMovie 
                   error:&error]; 

    if(error) { 
     NSLog(@"error creating AssetWriter: %@",[error description]); 
    } 
    NSDictionary *videoSettings = [NSDictionary dictionaryWithObjectsAndKeys: 
            AVVideoCodecH264, AVVideoCodecKey, 
            [NSNumber numberWithInt:frameSize.width], AVVideoWidthKey, 
            [NSNumber numberWithInt:frameSize.height], AVVideoHeightKey, 
            AVVideoScalingModeResizeAspect,AVVideoScalingModeKey, 
            nil]; 


    AVAssetWriterInput* writerInput = [AVAssetWriterInput 
             assetWriterInputWithMediaType:AVMediaTypeVideo 
             outputSettings:videoSettings]; 


    CGAffineTransform transform = CGAffineTransformIdentity; 
    UIImageOrientation orient = first.imageOrientation; 
    CGSize imageSize = first.size; 

    switch(orient) { 

     case UIImageOrientationUp: //EXIF = 1 
      transform = CGAffineTransformIdentity; 
      break; 

     case UIImageOrientationUpMirrored: //EXIF = 2 
      transform = CGAffineTransformMakeTranslation(imageSize.width, 0.0); 
      transform = CGAffineTransformScale(transform, -1.0, 1.0); 
      break; 

     case UIImageOrientationDown: //EXIF = 3 
      transform = CGAffineTransformMakeTranslation(imageSize.width, imageSize.height); 
      transform = CGAffineTransformRotate(transform, M_PI); 
      break; 

     case UIImageOrientationDownMirrored: //EXIF = 4 
      transform = CGAffineTransformMakeTranslation(0.0, imageSize.height); 
      transform = CGAffineTransformScale(transform, 1.0, -1.0); 
      break; 

     case UIImageOrientationLeftMirrored: //EXIF = 5 

      transform = CGAffineTransformMakeTranslation(imageSize.height, imageSize.width); 
      transform = CGAffineTransformScale(transform, -1.0, 1.0); 
      transform = CGAffineTransformRotate(transform, 3.0 * M_PI/2.0); 
      break; 

     case UIImageOrientationLeft: //EXIF = 6 

      transform = CGAffineTransformMakeTranslation(0.0, imageSize.width); 
      transform = CGAffineTransformRotate(transform, 3.0 * M_PI/2.0); 
      break; 

     case UIImageOrientationRightMirrored: //EXIF = 7 

      transform = CGAffineTransformMakeScale(-1.0, 1.0); 
      transform = CGAffineTransformRotate(transform, M_PI/2.0); 
      break; 

     case UIImageOrientationRight: //EXIF = 8 

      transform = CGAffineTransformMakeTranslation(imageSize.height, 0.0); 
      transform = CGAffineTransformRotate(transform, M_PI/2.0); 
      break; 

     default: 
      [NSException raise:NSInternalInconsistencyException format:@"Invalid image orientation"]; 
    } 


    writerInput.transform = transform; 

    NSMutableDictionary *attributes = [[NSMutableDictionary alloc] init]; 
    [attributes setObject:[NSNumber numberWithUnsignedInt:kCVPixelFormatType_32ARGB] forKey:(NSString*)kCVPixelBufferPixelFormatTypeKey]; 
    [attributes setObject:[NSNumber numberWithUnsignedInt:frameSize.width] forKey:(NSString*)kCVPixelBufferWidthKey]; 
    [attributes setObject:[NSNumber numberWithUnsignedInt:frameSize.height] forKey:(NSString*)kCVPixelBufferHeightKey]; 

    AVAssetWriterInputPixelBufferAdaptor *adaptor = [AVAssetWriterInputPixelBufferAdaptor 
                assetWriterInputPixelBufferAdaptorWithAssetWriterInput:writerInput 
                sourcePixelBufferAttributes:attributes]; 

    [videoWriter addInput:writerInput]; 

    // fixes all errors 
    writerInput.expectsMediaDataInRealTime = YES; 

    //Start a session: 
    [videoWriter startWriting]; 

    [videoWriter startSessionAtSourceTime:kCMTimeZero]; 

    CVPixelBufferRef buffer = NULL; 
    buffer = [self pixelBufferFromCGImage:[first CGImage]]; 
    BOOL result = [adaptor appendPixelBuffer:buffer withPresentationTime:kCMTimeZero]; 

    if (result == NO) 
     NSLog(@"failed to append buffer"); 

    if(buffer) { 
     CVBufferRelease(buffer); 
    } 

    int fps = 2; 

    for(int i=0; i<totPics; i++) 
    { 

     if (adaptor.assetWriterInput.readyForMoreMediaData) { 


      CMTime frameTime = CMTimeMake(1, fps); 
      CMTime lastTime = CMTimeMake(i, fps); 


      CMTime presentTime = CMTimeAdd(lastTime, frameTime); 
      NSLog(@"presentTime = %f",CMTimeGetSeconds(presentTime)); 


      ALAsset *asset = [assets objectAtIndex:i]; 
      ALAssetRepresentation *assetRepresentation = [asset defaultRepresentation]; 
      UIImage *imgGetFrame = [UIImage imageWithCGImage:[assetRepresentation fullScreenImage] scale:[assetRepresentation scale] orientation:(UIImageOrientation)[assetRepresentation orientation]]; 
      UIImage *imgFrame = [imgGetFrame imageByScalingAndCroppingForSize:CGSizeMake(720.0, 960.0)]; 

      buffer = [self pixelBufferFromCGImage:[imgFrame CGImage]]; 
      BOOL result = [adaptor appendPixelBuffer:buffer withPresentationTime:presentTime]; 

      if (result == NO) //failes on 3GS, but works on iphone 4 
      { 
       NSLog(@"failed to append buffer"); 
       NSLog(@"The error is %@", [videoWriter error]); 
      } 

      if(buffer) { 
       CVBufferRelease(buffer); 
      } 


     } else { 
      NSLog(@"error"); 
     }   
    } 

    //Finish the session: 
    [writerInput markAsFinished]; 
    [videoWriter finishWritingWithCompletionHandler:^{ 

     NSLog(@"Complete"); 
    }]; 

    CVPixelBufferPoolRelease(adaptor.pixelBufferPool); 
}  

-(CVPixelBufferRef) pixelBufferFromCGImage: (CGImageRef) image { 
NSDictionary *options = [NSDictionary dictionaryWithObjectsAndKeys: 
         [NSNumber numberWithBool:YES], kCVPixelBufferCGImageCompatibilityKey, 
         [NSNumber numberWithBool:YES], kCVPixelBufferCGBitmapContextCompatibilityKey, 
         nil]; 
CVPixelBufferRef pxbuffer = NULL; 

CVPixelBufferCreate(kCFAllocatorDefault, CGImageGetWidth(image), 
        CGImageGetHeight(image), kCVPixelFormatType_32ARGB, (__bridge CFDictionaryRef) options, 
        &pxbuffer); 

CVPixelBufferLockBaseAddress(pxbuffer, 0); 
void *pxdata = CVPixelBufferGetBaseAddress(pxbuffer); 

CGColorSpaceRef rgbColorSpace = CGColorSpaceCreateDeviceRGB(); 
CGContextRef context = CGBitmapContextCreate(pxdata, CGImageGetWidth(image), 
              CGImageGetHeight(image), 8, 4*CGImageGetWidth(image), rgbColorSpace, 
              kCGImageAlphaNoneSkipFirst); 

CGContextConcatCTM(context, CGAffineTransformMakeRotation(0)); 


CGContextDrawImage(context, CGRectMake(0, 0, CGImageGetWidth(image), 
             CGImageGetHeight(image)), image); 
CGColorSpaceRelease(rgbColorSpace); 
CGContextRelease(context); 

CVPixelBufferUnlockBaseAddress(pxbuffer, 0); 

return pxbuffer; 
} 

裁剪图像方法

-(UIImage*)imageByScalingAndCroppingForSize:(CGSize)targetSize { 
UIImage *sourceImage = self; 
UIImage *newImage = nil; 
CGSize imageSize = sourceImage.size; 
CGFloat width = imageSize.width; 
CGFloat height = imageSize.height; 
CGFloat targetWidth = targetSize.width; 
CGFloat targetHeight = targetSize.height; 
CGFloat scaleFactor = 0.0; 
CGFloat scaledWidth = targetWidth; 
CGFloat scaledHeight = targetHeight; 
CGPoint thumbnailPoint = CGPointMake(0.0,0.0); 

if (CGSizeEqualToSize(imageSize, targetSize) == NO) 
{ 
    CGFloat widthFactor = targetWidth/width; 
    CGFloat heightFactor = targetHeight/height; 

    if (widthFactor > heightFactor) 
    { 
     scaleFactor = widthFactor; // scale to fit height 
    } 
    else 
    { 
     scaleFactor = heightFactor; // scale to fit width 
    } 

    scaledWidth = width * scaleFactor; 
    scaledHeight = height * scaleFactor; 

    // center the image 
    if (widthFactor > heightFactor) 
    { 
     thumbnailPoint.y = (targetHeight - scaledHeight) * 0.5; 
    } 
    else 
    { 
     if (widthFactor < heightFactor) 
     { 
      thumbnailPoint.x = (targetWidth - scaledWidth) * 0.5; 
     } 
    } 
} 

UIGraphicsBeginImageContext(targetSize); // this will crop 

CGRect thumbnailRect = CGRectZero; 
thumbnailRect.origin = thumbnailPoint; 
thumbnailRect.size.width = scaledWidth; 
thumbnailRect.size.height = scaledHeight; 

[sourceImage drawInRect:thumbnailRect]; 

newImage = UIGraphicsGetImageFromCurrentImageContext(); 

if(newImage == nil) { 
    NSLog(@"could not scale image"); 
} 

//pop the context to get back to the default 
UIGraphicsEndImageContext(); 

return newImage; 
} 

我有来自用户的照片库通过图像和作物之后添加图像它720×960

当我接受100个图像,然后我有记忆警告错误。另外,当我在乐器中查看应用程序时,大约需要400 MB。所以如果有人知道我做错了什么,请帮助我。

+0

我想你的图像尺寸大。 –

+0

更新与错误日志你的问题。 – Tirth

+0

@DarshanKunjadiya:我将图像从856 x 1196剪裁到720 x 960.我已经检查了以kb为单位的图像大小。 –

回答

1

的问题是,您使用的是在你的视频处理循环中的所有应用程序的内存。在同一时间,你不能只是分配数百个内存的图片,当你的iOS设备上运行的代码会崩溃。请阅读我的博客文章video_and_memory_usage_on_ios_devices,然后更改for循环,以便为循环的每次迭代创建一个自动释放池,以修复内存使用失控的问题。还要注意的是kCVPixelFormatType_32ARGB会很慢,你应该使用kCVPixelFormatType_32BGRA