2015-11-04 123 views
0

我正在使用AVFoundation Framework,objective-C和Xcode 7.1构建iOS应用程序。我正在做的是获得录制的视频,添加文本图层并导出。我的问题是,当我在肖像上录制时,导出的视频在风景中!我一直在挣扎几个小时,无法解决它。我发现了类似的问题,但没有任何答案对我有帮助。 这里是我的代码:AVMutableComposition旋转录制的视频

AVURLAsset* videoAsset = [[AVURLAsset alloc]initWithURL:self.videoURL options:nil]; 
AVMutableComposition* mixComposition = [AVMutableComposition composition]; 

AVMutableCompositionTrack *compositionVideoTrack = [mixComposition addMutableTrackWithMediaType:AVMediaTypeVideo preferredTrackID:kCMPersistentTrackID_Invalid]; 

AVMutableCompositionTrack* compositionAudioTrack = [mixComposition addMutableTrackWithMediaType:AVMediaTypeAudio preferredTrackID:kCMPersistentTrackID_Invalid]; 

CMTime insertTime = kCMTimeZero; 
insertTime = CMTimeAdd(insertTime, compositionVideoTrack.timeRange.duration); 

AVAssetTrack *clipVideoTrack = [[videoAsset tracksWithMediaType:AVMediaTypeVideo] objectAtIndex:0]; 


// [compositionVideoTrack insertTimeRange:CMTimeRangeMake(kCMTimeZero, videoAsset.duration) 
//        ofTrack:clipVideoTrack 
//        atTime:kCMTimeZero error:nil]; 

AVAssetTrack* audioAssetTrack = [[videoAsset tracksWithMediaType:AVMediaTypeAudio] objectAtIndex:0]; 

[compositionVideoTrack insertTimeRange:CMTimeRangeMake(kCMTimeZero, clipVideoTrack.timeRange.duration) ofTrack:clipVideoTrack atTime:insertTime error:nil]; 
[compositionAudioTrack insertTimeRange:CMTimeRangeMake(kCMTimeZero, audioAssetTrack.timeRange.duration) ofTrack:audioAssetTrack atTime:insertTime error:nil]; 

[compositionVideoTrack setPreferredTransform:[[[videoAsset tracksWithMediaType:AVMediaTypeVideo] objectAtIndex:0] preferredTransform]]; 


CGSize videoSize = [clipVideoTrack naturalSize]; 
CALayer *parentLayer = [CALayer layer]; 
CALayer *videoLayer = [CALayer layer]; 
parentLayer.frame = CGRectMake(0, 0, videoSize.width, videoSize.height); 
videoLayer.frame = CGRectMake(0, 0, videoSize.width, videoSize.height); 

CATextLayer *subtitle1Text = [[CATextLayer alloc] init]; 
[subtitle1Text setFont:@"Helvetica"]; 
[subtitle1Text setFontSize:32]; 
[subtitle1Text setFrame:CGRectMake(0, 0, videoSize.width, 100)]; 
[subtitle1Text setString:self.titleField.text]; 
[subtitle1Text setAlignmentMode:kCAAlignmentCenter]; 
[subtitle1Text setForegroundColor:[[UIColor blackColor] CGColor]]; 
subtitle1Text.backgroundColor = [UIColor whiteColor].CGColor; 

CALayer *overlayLayer = [CALayer layer]; 
[overlayLayer addSublayer:subtitle1Text]; 
overlayLayer.frame = CGRectMake(0, 0, videoSize.width, videoSize.height); 
[overlayLayer setMasksToBounds:YES]; 

parentLayer.frame = CGRectMake(0, 0, videoSize.width, videoSize.height); 
videoLayer.frame = CGRectMake(0, 0, videoSize.width, videoSize.height); 
[parentLayer addSublayer:videoLayer]; 
[parentLayer addSublayer:overlayLayer]; 
float startTime = 0.5f; 
float duration = 4.0f; 
overlayLayer.opacity = 0.0; 
CABasicAnimation *myAnimation = [CABasicAnimation animationWithKeyPath:@"opacity"]; 
[myAnimation setBeginTime: startTime]; 
[myAnimation setDuration: duration]; 
[myAnimation setFromValue:[NSNumber numberWithFloat:0.5]]; 
[myAnimation setToValue:[NSNumber numberWithFloat:1.0]]; 
[myAnimation setRemovedOnCompletion:NO]; 

[overlayLayer addAnimation:myAnimation forKey:@"myUniqueAnimationKey"]; 

AVMutableVideoComposition* videoComp = [AVMutableVideoComposition videoComposition]; 
videoComp.renderSize = videoSize; 
videoComp.frameDuration = CMTimeMake(1, 30); 
videoComp.animationTool = [AVVideoCompositionCoreAnimationTool  videoCompositionCoreAnimationToolWithPostProcessingAsVideoLayer:videoLayer inLayer:parentLayer]; 

AVMutableVideoCompositionInstruction *instruction = [AVMutableVideoCompositionInstruction videoCompositionInstruction]; 
instruction.timeRange = CMTimeRangeMake(kCMTimeZero, [mixComposition duration]); 
AVAssetTrack *videoTrack = [[mixComposition tracksWithMediaType:AVMediaTypeVideo] objectAtIndex:0]; 
AVMutableVideoCompositionLayerInstruction* layerInstruction = [AVMutableVideoCompositionLayerInstruction videoCompositionLayerInstructionWithAssetTrack:videoTrack]; 
instruction.layerInstructions = [NSArray arrayWithObject:layerInstruction]; 
videoComp.instructions = [NSArray arrayWithObject: instruction]; 


NSArray *paths = NSSearchPathForDirectoriesInDomains(NSDocumentDirectory, NSUserDomainMask, YES); 
NSString *documentsDirectory = [paths objectAtIndex:0]; 
NSString* videoFileName = [NSString stringWithFormat:@"textOverlayVideo-%@.mov",[NSDate date]]; 
NSString *myPathDocs = [documentsDirectory stringByAppendingPathComponent:videoFileName]; 


fileURL = [NSURL fileURLWithPath:myPathDocs]; 


AVAssetExportSession* exporter = [[AVAssetExportSession alloc] initWithAsset:mixComposition presetName:AVAssetExportPresetHighestQuality]; 

exporter.outputURL = fileURL; 
exporter.videoComposition = videoComp; 
exporter.outputFileType = AVFileTypeQuickTimeMovie; 
exporter.shouldOptimizeForNetworkUse = YES; 


[exporter exportAsynchronouslyWithCompletionHandler:^{ 

    dispatch_async(dispatch_get_main_queue(), ^{ 

     switch (exporter.status) 
     { 
      case AVAssetExportSessionStatusFailed: 
       NSLog(@"AVAssetExportSessionStatusFailed"); 
       break; 
      case AVAssetExportSessionStatusCompleted: 

       NSLog(@"AVAssetExportSessionStatusCompleted"); 
       //[self syncFile]; 

       imageData = [NSData dataWithContentsOfURL:fileURL]; 
       [self send]; 

       break; 
      case AVAssetExportSessionStatusWaiting: 
       NSLog(@"AVAssetExportSessionStatusWaiting"); 
       break; 
      default: 
       break; 
     } 
    }); 

}];} 

谢谢大家。

回答

0

尝试设置

compositionVideoTrack.preferredTransform = clipVideoTrack.preferredTransform; 
+0

尝试过,但还是一样。 – Katerina

0

下面是解

-(void)addTextOverlayToVideo 
{ 

// 2 - Create AVMutableComposition object. This object will hold your AVMutableCompositionTrack instances. 
AVMutableComposition *mixComposition = [[AVMutableComposition alloc] init]; 

// 3 - Video track 
AVMutableCompositionTrack *videoTrack = [mixComposition addMutableTrackWithMediaType:AVMediaTypeVideo 
                    preferredTrackID:kCMPersistentTrackID_Invalid]; 
[videoTrack insertTimeRange:CMTimeRangeMake(kCMTimeZero, avAsset.duration) 
        ofTrack:[[avAsset tracksWithMediaType:AVMediaTypeVideo] objectAtIndex:0] 
        atTime:kCMTimeZero error:nil]; 

// 3.1 - Create AVMutableVideoCompositionInstruction 
AVMutableVideoCompositionInstruction *mainInstruction = [AVMutableVideoCompositionInstruction videoCompositionInstruction]; 
mainInstruction.timeRange = CMTimeRangeMake(kCMTimeZero, avAsset.duration); 

// 3.2 - Create an AVMutableVideoCompositionLayerInstruction for the video track and fix the orientation. 
AVMutableVideoCompositionLayerInstruction *videolayerInstruction = [AVMutableVideoCompositionLayerInstruction videoCompositionLayerInstructionWithAssetTrack:videoTrack]; 
AVAssetTrack *videoAssetTrack = [[avAsset tracksWithMediaType:AVMediaTypeVideo] objectAtIndex:0]; 
UIImageOrientation videoAssetOrientation_ = UIImageOrientationUp; 
BOOL isVideoAssetPortrait_ = NO; 
CGAffineTransform videoTransform = videoAssetTrack.preferredTransform; 
if (videoTransform.a == 0 && videoTransform.b == 1.0 && videoTransform.c == -1.0 && videoTransform.d == 0) { 
    videoAssetOrientation_ = UIImageOrientationRight; 
    isVideoAssetPortrait_ = YES; 
} 
if (videoTransform.a == 0 && videoTransform.b == -1.0 && videoTransform.c == 1.0 && videoTransform.d == 0) { 
    videoAssetOrientation_ = UIImageOrientationLeft; 
    isVideoAssetPortrait_ = YES; 
} 
if (videoTransform.a == 1.0 && videoTransform.b == 0 && videoTransform.c == 0 && videoTransform.d == 1.0) { 
    videoAssetOrientation_ = UIImageOrientationUp; 
} 
if (videoTransform.a == -1.0 && videoTransform.b == 0 && videoTransform.c == 0 && videoTransform.d == -1.0) { 
    videoAssetOrientation_ = UIImageOrientationDown; 
} 
[videolayerInstruction setTransform:videoAssetTrack.preferredTransform atTime:kCMTimeZero]; 
[videolayerInstruction setOpacity:0.0 atTime:avAsset.duration]; 

// 3.3 - Add instructions 
mainInstruction.layerInstructions = [NSArray arrayWithObjects:videolayerInstruction,nil]; 

AVMutableVideoComposition *mainCompositionInst = [AVMutableVideoComposition videoComposition]; 

CGSize naturalSize; 
if(isVideoAssetPortrait_){ 
    naturalSize = CGSizeMake(videoAssetTrack.naturalSize.height, videoAssetTrack.naturalSize.width); 
} else { 
    naturalSize = videoAssetTrack.naturalSize; 
} 

float renderWidth, renderHeight; 
renderWidth = naturalSize.width; 
renderHeight = naturalSize.height; 
mainCompositionInst.renderSize = CGSizeMake(renderWidth, renderHeight); 
mainCompositionInst.instructions = [NSArray arrayWithObject:mainInstruction]; 
mainCompositionInst.frameDuration = CMTimeMake(1, 30); 

CGSize videoSize = naturalSize; 
CALayer *parentLayer = [CALayer layer]; 
CALayer *videoLayer = [CALayer layer]; 
parentLayer.frame = CGRectMake(0, 0, videoSize.width, videoSize.height); 
videoLayer.frame = CGRectMake(0, 0, videoSize.width, videoSize.height); 

CATextLayer *subtitle1Text = [[CATextLayer alloc] init]; 
[subtitle1Text setFont:@"Helvetica"]; 
[subtitle1Text setFontSize:22]; 
[subtitle1Text setFrame:CGRectMake(0, 0, videoSize.width, 100)]; 
[subtitle1Text setString:self.titleField.text]; 
[subtitle1Text setAlignmentMode:kCAAlignmentCenter]; 
[subtitle1Text setForegroundColor:[[UIColor blackColor] CGColor]]; 
subtitle1Text.backgroundColor = [UIColor whiteColor].CGColor; 

CALayer *overlayLayer = [CALayer layer]; 
[overlayLayer addSublayer:subtitle1Text]; 
overlayLayer.frame = CGRectMake(0, 0, videoSize.width, videoSize.height); 
[overlayLayer setMasksToBounds:YES]; 

parentLayer.frame = CGRectMake(0, 0, videoSize.width, videoSize.height); 
videoLayer.frame = CGRectMake(0, 0, videoSize.width, videoSize.height); 
[parentLayer addSublayer:videoLayer]; 
[parentLayer addSublayer:overlayLayer]; 
float startTime = 0.5f; 
float duration = 4.0f; 
overlayLayer.opacity = 0.0; 
CABasicAnimation *myAnimation = [CABasicAnimation animationWithKeyPath:@"opacity"]; 
[myAnimation setBeginTime: startTime]; 
[myAnimation setDuration: duration]; 
[myAnimation setFromValue:[NSNumber numberWithFloat:0.5]]; 
[myAnimation setToValue:[NSNumber numberWithFloat:1.0]]; 
[myAnimation setRemovedOnCompletion:NO]; 

[overlayLayer addAnimation:myAnimation forKey:@"myUniqueAnimationKey"]; 


mainCompositionInst.renderSize = videoSize; 
mainCompositionInst.frameDuration = CMTimeMake(1, 30); 
mainCompositionInst.animationTool = [AVVideoCompositionCoreAnimationTool  videoCompositionCoreAnimationToolWithPostProcessingAsVideoLayer:videoLayer inLayer:parentLayer]; 
//[self applyVideoEffectsToComposition:mainCompositionInst size:naturalSize]; 

// 4 - Get path 
NSArray *paths = NSSearchPathForDirectoriesInDomains(NSDocumentDirectory, NSUserDomainMask, YES); 
NSString *documentsDirectory = [paths objectAtIndex:0]; 
NSString *myPathDocs = [documentsDirectory stringByAppendingPathComponent: 
         [NSString stringWithFormat:@"FinalVideo-%d.mov",arc4random() % 1000]]; 
NSURL *url = [NSURL fileURLWithPath:myPathDocs]; 

// 5 - Create exporter 
AVAssetExportSession *exporter = [[AVAssetExportSession alloc] initWithAsset:mixComposition 
                    presetName:AVAssetExportPresetHighestQuality]; 
exporter.outputURL=url; 
exporter.outputFileType = AVFileTypeQuickTimeMovie; 
exporter.shouldOptimizeForNetworkUse = YES; 
exporter.videoComposition = mainCompositionInst; 



[exporter exportAsynchronouslyWithCompletionHandler:^{ 

    dispatch_async(dispatch_get_main_queue(), ^{ 

     switch (exporter.status) 
     { 
      case AVAssetExportSessionStatusFailed: 
       NSLog(@"AVAssetExportSessionStatusFailed"); 
       break; 
      case AVAssetExportSessionStatusCompleted: 

       NSLog(@"AVAssetExportSessionStatusCompleted"); 
       //[self syncFile]; 

       imageData = [NSData dataWithContentsOfURL:url]; 
       [self send]; 

       break; 
      case AVAssetExportSessionStatusWaiting: 
       NSLog(@"AVAssetExportSessionStatusWaiting"); 
       break; 
      default: 
       break; 
     } 
    }); 

}]; 

}

+0

我不明白你如何解决它,因为我看不到'videoAssetOrientation_'和'isVideoAssetPortrait_'是如何使用的。无论如何,你能接受你的答案吗? –