2016-09-28 56 views
1

当我在ios 9中将两个视频与AVAssetExportSession混合时,它的工作完美无缺。但是当我在iOS 10中与AVAssetExportSession混合时,它不起作用。如果有任何知道的原因,请帮助我,谢谢。ios 10中的AVAssetExportSession不适用于iPhone 7

actualy代码工作iphone 6S和更早版本,但不是用于iPhone 7

工作例如

-(void) blendVideoOverVideo:(NSURL*)mainVideoUrl andBlendVideoUrl:(NSURL*)liveEffectUrl 
{ 
    AVURLAsset *mainVideoUrlAsset =[AVURLAsset URLAssetWithURL:mainVideoUrl options:nil]; 
    // AVPlayerItem* mainVideoPlayerItem =[[AVPlayerItem alloc]initWithAsset:mainVideoUrlAsset]; 
    AVAssetTrack* mainVideoTrack =[[mainVideoUrlAsset tracksWithMediaType:AVMediaTypeVideo]firstObject]; 
    CGSize mainVideoSize = [mainVideoTrack naturalSize]; 

    AVMutableComposition* mixComposition = [[AVMutableComposition alloc] init]; 

    AVURLAsset* audioAsset = [[AVURLAsset alloc]initWithURL:mainVideoUrl options:nil]; 
    if(mainVideoUrl!=nil) 
    { 
     if([[audioAsset tracksWithMediaType:AVMediaTypeAudio] count]) 
     { 
      AVMutableCompositionTrack *compositionCommentaryTrack = [mixComposition addMutableTrackWithMediaType:AVMediaTypeAudio 
                           preferredTrackID:kCMPersistentTrackID_Invalid]; 
      [compositionCommentaryTrack insertTimeRange:CMTimeRangeMake(kCMTimeZero, mainVideoUrlAsset.duration) 
               ofTrack:[[audioAsset tracksWithMediaType:AVMediaTypeAudio] objectAtIndex:0] 
               atTime:kCMTimeZero 
                error:nil]; 
     } 
    } 

    AVMutableCompositionTrack *mainVideoConpositionTrack = [mixComposition addMutableTrackWithMediaType:AVMediaTypeVideo preferredTrackID:kCMPersistentTrackID_Invalid]; 

    [mainVideoConpositionTrack insertTimeRange:CMTimeRangeMake(kCMTimeZero, mainVideoUrlAsset.duration) ofTrack:mainVideoTrack atTime:kCMTimeZero error:nil]; 

    AVMutableVideoCompositionLayerInstruction *mainVideoLayerInstruction = [AVMutableVideoCompositionLayerInstruction videoCompositionLayerInstructionWithAssetTrack:mainVideoConpositionTrack]; 

    //SEcond Track 
    AVURLAsset *blendVideoUrlAsset =[AVURLAsset URLAssetWithURL:liveEffectUrl options:nil]; 
    // AVPlayerItem* blendVideoPlayerItem =[[AVPlayerItem alloc]initWithAsset:blendVideoUrlAsset]; 
    AVAssetTrack* blendVideoTrack =[[blendVideoUrlAsset tracksWithMediaType:AVMediaTypeVideo]firstObject]; 
    CGSize blendVideoSize = [blendVideoTrack naturalSize]; 

    AVMutableCompositionTrack *blendVideoConpositionTrack = [mixComposition addMutableTrackWithMediaType:AVMediaTypeVideo preferredTrackID:kCMPersistentTrackID_Invalid]; 

    CMTime oldTime=CMTimeMakeWithSeconds(CMTimeGetSeconds(blendVideoUrlAsset.duration), blendVideoUrlAsset.duration.timescale); 

// CMTime timeNew=CMTimeMakeWithSeconds(CMTimeGetSeconds(blendVideoUrlAsset.duration)/2, blendVideoUrlAsset.duration.timescale); 


    [blendVideoConpositionTrack insertTimeRange:CMTimeRangeMake(kCMTimeZero, oldTime) ofTrack:blendVideoTrack atTime:kCMTimeZero error:nil]; 

    AVMutableVideoCompositionLayerInstruction *blendVideoLayerInstruction = [AVMutableVideoCompositionLayerInstruction videoCompositionLayerInstructionWithAssetTrack:blendVideoConpositionTrack]; 

    AVMutableVideoCompositionInstruction * MainInstruction = [AVMutableVideoCompositionInstruction videoCompositionInstruction]; 
    MainInstruction.timeRange = CMTimeRangeMake(kCMTimeZero, mainVideoUrlAsset.duration); 

    CGAffineTransform Scale = CGAffineTransformMakeScale(1.0f,1.0f); 
    CGAffineTransform Move = CGAffineTransformMakeTranslation(0,0); 
    [mainVideoLayerInstruction setTransform:CGAffineTransformConcat(Scale,Move) atTime:kCMTimeZero]; 

    [blendVideoLayerInstruction setOpacity:0.5 atTime:kCMTimeZero]; 
// [blendVideoLayerInstruction setOpacity:0.0 atTime:timeNew]; 

    CGFloat cropOffX = 1.0; 
    CGFloat cropOffY = 1.0; 
    if(blendVideoSize.height>mainVideoSize.height) 
    { 
     cropOffY = mainVideoSize.height/blendVideoSize.height; 
    }else{ 

     cropOffY = mainVideoSize.height/blendVideoSize.height; 

    } 
    if(blendVideoSize.width>mainVideoSize.width) 
    { 
     cropOffX = mainVideoSize.width/blendVideoSize.width; 
    } 
    Scale = CGAffineTransformMakeScale(cropOffX,cropOffY); 
    Move = CGAffineTransformMakeTranslation(0.1, 0.1); 
    [blendVideoLayerInstruction setTransform:CGAffineTransformConcat(Scale,Move) atTime:kCMTimeZero]; 

    MainInstruction.layerInstructions = [NSArray arrayWithObjects:blendVideoLayerInstruction,mainVideoLayerInstruction,nil]; 

    AVMutableVideoComposition *MainCompositionInst = [AVMutableVideoComposition videoComposition]; 
    MainCompositionInst.instructions = [NSArray arrayWithObject:MainInstruction]; 
    MainCompositionInst.frameDuration = CMTimeMake(1, 30); 
    MainCompositionInst.renderSize = mainVideoSize; 


    NSString *fullName= [NSString stringWithFormat:@"video%d.mov",arc4random() % 1000]; 



    NSArray *paths = NSSearchPathForDirectoriesInDomains(NSDocumentDirectory, NSUserDomainMask, YES); 
    NSString *documentsDirectory = [paths objectAtIndex:0]; 
    NSString *myPathDocs = [documentsDirectory stringByAppendingPathComponent:fullName]; 
    if([[NSFileManager defaultManager] fileExistsAtPath:myPathDocs]) 
    { 
     [[NSFileManager defaultManager] removeItemAtPath:myPathDocs error:nil]; 
    } 
    NSURL *url = [NSURL fileURLWithPath:myPathDocs]; 
    AVAssetExportSession *exporter = [[AVAssetExportSession alloc] initWithAsset:mixComposition presetName:AVAssetExportPresetMediumQuality]; 
    exporter.outputURL=url; 

    CMTime start; 
    CMTime duration; 

    NSLog(@"Main Video dura %f blend dura - %f, ",CMTimeGetSeconds(mainVideoUrlAsset.duration),CMTimeGetSeconds(blendVideoUrlAsset.duration)); 


    if(CMTimeGetSeconds(blendVideoUrlAsset.duration)>CMTimeGetSeconds(mainVideoUrlAsset.duration)) 
    { 
     start = CMTimeMakeWithSeconds(0.0, blendVideoUrlAsset.duration.timescale); 
     duration = CMTimeMakeWithSeconds(CMTimeGetSeconds(mainVideoUrlAsset.duration), blendVideoUrlAsset.duration.timescale); 
    } 
    else if(CMTimeGetSeconds(mainVideoUrlAsset.duration)>CMTimeGetSeconds(blendVideoUrlAsset.duration)) 
    { 
     start = CMTimeMakeWithSeconds(0.0, mainVideoUrlAsset.duration.timescale); 
     duration = CMTimeMakeWithSeconds(CMTimeGetSeconds(mainVideoUrlAsset.duration), mainVideoUrlAsset.duration.timescale); 
    } 
    CMTimeRange range = CMTimeRangeMake(start, duration); 

    exporter.timeRange = range; 
    [exporter setVideoComposition:MainCompositionInst]; 
    exporter.outputFileType = AVFileTypeQuickTimeMovie; 

    __weak typeof(self) weakSelf = self; 

    [weakSelf createMBCircularProgress:exporter]; 


    [exporter exportAsynchronouslyWithCompletionHandler:^{ 
     dispatch_async(dispatch_get_main_queue(), ^{ 
      [weakSelf exportDidFinish:exporter]; 
     }); 
    }]; 
} 

这个代码将在IOS 9乃至的iOS 10在iPhone 6S,6运行,5等,但这个代码不会运行在iPhone 7模拟器。

的解决方案是我们需要使用最新的XCode 8.1测试版运行该

+1

分享代码! – voromax

+0

任何简单的代码,甚至不写在iOS 10 –

+0

实际代码适用于iphone 6s和更早版本,但不适用于iPhone 7 –

回答

2
It's a bug. 

It's fixed in Xcode 8.1 beta. 

的Xcode 8.1测试版[AVAssetExportSession allExportPresets] iPhone 7模拟器现在返回

AVAssetExportPreset1920x1080, 
AVAssetExportPresetLowQuality, 
AVAssetExportPresetAppleM4A, 
AVAssetExportPreset640x480, 
AVAssetExportPreset3840x2160, 
AVAssetExportPresetHighestQuality, 
AVAssetExportPreset1280x720, 
AVAssetExportPresetMediumQuality, 
AVAssetExportPreset960x540 

的Xcode 8.0 AVAssetExportSession allExportPresets] iPhone 7模拟器返回一个空阵列

+1

很高兴看到你找到答案,我们都很感激。但目前尚不清楚,你问过甚么。请更新您的问题,以便此问答可以帮助其他人。在问题中发布代码和错误消息。 – pedrouan

+0

问题很简单,AVAssetExportSession不能在iPhone中工作 –

+0

答案很简单,会在XCode beta 8.1中运行 –

相关问题