ios - iOS 10中的AVAssetExportSession不适用于iPhone 7

标签 ios objective-c xcode ios10 avassetexportsession

当我在ios 9中将两个视频与AVAssetExportSession混合时,它的运行效果非常好。但是当我在iOS 10中与AVAssetExportSession混合时,它无法正常工作。如果您知道原因,请帮助我,谢谢。

实际上代码适用于iPhone 6s和更早版本,但不适用于iPhone 7

例如

-(void) blendVideoOverVideo:(NSURL*)mainVideoUrl andBlendVideoUrl:(NSURL*)liveEffectUrl
{
    AVURLAsset  *mainVideoUrlAsset =[AVURLAsset URLAssetWithURL:mainVideoUrl options:nil];
    //    AVPlayerItem* mainVideoPlayerItem =[[AVPlayerItem alloc]initWithAsset:mainVideoUrlAsset];
    AVAssetTrack* mainVideoTrack =[[mainVideoUrlAsset tracksWithMediaType:AVMediaTypeVideo]firstObject];
    CGSize mainVideoSize = [mainVideoTrack naturalSize];

    AVMutableComposition* mixComposition = [[AVMutableComposition alloc] init];

    AVURLAsset* audioAsset = [[AVURLAsset alloc]initWithURL:mainVideoUrl options:nil];
    if(mainVideoUrl!=nil)
    {
        if([[audioAsset tracksWithMediaType:AVMediaTypeAudio] count])
        {
            AVMutableCompositionTrack *compositionCommentaryTrack = [mixComposition addMutableTrackWithMediaType:AVMediaTypeAudio
                                                                                                preferredTrackID:kCMPersistentTrackID_Invalid];
            [compositionCommentaryTrack insertTimeRange:CMTimeRangeMake(kCMTimeZero, mainVideoUrlAsset.duration )
                                                ofTrack:[[audioAsset tracksWithMediaType:AVMediaTypeAudio] objectAtIndex:0]
                                                 atTime:kCMTimeZero
                                                  error:nil];
        }
    }

    AVMutableCompositionTrack *mainVideoConpositionTrack = [mixComposition addMutableTrackWithMediaType:AVMediaTypeVideo preferredTrackID:kCMPersistentTrackID_Invalid];

    [mainVideoConpositionTrack insertTimeRange:CMTimeRangeMake(kCMTimeZero, mainVideoUrlAsset.duration) ofTrack:mainVideoTrack atTime:kCMTimeZero error:nil];

    AVMutableVideoCompositionLayerInstruction *mainVideoLayerInstruction = [AVMutableVideoCompositionLayerInstruction videoCompositionLayerInstructionWithAssetTrack:mainVideoConpositionTrack];

    //SEcond Track
    AVURLAsset  *blendVideoUrlAsset =[AVURLAsset URLAssetWithURL:liveEffectUrl options:nil];
    //    AVPlayerItem* blendVideoPlayerItem =[[AVPlayerItem alloc]initWithAsset:blendVideoUrlAsset];
    AVAssetTrack* blendVideoTrack =[[blendVideoUrlAsset tracksWithMediaType:AVMediaTypeVideo]firstObject];
    CGSize blendVideoSize = [blendVideoTrack naturalSize];

    AVMutableCompositionTrack *blendVideoConpositionTrack = [mixComposition addMutableTrackWithMediaType:AVMediaTypeVideo preferredTrackID:kCMPersistentTrackID_Invalid];

    CMTime oldTime=CMTimeMakeWithSeconds(CMTimeGetSeconds(blendVideoUrlAsset.duration), blendVideoUrlAsset.duration.timescale);

//    CMTime timeNew=CMTimeMakeWithSeconds(CMTimeGetSeconds(blendVideoUrlAsset.duration)/2, blendVideoUrlAsset.duration.timescale);


    [blendVideoConpositionTrack insertTimeRange:CMTimeRangeMake(kCMTimeZero, oldTime) ofTrack:blendVideoTrack atTime:kCMTimeZero error:nil];

    AVMutableVideoCompositionLayerInstruction *blendVideoLayerInstruction = [AVMutableVideoCompositionLayerInstruction videoCompositionLayerInstructionWithAssetTrack:blendVideoConpositionTrack];

    AVMutableVideoCompositionInstruction * MainInstruction = [AVMutableVideoCompositionInstruction videoCompositionInstruction];
    MainInstruction.timeRange = CMTimeRangeMake(kCMTimeZero, mainVideoUrlAsset.duration);

    CGAffineTransform Scale = CGAffineTransformMakeScale(1.0f,1.0f);
    CGAffineTransform Move = CGAffineTransformMakeTranslation(0,0);
    [mainVideoLayerInstruction setTransform:CGAffineTransformConcat(Scale,Move) atTime:kCMTimeZero];

    [blendVideoLayerInstruction setOpacity:0.5 atTime:kCMTimeZero];
//    [blendVideoLayerInstruction setOpacity:0.0 atTime:timeNew];

    CGFloat cropOffX = 1.0;
    CGFloat cropOffY = 1.0;
    if(blendVideoSize.height>mainVideoSize.height)
    {
        cropOffY = mainVideoSize.height/blendVideoSize.height;
    }else{

        cropOffY = mainVideoSize.height/blendVideoSize.height;

    }
    if(blendVideoSize.width>mainVideoSize.width)
    {
        cropOffX = mainVideoSize.width/blendVideoSize.width;
    }
    Scale = CGAffineTransformMakeScale(cropOffX,cropOffY);
    Move = CGAffineTransformMakeTranslation(0.1,  0.1);
    [blendVideoLayerInstruction setTransform:CGAffineTransformConcat(Scale,Move) atTime:kCMTimeZero];

    MainInstruction.layerInstructions = [NSArray arrayWithObjects:blendVideoLayerInstruction,mainVideoLayerInstruction,nil];

    AVMutableVideoComposition *MainCompositionInst = [AVMutableVideoComposition videoComposition];
    MainCompositionInst.instructions = [NSArray arrayWithObject:MainInstruction];
    MainCompositionInst.frameDuration = CMTimeMake(1, 30);
    MainCompositionInst.renderSize = mainVideoSize;


    NSString *fullName= [NSString stringWithFormat:@"video%d.mov",arc4random() % 1000];



    NSArray *paths = NSSearchPathForDirectoriesInDomains(NSDocumentDirectory, NSUserDomainMask, YES);
    NSString *documentsDirectory = [paths objectAtIndex:0];
    NSString *myPathDocs = [documentsDirectory stringByAppendingPathComponent:fullName];
    if([[NSFileManager defaultManager] fileExistsAtPath:myPathDocs])
    {
        [[NSFileManager defaultManager] removeItemAtPath:myPathDocs error:nil];
    }
    NSURL *url = [NSURL fileURLWithPath:myPathDocs];
    AVAssetExportSession *exporter = [[AVAssetExportSession alloc] initWithAsset:mixComposition presetName:AVAssetExportPresetMediumQuality];
    exporter.outputURL=url;

    CMTime start;
    CMTime duration;

    NSLog(@"Main Video dura %f blend dura - %f, ",CMTimeGetSeconds(mainVideoUrlAsset.duration),CMTimeGetSeconds(blendVideoUrlAsset.duration));


    if(CMTimeGetSeconds(blendVideoUrlAsset.duration)>CMTimeGetSeconds(mainVideoUrlAsset.duration))
    {
        start = CMTimeMakeWithSeconds(0.0, blendVideoUrlAsset.duration.timescale);
        duration = CMTimeMakeWithSeconds(CMTimeGetSeconds(mainVideoUrlAsset.duration), blendVideoUrlAsset.duration.timescale);
    }
    else if(CMTimeGetSeconds(mainVideoUrlAsset.duration)>CMTimeGetSeconds(blendVideoUrlAsset.duration))
    {
        start = CMTimeMakeWithSeconds(0.0, mainVideoUrlAsset.duration.timescale);
        duration = CMTimeMakeWithSeconds(CMTimeGetSeconds(mainVideoUrlAsset.duration), mainVideoUrlAsset.duration.timescale);
    }
    CMTimeRange range = CMTimeRangeMake(start, duration);

    exporter.timeRange = range;
    [exporter setVideoComposition:MainCompositionInst];
    exporter.outputFileType = AVFileTypeQuickTimeMovie;

    __weak typeof(self) weakSelf = self;

    [weakSelf createMBCircularProgress:exporter];


    [exporter exportAsynchronouslyWithCompletionHandler:^{
        dispatch_async(dispatch_get_main_queue(), ^{
            [weakSelf exportDidFinish:exporter];
        });
    }];
}

此代码将在iPhone 9s,6,5等的ios 9甚至iOS 10中运行,但此代码将不会在iPhone 7模拟器中运行。

解决方案是我们需要使用最新的XCode 8.1 beta来运行此程序

最佳答案

It's a bug.

It's fixed in Xcode 8.1 beta.

Xcode 8.1 beta [AVAssetExportSession allExportPresets] iPhone 7模拟器现在返回
AVAssetExportPreset1920x1080,
AVAssetExportPresetLowQuality,
AVAssetExportPresetAppleM4A,
AVAssetExportPreset640x480,
AVAssetExportPreset3840x2160,
AVAssetExportPresetHighestQuality,
AVAssetExportPreset1280x720,
AVAssetExportPresetMediumQuality,
AVAssetExportPreset960x540

Xcode 8.0 [AVAssetExportSession allExportPresets] iPhone 7模拟器返回空数组

关于ios - iOS 10中的AVAssetExportSession不适用于iPhone 7,我们在Stack Overflow上找到一个类似的问题: https://stackoverflow.com/questions/39745570/

相关文章:

iphone - 使用依赖子项目时 Xcode 中的链接问题

python - pycuda 失败; Theano 与 Anaconda

ios - SWIFT - xcode 6 beta 6 中的 KVO .. observeValueForKeyPath 不再被调用

ios - UITextFieldDelegate 与 UITextField 控件事件

ios - 如何以编程方式使用 Assets 目录图像的切片信息?

ios - 如何在 swift 中按日期对对象数组进行分组?

objective-c - 如何在 Xcode 项目中使用 Twine?

ios - 如何在ios中上传多张图片?

ios - (Swift)需要逐步了解如何在 View Controller 之间滑动

单击 Google 登录按钮时 iOS 应用程序崩溃