ios - 在 iOS 上混合视频流

标签 ios avfoundation

我需要将来自不同输入的两个视频流混合到单个视频文件中。我想将一个视频帧嵌入到另一个视频帧中。

我的来源是:iOS应用屏幕录制和前置摄像头

输出:任意格式的视频文件

如何做到这一点?

最佳答案

      NSString * str_first=[NSString stringWithFormat:@"%@",[ary_urls objectAtIndex:0]];
        NSString * str_secnd=[NSString stringWithFormat:@"%@",[ary_urls objectAtIndex:1]];
        NSString * str_third=[NSString stringWithFormat:@"%@",[ary_urls objectAtIndex:2]];
        NSURL *url_frst   =[[NSURL alloc]initWithString:str_first];
        NSURL *url_second =[[NSURL alloc]initWithString:str_secnd];
        NSURL *url_third  =[[NSURL alloc]initWithString:str_third];
        firstAsset = [AVAsset assetWithURL:url_frst];
        secondAsset = [AVAsset assetWithURL:url_second];
        thirdAsset = [AVAsset assetWithURL:url_third];
    if(firstAsset !=nil && secondAsset!=nil){
        [ActivityView startAnimating];
        //Create AVMutableComposition Object.This object will hold our multiple AVMutableCompositionTrack.
        AVMutableComposition* mixComposition = [[AVMutableComposition alloc] init];

        //VIDEO TRACK
        AVMutableCompositionTrack *firstTrack = [mixComposition addMutableTrackWithMediaType:AVMediaTypeVideo preferredTrackID:kCMPersistentTrackID_Invalid];
        [firstTrack insertTimeRange:CMTimeRangeMake(kCMTimeZero, firstAsset.duration) ofTrack:[[firstAsset tracksWithMediaType:AVMediaTypeVideo] objectAtIndex:0] atTime:kCMTimeZero error:nil];

        AVMutableCompositionTrack *secondTrack = [mixComposition addMutableTrackWithMediaType:AVMediaTypeVideo preferredTrackID:kCMPersistentTrackID_Invalid];
        [secondTrack insertTimeRange:CMTimeRangeMake(kCMTimeZero, secondAsset.duration) ofTrack:[[secondAsset tracksWithMediaType:AVMediaTypeVideo] objectAtIndex:0] atTime:firstAsset.duration error:nil];

//        AUDIO TRACK
//                if(audioAsset!=nil){
//                    AVMutableCompositionTrack *AudioTrack = [mixComposition addMutableTrackWithMediaType:AVMediaTypeAudio preferredTrackID:kCMPersistentTrackID_Invalid];
//                    [AudioTrack insertTimeRange:CMTimeRangeMake(kCMTimeZero, CMTimeAdd(firstAsset.duration, secondAsset.duration)) ofTrack:[[audioAsset tracksWithMediaType:AVMediaTypeAudio] objectAtIndex:0] atTime:kCMTimeZero error:nil];
//                }

        AVMutableVideoCompositionInstruction *MainInstruction = [AVMutableVideoCompositionInstruction videoCompositionInstruction];
        MainInstruction.timeRange = CMTimeRangeMake(kCMTimeZero, CMTimeAdd(firstAsset.duration, secondAsset.duration));


        //FIXING ORIENTATION//


        AVMutableVideoCompositionLayerInstruction *firstlayerInstruction = [AVMutableVideoCompositionLayerInstruction videoCompositionLayerInstructionWithAssetTrack:firstTrack];
        AVAssetTrack * firstAssetTrack = [[firstAsset tracksWithMediaType:AVMediaTypeVideo] objectAtIndex:0];
        UIImageOrientation firstAssetOrientation_  = UIImageOrientationUp;
        BOOL isFirstAssetPortrait_  = NO;
        CGAffineTransform firstTransform = firstAssetTrack.preferredTransform;
        if (firstTransform.a == 0 && firstTransform.b == 1.0 && firstTransform.c == -1.0 && firstTransform.d == 0) {
            firstAssetOrientation_ = UIImageOrientationRight;
            isFirstAssetPortrait_ = YES;
        }
        if (firstTransform.a == 0 && firstTransform.b == -1.0 && firstTransform.c == 1.0 && firstTransform.d == 0) {
            firstAssetOrientation_ =  UIImageOrientationLeft;
            isFirstAssetPortrait_ = YES;
        }
        if (firstTransform.a == 1.0 && firstTransform.b == 0 && firstTransform.c == 0 && firstTransform.d == 1.0) {
            firstAssetOrientation_ =  UIImageOrientationUp;
        }
        if (firstTransform.a == -1.0 && firstTransform.b == 0 && firstTransform.c == 0 && firstTransform.d == -1.0) {
            firstAssetOrientation_ = UIImageOrientationDown;
        }
        [firstlayerInstruction setTransform:firstAsset.preferredTransform atTime:kCMTimeZero];
        [firstlayerInstruction setOpacity:0.0 atTime:firstAsset.duration];


        CGFloat FirstAssetScaleToFitRatio = 320.0/firstAssetTrack.naturalSize.width;
        if(isFirstAssetPortrait_)
        {
            FirstAssetScaleToFitRatio = 320.0/firstAssetTrack.naturalSize.height;
            CGAffineTransform FirstAssetScaleFactor = CGAffineTransformMakeScale(FirstAssetScaleToFitRatio,FirstAssetScaleToFitRatio);
            [firstlayerInstruction setTransform:CGAffineTransformConcat(firstAssetTrack.preferredTransform, FirstAssetScaleFactor) atTime:kCMTimeZero];
        }else
        {
            CGAffineTransform FirstAssetScaleFactor = CGAffineTransformMakeScale(FirstAssetScaleToFitRatio,FirstAssetScaleToFitRatio);
            [firstlayerInstruction setTransform:CGAffineTransformConcat(CGAffineTransformConcat(firstAssetTrack.preferredTransform, FirstAssetScaleFactor),CGAffineTransformMakeTranslation(0, 0)) atTime:kCMTimeZero];
        }
        [firstlayerInstruction setOpacity:0.0 atTime:firstAsset.duration];


AVMutableVideoCompositionLayerInstruction *secondlayerInstruction = [AVMutableVideoCompositionLayerInstruction videoCompositionLayerInstructionWithAssetTrack:secondTrack];
AVAssetTrack *secondAssetTrack = [[secondAsset tracksWithMediaType:AVMediaTypeVideo] objectAtIndex:0];
UIImageOrientation secondAssetOrientation_  = UIImageOrientationUp;
BOOL isSecondAssetPortrait_  = NO;
CGAffineTransform secondTransform = secondAssetTrack.preferredTransform;
if (secondTransform.a == 0 && secondTransform.b == 1.0 && secondTransform.c == -1.0 && secondTransform.d == 0) {
    secondAssetOrientation_= UIImageOrientationRight;
    isSecondAssetPortrait_ = YES;
}
if (secondTransform.a == 0 && secondTransform.b == -1.0 && secondTransform.c == 1.0 && secondTransform.d == 0) {
    secondAssetOrientation_ =  UIImageOrientationLeft;
    isSecondAssetPortrait_ = YES;
}
if (secondTransform.a == 1.0 && secondTransform.b == 0 && secondTransform.c == 0 && secondTransform.d == 1.0) {
    secondAssetOrientation_ =  UIImageOrientationUp;
}
if (secondTransform.a == -1.0 && secondTransform.b == 0 && secondTransform.c == 0 && secondTransform.d == -1.0) {
    secondAssetOrientation_ = UIImageOrientationDown;
}
[secondlayerInstruction setTransform:secondAsset.preferredTransform atTime:firstAsset.duration];


CGFloat SecondAssetScaleToFitRatio = 320.0/secondAssetTrack.naturalSize.width;
if(isSecondAssetPortrait_){
    SecondAssetScaleToFitRatio = 320.0/secondAssetTrack.naturalSize.height;
    CGAffineTransform SecondAssetScaleFactor = CGAffineTransformMakeScale(SecondAssetScaleToFitRatio,SecondAssetScaleToFitRatio);
    [secondlayerInstruction setTransform:CGAffineTransformConcat(secondAssetTrack.preferredTransform, SecondAssetScaleFactor) atTime:firstAsset.duration];
}else{
    ;
    CGAffineTransform SecondAssetScaleFactor = CGAffineTransformMakeScale(SecondAssetScaleToFitRatio,SecondAssetScaleToFitRatio);
    [secondlayerInstruction setTransform:CGAffineTransformConcat(CGAffineTransformConcat(secondAssetTrack.preferredTransform, SecondAssetScaleFactor),CGAffineTransformMakeTranslation(0, 0)) atTime:firstAsset.duration];
}



        MainInstruction.layerInstructions = [NSArray arrayWithObjects:firstlayerInstruction, secondlayerInstruction,nil];
        AVMutableVideoComposition *mainCompositionInst = [AVMutableVideoComposition videoComposition];
        mainCompositionInst.instructions = [NSArray arrayWithObject:MainInstruction];
        mainCompositionInst.frameDuration = CMTimeMake(1, 30);

        CGSize naturalSizeFirst, naturalSizeSecond;
        if(isFirstAssetPortrait_){
            naturalSizeFirst = CGSizeMake(firstAssetTrack.naturalSize.height, firstAssetTrack.naturalSize.width);
        } else {
            naturalSizeFirst = firstAssetTrack.naturalSize;
        }
        if(isSecondAssetPortrait_){
            naturalSizeSecond = CGSizeMake(secondAssetTrack.naturalSize.height, secondAssetTrack.naturalSize.width);
        } else {
            naturalSizeSecond = secondAssetTrack.naturalSize;
        }

        float renderWidth, renderHeight;
        if(naturalSizeFirst.width > naturalSizeSecond.width) {
            renderWidth = naturalSizeFirst.width;
        } else {
            renderWidth = naturalSizeSecond.width;
        }
        if(naturalSizeFirst.height > naturalSizeSecond.height) {
            renderHeight = naturalSizeFirst.height;
        } else {
            renderHeight = naturalSizeSecond.height;
        }


        mainCompositionInst.renderSize = CGSizeMake(renderWidth, renderHeight);
        NSArray *paths = NSSearchPathForDirectoriesInDomains(NSDocumentDirectory, NSUserDomainMask, YES);
        NSString *documentsDirectory = [paths objectAtIndex:0];
        NSString *myPathDocs =  [documentsDirectory stringByAppendingPathComponent:[NSString stringWithFormat:@"mergeVideo-%d.mp4",arc4random() % 1000]];
        url_album = [NSURL fileURLWithPath:myPathDocs];


        [ary_temp_url replaceObjectAtIndex:0 withObject:url_album];


        AVAssetExportSession *exporter = [[AVAssetExportSession alloc] initWithAsset:mixComposition presetName:AVAssetExportPresetHighestQuality];
        exporter.outputURL=url_album;
        exporter.outputFileType = AVFileTypeQuickTimeMovie;
        exporter.videoComposition = mainCompositionInst;
        exporter.shouldOptimizeForNetworkUse = YES;
        [exporter exportAsynchronouslyWithCompletionHandler:^
         {
             dispatch_async(dispatch_get_main_queue(), ^{
                 [self exportDidFinish:exporter];
             });
         }];
    }
}

关于ios - 在 iOS 上混合视频流,我们在Stack Overflow上找到一个类似的问题: https://stackoverflow.com/questions/24972776/

相关文章:

IOS - 核心数据栈

ios - 在 iOS swift 中实现完成时出错

iphone - AVCaptureMovieFileOutput -- 写入时修剪文件

ios - swift 2 : AVAssetReader and NSInputStream Audio Graph

ios - 在 iOS 上以 30fps 播放 200fps 视频

iphone - 在 google place API 的单个 URL 中查找 "Shopping_mall"和 "food"

ios - CloudKit - 在 CKRecord 中存储多个 CKReferences

iOS 检索和排序具有关系的核心数据实体

ios - SKVideoNode(嵌入在 SKScene 中)作为 Scene Kit 节点的纹理不起作用

ios - 静音音频 AVCaptureSession