swift - 使用 AVMutableComposition 难以合并两个视频

标签 swift avfoundation avasset avmutablecomposition

我正在开发一个项目,我试图使用 AVMutableComposition 将两个 AVAsset(视频文件)合并在一起。我的视频文件保存到我的相机胶卷时,都完全符合预期。他们的 URL 是有效的,但我最终导出的产品只显示第一个视频,而不是第二个合并的视频。这是我正在使用的代码:

    // Setup video asset
    let videoAsset: AVAsset = AVAsset( url: clip1 )

    // Setup composition
    let composition = AVMutableComposition()

    // Get video track
    let vtrack = videoAsset.tracks(withMediaType: AVMediaType.video)

    // Setup the first video track as asset track
    let videoTrack: AVAssetTrack = vtrack[0]

    // Setup the video timerange
    let vid_timerange = CMTimeRangeMake(kCMTimeZero, videoAsset.duration)

    // Setup the composition video track
    let compositionvideoTrack:AVMutableCompositionTrack = composition.addMutableTrack(withMediaType: AVMediaType.video, preferredTrackID: kCMPersistentTrackID_Invalid)!

    // Insert expected time range
    do {
        try compositionvideoTrack.insertTimeRange(vid_timerange, of: videoTrack, at: kCMTimeZero)
    } catch {
        print("An error occurred")
    }

    // Setup second video asset
    let reversedAsset: AVAsset = AVAsset( url: clip2 )

    // Setup the video track
    let vtrack1 = reversedAsset.tracks(withMediaType: AVMediaType.video)

    // Setup the video track
    let videoTrack1: AVAssetTrack = vtrack1[0]

    // Setup the video time range
    let vid1_timerange = CMTimeRangeMake(kCMTimeZero, reversedAsset.duration)

    // Setup the second composition video track
    let secondCompositionVideoTrack:AVMutableCompositionTrack = composition.addMutableTrack(withMediaType: AVMediaType.video, preferredTrackID: kCMPersistentTrackID_Invalid)!

    // Insert time range
    do {
        try secondCompositionVideoTrack.insertTimeRange(vid1_timerange, of: videoTrack1, at: videoAsset.duration)
    } catch {
        print("An error occurred")
    }

    // Setup the folder path
    let paths = NSSearchPathForDirectoriesInDomains(.documentDirectory, .userDomainMask, true)

    // Setup documents directory
    let documentsDirectory = paths[0] as String

    // Setup the last path component
    let lastPath = clip1.lastPathComponent

    // Setup the reverse string
    let reverseString = "loop-" + lastPath

    // Setup desired full path
    let fullPath: String = "\(documentsDirectory)/\(reverseString)"

    // Setup reverse destination URL
    let reverseURL = URL(fileURLWithPath: fullPath)

    // Export
    let exportSession = AVAssetExportSession(asset: composition, presetName: AVAssetExportPresetHighestQuality)!

    // Setup the destination for output
    exportSession.outputURL = reverseURL

    // Setup the file type
    exportSession.outputFileType = AVFileType.mp4

    exportSession.exportAsynchronously(completionHandler: {
        if exportSession.status == .completed {

            // Send completion on main queue
            DispatchQueue.main.async(execute: {

                PHPhotoLibrary.shared().performChanges({PHAssetChangeRequest.creationRequestForAssetFromVideo(atFileURL: reverseURL)
                }) { saved, error in
                    if saved {
                        print("save of potentially looped video succesful")
                    }
                }

                // Send completion handler
                completionHandler(reverseURL)

            })
            return
        } else if exportSession.status == .failed {
            print("Loop Export failed - \(String(describing: exportSession.error))")
            completionHandler(nil)
        }

        completionHandler(nil)
        return
    })

保存到我的相机胶卷的只是第一个片段,看不到第二个片段的迹象。任何帮助将不胜感激。谢谢!

最佳答案

我写了下面的代码来合并视频。这工作正常。我还在需要的代码行上方写了注释。

let videoAssets1 = AVAsset(url: videoUrl1)
let videoAssets2 = AVAsset(url: videoUrl2)

let mixComposition = AVMutableComposition()

// Create composition track for first video
let firstCompositionTrack = mixComposition.addMutableTrack(withMediaType: .video, preferredTrackID: kCMPersistentTrackID_Invalid)
do {
        try firstCompositionTrack?.insertTimeRange(CMTimeRange(start: kCMTimeZero, end: videoAssets1.duration), of: videoAssets1.tracks(withMediaType: .video)[0], at: kCMTimeZero)
    } catch {
        print("Error = \(error.localizedDescription)")
}

// Create composition track for second video
let secondCompositionTrack = mixComposition.addMutableTrack(withMediaType: .video, preferredTrackID: kCMPersistentTrackID_Invalid)
do {
        try secondCompositionTrack?.insertTimeRange(CMTimeRange(start: kCMTimeZero, end: videoAssets2.duration), of: videoAssets2.tracks(withMediaType: .video)[0], at: kCMTimeZero)
} catch {
        print("Error = \(error.localizedDescription)")
}

//See how we are creating AVMutableVideoCompositionInstruction object.This object will contain the array of our AVMutableVideoCompositionLayerInstruction objects.You set the duration of the layer.You should add the lenght equal to the lingth of the longer asset in terms of duration.
let mainInstruction = AVMutableVideoCompositionInstruction()
mainInstruction.timeRange = CMTimeRange(start: kCMTimeZero, duration: videoAssets1.duration)

// We will be creating 2 AVMutableVideoCompositionLayerInstruction objects.
// Each for our 2 AVMutableCompositionTrack.
// Here we are creating AVMutableVideoCompositionLayerInstruction for out first track.
// See how we make use of CGAffineTransform to move and scale our First Track.
// So it is displayed at the bottom of the screen in smaller size.
// (First track in the one that remains on top).    
let firstLayerInstruction = AVMutableVideoCompositionLayerInstruction(assetTrack: firstCompositionTrack!)
let firstScale : CGAffineTransform = CGAffineTransform(scaleX: 1, y: 1)
let firstMove: CGAffineTransform = CGAffineTransform(translationX: 0, y: 0)
firstLayerInstruction.setTransform(firstScale.concatenating(firstMove), at: kCMTimeZero)


// Here we are creating AVMutableVideoCompositionLayerInstruction for second track.
// See how we make use of CGAffineTransform to move and scale our second Track.
let secondLayerInstruction = AVMutableVideoCompositionLayerInstruction(assetTrack: secondCompositionTrack!)
let secondScale : CGAffineTransform = CGAffineTransform(scaleX: 1, y: 1)
let secondMove : CGAffineTransform = CGAffineTransform(translationX: (firstCompositionTrack?.naturalSize.width)! + CGFloat(20), y: 0)
secondLayerInstruction.setTransform(secondScale.concatenating(secondMove), at: kCMTimeZero)

//Now we add our 2 created AVMutableVideoCompositionLayerInstruction objects to our AVMutableVideoCompositionInstruction in form of an array.
mainInstruction.layerInstructions = [firstLayerInstruction, secondLayerInstruction]

// Get the height and width of video.
let height = (Float((firstCompositionTrack?.naturalSize.height)!) > Float((secondCompositionTrack?.naturalSize.height)!)) ? firstCompositionTrack?.naturalSize.height : secondCompositionTrack?.naturalSize.height

//  height will be larger in both and width is total of both video.
let width = CGFloat((Float((firstCompositionTrack?.naturalSize.width)!) + Float((secondCompositionTrack?.naturalSize.width)!))) + CGFloat(20)

//Now we create AVMutableVideoComposition object.
//We can add mutiple AVMutableVideoCompositionInstruction to this object.
//We have only one AVMutableVideoCompositionInstruction object in our example.
//You can use multiple AVMutableVideoCompositionInstruction objects to add multiple layers of effects such as fade and transition but make sure that time ranges of the AVMutableVideoCompositionInstruction objects don't overlap.
let mainCompositionInst = AVMutableVideoComposition()
mainCompositionInst.instructions = [mainInstruction]
mainCompositionInst.frameDuration = CMTime(value: CMTimeValue(1), timescale: CMTimeScale(30))
mainCompositionInst.renderSize = CGSize(width: width, height: height!)


// Create the export session with the composition and set the preset to the highest quality.
let exporter = AVAssetExportSession(asset: mixComposition, presetName: AVAssetExportPresetHighestQuality)

// Set the desired output URL for the file created by the export process.
exporter?.outputURL = URL(fileURLWithPath: self.getVideoPath(name: "videoCompose"))
exporter?.videoComposition = mainCompositionInst

// Set the output file type to be a mp4 movie.
exporter?.outputFileType = AVFileType.mp4
exporter?.shouldOptimizeForNetworkUse = true

exporter?.exportAsynchronously(completionHandler: {() -> Void in
    DispatchQueue.main.async(execute: {() -> Void in
        if exporter?.status == .completed {

            do {
                let videoData = try Data(contentsOf: exporter!.outputURL!)

                // Here video will save in document directory path, you can use your requirement.
                try videoData.write(to: URL(fileURLWithPath: self.getVideoPath(name: "videoCompose")), options: Data.WritingOptions.atomic)

            } catch {
                print("Failed to Save video ===>>> \(error.localizedDescription)")
            }
        }
    })
})

希望对您有所帮助。

关于swift - 使用 AVMutableComposition 难以合并两个视频,我们在Stack Overflow上找到一个类似的问题: https://stackoverflow.com/questions/49504075/

相关文章:

ios - 带正文的 Alamofire 发布请求

ios - 自定义 ios 应用程序找不到我的 nrf 信标

swift - 不可变字典值变化

iphone - 使用 AVFoundation Framework(AVCaptureSession) 在 iPhone 中录制视频?

ios - iOS 8 上的 AVCaptureVideoDataOutput 不会在指定的调度队列上发布样本缓冲区

ios - AVFoundation视频播放问题

ios - 需要在 mpmovieplayercontroller swift 中显示视频图像

ios - Swift 3 - 如何在应用程序中使用照片库图片?

ios - AVAssetExportSession 无法创建文件错误-12115

swift - AVAsset 方向/旋转问题