swift - 无法使用 AVVideoCompositionCoreAnimationTool 在视频中显示动画 CALayer

标签 swift calayer cabasicanimation avassetexportsession avvideocomposition

更新 6: 我已经设法完全解决了我的问题,但我仍然想要一个比我猜测的更好的解释,这是如果我不正确的话它不起作用的原因

我一直在尝试在视频上制作 Sprite 表的动画,但每次导出视频时,最终结果都是我开始时使用的示例视频。

这是我的代码:

首先设置我的自定义 CALayer 来处理我自己的 Sprite 表

class SpriteLayer: CALayer {
    var frameIndex: Int

    override init() {
        // Using 0 as a default state
        self.frameIndex = 0
        super.init()
    }

    required init?(coder aDecoder: NSCoder) {
        self.frameIndex = 0
        super.init(coder: aDecoder)
    }

    override func display() {
        let currentFrameIndex = self.frameIndex
        if currentFrameIndex == 0 {
            return
        }
        let frameSize = self.contentsRect.size
        self.contentsRect = CGRect(x: 0, y: CGFloat(currentFrameIndex - 1) * frameSize.height, width: frameSize.width, height: frameSize.height)
    }

    override func action(forKey event: String) -> CAAction? {
        if event == "contentsRect" {
            return nil
        }
        return super.action(forKey: event)
    }

    override class func needsDisplay(forKey key: String) -> Bool {
        return key == "frameIndex"
    }
}

Gif 是一个基本类,没有什么花哨的东西,而且工作得很好。 gif.Strip 是表示 gif 的垂直 Sprite 表的 UIImage

现在应该导出新视频的方法(它是用于导出的更大类的一部分。

func convertAndExport(to url :URL , completion: @escaping () -> Void ) {
        // Get Initial info and make sure our destination is available
        self.outputURL = url
        let stripCgImage = self.gif.strip!.cgImage!
        // This is used to time how long the export took
        let start = DispatchTime.now()
        do {
            try FileManager.default.removeItem(at: outputURL)
        } catch {
            print("Remove Error: \(error.localizedDescription)")
            print(error)
        }
        // Find and load "sample.mp4" as a AVAsset
        let videoPath = Bundle.main.path(forResource: "sample", ofType: "mp4")!
        let videoUrl = URL(fileURLWithPath: videoPath)
        let videoAsset = AVAsset(url: videoUrl)
        // Start a new mutable Composition with the same base video track
        let mixComposition = AVMutableComposition()
        let compositionVideoTrack = mixComposition.addMutableTrack(withMediaType: .video, preferredTrackID: kCMPersistentTrackID_Invalid)!
        let clipVideoTrack = videoAsset.tracks(withMediaType: .video).first!
        do {
            try compositionVideoTrack.insertTimeRange(CMTimeRangeMake(kCMTimeZero, videoAsset.duration), of: clipVideoTrack, at: kCMTimeZero)
        } catch {
            print("Insert Error: \(error.localizedDescription)")
            print(error)
            return
        }
        compositionVideoTrack.preferredTransform = clipVideoTrack.preferredTransform
        // Quick access to the video size
        let videoSize = clipVideoTrack.naturalSize
        // Setup CALayer and it's animation
        let aLayer = SpriteLayer()
        aLayer.contents = stripCgImage
        aLayer.frame = CGRect(x: 0, y: 0, width: videoSize.width, height: videoSize.height)
        aLayer.opacity = 1.0
        aLayer.masksToBounds = true
        aLayer.bounds = CGRect(x: 0, y: 0, width: videoSize.width, height: videoSize.height)
        aLayer.contentsRect = CGRect(x: 0, y: 0, width: 1, height: 1.0 / 3.0)
        let spriteAnimation = CABasicAnimation(keyPath: "frameIndex")
        spriteAnimation.fromValue = 1
        spriteAnimation.toValue = 4
        spriteAnimation.duration = 2.25
        spriteAnimation.repeatCount = .infinity
        spriteAnimation.autoreverses = false
        spriteAnimation.beginTime = AVCoreAnimationBeginTimeAtZero
        aLayer.add(spriteAnimation, forKey: nil)
        // Setup Layers for AVVideoCompositionCoreAnimationTool
        let parentLayer = CALayer()
        let videoLayer = CALayer()
        parentLayer.frame = CGRect(x: 0, y: 0, width: videoSize.width, height: videoSize.height)
        videoLayer.frame = CGRect(x: 0, y: 0, width: videoSize.width, height: videoSize.height)
        parentLayer.addSublayer(videoLayer)
        parentLayer.addSublayer(aLayer)
        // Create the mutable video composition
        let videoComp = AVMutableVideoComposition()
        videoComp.renderSize = videoSize
        videoComp.frameDuration = CMTimeMake(1, 30)
        videoComp.animationTool = AVVideoCompositionCoreAnimationTool(postProcessingAsVideoLayer: videoLayer, in: parentLayer)
        // Set the video composition to apply to the composition's video track
        let instruction = AVMutableVideoCompositionInstruction()
        instruction.timeRange = CMTimeRangeMake(kCMTimeZero, mixComposition.duration)
        let videoTrack = mixComposition.tracks(withMediaType: .video).first!
        let layerInstruction = AVMutableVideoCompositionLayerInstruction(assetTrack: videoTrack)
        instruction.layerInstructions = [layerInstruction]
        videoComp.instructions = [instruction]
        // Initialize export session
        let assetExport = AVAssetExportSession(asset: mixComposition, presetName: AVAssetExportPresetPassthrough)!
        assetExport.videoComposition = videoComp
        assetExport.outputFileType = AVFileType.mp4
        assetExport.outputURL = self.outputURL
        assetExport.shouldOptimizeForNetworkUse = true
        // Export
        assetExport.exportAsynchronously {
            let status = assetExport.status
            switch status {
            case .failed:
                print("Export Failed")
                print("Export Error: \(assetExport.error!.localizedDescription)")
                print(assetExport.error!)
            case .unknown:
                print("Export Unknown")
            case .exporting:
                print("Export Exporting")
            case .waiting:
                print("Export Waiting")
            case .cancelled:
                print("Export Cancelled")
            case .completed:
                let end = DispatchTime.now()
                let nanoTime = end.uptimeNanoseconds - start.uptimeNanoseconds
                let timeInterval = Double(nanoTime) / 1_000_000_000
                // Function is now over, we can print how long it took
                print("Time to generate video: \(timeInterval) seconds")
                completion()
            }
        }
}

编辑: 我的代码基于以下链接

更新 1: 我尝试删除代码的 CABasicAnimation 部分并尝试使用我的 CALayer 但无济于事。我什至无法显示图像。 为了测试一下,我尝试在 Xcode Playground 中使用 contentsRect 上的 CAKeyframeAnimation 为这个 sprite 表设置动画,它工作正常,所以我认为问题不在于 CABasicAnimation,甚至 CALayer 本身也不行。在这方面我真的需要一些帮助,因为我不明白为什么我什至无法在导出的示例视频上显示图像。

更新 2: 作为对马特评论的回应,我试着暂时忘记 Sprite 表并将其更改为 CATextLayer 但仍然没有在我的视频中看到任何内容(它有深色图像,因此白色文本应该完全可见)

let aLayer = CATextLayer()
aLayer.string = "This is a test"
aLayer.fontSize = videoSize.height / 6
aLayer.alignmentMode = kCAAlignmentCenter
aLayer.foregroundColor = UIColor.white.cgColor
aLayer.bounds = CGRect(x: 0, y: 0, width: videoSize.width, height: videoSize.height / 6)

更新 3: 根据 Matt 的要求,我尝试将 parentLayer.addSublayer(aLayer) 更改为 videoLayer.addSublayer(aLayer) 但仍然没有任何改变,但我认为同样如此,因为 documentation AVVideoCompositionCoreAnimationTool 如下

convenience init(postProcessingAsVideoLayer videoLayer: CALayer, 
              in animationLayer: CALayer)

意思是我的 parentLayeranimationLayer 并且可能意味着任何动画都应该在这个层中完成。

更新 4: 我在这里开始发疯,我现在放弃了显示文本或动画图像的想法我只想以任何可能的方式影响我的视频所以我将 aLayer 更改为此:

let aLayer = CALayer()
aLayer.frame = CGRect(x: 0, y: 0, width: videoSize.width, height: videoSize.height)
aLayer.backgroundColor = UIColor.white.cgColor

好吧,这绝对没有任何作用,我仍然在我的 outputUrl 中获得我的示例视频(如果你想“一起玩”,我开始在 Playground 上使用以下代码测试它)

import PlaygroundSupport
import UIKit
import Foundation
import AVFoundation

func convertAndExport(to url :URL , completion: @escaping () -> Void ) {
    let start = DispatchTime.now()
    do {
        try FileManager.default.removeItem(at: url)
    } catch {
        print("Remove Error: \(error.localizedDescription)")
        print(error)
    }

    let videoPath = Bundle.main.path(forResource: "sample", ofType: "mp4")!
    let videoUrl = URL(fileURLWithPath: videoPath)
    let videoAsset = AVURLAsset(url: videoUrl)
    let mixComposition = AVMutableComposition()
    let compositionVideoTrack = mixComposition.addMutableTrack(withMediaType: .video, preferredTrackID: kCMPersistentTrackID_Invalid)!
    let clipVideoTrack = videoAsset.tracks(withMediaType: .video).first!

    do {
        try compositionVideoTrack.insertTimeRange(CMTimeRangeMake(kCMTimeZero, videoAsset.duration), of: clipVideoTrack, at: kCMTimeZero)
    } catch {
        print("Insert Error: \(error.localizedDescription)")
        print(error)
        return
    }
    compositionVideoTrack.preferredTransform = clipVideoTrack.preferredTransform
    let videoSize = clipVideoTrack.naturalSize
    print("Video Size Detected: \(videoSize.width) x \(videoSize.height)")

    let aLayer = CALayer()
    aLayer.frame = CGRect(x: 0, y: 0, width: videoSize.width, height: videoSize.height)
    aLayer.backgroundColor = UIColor.white.cgColor

    let parentLayer = CALayer()
    let videoLayer = CALayer()
    parentLayer.frame = CGRect(x: 0, y: 0, width: videoSize.width, height: videoSize.height)
    videoLayer.frame = CGRect(x: 0, y: 0, width: videoSize.width, height: videoSize.height)
    parentLayer.addSublayer(videoLayer)
    parentLayer.addSublayer(aLayer)
    aLayer.setNeedsDisplay()
    let videoComp = AVMutableVideoComposition()
    videoComp.renderSize = videoSize
    videoComp.frameDuration = CMTimeMake(1, 30)
    videoComp.animationTool = AVVideoCompositionCoreAnimationTool(postProcessingAsVideoLayer: videoLayer, in: parentLayer)

    let instruction = AVMutableVideoCompositionInstruction()
    instruction.timeRange = CMTimeRangeMake(kCMTimeZero, mixComposition.duration)
    let videoTrack = mixComposition.tracks(withMediaType: .video).first!
    let layerInstruction = AVMutableVideoCompositionLayerInstruction(assetTrack: videoTrack)
    instruction.layerInstructions = [layerInstruction]
    videoComp.instructions = [instruction]

    let assetExport = AVAssetExportSession(asset: mixComposition, presetName: AVAssetExportPresetPassthrough)!
    assetExport.videoComposition = videoComp
    assetExport.outputFileType = AVFileType.mp4
    assetExport.outputURL = url
    assetExport.shouldOptimizeForNetworkUse = true

    assetExport.exportAsynchronously {
        let status = assetExport.status
        switch status {
        case .failed:
            print("Export Failed")
            print("Export Error: \(assetExport.error!.localizedDescription)")
            print(assetExport.error!)
        case .unknown:
            print("Export Unknown")
        case .exporting:
            print("Export Exporting")
        case .waiting:
            print("Export Waiting")
        case .cancelled:
            print("Export Cancelled")
        case .completed:
            let end = DispatchTime.now()
            let nanoTime = end.uptimeNanoseconds - start.uptimeNanoseconds
            let timeInterval = Double(nanoTime) / 1_000_000_000
            print("Time to generate video: \(timeInterval) seconds")
            completion()
        }
    }
}

let outputUrl = FileManager.default.temporaryDirectory.appendingPathComponent("test.mp4")
convertAndExport(to: outputUrl) {
    print(outputUrl)
}

请有人帮助我理解我做错了什么......

更新 5: 除了 iPad Air 2 上的 Playground 测试(所以没有模拟器),我正在运行所有内容,因为我使用相机拍照,然后将它们拼接成一个 Sprite 表,然后我计划在我将通过电子邮件发送的视频上制作动画。我开始进行 Playground 测试,因为 iPad 上的每项测试都需要我完成整个应用程序周期(倒计时、照片、表单、电子邮件发送/接收)

最佳答案

好的,终于让它按照我一直希望的那样工作了。

首先,即使他删除了他的评论,也要感谢 Matt 提供了指向一个工作示例的链接,该示例帮助我拼凑出了我的代码中的错误。

  • 首先
let assetExport = AVAssetExportSession(asset: mixComposition, presetName: AVAssetExportPresetPassthrough)!

我需要使用 AVAssetExportPresetHighestQuality 而不是 AVAssetExportPresetPassthrough。我的猜测是,直通预设意味着您不进行任何重新编码,因此将其设置为最高(不是中等,因为我导出的视频超过 400x400),这样我就可以真正重新编码我的视频。我猜这就是阻止导出的视频包含我正在尝试的任何 CALayer 的原因(甚至用白色覆盖视频)。

  • 其次(不确定这是否真的会影响,但我稍后会尝试)
parentLayer.addSublayer(aLayer)

我将其替换为:

videoLayer.addSublayer(aLayer)

不确定这是否真的重要,但我的理解是这实际上是 AVVideoCompositionCoreAnimationTool 的动画层,而 parentLayer 只是一个容器,并不意味着包含更多内容,但我可能错了。

  • 我做的第三个改变
let spriteAnimation = CABasicAnimation(keyPath: "frameIndex")
spriteAnimation.fromValue = 1
spriteAnimation.toValue = 4
spriteAnimation.duration = 2.25
spriteAnimation.repeatCount = .infinity
spriteAnimation.autoreverses = false
spriteAnimation.beginTime = AVCoreAnimationBeginTimeAtZero
aLayer.add(spriteAnimation, forKey: nil)

我改成了这样:

let animation = CAKeyframeAnimation(keyPath: #keyPath(CALayer.contentsRect))
animation.duration = 2.25
animation.calculationMode = kCAAnimationDiscrete
animation.repeatCount = .infinity
animation.values = [
    CGRect(x: 0, y: 0, width: 1, height: 1/3.0),
    CGRect(x: 0, y: 1/3.0, width: 1, height: 1/3.0),
    CGRect(x: 0, y: 2/3.0, width: 1, height: 1/3.0)
    ] as [CGRect]
animation.beginTime = AVCoreAnimationBeginTimeAtZero
animation.fillMode = kCAFillModeBackwards
animation.isRemovedOnCompletion = false
aLayer.add(animation, forKey: nil)

此更改主要是删除我的 sprite 表的自定义动画(因为它总是相同的,所以我首先想要一个工作示例,然后我将概括它并可能将它添加到我的私有(private) UI Pod 中)。但最重要的是 animation.isRemovedOnCompletion = false 我注意到删除它使得动画根本不会在导出的视频上播放。因此,对于任何使用 CABasicAnimation 的人在导出后没有在视频上设置动画,请尝试查看您的 isRemovedOnCompletion 是否在您的动画上正确设置。

我认为这几乎就是我所做的所有更改。

虽然我在技术上回答了我的问题,但我的赏金仍然是了解 AVVideoCompositionCoreAnimationToolAVAssetExport 是如何工作的,以及为什么我必须做我所做的改变才能最终让它工作,如果任何人都有兴趣解释。

再次感谢 Matt,你向我展示了你是如何做到的,从而帮助了我。

关于swift - 无法使用 AVVideoCompositionCoreAnimationTool 在视频中显示动画 CALayer,我们在Stack Overflow上找到一个类似的问题: https://stackoverflow.com/questions/52427597/

相关文章:

ios8 - iOS 8动画错误

swift - Swift 如何有多个 init()

iphone - CAShapeLayer : Finding the intersected area of 3 circles

iphone - 3D立方体问题!第1部分

ios5 - UIView.frame 和 CALayer.frame 之间的关系是什么? (在 CATransform3D 之前和之后)

ios - 无法将 CABasicAnimation 与 Mapbox iOS 框架一起使用

ios - GradientLayer 中的 Swift -Animation 未出现在单元格中

swift - 播放视频不是全屏,swift 3

ios - 将委托(delegate)分配给第三类中另一个类的一个类

ios - 将数据从容器 View 传递到 MainViewController