ios - 从通过 AVCapturePhotoCaptureDelegate 获得的 AVCapturePhoto 中检索 CVSampleBuffer

标签 ios avfoundation

如标题所示,我正在尝试从方法的输出中检索捕获的照片的 CVPixelBuffer:

AVCapturePhotoCaptureDelegate.photoOutput(_输出:AVCapturePhotoOutput,didFinishProcessingPhoto 照片:AVCapturePhoto,错误:Error?)

photo 参数的 pixelBuffer 在委托(delegate)方法调用中为 nil,我想将其用于一些低级别的图像处理。

我主要遵循示例代码,可以在以下位置找到:

https://developer.apple.com/library/content/samplecode/AVCam/Introduction/Intro.html

和 AVFoundation 文档。

由于 AVFoundation session 配置有点冗长并且可能会提供一些答案,我将只粘贴处理它的整个对象,它应该包含所有相关代码:

protocol CameraServiceDelegate: class {
    func cameraServiceDidCapturePhoto(withBuffer buffer: CVPixelBuffer)
    func cameraServiceEncounteredError(_ error: Error?)
}

final class CameraService: NSObject {

    struct BufferRetrievalFailure: Error {}

    weak var delegate: CameraServiceDelegate?

    private let session = AVCaptureSession()
    private var discoverySession = AVCaptureDevice.DiscoverySession(
        deviceTypes: [.builtInDualCamera, .builtInWideAngleCamera],
        mediaType: .video,
        position: .back
    )
    private var deviceInput: AVCaptureDeviceInput!
    private let photoOutput = AVCapturePhotoOutput()

    private let sessionQueue = DispatchQueue(label: "av-capture-session.serial.queue")

    private var captureDevice: AVCaptureDevice? {
        return .default(.builtInDualCamera, for: .video, position: .back)
            ?? .default(.builtInWideAngleCamera, for: .video, position: .back)
            ?? .default(.builtInWideAngleCamera, for: .video, position: .front)
    }

    func setup(with layer: AVCaptureVideoPreviewLayer) {
        layer.session = session

        switch AVCaptureDevice.authorizationStatus(for: .video) {
        case .authorized:
            break
        case .notDetermined:
            requestVideoAuthorization()
        default:
            assertionFailure("Just enable video, this is not a real app.")
        }

        sessionQueue.async { [weak self] in
            self?.setupAVSession(with: layer)
        }
    }

    func resume() {
        sessionQueue.async { [weak session] in
            session?.startRunning()
        }
    }

    func suspend() {
        sessionQueue.async { [weak session] in
            session?.stopRunning()
        }
    }

    func capturePhoto() {
        sessionQueue.async { [weak self] in
            guard let strongSelf = self else {
                return
            }

            strongSelf.photoOutput.capturePhoto(with: strongSelf.capturePhotoSettings(), delegate: strongSelf)
        }
    }

    private func requestVideoAuthorization() {
        sessionQueue.suspend()

        AVCaptureDevice.requestAccess(for: .video) { [weak sessionQueue] isAuthorized in
            guard isAuthorized else {
                assertionFailure("Just enable video, this is not a real app.")
                return
            }

            sessionQueue?.resume()
        }
    }

    private func setupAVSession(with layer: AVCaptureVideoPreviewLayer) {
        session.beginConfiguration()

        session.sessionPreset = .photo

        setupVideoInput()
        setupVideoPreviewViewLayer(with: layer)
        setupPhotoOutput()

        session.commitConfiguration()
    }

    private func setupVideoInput() {
        guard let videoDevice = captureDevice,
              let deviceInput = try? AVCaptureDeviceInput(device: videoDevice),
              session.canAddInput(deviceInput) else {
            fatalError("Could not retrieve suitable capture device or configure video device input.")
        }

        self.deviceInput = deviceInput
        session.addInput(deviceInput)
    }

    private func setupVideoPreviewViewLayer(with layer: AVCaptureVideoPreviewLayer) {
        DispatchQueue.main.async {
            let statusBarOrientation = UIApplication.shared.statusBarOrientation

            layer.connection?.videoOrientation =
                statusBarOrientation != .unknown
                    ? AVCaptureVideoOrientation(rawValue: statusBarOrientation.rawValue)!
                    : .portrait
        }
    }

    private func setupPhotoOutput() {
        guard session.canAddOutput(photoOutput) else {
            fatalError("Could not configure photo output.")
        }

        session.addOutput(photoOutput)

        photoOutput.isHighResolutionCaptureEnabled = true
        photoOutput.isLivePhotoCaptureEnabled = false
        photoOutput.isDepthDataDeliveryEnabled = photoOutput.isDepthDataDeliverySupported
    }

    private func capturePhotoSettings() -> AVCapturePhotoSettings {
        let settings: AVCapturePhotoSettings

        if photoOutput.availablePhotoCodecTypes.contains(.hevc) {
            settings = AVCapturePhotoSettings(format: [AVVideoCodecKey: AVVideoCodecType.hevc])
        } else {
            settings = AVCapturePhotoSettings()
        }

        settings.isHighResolutionPhotoEnabled = true
        settings.isDepthDataDeliveryEnabled = photoOutput.isDepthDataDeliveryEnabled

        return settings
    }
}

extension CameraService: AVCapturePhotoCaptureDelegate {

    func photoOutput(_ output: AVCapturePhotoOutput, didFinishProcessingPhoto photo: AVCapturePhoto, error: Error?) {
        guard error == nil else {
            delegate?.cameraServiceEncounteredError(error)
            return
        }

        guard let buffer = photo.pixelBuffer else {
            delegate?.cameraServiceEncounteredError(BufferRetrievalFailure())
            return
        }

        delegate?.cameraServiceDidCapturePhoto(withBuffer: buffer)
    }
}

最佳答案

我没有适合您的代码示例,因为我在 Xamarin 中工作,但您需要设置 previewPhotoFormat在创建捕获时使用的 AVCapturePhotoSettings 对象上。一个例子我 found online :

var settings = AVCapturePhotoSettings()
let previewPixelType = settings.availablePreviewPhotoPixelFormatTypes.first!
let previewFormat = [
    kCVPixelBufferPixelFormatTypeKey as String: previewPixelType,
    kCVPixelBufferWidthKey as String: self.capturedButton.frame.width,
    kCVPixelBufferHeightKey as String: self.capturedButton.frame.height
] as [String : Any]
settings.previewPhotoFormat = previewFormat

我亲自检查了 availablePreviewPhotoPixelFormatTypes查看我的分析所需的格式 ( kCVPixelFormatType_32BGRA ) 是否在其中。到目前为止,我还没有遇到没有它的设备。

关于ios - 从通过 AVCapturePhotoCaptureDelegate 获得的 AVCapturePhoto 中检索 CVSampleBuffer,我们在Stack Overflow上找到一个类似的问题: https://stackoverflow.com/questions/47678894/

相关文章:

ios - 在Alamofire深处坠毁

ios - 在 swrevealcontroller 之前实现登录屏幕

ios - 为什么不应用 AVMutableVideoCompositionInstruction?

ios - 在 iOS 中使用 AVFoundation Framework 来自相机的视频帧?

ios - allowAirPlay在MPMoviePlayerViewController中不显示Air Play按钮

ios - 调整工作表演示文稿 SwiftUI 的大小

ios - Swift 如何在堆栈 View 中调整图像大小

objective-c - 在 iOS 应用程序中获取和显示来自 Facebook 的图像

ios - 如何使用 AVFoundation 循环播放视频,最后没有不必要的停顿?

ios - Swift 链接器错误 : Undefined symbol for surely available API presentationDimensions