ios - 如何从同一个 AVCaptureSession 捕获照片和视频?

标签 ios swift camera avfoundation snapchat

我正在尝试制作类似 SnapChat 的应用程序。通过同一个按钮,我愿意允许用户拍照(在内部修饰)和录制视频(长按)。

我正在为此使用 AVFoundation。棘手的部分是我不能让它在同一个 AVCaptureSession 中正常工作。我的意思是,两次捕获我只有 1 个预览层,如何根据用户与记录按钮的交互启动正确的层?有人已经使用过类似的东西吗?

这是我的一段代码:

import UIKit
import AVFoundation

protocol RecordCameraDelegate {
    func didSavedOutputFile(url: URL!, error: Error?)
    func didSavedImage(image: UIImage?)
}

// MARK: - Camera
class RecordCamera : NSObject {

    var videoLayer : AVCaptureVideoPreviewLayer!
    var delegate : RecordCameraDelegate!
    var capturedPhoto : UIImage?

    fileprivate var captureSession = AVCaptureSession()
    fileprivate var photoSession = AVCaptureSession()

    fileprivate var movieOutput = AVCaptureMovieFileOutput()
    fileprivate var cameraDevice : AVCaptureDevicePosition!
    fileprivate let stillImageOutput = AVCaptureStillImageOutput()

    // Devices
    fileprivate lazy var frontCameraDevice: AVCaptureDevice? = {
        let devices = AVCaptureDevice.devices(withMediaType: AVMediaTypeVideo) as! [AVCaptureDevice]
        return devices.filter { $0.position == .front }.first
    }()

    fileprivate lazy var backCameraDevice: AVCaptureDevice? = {
        let devices = AVCaptureDevice.devices(withMediaType: AVMediaTypeVideo) as! [AVCaptureDevice]
        return devices.filter { $0.position == .back }.first
    }()

    fileprivate lazy var micDevice: AVCaptureDevice? = {
        return AVCaptureDevice.defaultDevice(withMediaType: AVMediaTypeAudio)
    }()

    fileprivate var tempFilePath: URL = {
        let tempPath = URL(fileURLWithPath: NSTemporaryDirectory()).appendingPathComponent("bighug").appendingPathExtension("mp4").absoluteString
        if FileManager.default.fileExists(atPath: tempPath) {
            do {
                try FileManager.default.removeItem(atPath: tempPath)
            } catch let error { print("Can't create File URL: \(String(describing: error))") }
        }
        return URL(string: tempPath)!
    }()

    // MARK: - Initialization
    init(view: UIView, cameraPosition: AVCaptureDevicePosition = .front) {
        super.init()

        cameraDevice = cameraPosition

        // Video
        self.configureToRecord(view: view)
        // Photo
        self.configureToCapturePhoto()
    }

    func configureToRecord(view: UIView? = nil) {

        captureSession.beginConfiguration()
        defer {
            // commit & stop session
            captureSession.commitConfiguration()
            if  !captureSession.isRunning { captureSession.startRunning() }
        }

        captureSession.sessionPreset = AVCaptureSessionPresetHigh

        // Start configuration
        if !captureSession.isRunning {

            // layer
            if  let validView = view {
                videoLayer = AVCaptureVideoPreviewLayer(session: captureSession)
                videoLayer.videoGravity = AVLayerVideoGravityResizeAspectFill
                videoLayer.frame = validView.bounds
                validView.layer.addSublayer(videoLayer)
            }

            // add device inputs (front camera and mic)
            if  cameraDevice == .front {
                captureSession.addInput(deviceInputFrom(device: frontCameraDevice))
            } else {
                captureSession.addInput(deviceInputFrom(device: backCameraDevice))
            }
        }

        captureSession.addInput(deviceInputFrom(device: micDevice))

        // Output
        movieOutput.movieFragmentInterval = kCMTimeInvalid

        // Remove previous output
        if  let existingOutput = captureSession.outputs.first as? AVCaptureOutput {
            captureSession.removeOutput(existingOutput)
        }
        // Add Movie Output
        if  captureSession.canAddOutput(movieOutput) {
            captureSession.addOutput(movieOutput)
        }
    }

    func configureToCapturePhoto() {

        photoSession.beginConfiguration()
        defer { photoSession.commitConfiguration() }

        photoSession.sessionPreset = AVCaptureSessionPresetPhoto
        stillImageOutput.outputSettings = [AVVideoCodecKey:AVVideoCodecJPEG]

        if #available(iOS 10.0, *) {
            let cameraOutput = AVCapturePhotoOutput()
            // Add Photo Output
            if  photoSession.canAddOutput(cameraOutput) {
                photoSession.addOutput(cameraOutput)
            }
        }
        else {
            // Add Photo Output
            if  photoSession.canAddOutput(stillImageOutput) {
                photoSession.addOutput(stillImageOutput)
            }
        }
    }

    func takePicture() {
        if #available(iOS 10.0, *) {
            let cameraOutput = photoSession.outputs.first as! AVCapturePhotoOutput
            // Capture Picture
            let settings = AVCapturePhotoSettings()
            let previewPixelType = settings.availablePreviewPhotoPixelFormatTypes.first!
            let previewFormat = [
                kCVPixelBufferPixelFormatTypeKey as String: previewPixelType,
                kCVPixelBufferWidthKey as String: 828,
                kCVPixelBufferHeightKey as String: 828
            ]
            settings.previewPhotoFormat = previewFormat
            cameraOutput.capturePhoto(with: settings, delegate: self)
        }
        else {
            if  let videoConnection = stillImageOutput.connection(withMediaType: AVMediaTypeVideo) {
                stillImageOutput.captureStillImageAsynchronously(from: videoConnection) { (imageDataSampleBuffer, error) -> Void in
                    let imageData = AVCaptureStillImageOutput.jpegStillImageNSDataRepresentation(imageDataSampleBuffer)
                    //UIImageWriteToSavedPhotosAlbum(UIImage(data: imageData!)!, nil, nil, nil)
                    guard let validData = imageData else { self.delegate?.didSavedImage(image: nil); return }
                    self.capturedPhoto = UIImage(data: validData)
                }
            }
        }
    }

    // MARK: - Record Methods
    func startRecording() {
        // Take picture
        print("Camera started recording")
        self.takePicture()
        // Start recording
        movieOutput.startRecording(
            toOutputFileURL: tempFilePath,
            recordingDelegate: self
        )
    }

    func stopRecording() {
        print("Camera stopped recording")
        movieOutput.stopRecording()
    }

    // MARK: - Modes
    func cameraMode() {

        captureSession.beginConfiguration()
        defer { captureSession.commitConfiguration() }

        let inputs: [AVCaptureInput] = captureSession.inputs?.flatMap { $0 as? AVCaptureInput } ?? []

        // From
        if  cameraDevice == .front {
            if  let validFrontDevice = deviceInputFrom(device: frontCameraDevice) {
                if !inputs.contains(validFrontDevice) {
                    captureSession.addInput(validFrontDevice)
                }
            }
        }
        // Back
        if  cameraDevice == .back {
            if  let validBackDevice = deviceInputFrom(device: backCameraDevice) {
                if !inputs.contains(validBackDevice) {
                    captureSession.addInput(validBackDevice)
                }
            }
        }

        print("Record Camera --> Set VIDEO Mode")
    }

    func audioMode() {

        captureSession.beginConfiguration()
        defer { captureSession.commitConfiguration() }

        let inputs: [AVCaptureInput] = captureSession.inputs?.flatMap { $0 as? AVCaptureInput } ?? []

        // Remove..
        for input in inputs {
            if  let deviceInput = input as? AVCaptureDeviceInput {
                if  deviceInput.device == backCameraDevice
                ||  deviceInput.device == frontCameraDevice {
                    captureSession.removeInput(deviceInput)
                }
            }
        }

        print("Record Camera --> Set AUDIO Mode")
    }

    // MARK: - Util methods
    fileprivate func deviceInputFrom(device: AVCaptureDevice?) -> AVCaptureDeviceInput? {
        guard let validDevice = device else { return nil }
        do {
            return try AVCaptureDeviceInput(device: validDevice)
        } catch let outError {
            print("Device setup error occured: \(String(describing: outError))")
            return nil
        }
    }

    func swipeCamera() {

        cameraDevice = cameraDevice == .front ? .back : .front

        captureSession.beginConfiguration()
        defer { captureSession.commitConfiguration() }

        let inputs: [AVCaptureInput] = captureSession.inputs?.flatMap { $0 as? AVCaptureInput } ?? []

        // Remove...
        for input in inputs {
            if  let deviceInput = input as? AVCaptureDeviceInput {
                if  deviceInput.device == backCameraDevice && cameraDevice == .front {
                    captureSession.removeInput(deviceInput)
                    photoSession.removeInput(deviceInput)
                    break;
                } else if deviceInput.device == frontCameraDevice && cameraDevice == .back {
                    captureSession.removeInput(deviceInput)
                    photoSession.removeInput(deviceInput)
                    break;
                }
            }
        }

        // From
        if  cameraDevice == .front {
            if  let validFrontDevice = deviceInputFrom(device: frontCameraDevice) {
                if !inputs.contains(validFrontDevice) {
                    captureSession.addInput(validFrontDevice)
                    photoSession.addInput(validFrontDevice)
                    print("Record Camera --> Swipe to Front Camera")
                }
            }
        }
        // Back
        if  cameraDevice == .back {
            if  let validBackDevice = deviceInputFrom(device: backCameraDevice) {
                if !inputs.contains(validBackDevice) {
                    captureSession.addInput(validBackDevice)
                    photoSession.addInput(validBackDevice)
                    print("Record Camera --> Swipe to Back Camera")
                }
            }
        }
    }
}

// MARK: - Capture Output
extension RecordCamera : AVCaptureFileOutputRecordingDelegate {

    func capture(_ captureOutput: AVCaptureFileOutput!, didStartRecordingToOutputFileAt fileURL: URL!, fromConnections connections: [Any]!) {
        // Not implemented
    }

    func capture(_ captureOutput: AVCaptureFileOutput!, didFinishRecordingToOutputFileAt outputFileURL: URL!, fromConnections connections: [Any]!, error: Error!) {
        guard error == nil else {
            if  let photo = capturedPhoto {
                delegate?.didSavedImage(image: photo)
            }
            return
        }
        delegate?.didSavedOutputFile(url: outputFileURL, error: error)
    }
}

@available(iOS 10.0, *)
extension RecordCamera : AVCapturePhotoCaptureDelegate {

    func capture(_ captureOutput: AVCapturePhotoOutput, didCapturePhotoForResolvedSettings resolvedSettings: AVCaptureResolvedPhotoSettings) {
        print("Picture taken")
    }

    func capture(_ captureOutput: AVCapturePhotoOutput, didFinishProcessingPhotoSampleBuffer photoSampleBuffer: CMSampleBuffer?, previewPhotoSampleBuffer: CMSampleBuffer?, resolvedSettings: AVCaptureResolvedPhotoSettings, bracketSettings: AVCaptureBracketedStillImageSettings?, error: Error?) {

        guard error == nil else {
            print("Failed Capturing Picture: \(String(describing: error!.localizedDescription))")
            capturedPhoto = nil
            //self.delegate.didSavedImage(image: nil)
            return
        }

        if  let sampleBuffer = photoSampleBuffer, let previewBuffer = previewPhotoSampleBuffer,
            let imageData = AVCapturePhotoOutput.jpegPhotoDataRepresentation(forJPEGSampleBuffer: sampleBuffer, previewPhotoSampleBuffer: previewBuffer) {
            print("Photo Saved!")
            capturedPhoto = UIImage(data: imageData)
            //self.delegate.didSavedImage(image: image)
        }

    }
}

最佳答案

我做了几乎与您需要的相同的功能。 我已经创建并配置了一个捕获 session 。 对于视频输出,我使用了 AVCaptureVideoDataOutput 类,对于音频,我使用了 AVCaptureAudioDataOutput 类,对于照片,我使用了 AVCaptureStillImageOutput。

我使用 AVAssetWriter 来录制视频和音频,因为我需要执行自定义视频操作。录制在
AVCaptureVideoDataOutputSampleBufferDelegate 方法。 该委托(delegate)方法如下所示。

func captureOutput(_ captureOutput: AVCaptureOutput!, didOutputSampleBuffer sampleBuffer: CMSampleBuffer!, from connection: AVCaptureConnection!) {
    if !isRecordingVideo {
        return
    }

    if captureOutput == self.videoOutput {
        assetVideoWriterQueue.async {
            if self.shouldStartWritingSession {
                self.assetWriter.startSession(atSourceTime: CMSampleBufferGetPresentationTimeStamp(sampleBuffer))
                self.shouldStartWritingSession = false
            }

            if self.assetWriterInputCamera.isReadyForMoreMediaData {
                self.assetWriterInputCamera.append(sampleBuffer)
            }
        }
    }

    if captureOutput == self.audioOutput {
        assetAudioWriterQueue.async {
            let shouldStartWritingSession = self.shouldStartWritingSession
            if self.assetWriterInputMicrofone.isReadyForMoreMediaData && shouldStartWritingSession == false {
                self.assetWriterInputMicrofone.append(sampleBuffer)
            }

            if shouldStartWritingSession {
                print("In audioOutput and CANNOT Record")
            }
        }
    }
}

我的静态图像捕捉是这样的:

func captureStillImage(_ completion: @escaping ((Bool, UIImage?) -> Void)) {
    guard self.state == .running else {
        completion(false, nil)
        return
    }

    backgroundQueue.async {
        let connection = self.stillImageOutpup.connection(withMediaType: AVMediaTypeVideo)

        self.stillImageOutpup.captureStillImageAsynchronously(from: connection, completionHandler: { (buffer, error) in
            defer {
                self.state = .running
            }

            guard let buffer = buffer, let imageData = AVCaptureStillImageOutput.jpegStillImageNSDataRepresentation(buffer) else {
                DispatchQueue.main.async {
                    completion(false, nil)
                }

                return
            }

            let image = UIImage(data: imageData)

            DispatchQueue.main.async {
                completion(true, image)
            }
        })
    }
}

您可以了解如何在 StackOverflow 上使用 Assets 编写器。 例如,您可能熟悉 this

关于ios - 如何从同一个 AVCaptureSession 捕获照片和视频?,我们在Stack Overflow上找到一个类似的问题: https://stackoverflow.com/questions/44488635/

相关文章:

objective-c - 4.4 中的新 Objective-c 文字

ios - 无法获取 UIBarButtonSystemItemTrash 的图标

iOS 遍历对象的属性并添加操作

Swift MKMapViewDelegate - 在 View Controller 之间传输坐标

iOS - 使用设备摄像头识别 3D 对象

android - 为什么使用相机 Intent 捕获的图像会在 Android 上的某些设备上旋转?

ios - 如何解决这个问题 "Ambiguous reference to member ' dataTask(with :completionHandler: )' "

swift - 在 Swift 3 中使用 KVO 检查值是否被更改

c# - 使用 Xamarin.Forms 访问相机

swift - 设置图像后,UIImageView 返回到原始位置 - tvOS