ios - 当我尝试运行相机编码时,当前不支持多个音频/视频 AVCaptureInputs

标签 ios swift camera

当我尝试运行相机编码时,我收到以下错误消息

"2019-05-09 23:15:48.446844+0200 testing2[514:31963] Terminating app due to uncaught exception 'NSInvalidArgumentException', reason: '* -[AVCaptureSession addInput:] Multiple audio/video AVCaptureInputs are not currently supported' * First throw call stack: libc++abi.dylib: terminating with uncaught exception of type NSException (lldb) "

我无法找出问题发生在哪里,并尝试使用断点来了解但没有成功。

还尝试过使用 info.plist 的解决方案,因此它没有任何作用。

let captureSession = AVCaptureSession()

var previewLayer:CALayer!

var captureDevice:AVCaptureDevice!

var takePhoto = false

override func viewDidLoad() {
    super.viewDidLoad()
}

override func viewWillAppear(_ animated: Bool) {
    super.viewWillAppear(animated)
    prepareCamera()
}


func prepareCamera() {
    captureSession.sessionPreset = AVCaptureSession.Preset.photo

    let availableDevices = AVCaptureDevice.DiscoverySession(deviceTypes: [.builtInWideAngleCamera], mediaType: AVMediaType.video, position: .back).devices
    captureDevice = availableDevices.first
    beginSession()


}

func beginSession () {
    do {
        let captureDeviceInput = try AVCaptureDeviceInput(device: captureDevice)

        captureSession.addInput(captureDeviceInput)

    }catch {
        print(error.localizedDescription)
    }


    let previewLayer = AVCaptureVideoPreviewLayer(session: captureSession)
    self.previewLayer = previewLayer
    self.view.layer.addSublayer(self.previewLayer)
    self.previewLayer.frame = self.view.layer.frame
    captureSession.startRunning()

    let dataOutput = AVCaptureVideoDataOutput()
    dataOutput.videoSettings = [(kCVPixelBufferPixelFormatTypeKey as NSString):NSNumber(value:kCVPixelFormatType_32BGRA)] as [String : Any]

    dataOutput.alwaysDiscardsLateVideoFrames = true

    if captureSession.canAddOutput(dataOutput) {
        captureSession.addOutput(dataOutput)
    }

    captureSession.commitConfiguration()


    let queue = DispatchQueue(label: "com.brianadvent.captureQueue")
    dataOutput.setSampleBufferDelegate(self, queue: queue)





}

@IBAction func takePhoto(_ sender: Any) {
    takePhoto = true

}

func captureOutput(_ captureOutput: AVCaptureOutput!, didOutputSampleBuffer sampleBuffer: CMSampleBuffer!, from connection: AVCaptureConnection!) {

    if takePhoto {
        takePhoto = false

        if let image = self.getImageFromSampleBuffer(buffer: sampleBuffer) {

            let photoVC =  UIStoryboard(name: "Main", bundle:  nil).instantiateViewController(withIdentifier: "PhotoVC") as! Viewcontroller2

            photoVC.takenPhoto = image

            DispatchQueue.main.async {
                self.present(photoVC, animated: true, completion: {
                    self.stopCaptureSession()
                })

            }
        }


    }
}


func getImageFromSampleBuffer (buffer:CMSampleBuffer) -> UIImage? {
    if let pixelBuffer = CMSampleBufferGetImageBuffer(buffer) {
        let ciImage = CIImage(cvPixelBuffer: pixelBuffer)
        let context = CIContext()

        let imageRect = CGRect(x: 0, y: 0, width: CVPixelBufferGetWidth(pixelBuffer), height: CVPixelBufferGetHeight(pixelBuffer))

        if let image = context.createCGImage(ciImage, from: imageRect) {
            return UIImage(cgImage: image, scale: UIScreen.main.scale, orientation: .right)
        }

    }

    return nil
}

func stopCaptureSession () {
    self.captureSession.stopRunning()

    if let inputs = captureSession.inputs as? [AVCaptureDeviceInput] {
        for input in inputs {
            self.captureSession.removeInput(input)
        }
    }

}


override func didReceiveMemoryWarning() {
    super.didReceiveMemoryWarning()

}



















struct Constants {
    static let apiKey = "AIzaSyDtaJ5eU24rbnHsG9pb1STOizDJvqcaj5E"
    static let bundleId = "com.felibundle"
    static let searchEngineId = "016628067786358079133:2gm9usqzouc"
}
@IBAction func pish(_ sender: Any) {
    googleSearch(term: "George Bush") { results in
        print(results)}
}
func googleSearch(term: String, callback:@escaping ([(title: String, url: String)]?) -> Void) {
    let urlString = String(format: "https://cse.google.com/cse?cx=016628067786358079133:2gm9usqzouc", term, Constants.searchEngineId, Constants.apiKey)
    let encodedUrl = urlString.addingPercentEncoding(withAllowedCharacters: .urlQueryAllowed)

    guard let url = URL(string: encodedUrl ?? urlString) else {
        print("invalid url \(urlString)")
        return
    }

    let request = NSMutableURLRequest(url: url, cachePolicy: .useProtocolCachePolicy, timeoutInterval: 10)
    request.httpMethod = "GET"
    request.setValue(Constants.bundleId, forHTTPHeaderField: "X-Ios-Bundle-Identifier")

    let session = URLSession.shared
    let datatask = session.dataTask(with: request as URLRequest) { (data, response, error) in
        guard
            error == nil,
            let data = data,
            let json = try? JSONSerialization.jsonObject(with: data, options: .allowFragments) as? [String : Any]
            else {

                callback(nil)
                return
        }

        guard let items = json?["items"] as? [[String : Any]], items.count > 0 else {
            print("no results")
            return
        }

        callback(items.map { ($0["title"] as! String, $0["formattedUrl"] as! String) })
    }


    datatask.resume()
}

最佳答案

可能是这个

override func viewWillAppear(_ animated: Bool) {
   super.viewWillAppear(animated)
   prepareCamera()
}

当您在当前 vc 上方呈现 vc 并关闭它时,会多次调用它,因此您可以将其放入 viewDidLoad

关于ios - 当我尝试运行相机编码时,当前不支持多个音频/视频 AVCaptureInputs,我们在Stack Overflow上找到一个类似的问题: https://stackoverflow.com/questions/56085397/

相关文章:

c++ - 没有校正图像的不失真

android - 如何使用 phonegap 在 Android 手机上显示摄像头

iphone - 更改 ISO 或快门速度

从 UIImages 创建 mov 时的 iOS 绿色边框

iOS 应用程序 - 当用户从设备中删除应用程序时进行拦截

ios - 快速查找附近的蓝牙设备

swift - 如何对工作日字符串数组进行排序?

ios - 如何使用调度组在类初始化时异步等待 Firebase 回调?

ios - 显示来自信标的数据

ios - 无法在 Swift 中的 View Controller 上获得后退按钮