ios - 如何修复使用前置摄像头镜像、AVFoundation、Swift 捕获图像

标签 ios swift view camera avfoundation

如何修复使用前置摄像头镜像拍摄的图像? SnapChat 似乎和 WhatsApp 和 Instagram 一样修复了它,我该怎么做? 我真的很想为此找到解决方案...这很烦人...先谢谢了

我看过Always seeing Mirror image while capturing from Front Camera iOS 5.0但它翻转了后置摄像头和前置摄像头的图像,这并不能真正解决任何人的问题。如果有人能帮我弄清楚如何只翻转前置摄像头图像或任何其他很棒的解决方案!

import UIKit
import AVFoundation

@available(iOS 10.0, *)
class CameraViewController: UIViewController,AVCaptureVideoDataOutputSampleBufferDelegate {

let photoSettings = AVCapturePhotoSettings()
    var audioPlayer = AVAudioPlayer()
    var captureSession = AVCaptureSession()
    var videoDeviceInput: AVCaptureDeviceInput!
    var previewLayer = AVCaptureVideoPreviewLayer()
    var frontCamera: Bool = false
    var captureDevice:AVCaptureDevice!
    var takePhoto = false

    override func viewDidLoad() {
        super.viewDidLoad()
    }

    override func viewWillAppear(_ animated: Bool) {
        super.viewWillAppear(animated)
        prepareCamera()
    }

    func prepareCamera() {
        captureSession.sessionPreset = AVCaptureSessionPresetPhoto

        if let availableDevices = AVCaptureDeviceDiscoverySession(deviceTypes: [.builtInWideAngleCamera], mediaType: AVMediaTypeVideo, position: .back).devices {
            captureDevice = availableDevices.first
            beginSession()
        } 
    }

    func frontCamera(_ front: Bool){
        let devices = AVCaptureDevice.devices()

        do{
            try captureSession.removeInput(AVCaptureDeviceInput(device:captureDevice!)) 
        }catch{
            print("Error")
        }

        for device in devices!{
            if((device as AnyObject).hasMediaType(AVMediaTypeVideo)){
                if front{
                    if (device as AnyObject).position == AVCaptureDevicePosition.front {
                        captureDevice = device as? AVCaptureDevice

                        do{
                            try captureSession.addInput(AVCaptureDeviceInput(device: captureDevice!))
                        }catch{}
                        break
                    }
                }else{
                    if (device as AnyObject).position == AVCaptureDevicePosition.back {
                        captureDevice = device as? AVCaptureDevice

                        do{
                            try captureSession.addInput(AVCaptureDeviceInput(device: captureDevice!))
                        }catch{}
                        break
                    }
                }
            }
        }
    }

    func beginSession () {
        do {
            let captureDeviceInput = try AVCaptureDeviceInput(device: captureDevice)
            if let previewLayer = AVCaptureVideoPreviewLayer(session: captureSession) {
            self.previewLayer = previewLayer
            containerView.layer.addSublayer(previewLayer as? CALayer ?? CALayer())
            self.previewLayer.frame = self.view.layer.frame
            self.previewLayer.videoGravity = AVLayerVideoGravityResizeAspectFill
            previewLayer.connection.videoOrientation = AVCaptureVideoOrientation.portrait
            captureSession.startRunning()

            let dataOutput = AVCaptureVideoDataOutput()
            dataOutput.videoSettings = [(kCVPixelBufferPixelFormatTypeKey as NSString):NSNumber(value:kCVPixelFormatType_32BGRA)]

            dataOutput.alwaysDiscardsLateVideoFrames = true

            if captureSession.canAddOutput(dataOutput) {
                captureSession.addOutput(dataOutput)

                photoSettings.isHighResolutionPhotoEnabled = true
                photoSettings.isAutoStillImageStabilizationEnabled = true
            }

            captureSession.commitConfiguration()

            let queue = DispatchQueue(label: "com.NightOut.captureQueue")
            dataOutput.setSampleBufferDelegate(self, queue: queue) 
        }
    }
        @IBAction func takePhoto(_ sender: Any) {
            takePhoto = true

            photoSettings.isHighResolutionPhotoEnabled = true
            photoSettings.isAutoStillImageStabilizationEnabled = true
    }

    func captureOutput(_ captureOutput: AVCaptureOutput!, didOutputSampleBuffer sampleBuffer: CMSampleBuffer!, from connection: AVCaptureConnection!) {
        if takePhoto {
            takePhoto = false
            if let image = self.getImageFromSampleBuffer(buffer: sampleBuffer) {
                let photoVC = UIStoryboard(name: "Main", bundle: nil).instantiateViewController(withIdentifier: "PhotoVC") as! PhotoPreviewViewController

                photoVC.takenPhoto = image

                DispatchQueue.main.async {
                    self.present(photoVC, animated: true, completion: {
                        self.stopCaptureSession()
                    })
                }
            }  
        }
    }

    func getImageFromSampleBuffer (buffer:CMSampleBuffer) -> UIImage? {
        if let pixelBuffer = CMSampleBufferGetImageBuffer(buffer) {
            let ciImage = CIImage(cvPixelBuffer: pixelBuffer)
            let context = CIContext()

            let imageRect = CGRect(x: 0, y: 0, width: CVPixelBufferGetWidth(pixelBuffer), height: CVPixelBufferGetHeight(pixelBuffer))

            if let image = context.createCGImage(ciImage, from: imageRect) {
                return UIImage(cgImage: image, scale: UIScreen.main.scale, orientation: .leftMirrored)
            }
    }
        return nil
    }

    override func viewWillDisappear(_ animated: Bool) {
        super.viewWillDisappear(animated)

        self.captureSession.stopRunning()
    }

    func stopCaptureSession () {
        self.captureSession.stopRunning()

        if let inputs = captureSession.inputs as? [AVCaptureDeviceInput] {
            for input in inputs {
                self.captureSession.removeInput(input)
            }
        }
    }

    override func didReceiveMemoryWarning() {
        super.didReceiveMemoryWarning()
    }

    @IBAction func DismissButtonAction(_ sender: UIButton) {

        UIView.animate(withDuration: 0.1, animations: {
            self.DismissButton.transform = CGAffineTransform.identity.scaledBy(x: 0.8, y: 0.8)
        }, completion: { (finish) in
            UIView.animate(withDuration: 0.1, animations: {
                self.DismissButton.transform = CGAffineTransform.identity
            })
        })
        performSegue(withIdentifier: "Segue", sender: nil)
    }
}

最佳答案

我自己解决了这个问题,这是解决方案:

if captureDevice.position == AVCaptureDevicePosition.back {
    if let image = context.createCGImage(ciImage, from: imageRect) {
        return UIImage(cgImage: image, scale: UIScreen.main.scale, orientation: .right)
    }
}
                
if captureDevice.position == AVCaptureDevicePosition.front {
    if let image = context.createCGImage(ciImage, from: imageRect) {
        return UIImage(cgImage: image, scale: UIScreen.main.scale, orientation: .leftMirrored)
    }
}

关于ios - 如何修复使用前置摄像头镜像、AVFoundation、Swift 捕获图像,我们在Stack Overflow上找到一个类似的问题: https://stackoverflow.com/questions/45223068/

相关文章:

objective-c - 在 Xcode 中为 iPad 指定图像,例如为 iPhone 4 指定 @2x

objective-c - 是否可以根据修改键更改 NSOpenPanel 的提示

SQL Server通过 View 查询和通过基表查询得到不同的执行计划

ios - Whatsapp 类型编辑按钮 IOS master detail app

ios - Xcode 4.5.x 版本编辑器显示 "No Editor"消息

ios - 获取 Youtube 视频 viewCount Youtube API v3/Swift

swift - 使用 NSURLSession 通过 Uber API 进行身份验证

Swift - UITableView 滚动减速不起作用,除非启用水平和垂直弹跳

asp.net-mvc-3 - 尝试捕捉不良做法吗?

java - Activity和View之间的接口(interface)实现错误