我目前正在我的 Swift 应用程序中处理 QR 扫描 View 。 我想将 VideoPreview 置于 View 中间。
View 看起来像这样:
View (白色)称为 ScanView
,我想让图像预览与 ScanView
大小相同并将其居中。
代码片段:
感谢您的帮助!
最佳答案
这是一个可行的解决方案:
import UIKit
import AVFoundation
class ViewController: UIViewController, AVCaptureVideoDataOutputSampleBufferDelegate, AVCaptureMetadataOutputObjectsDelegate {
@IBOutlet weak var innerView: UIView!
var session: AVCaptureSession?
var input: AVCaptureDeviceInput?
var previewLayer: AVCaptureVideoPreviewLayer?
override func viewDidLoad() {
super.viewDidLoad()
createSession()
}
override func viewDidAppear(_ animated: Bool) {
super.viewDidAppear(animated)
self.previewLayer?.frame.size = self.innerView.frame.size
}
private func createSession() {
do {
self.session = AVCaptureSession()
if let device = AVCaptureDevice.default(for: AVMediaType.video) {
self.input = try AVCaptureDeviceInput(device: device)
self.session?.addInput(self.input!)
self.previewLayer = AVCaptureVideoPreviewLayer(session: self.session!)
self.previewLayer?.frame.size = self.innerView.frame.size
self.previewLayer?.videoGravity = AVLayerVideoGravity.resizeAspectFill
self.innerView.layer.addSublayer(self.previewLayer!)
//______ 1. solution with Video camera ______//
let videoOutput = AVCaptureVideoDataOutput()
videoOutput.setSampleBufferDelegate(self, queue: DispatchQueue.main)
self.session?.canAddOutput(videoOutput)
self.session?.addOutput(videoOutput)
self.session?.startRunning()
//______ 2. solution with QR code ______//
let videoOutput = AVCaptureMetadataOutput()
videoOutput.setMetadataObjectsDelegate(self, queue: DispatchQueue.main)
self.session?.canAddOutput(videoOutput)
self.session?.addOutput(videoOutput)
// explanation here: https://stackoverflow.com/a/35642852/2450755
videoOutput.metadataObjectTypes = [AVMetadataObject.ObjectType.qr]
self.session?.startRunning()
}
} catch _ {
}
}
//MARK: AVCaptureVideoDataOutputSampleBufferDelegate
public func captureOutput(_ output: AVCaptureOutput, didOutput sampleBuffer: CMSampleBuffer, from connection: AVCaptureConnection) {
if let pixelBuffer = CMSampleBufferGetImageBuffer(sampleBuffer) {
let cameraImage = CIImage(cvPixelBuffer: pixelBuffer)
// awesome stuff here
}
}
//MARK: AVCaptureMetadataOutputObjectsDelegate
func setMetadataObjectsDelegate(_ objectsDelegate: AVCaptureMetadataOutputObjectsDelegate?, queue objectsCallbackQueue: DispatchQueue?) {
}
}
要求:
- 设置:
隐私 - 相机使用说明
innerView
必须初始化,我是用 Storyboard 做的,有以下限制:
这里是结果:
关于ios - AVCaptureVideoPreviewLayer 的比例,我们在Stack Overflow上找到一个类似的问题: https://stackoverflow.com/questions/47850367/