目标:我正在使用 AVFoundation 创建自定义相机,其行为类似于 Facebook、Instagram 和 Snapchat 相机图像捕获序列。
下面是我的具有理想用户体验的 Controller :
- 用户按下加号按钮
- 应用使用 AVCaptureSession 和 AVCaptureVideoPreviewLayer 转入/转换到自定义相机 View Controller ,显示为紫色区域
- 用户按下拍摄按钮来捕获图像
- 应用程序会转到刚刚拍摄的照片的静态/ ImageView Controller ,以便用户可以编辑或执行其他必须发生的操作
- 用户按下使用按钮
- 应用程序保存图像并弹出到根ViewController
Here is my Swift Storyboard of the above
问题:我能够使用 AVCapturePreviewLayer 获取实时视频源,但是一旦捕获图片,我无法将捕获的 UIImage 传输到第二个 ViewController。我正在使用在结束时触发的 segue captureStillImageAsynchronouslyFromConnection 完成回调。
这是 MasterViewController?
class AddPhotoViewController: UIViewController {
@IBOutlet var previewLayerView: UIView!
var captureSession: AVCaptureSession?
var previewLayer: AVCaptureVideoPreviewLayer?
var stillImageOutput: AVCaptureStillImageOutput?
var imageDetail: UIImage?
@IBAction func cancelCameraBtn(sender: AnyObject) {
self.navigationController?.popToRootViewControllerAnimated(true)
}
@IBAction func takePhotoBtn(sender: AnyObject) {
if let videoConnection = stillImageOutput!.connectionWithMediaType(AVMediaTypeVideo) {
videoConnection.videoOrientation = AVCaptureVideoOrientation.Portrait
stillImageOutput?.captureStillImageAsynchronouslyFromConnection(videoConnection, completionHandler: {(sampleBuffer, error) in
if (sampleBuffer != nil) {
let imageData = AVCaptureStillImageOutput.jpegStillImageNSDataRepresentation(sampleBuffer)
let dataProvider = CGDataProviderCreateWithCFData(imageData)
let cgImageRef = CGImageCreateWithJPEGDataProvider(dataProvider, nil, true, CGColorRenderingIntent.RenderingIntentDefault)
self.imageDetail = UIImage(CGImage: cgImageRef!, scale: 1.0, orientation: UIImageOrientation.Right)
self.performSegueWithIdentifier("captureSessionDetailSegue", sender: self)
}
})
}
}
override func viewDidLoad() {
super.viewDidLoad()
// Do any additional setup after loading the view, typically from a nib.
}
override func viewWillDisappear(animated: Bool) {
captureSession!.stopRunning()
self.navigationController?.setNavigationBarHidden(false, animated: false)
}
override func viewWillAppear(animated: Bool) {
super.viewWillAppear(animated)
// display properties
self.navigationController?.setNavigationBarHidden(true, animated: false)
captureSession = AVCaptureSession()
captureSession!.sessionPreset = AVCaptureSessionPresetPhoto
let backCamera = AVCaptureDevice.defaultDeviceWithMediaType(AVMediaTypeVideo)
var error: NSError?
var input: AVCaptureDeviceInput!
do {
input = try AVCaptureDeviceInput(device: backCamera)
} catch let error1 as NSError {
error = error1
input = nil
}
if error == nil && captureSession!.canAddInput(input) {
captureSession!.addInput(input)
stillImageOutput = AVCaptureStillImageOutput()
stillImageOutput!.outputSettings = [AVVideoCodecKey: AVVideoCodecJPEG]
if captureSession!.canAddOutput(stillImageOutput) {
captureSession!.addOutput(stillImageOutput)
previewLayer = AVCaptureVideoPreviewLayer(session: captureSession)
previewLayer!.videoGravity = AVLayerVideoGravityResizeAspectFill
previewLayer!.connection?.videoOrientation = AVCaptureVideoOrientation.Portrait
previewLayerView.layer.addSublayer(previewLayer!)
//previewLayerView.layer.removeAllAnimations()
captureSession!.startRunning()
}
}
}
override func viewDidAppear(animated: Bool) {
super.viewDidAppear(animated)
previewLayer!.frame = previewLayerView.bounds
}
override func didReceiveMemoryWarning() {
super.didReceiveMemoryWarning()
// Dispose of any resources that can be recreated.
}
// MARK: - Navigation
// In a storyboard-based application, you will often want to do a little preparation before navigation
override func prepareForSegue(segue: UIStoryboardSegue, sender: AnyObject?) {
// Get the new view controller using segue.destinationViewController.
// Pass the selected object to the new view controller.
//if segue.identifier == "captureSessionDetailSegue" {
let destination = segue.destinationViewController as! CaptureSessionDetailViewController
destination.capturedImage.image = self.imageDetail
// returns nil propertyfrom here
//destination.navigationController!.setNavigationBarHidden(true, animated: false)
//}
}
}
这是 DetailViewController?
class CaptureSessionDetailViewController: UIViewController {
@IBOutlet var capturedImage: UIImageView!
override func viewDidLoad() {
super.viewDidLoad()
// Do any additional setup after loading the view.
}
override func didReceiveMemoryWarning() {
super.didReceiveMemoryWarning()
// Dispose of any resources that can be recreated.
}
/*
// MARK: - Navigation
// In a storyboard-based application, you will often want to do a little preparation before navigation
override func prepareForSegue(segue: UIStoryboardSegue, sender: AnyObject?) {
// Get the new view controller using segue.destinationViewController.
// Pass the selected object to the new view controller.
}
*/
}
我当前的代码产生 fatal error :在解包可选值时意外发现 nil。我认为这是因为我的prepareForSegue方法设置了一些尚不存在的东西,但我不知道如何将图像获取到所需的DetailViewController。
如何才能达到我想要的结果?
最佳答案
我的解决方案使用了上述用户 (Dharmesh Kheni) 和 DBCamera custom camera github 的设计模式.
在AddPhotoViewController
@IBAction func takePhotoBtn(sender: AnyObject) {
if let videoConnection = stillImageOutput!.connectionWithMediaType(AVMediaTypeVideo) {
videoConnection.videoOrientation = AVCaptureVideoOrientation.Portrait
stillImageOutput?.captureStillImageAsynchronouslyFromConnection(videoConnection, completionHandler: {(sampleBuffer, error) in
if (sampleBuffer != nil) {
let imageData = AVCaptureStillImageOutput.jpegStillImageNSDataRepresentation(sampleBuffer)
// Setup class variable --> imgMetaData: NSData!
// Assign and transport to destination ViewController
self.imgMetaData = imageData
self.performSegueWithIdentifier("captureSessionDetailSegue", sender: self)
}
})
}
}
override func prepareForSegue(segue: UIStoryboardSegue, sender: AnyObject?) {
if segue.identifier == "captureSessionDetailSegue" {
let destination = segue.destinationViewController as! CaptureSessionDetailViewController
destination.capturedImageMetaData = self.imgMetaData
}
}
在CaptureSessionDetailViewController
class CaptureSessionDetailViewController: UIViewController {
var capturedImageMetaData: NSData!
@IBOutlet var capturedImage: UIImageView!
override func viewDidLoad() {
super.viewDidLoad()
let dataProvider = CGDataProviderCreateWithCFData(capturedImageMetaData)
let cgImageRef = CGImageCreateWithJPEGDataProvider(dataProvider, nil, true, CGColorRenderingIntent.RenderingIntentDefault)
let img = UIImage(CGImage: cgImageRef!, scale: 1.0, orientation: UIImageOrientation.Right)
capturedImage.image = img
// Do any additional setup after loading the view.
}
来自 AVCaptureStillImageOutput
的图像数据被分配给 AddPhotoViewController
中的类变量 imgMetaData: NSData!
。数据通过 prepareForSegue
传输到目标 View Controller CaptureSessionDetailViewController
并存储在 capturedImageMEtaData: NSData!
中。然后在 viewDidLoad
方法中将数据转换为 UIImage
。
关于ios - 如何使用 AVFoundation 捕获静态图像并使用 Swift 在另一个 View Controller 中显示图像,我们在Stack Overflow上找到一个类似的问题: https://stackoverflow.com/questions/33272864/