ios - 未调用 didOutputSampleBuffer 委托(delegate)

标签 ios swift delegates buffer avfoundation

我的代码中的 didOutputSampleBuffer 函数没有被调用。我不知道为什么会这样。这是代码:

import UIKit
import AVFoundation
import Accelerate

class ViewController: UIViewController {

var captureSession: AVCaptureSession?
var dataOutput: AVCaptureVideoDataOutput?
var customPreviewLayer: AVCaptureVideoPreviewLayer?

@IBOutlet weak var camView: UIView!

override func viewWillAppear(animated: Bool) {
    super.viewDidAppear(animated)
    captureSession?.startRunning()
    //setupCameraSession()
}

override func viewDidLoad() {
    super.viewDidLoad()
    // Do any additional setup after loading the view, typically from a nib.
    //captureSession?.startRunning()
    setupCameraSession()
}

override func didReceiveMemoryWarning() {
    super.didReceiveMemoryWarning()
    // Dispose of any resources that can be recreated.
}

func setupCameraSession() {
    // Session
    self.captureSession = AVCaptureSession()
    captureSession!.sessionPreset = AVCaptureSessionPreset1920x1080
    // Capture device
    let inputDevice: AVCaptureDevice = AVCaptureDevice.defaultDeviceWithMediaType(AVMediaTypeVideo)
    var deviceInput = AVCaptureDeviceInput()

    do {
        deviceInput = try AVCaptureDeviceInput(device: inputDevice)
    } catch let error as NSError {
        print(error)
    }
    if captureSession!.canAddInput(deviceInput) {
        captureSession!.addInput(deviceInput)
    }
    // Preview

    self.customPreviewLayer = AVCaptureVideoPreviewLayer(session: captureSession)
    self.customPreviewLayer!.frame = camView.bounds
    self.customPreviewLayer?.videoGravity = AVLayerVideoGravityResizeAspect
    self.customPreviewLayer?.connection.videoOrientation = AVCaptureVideoOrientation.Portrait
    camView.layer.addSublayer(self.customPreviewLayer!)
    print("Cam layer added")

    self.dataOutput = AVCaptureVideoDataOutput()
    self.dataOutput!.videoSettings = [
        String(kCVPixelBufferPixelFormatTypeKey) : Int(kCVPixelFormatType_420YpCbCr8BiPlanarFullRange)
    ]

    dataOutput!.alwaysDiscardsLateVideoFrames = true
    if captureSession!.canAddOutput(dataOutput) {
        captureSession!.addOutput(dataOutput)
    }
    captureSession!.commitConfiguration()
    let queue: dispatch_queue_t = dispatch_queue_create("VideoQueue", DISPATCH_QUEUE_SERIAL)
    let delegate = VideoDelegate()
    dataOutput!.setSampleBufferDelegate(delegate, queue: queue)
}




 func captureOutput(captureOutput: AVCaptureOutput, didOutputSampleBuffer sampleBuffer: CMSampleBufferRef, fromConnection connection: AVCaptureConnection) {
    let imageBuffer: CVImageBufferRef = CMSampleBufferGetImageBuffer(sampleBuffer)!
    CVPixelBufferLockBaseAddress(imageBuffer, 0)
    // For the iOS the luma is contained in full plane (8-bit)
    let width: size_t = CVPixelBufferGetWidthOfPlane(imageBuffer, 0)
    let height: size_t = CVPixelBufferGetHeightOfPlane(imageBuffer, 0)
    let bytesPerRow: size_t = CVPixelBufferGetBytesPerRowOfPlane(imageBuffer, 0)
    let lumaBuffer: UnsafeMutablePointer = CVPixelBufferGetBaseAddressOfPlane(imageBuffer, 0)
    let grayColorSpace: CGColorSpaceRef = CGColorSpaceCreateDeviceGray()!
    let context: CGContextRef = CGBitmapContextCreate(lumaBuffer, width, height, 8, bytesPerRow, grayColorSpace, CGImageAlphaInfo.NoneSkipFirst.rawValue)!
    let dstImageFilter: CGImageRef = CGBitmapContextCreateImage(context)!
    dispatch_sync(dispatch_get_main_queue(), {() -> Void in
        self.customPreviewLayer!.contents = dstImageFilter as AnyObject
    })

}


}

这是我的 VideoDelegate 代码:

import Foundation
import AVFoundation
import UIKit

// Video Delegate
class VideoDelegate : NSObject, AVCaptureVideoDataOutputSampleBufferDelegate
{

    func captureOutput(captureOutput: AVCaptureOutput!,
        didOutputSampleBuffer sampleBuffer: CMSampleBuffer!,
        fromConnection connection: AVCaptureConnection!){
            print("hihi")

    }


    func captureOutput(captureOutput: AVCaptureOutput!,
        didDropSampleBuffer sampleBuffer: CMSampleBuffer!,
        fromConnection connection: AVCaptureConnection!){

            print("LOL")
    }


}

为什么我的委托(delegate)没有被调用以及如何修复它?我已经检查过关于堆栈溢出的类似问题,但我找不到解决这个问题的方法。请帮忙。

最佳答案

我发现了我报错的问题!这是因为被调用的委托(delegate)必须在同一个 View Controller 中创建。这是修改后的代码:

import UIKit
import AVFoundation
import Accelerate

var customPreviewLayer: AVCaptureVideoPreviewLayer?

class ViewController: UIViewController,     AVCaptureVideoDataOutputSampleBufferDelegate {

var captureSession: AVCaptureSession?
var dataOutput: AVCaptureVideoDataOutput?
//var customPreviewLayer: AVCaptureVideoPreviewLayer?

@IBOutlet weak var camView: UIView!

override func viewWillAppear(animated: Bool) {
    super.viewDidAppear(animated)
    //setupCameraSession()
}

override func viewDidLoad() {
    super.viewDidLoad()
    // Do any additional setup after loading the view, typically from a nib.
    //captureSession?.startRunning()
    setupCameraSession()
    self.captureSession?.startRunning()
}

override func didReceiveMemoryWarning() {
    super.didReceiveMemoryWarning()
    // Dispose of any resources that can be recreated.
}

func setupCameraSession() {
    // Session
    self.captureSession = AVCaptureSession()
    self.captureSession!.sessionPreset = AVCaptureSessionPreset1920x1080
    // Capture device
    let inputDevice: AVCaptureDevice = AVCaptureDevice.defaultDeviceWithMediaType(AVMediaTypeVideo)
    var deviceInput = AVCaptureDeviceInput()
    // Device input
    //var deviceInput: AVCaptureDeviceInput? = AVCaptureDeviceInput.deviceInputWithDevice(inputDevice, error: error)
    do {
        deviceInput = try AVCaptureDeviceInput(device: inputDevice)

    } catch let error as NSError {
        // Handle errors
        print(error)
    }
    if self.captureSession!.canAddInput(deviceInput) {
        self.captureSession!.addInput(deviceInput)
    }
    // Preview
    customPreviewLayer = AVCaptureVideoPreviewLayer(session: captureSession)
    customPreviewLayer!.frame = camView.bounds
    customPreviewLayer?.videoGravity = AVLayerVideoGravityResizeAspect
    customPreviewLayer?.connection.videoOrientation = AVCaptureVideoOrientation.Portrait
    self.camView.layer.addSublayer(customPreviewLayer!)
    print("Cam layer added")

    self.dataOutput = AVCaptureVideoDataOutput()
    self.dataOutput!.videoSettings = [
        String(kCVPixelBufferPixelFormatTypeKey) : Int(kCVPixelFormatType_420YpCbCr8BiPlanarFullRange)
    ]

    self.dataOutput!.alwaysDiscardsLateVideoFrames = true
    if self.captureSession!.canAddOutput(dataOutput) {
        self.captureSession!.addOutput(dataOutput)
    }
    self.captureSession!.commitConfiguration()
    let queue: dispatch_queue_t = dispatch_queue_create("VideoQueue", DISPATCH_QUEUE_SERIAL)
    //let delegate = VideoDelegate()
    self.dataOutput!.setSampleBufferDelegate(self, queue: queue)
}




 func captureOutput(captureOutput: AVCaptureOutput, didOutputSampleBuffer sampleBuffer: CMSampleBufferRef, fromConnection connection: AVCaptureConnection) {
    print("buffered")
    let imageBuffer: CVImageBufferRef = CMSampleBufferGetImageBuffer(sampleBuffer)!
    CVPixelBufferLockBaseAddress(imageBuffer, 0)
    let width: size_t = CVPixelBufferGetWidthOfPlane(imageBuffer, 0)
    let height: size_t = CVPixelBufferGetHeightOfPlane(imageBuffer, 0)
    let bytesPerRow: size_t = CVPixelBufferGetBytesPerRowOfPlane(imageBuffer, 0)
    let lumaBuffer: UnsafeMutablePointer = CVPixelBufferGetBaseAddressOfPlane(imageBuffer, 0)
    let grayColorSpace: CGColorSpaceRef = CGColorSpaceCreateDeviceGray()!
    let context: CGContextRef = CGBitmapContextCreate(lumaBuffer, width, height, 8, bytesPerRow, grayColorSpace, CGImageAlphaInfo.PremultipliedLast.rawValue)!//problematic

    let dstImageFilter: CGImageRef = CGBitmapContextCreateImage(context)!
    dispatch_sync(dispatch_get_main_queue(), {() -> Void in
        customPreviewLayer!.contents = dstImageFilter as AnyObject
    })
}



}

关于ios - 未调用 didOutputSampleBuffer 委托(delegate),我们在Stack Overflow上找到一个类似的问题: https://stackoverflow.com/questions/34572893/

相关文章:

ios - 支持自定义和默认展开转场

ios - 路径上的数据模型编译失败-xcode 6

ios - 为什么一个 View Controller 创建了两次?

ios - 如何在 UITableViewRowAction 中添加图像?

Ruby:DRY 类方法调用 Singleton 实例方法

ios - 如何在iphone 的NSUserDefaults 中动态生成多个 key

swift - 用户希望在文本字段中输入整数

ios - 如何确保在 animateWithDuration() 中调用完成

ios - 我应该在哪里声明/实例化我的 Store 对象以使其在应用程序运行时可用?

objective-c - 如何在sharekit中处理 “sendDidFinish”