ios - Vision + ARKit 对象跟踪没有返回结果

标签 ios swift xcode arkit

我正在开发一款 Vision 应用,该应用集成了用于测量距离的 ARKit。我在使用 Vision 跟踪对象时遇到错误,导致来自 VNSequenceRequestHandler 的结果为空。我将包含 View Controller 的代码。请注意,其中大部分内容基于在线提供的示例代码。

import UIKit
import SceneKit
import ARKit
import Vision

class ViewController: UIViewController, ARSCNViewDelegate {
    @IBOutlet weak var highlightView: UIView!
    var sceneSphere = SCNNode()

    @IBOutlet var sceneView: ARSCNView!
    private var lastObservation: VNDetectedObjectObservation?
    private let visionSequenceHandler = VNSequenceRequestHandler()

    override func viewDidLoad() {
        super.viewDidLoad()

        // Set the view's delegate
        sceneView.delegate = self
        // Show statistics such as fps and timing information
        sceneView.showsStatistics = true

        // Create a new scene
        let scene = SCNScene(named: "art.scnassets/VisionScene.scn")!

        // Set the scene to the view
        sceneView.scene = scene
    }

    override func viewWillAppear(_ animated: Bool) {
        super.viewWillAppear(animated)

        // Create a session configuration
        let configuration = ARWorldTrackingConfiguration()

        // Run the view's session
        sceneView.session.run(configuration)
        sceneSphere = (sceneView.scene.rootNode.childNode(withName: "sphere", recursively: false))!
        sceneSphere.scale = SCNVector3.init(0.5, 0.5, 0.5)
    }

    override func viewWillDisappear(_ animated: Bool) {
        super.viewWillDisappear(animated)

        // Pause the view's session
        sceneView.session.pause()
    }

    override func didReceiveMemoryWarning() {
        super.didReceiveMemoryWarning()
        // Release any cached data, images, etc that aren't in use.
    }

    // MARK: - ARSCNViewDelegate

/*
    // Override to create and configure nodes for anchors added to the view's session.
    func renderer(_ renderer: SCNSceneRenderer, nodeFor anchor: ARAnchor) -> SCNNode? {
        let node = SCNNode()

        return node
    }
*/
    @IBAction func userTapped(_ sender: UITapGestureRecognizer) {
        //print(sceneView.hitTest(sender.location(in: sceneView), types: ARHitTestResult.ResultType.featurePoint))
        print(sender.location(ofTouch: 0, in: view))
        let newRect = CGRect(x: (sender.location(ofTouch: 0, in: sceneView)).x, y: (sender.location(ofTouch: 0, in: sceneView)).y, width: 100, height: 100)
        highlightView.frame = newRect
        let newObservation = VNDetectedObjectObservation(boundingBox: newRect)
        sceneSphere.position.x = Float(newObservation.boundingBox.midX)
        sceneSphere.position.y = Float(newObservation.boundingBox.midY)
        self.lastObservation = newObservation
    }

    func session(_ session: ARSession, didFailWithError error: Error) {
        // Present an error message to the user

    }

    func sessionWasInterrupted(_ session: ARSession) {
        // Inform the user that the session has been interrupted, for example, by presenting an overlay

    }

    func sessionInterruptionEnded(_ session: ARSession) {
        // Reset tracking and/or remove existing anchors if consistent tracking is required

    }
}
extension ViewController: SCNSceneRendererDelegate {

    func renderer(_ renderer: SCNSceneRenderer, updateAtTime time: TimeInterval) {
        //print("updateFrame1")
        guard
            // make sure the pixel buffer can be converted
            let pixelBuffer = (sceneView.session.currentFrame?.capturedImage),
            // make sure that there is a previous observation we can feed into the request
            let lastObservation = self.lastObservation
            else {print("nolastrequest"); return }
        print("lO2:\(lastObservation)")
        print("lO3:\(lastObservation.boundingBox)")
        print(pixelBuffer)
        let request = VNTrackObjectRequest(detectedObjectObservation: lastObservation, completionHandler: self.handleVisionRequestUpdate)
        // set the accuracy to high
        // this is slower, but it works a lot better
        request.trackingLevel = .accurate
        do {
            print(pixelBuffer)
            print(request)
            try self.visionSequenceHandler.perform([request], on: pixelBuffer)
        } catch {
            print("Throws: \(error)")
        }
    }
    private func handleVisionRequestUpdate(_ request: VNRequest, error: Error?) {
        print("HandleVisionRequest")
        print(request.results?.first)
        print(request)
        // Dispatch to the main queue because we are touching non-atomic, non-thread safe properties of the view controller
        DispatchQueue.main.async {
            // make sure we have an actual result
            print("lO1:\(request.results?.first)")
            print("nORC: \(request.results)")
            //let newObservation = request.results?.first as! VNDetectedObjectObservation
            //print(newObservation.boundingBox.midX)
            //print(self.lastObservation?.boundingBox.midX)
            // prepare for next loop
            if request.results?.first != nil {
            self.lastObservation = newObservation
            }

            // check the confidence level before updating the UI
           /* guard newObservation.confidence >= 0.3 else {
                // hide the rectangle when we lose accuracy so the user knows something is wrong
                self.highlightView?.frame = .zero
                return
            }*/

            // calculate view rect
            //var transformedRect = newObservation.boundingBox
            //transformedRect.origin.y = 1 - transformedRect.origin.y
         //   let convertedRect = self.cameraLayer.layerRectConverted(fromMetadataOutputRect: transformedRect)
            /*// move the highlight view
            let xChange = ((self.highlightView?.frame.midX)!-convertedRect.midX)
            let yChange = ((self.highlightView?.frame.midY)!-convertedRect.midY)
            var inchChange = ((xChange + yChange)/72)
            inchChange = abs(round(inchChange*60))
            print(inchChange)
            self.speedLabal.text = "\(inchChange) In/sec"*/
            //self.highlightView?.frame = convertedRect
        }
    }

}

如果我在 handleVisionRequestUpdate 中禁用 self.lastObservation = newObservation 行,则不会发生此错误,但应用程序不会按预期运行。

每当问题发生时,print("nORC:\(request.results)") 的结果都是nil。如果有帮助,我可以上传整个项目。我会对此提出任何反馈意见。提前致谢!

最佳答案

与 UIKit 相比,Vision Framework 使用不同的坐标系。 UIKit 的原点在左上角,最大宽度和高度值是屏幕尺寸的点数。 然而,Vision Framework 的原点在左下方,宽度和高度的最大值为 1。 这就是您的代码无法正常工作的原因。

在将 lastObservation.boundingBox 属性传递给 VNTrackObjectRequest 之前将其转换为 UIKit 坐标。

关于ios - Vision + ARKit 对象跟踪没有返回结果,我们在Stack Overflow上找到一个类似的问题: https://stackoverflow.com/questions/47743102/

相关文章:

ios - Closed::Creating a UIButton horizo​​ntal carousel with swipe and tap 手势

ios - 在 nil 中的 uiview 之间传输数据

ios - 使用 XLPagerTabStrip 在选项卡之间共享数据

iphone - 自动更改到下一个 ViewController

mysql - 如何从 iPhone 应用程序在 mysql rds amazon 中获取或插入数据

ios - 使用 UICollectionViewDiffableDataSource 和 NSFetchedResultsController 重新排序单元格

ios - 在 Swift 中重新创建 Python 的输入语句

ios - 在viewdidload中加载后不要重新添加mapview

iphone - Storyboard和 uitableview 困境

ios - 如何使图像对应于 NSMutableArray 中的每个对象 - xcode 7.3