首页 > 解决方案 > 如何在 iOS 上获取相机校准数据?又名 AVCameraCalibrationData

问题描述

我正在开发一个需要AVCameraCalibrationData. 我的应用程序崩溃并出现异常。

AVCaptureDataOutputSynchronizer initWithDataOutputs:] Unsupported AVCaptureOutput in dataOutputs - <AVCapturePhotoOutput: 0x283d6ab80>'

我尝试了其他一些解决方法,但depthDataOutput没有被调用。请看我的相机configuration。任何帮助将非常感激。


class ViewController: UIViewController {

    @IBOutlet var image_view: UIImageView!
    @IBOutlet var capture_button: UIButton!

    var captureSession: AVCaptureSession?
    var sessionOutput: AVCapturePhotoOutput?
    var depthOutput: AVCaptureDepthDataOutput?
    var previewLayer: AVCaptureVideoPreviewLayer?
    var outputSynchronizer: AVCaptureDataOutputSynchronizer?

    let dataOutputQueue = DispatchQueue(label: "data queue", qos: .userInitiated, attributes: [], autoreleaseFrequency: .workItem)

    @IBAction func capture(_ sender: Any) {

        self.sessionOutput?.capturePhoto(with: AVCapturePhotoSettings(format: [AVVideoCodecKey: AVVideoCodecType.jpeg]), delegate: self)

    }

    func photoOutput(_ output: AVCapturePhotoOutput, didFinishProcessingPhoto photo: AVCapturePhoto, error: Error?) {

        self.previewLayer?.removeFromSuperlayer()
        self.image_view.image = UIImage(data: photo.fileDataRepresentation()!)

        print(photo.cameraCalibrationData)  // is nil

        let depth_map = photo.depthData?.depthDataMap
        print("depth_map:", depth_map) // is nil

    }

    func depthDataOutput(_ output: AVCaptureDepthDataOutput, didOutput depthData: AVDepthData, timestamp: CMTime, connection: AVCaptureConnection) {

        print("depth data") // never called

    }

    override func viewDidLoad() {
        super.viewDidLoad()

        self.captureSession = AVCaptureSession()
        self.captureSession?.sessionPreset = .photo

        self.sessionOutput = AVCapturePhotoOutput()
        self.depthOutput = AVCaptureDepthDataOutput()
        self.depthOutput?.setDelegate(self, callbackQueue: dataOutputQueue)

        do {

            let device =  AVCaptureDevice.default(.builtInDualCamera, for: .video, position: .back)
            let input = try AVCaptureDeviceInput(device: device!)
            if(self.captureSession?.canAddInput(input))!{
                self.captureSession?.addInput(input)

                if(self.captureSession?.canAddOutput(self.sessionOutput!))!{
                    self.captureSession?.addOutput(self.sessionOutput!)


                    if(self.captureSession?.canAddOutput(self.depthOutput!))!{
                        self.captureSession?.addOutput(self.depthOutput!)

                        self.previewLayer = AVCaptureVideoPreviewLayer(session: self.captureSession!)
                        self.previewLayer?.frame = self.image_view.bounds
                        self.previewLayer?.videoGravity = AVLayerVideoGravity.resizeAspectFill
                        self.previewLayer?.connection?.videoOrientation = AVCaptureVideoOrientation.portrait
                        self.image_view.layer.addSublayer(self.previewLayer!)

                    }

                }

            }
            if sessionOutput!.isDepthDataDeliverySupported {
                sessionOutput?.isDepthDataDeliveryEnabled = true
                depthOutput?.connection(with: .depthData)!.isEnabled = true
                depthOutput?.isFilteringEnabled = true
                outputSynchronizer = AVCaptureDataOutputSynchronizer(dataOutputs: [sessionOutput!, depthOutput!]) // app crash on that line of code
                outputSynchronizer!.setDelegate(self, queue: self.dataOutputQueue)
            }

        } catch {}

        self.captureSession?.startRunning()

    }

}

@available(iOS 11.0, *)
extension ViewController: AVCaptureDataOutputSynchronizerDelegate , AVCaptureDepthDataOutputDelegate, AVCapturePhotoCaptureDelegate {
    @available(iOS 11.0, *)
    func dataOutputSynchronizer(_ synchronizer: AVCaptureDataOutputSynchronizer, didOutput synchronizedDataCollection: AVCaptureSynchronizedDataCollection) {

    }
}

标签: iosswiftavfoundation

解决方案


推荐阅读