相机景深模式

在iOS 11中,苹果新出了一个类用于描述景深数据:AVDepthData,AVDepthData内部提供了一系列的属性和方法来获取景深的CVPixelBuffer,景深数据类型等,AVDepthData在iOS、macOS等平台上都是用来描述景深数据,且属于AVFundation框架。

1.景深摄像头

// 创建 AVCaptureSession 实例
captureSession = AVCaptureSession()   
// 配置 AVCaptureDeviceInput  前置:builtInTrueDepthCamera  后置:builtInDualWideCamera
guard let videoDevice = AVCaptureDevice.default(.builtInTrueDepthCamera, for: .depthData, position: .front),
 let videoInput = try? AVCaptureDeviceInput(device: videoDevice), captureSession.canAddInput(videoInput) else {
     print("Failed to set up video input")
     return
 }

2. AVCaptureDepthDataOutput使用

let depthDataOutput = AVCaptureDepthDataOutput()
depthDataOutput.setDelegate(self, callbackQueue: DispatchQueue.main)

if captureSession.canAddOutput(depthDataOutput) {
        captureSession.addOutput(depthDataOutput)
 }

// AVCaptureDepthDataOutputDelegate
 func depthDataOutput(_ output: AVCaptureDepthDataOutput, didOutput depthData: AVDepthData, timestamp: CMTime, connection: AVCaptureConnection) {
// 深度数据 CVPixelBuffer
  let depthDataMap = depthData.depthDataMap
}
注意点:AVCaptureDepthDataOutput和AVCaptureVideoDataOutput的帧率是不一致的

3.怎么保持同步

dataOutputSynchronizer = AVCaptureDataOutputSynchronizer(dataOutputs: [videoOutput,depthDataOutput])
dataOutputSynchronizer.setDelegate(self, queue: DispatchQueue.main)

// AVCaptureDataOutputSynchronizerDelegate
 func dataOutputSynchronizer(_ synchronizer: AVCaptureDataOutputSynchronizer, didOutput synchronizedDataCollection: AVCaptureSynchronizedDataCollection) {
        
        var depthDataWasDropped = true
        var sampleBufferWasDropped = true
        var depthData: AVDepthData?
        var videoData: CMSampleBuffer?
        if let depthBufferData = synchronizedDataCollection.synchronizedData(for: self.depthDataOutput) as? AVCaptureSynchronizedDepthData {
            depthData = depthBufferData.depthData
            depthDataWasDropped = depthBufferData.depthDataWasDropped
        }
        
        if let videoBufferData = synchronizedDataCollection.synchronizedData(for: self.videoOutput) as? AVCaptureSynchronizedSampleBufferData{
            videoData = videoBufferData.sampleBuffer
            sampleBufferWasDropped = videoBufferData.sampleBufferWasDropped
        }
        
        if depthDataWasDropped || sampleBufferWasDropped {
            return
        }
        
        guard let depthData = depthData, let videoData = videoData, let pixelBuffer = CMSampleBufferGetImageBuffer(videoData)  else { return }
        let depthDataMap = depthData.depthDataMap
        let depthDataCIImage = CIImage(cvPixelBuffer: depthDataMap)
        let originalCIImage = CIImage(cvPixelBuffer: pixelBuffer)
       
        DispatchQueue.main.async {
            let uiImage: UIImage = UIImage(ciImage: depthDataCIImage)
            self.imageView.image = uiImage
        }
    }
©著作权归作者所有,转载或内容合作请联系作者
【社区内容提示】社区部分内容疑似由AI辅助生成,浏览时请结合常识与多方信息审慎甄别。
平台声明:文章内容(如有图片或视频亦包括在内)由作者上传并发布,文章内容仅代表作者本人观点,简书系信息发布平台,仅提供信息存储服务。

相关阅读更多精彩内容

友情链接更多精彩内容