在iOS 11中,苹果新出了一个类用于描述景深数据:AVDepthData,AVDepthData内部提供了一系列的属性和方法来获取景深的CVPixelBuffer,景深数据类型等,AVDepthData在iOS、macOS等平台上都是用来描述景深数据,且属于AVFundation框架。
1.景深摄像头
// 创建 AVCaptureSession 实例
captureSession = AVCaptureSession()
// 配置 AVCaptureDeviceInput 前置:builtInTrueDepthCamera 后置:builtInDualWideCamera
guard let videoDevice = AVCaptureDevice.default(.builtInTrueDepthCamera, for: .depthData, position: .front),
let videoInput = try? AVCaptureDeviceInput(device: videoDevice), captureSession.canAddInput(videoInput) else {
print("Failed to set up video input")
return
}
2. AVCaptureDepthDataOutput使用
let depthDataOutput = AVCaptureDepthDataOutput()
depthDataOutput.setDelegate(self, callbackQueue: DispatchQueue.main)
if captureSession.canAddOutput(depthDataOutput) {
captureSession.addOutput(depthDataOutput)
}
// AVCaptureDepthDataOutputDelegate
func depthDataOutput(_ output: AVCaptureDepthDataOutput, didOutput depthData: AVDepthData, timestamp: CMTime, connection: AVCaptureConnection) {
// 深度数据 CVPixelBuffer
let depthDataMap = depthData.depthDataMap
}
注意点:AVCaptureDepthDataOutput和AVCaptureVideoDataOutput的帧率是不一致的
3.怎么保持同步
dataOutputSynchronizer = AVCaptureDataOutputSynchronizer(dataOutputs: [videoOutput,depthDataOutput])
dataOutputSynchronizer.setDelegate(self, queue: DispatchQueue.main)
// AVCaptureDataOutputSynchronizerDelegate
func dataOutputSynchronizer(_ synchronizer: AVCaptureDataOutputSynchronizer, didOutput synchronizedDataCollection: AVCaptureSynchronizedDataCollection) {
var depthDataWasDropped = true
var sampleBufferWasDropped = true
var depthData: AVDepthData?
var videoData: CMSampleBuffer?
if let depthBufferData = synchronizedDataCollection.synchronizedData(for: self.depthDataOutput) as? AVCaptureSynchronizedDepthData {
depthData = depthBufferData.depthData
depthDataWasDropped = depthBufferData.depthDataWasDropped
}
if let videoBufferData = synchronizedDataCollection.synchronizedData(for: self.videoOutput) as? AVCaptureSynchronizedSampleBufferData{
videoData = videoBufferData.sampleBuffer
sampleBufferWasDropped = videoBufferData.sampleBufferWasDropped
}
if depthDataWasDropped || sampleBufferWasDropped {
return
}
guard let depthData = depthData, let videoData = videoData, let pixelBuffer = CMSampleBufferGetImageBuffer(videoData) else { return }
let depthDataMap = depthData.depthDataMap
let depthDataCIImage = CIImage(cvPixelBuffer: depthDataMap)
let originalCIImage = CIImage(cvPixelBuffer: pixelBuffer)
DispatchQueue.main.async {
let uiImage: UIImage = UIImage(ciImage: depthDataCIImage)
self.imageView.image = uiImage
}
}