相机采集
iOS系统相机氛围AVCaptureSession和AVCaptureMultiCamSession两种。主要区别是AVCaptureSession只能有一路的dataOuput,在多路输出的场景就需要使用AVCaptureMultiCamSession。
初始化session
self.captureSession = [[AVCaptureSession alloc] init];
初始化device
- (AVCaptureDevice *)p_chooseBestDevice:(AVCaptureDevicePosition)pos deviceType:(NSArray<AVCaptureDeviceType> *)devieTypes {
AVCaptureDevice *bestDevice;
if (@available(iOS 10.0, *)) {
if (devieTypes.count <0) {
devieTypes = @[AVCaptureDeviceTypeBuiltInWideAngleCamera];
}
AVCaptureDeviceDiscoverySession *discorverySession = [AVCaptureDeviceDiscoverySession discoverySessionWithDeviceTypes:devieTypes mediaType:AVMediaTypeVideo position:pos];
NSArray<AVCaptureDevice *> *devices = discorverySession.devices;
for (AVCaptureDevice *device in devices) {
for (AVCaptureDeviceType deviceType in devieTypes) {
if ([deviceType isEqualToString:device.deviceType]) {
bestDevice = device;
break;
}
}
}
} else {
NSArray<AVCaptureDevice *> *devices = [AVCaptureDevice devicesWithMediaType:AVMediaTypeVideo];
for (AVCaptureDevice *device in devices) {
if (device.position == pos) {
bestDevice = device;
break;
}
}
}
return bestDevice;
}
初始化input
self.deviceInput = [[AVCaptureDeviceInput alloc] initWithDevice:self.device error:&error];
if (error) {
ZZHDebugLog(@"%@", error);
}
if ([self.captureSession canAddInput:self.deviceInput]) {
[self.captureSession addInput:self.deviceInput];
}
初始化output
self.videoDataOutput = [[AVCaptureVideoDataOutput alloc] init];
[self.videoDataOutput setAlwaysDiscardsLateVideoFrames:YES];
[self.videoDataOutput setVideoSettings:[NSDictionary dictionaryWithObject:[NSNumber numberWithInt:kCVPixelFormatType_32BGRA] forKey:(id)kCVPixelBufferPixelFormatTypeKey]];
self.videoCaptureQueue = dispatch_queue_create("com.zzh.videocatpure", 0);
[self.videoDataOutput setSampleBufferDelegate:self queue:self.videoCaptureQueue];
if ([self.captureSession canAddOutput:self.videoDataOutput]) {
[self.captureSession addOutput:self.videoDataOutput];
}
设置输出帧方向
AVCaptureConnection *connection = [self.videoDataOutput connectionWithMediaType:AVMediaTypeVideo];
connection.videoOrientation = AVCaptureVideoOrientationPortrait;
AVCaptureVideoDataOutputSampleBufferDelegate
- (void)captureOutput:(AVCaptureOutput *)output didOutputSampleBuffer:(CMSampleBufferRef)sampleBuffer fromConnection:(AVCaptureConnection *)connection {
if (output == self.videoDataOutput) {
if (self.videoBufferCallBack) {
self.videoBufferCallBack(sampleBuffer);
}
}
}
- (void)captureOutput:(AVCaptureOutput *)output didDropSampleBuffer:(CMSampleBufferRef)sampleBuffer fromConnection:(AVCaptureConnection *)connection API_AVAILABLE(ios(6.0), macCatalyst(14.0)) {
ZZHDebugLog(@"");
}
metal上屏
初始化MTKView
- (void)p_initPreview:(CGRect)frame {
self.preivew = [[MTKView alloc] initWithFrame:frame];
self.preivew.device = MTLCreateSystemDefaultDevice();
self.preivew.delegate = self;
self.preivew.framebufferOnly = NO;
self.commandQueue = [self.preivew.device newCommandQueue];
CVMetalTextureCacheCreate(NULL, NULL, self.preivew.device, NULL, &_textureCache);
}
上传纹理
- (void)displayBuffer:(CMSampleBufferRef)buffer {
CVPixelBufferRef pixelBuffer = CMSampleBufferGetImageBuffer(buffer);
size_t width = CVPixelBufferGetWidth(pixelBuffer);
size_t height = CVPixelBufferGetHeight(pixelBuffer);
CVMetalTextureRef tmpTexture = NULL;
CVReturn status = CVMetalTextureCacheCreateTextureFromImage(kCFAllocatorDefault, self.textureCache, pixelBuffer, NULL, MTLPixelFormatBGRA8Unorm, width, height, 0, &tmpTexture);
if (status == kCVReturnSuccess) {
self.preivew.drawableSize = CGSizeMake(width, height);
self.texture = CVMetalTextureGetTexture(tmpTexture);
CFRelease(tmpTexture);
}
}
上屏
- (void)drawInMTKView:(nonnull MTKView *)view {
if (self.texture) {
id<MTLCommandBuffer> commandBuffer = [self.commandQueue commandBuffer];
id<MTLTexture> drawingTexture = view.currentDrawable.texture;
MPSImageGaussianBlur *filter = [[MPSImageGaussianBlur alloc] initWithDevice:view.device sigma:1];
[filter encodeToCommandBuffer:commandBuffer sourceTexture:self.texture destinationTexture:drawingTexture];
[commandBuffer presentDrawable:view.currentDrawable];
[commandBuffer commit];
self.texture = NULL;
}
}
问题
1、activeFormat、preset留待后边讨论
2、输出帧数据格式的设置,有一些系统的行为,不设置、设置nil都有差别
3、输出帧的额外信息留待后边讨论,例如亮度
4、上屏的信息一定要和帧格式对齐,不然有色差,如转换矩阵、颜色空间等