iOS分屏相机实现

话不多说首先上效果图

WechatIMG7.png

原理:
屏幕的左半部分为AVCaptureVideoPreviewLayer
而右半部分则为UIImageView

代码

遵循<AVCaptureVideoDataOutputSampleBufferDelegate>协议

//捕获设备,通常是前置摄像头,后置摄像头,麦克风(音频输入)
@property (nonatomic, strong) AVCaptureDevice *device;
//AVCaptureDeviceInput 代表输入设备,他使用AVCaptureDevice 来初始化
@property (nonatomic, strong) AVCaptureDeviceInput *input;
//输出图片
@property (nonatomic ,strong) AVCaptureVideoDataOutput *imageOutput;
//session:由他把输入输出结合在一起,并开始启动捕获设备(摄像头)
@property (nonatomic, strong) AVCaptureSession *session;
//图像预览层,实时显示捕获的图像
@property (nonatomic ,strong) AVCaptureVideoPreviewLayer *previewLayer;
@property (strong, nonatomic) UIImageView *outputImageView;
@property (strong, nonatomic) UIView *leftView;

- (void)cameraDistrict
{
    //    AVCaptureDevicePositionBack  后置摄像头
    //    AVCaptureDevicePositionFront 前置摄像头
    self.device = [self cameraWithPosition:AVCaptureDevicePositionBack];
    self.input = [[AVCaptureDeviceInput alloc] initWithDevice:self.device error:nil];

    self.imageOutput = [[AVCaptureVideoDataOutput alloc]init];
    self.imageOutput.alwaysDiscardsLateVideoFrames = YES;

    dispatch_queue_t queue;
    queue = dispatch_queue_create("cameraQueue", NULL);
    [self.imageOutput setSampleBufferDelegate:self queue:queue];

    NSString* key = (NSString*)kCVPixelBufferPixelFormatTypeKey;
    NSNumber* value = [NSNumber numberWithUnsignedInt:kCVPixelFormatType_32BGRA];
    NSDictionary* videoSettings = [NSDictionary dictionaryWithObject:value forKey:key];
    [self.imageOutput setVideoSettings:videoSettings];

    self.session = [[AVCaptureSession alloc] init];
    //     拿到的图像的大小可以自行设定
    //    AVCaptureSessionPreset320x240
    //    AVCaptureSessionPreset352x288
    //    AVCaptureSessionPreset640x480
    //    AVCaptureSessionPreset960x540
    //    AVCaptureSessionPreset1280x720
    //    AVCaptureSessionPreset1920x1080
    //    AVCaptureSessionPreset3840x2160
    self.session.sessionPreset = AVCaptureSessionPreset640x480;
    //输入输出设备结合
    if ([self.session canAddInput:self.input]) {
        [self.session addInput:self.input];
    }
    if ([self.session canAddOutput:self.imageOutput]) {
        [self.session addOutput:self.imageOutput];
    }
    //预览层的生成
    self.previewLayer = [[AVCaptureVideoPreviewLayer alloc] initWithSession:self.session];
    self.previewLayer.frame = self.leftView.bounds;

    self.previewLayer.videoGravity = AVLayerVideoGravityResizeAspectFill;
    [[self.previewLayer connection]setVideoOrientation:AVCaptureVideoOrientationLandscapeRight];
    [self.leftView.layer addSublayer:self.previewLayer];

    _outputImageView = [[UIImageView alloc]init];
    _outputImageView.contentMode = UIViewContentModeScaleAspectFill;
    _outputImageView.clipsToBounds = YES;
    [self.view addSubview:_outputImageView];

    _outputImageView.sd_layout
    .leftSpaceToView(self.leftView, 0)
    .rightSpaceToView(self.view, 0)
    .topSpaceToView(self.view, 0)
    .bottomSpaceToView(self.view, 0);

    //设备取景开始
    [self.session startRunning];
    if ([_device lockForConfiguration:nil]) {
        //自动闪光灯,
        if ([_device isFlashModeSupported:AVCaptureFlashModeAuto]) {
            [_device setFlashMode:AVCaptureFlashModeAuto];
        }
        //自动白平衡,但是好像一直都进不去
        if ([_device isWhiteBalanceModeSupported:AVCaptureWhiteBalanceModeAutoWhiteBalance]) {
            [_device setWhiteBalanceMode:AVCaptureWhiteBalanceModeAutoWhiteBalance];
        }
        [_device unlockForConfiguration];
    }

}

- (AVCaptureDevice *)cameraWithPosition:(AVCaptureDevicePosition)position{
    NSArray *devices = [AVCaptureDevice devicesWithMediaType:AVMediaTypeVideo];
    for ( AVCaptureDevice *device in devices )
        if ( device.position == position ){
            return device;
        }
    return nil;
}

- (AVCaptureVideoOrientation) videoOrientationFromCurrentDeviceOrientation {

    switch (self.interfaceOrientation) {
        case UIInterfaceOrientationPortrait: {
            return AVCaptureVideoOrientationPortrait;
        }
        case UIInterfaceOrientationLandscapeLeft: {
            return AVCaptureVideoOrientationLandscapeLeft;
        }
        case UIInterfaceOrientationLandscapeRight: {
            return AVCaptureVideoOrientationLandscapeRight;
        }
        case UIInterfaceOrientationPortraitUpsideDown: {
            return AVCaptureVideoOrientationPortraitUpsideDown;
        }
        case UIInterfaceOrientationUnknown: {
            break;
        }
    }

    return AVCaptureVideoOrientationLandscapeLeft;
}

#pragma mark AVCaptureSession delegate
/**
将sampleBuffer转换成image并绘制在右侧ImageView上
*/
- (void)captureOutput:(AVCaptureOutput *)captureOutput didOutputSampleBuffer:(CMSampleBufferRef)sampleBuffer fromConnection:(AVCaptureConnection *)connection{
    CVImageBufferRef imageBuffer = CMSampleBufferGetImageBuffer(sampleBuffer);
    CVPixelBufferLockBaseAddress(imageBuffer,0);
    uint8_t *baseAddress = (uint8_t *)CVPixelBufferGetBaseAddress(imageBuffer);
    size_t bytesPerRow = CVPixelBufferGetBytesPerRow(imageBuffer);
    size_t width = CVPixelBufferGetWidth(imageBuffer);
    size_t height = CVPixelBufferGetHeight(imageBuffer);

    CGColorSpaceRef colorSpace = CGColorSpaceCreateDeviceRGB();
    CGContextRef newContext = CGBitmapContextCreate(baseAddress,width, height, 8, bytesPerRow, colorSpace,kCGBitmapByteOrder32Little | kCGImageAlphaPremultipliedFirst);
    CGImageRef newImage = CGBitmapContextCreateImage(newContext);
    CGContextRelease(newContext);
    CGColorSpaceRelease(colorSpace);

    UIImage *image= [UIImage imageWithCGImage:newImage scale:1.0 orientation:UIImageOrientationUp];
    CGImageRelease(newImage);
    [_outputImageView performSelectorOnMainThread:@selector(setImage:) withObject:image waitUntilDone:YES];
    CVPixelBufferUnlockBaseAddress(imageBuffer,0);
}

隐藏系统的音量调节窗口

    AVAudioSession *audio = [AVAudioSession sharedInstance];
    [audio setActive:YES error:nil];
    MPVolumeView *volumeView = [[MPVolumeView alloc]initWithFrame:CGRectMake(-2000, -2000, 10, 10)];
    volumeView.hidden = NO;
    [self.view addSubview:volumeView];

监听系统音量调节事件

    [[NSNotificationCenter defaultCenter] addObserver:self selector:@selector(volumeClicked:) name:@"AVSystemController_SystemVolumeDidChangeNotification" object:nil];
//    但是仅仅监听是不起作用的,因为@"AVSystemController_SystemVolumeDidChangeNotification"需要对它进行响应,所以要在监听后加
    [[UIApplication sharedApplication] beginReceivingRemoteControlEvents];

根据音量加减来调节相机焦距

-(void)volumeClicked:(NSNotification *)noti{

    //1.0~67.5
    CGFloat zoom = self.device.videoZoomFactor;

    NSLog(@"%.f",zoom);

    NSDictionary *userInfo = [noti userInfo];
    CGFloat volume = [[userInfo valueForKey:@"AVSystemController_AudioVolumeNotificationParameter"]floatValue];

    if (volume == 1.0 && self.volume == 1.0) {//增加到最大了

        zoom += 0.25;

    }else if (volume == 0.0 && self.volume == 0.0)//减小到最小了
    {
        zoom -= 0.25;

    }else if(volume > self.volume)//++
    {
        zoom += 0.25;

        self.volume = volume;

    }else if (volume < self.volume)//--
    {

        self.volume = volume;
        zoom -= 0.25;

    }

    NSError *error = nil;
    if (zoom < 1 || zoom > 30) {
        return;
    }
    [self.device lockForConfiguration:&error];
    if (!error) {
        self.device.videoZoomFactor = zoom;
    }else
    {
        NSLog(@"error = %@", error);
    }
    [self.device unlockForConfiguration];
}

最后编辑于
©著作权归作者所有,转载或内容合作请联系作者
【社区内容提示】社区部分内容疑似由AI辅助生成,浏览时请结合常识与多方信息审慎甄别。
平台声明:文章内容(如有图片或视频亦包括在内)由作者上传并发布,文章内容仅代表作者本人观点,简书系信息发布平台,仅提供信息存储服务。

相关阅读更多精彩内容

友情链接更多精彩内容