分两部分来写
第一部分:前置相机的设置
代码如下:
- (void)viewDidAppear:(BOOL)animated{
[super viewDidAppear:animated];
[self initCameraAndUI];
if (self.session != nil && !self.session.isRunning) {
[self.session startRunning];
}
}
- (void)viewDidLoad {
[super viewDidLoad];
}
#pragma mark initDevice && UI
- (void)initCameraAndUI{
self.session = [[AVCaptureSession alloc]init];
self.session.sessionPreset = AVCaptureSessionPreset640x480;
NSArray *availableCameraDevices = [AVCaptureDevice devicesWithMediaType:AVMediaTypeVideo];
//前置摄像头
for (AVCaptureDevice *device in availableCameraDevices) {
if (device.position == AVCaptureDevicePositionFront) {
self.frontDevice = device;
}
}
//input
AVCaptureDeviceInput *cameraInput = [AVCaptureDeviceInput deviceInputWithDevice:self.frontDevice error:nil];
if ([self.session canAddInput:cameraInput]) {
[self.session addInput:cameraInput];
}
//output
AVCaptureVideoDataOutput *output = [[AVCaptureVideoDataOutput alloc]init];
[output setSampleBufferDelegate:self queue:dispatch_queue_create("myQueue", NULL)];
output.videoSettings = [NSDictionary dictionaryWithObjectsAndKeys:
[NSNumber numberWithInt:kCVPixelFormatType_32BGRA], kCVPixelBufferPixelFormatTypeKey,
[NSNumber numberWithInt: 320], (id)kCVPixelBufferWidthKey,
[NSNumber numberWithInt: 240], (id)kCVPixelBufferHeightKey,
nil];
if ([self.session canAddOutput:output]) {
[self.session addOutput:output];
}
self.previewLayer = [[AVCaptureVideoPreviewLayer alloc]initWithSession:self.session];
self.previewLayer.frame = self.cameraView.bounds;
self.previewLayer.videoGravity = AVLayerVideoGravityResizeAspectFill;
[self.cameraView.layer addSublayer:self.previewLayer];
}
- (void)viewDidDisappear:(BOOL)animated{
[super viewDidDisappear:animated];
if (self.session != nil && self.session.isRunning) {
[self.session stopRunning];
}
}
- (void)didReceiveMemoryWarning {
[super didReceiveMemoryWarning];
if (self.session != nil && self.session.isRunning) {
[self.session stopRunning];
}
}
第二部分:人脸检测
将前置相机捕获到的数据流转化为Image:
#pragma mark -- AVCaptureVideoDataOutputSampleBufferDelegate
- (void)captureOutput:(AVCaptureOutput *)captureOutput didOutputSampleBuffer:(CMSampleBufferRef)sampleBuffer fromConnection:(AVCaptureConnection *)connection{
self.yuvImage = [GetYUVImage sampleBufferToImageWithSampleBuffer:sampleBuffer];
}
转化Image的方法:
+ (UIImage *)sampleBufferToImageWithSampleBuffer:(CMSampleBufferRef)sampleBuffer{
CVImageBufferRef imageBuffer = CMSampleBufferGetImageBuffer(sampleBuffer);
CVPixelBufferLockBaseAddress(imageBuffer, 0);
void *baseAddress = CVPixelBufferGetBaseAddress(imageBuffer);
size_t bytesPerRow = CVPixelBufferGetBytesPerRow(imageBuffer);
size_t width = CVPixelBufferGetWidth(imageBuffer);
size_t height = CVPixelBufferGetHeight(imageBuffer);
CGColorSpaceRef colorSpace = CGColorSpaceCreateDeviceRGB();
CGContextRef context = CGBitmapContextCreate(baseAddress, width, height, 8,
bytesPerRow, colorSpace, kCGBitmapByteOrder32Little | kCGImageAlphaPremultipliedFirst);
CGImageRef quartzImage = CGBitmapContextCreateImage(context);
CVPixelBufferUnlockBaseAddress(imageBuffer,0);
CGContextRelease(context);
CGColorSpaceRelease(colorSpace);
UIImage *image = [UIImage imageWithCGImage:quartzImage scale:1.0 orientation:UIImageOrientationRight];
CGImageRelease(quartzImage);
return (image);
}
开始检测:
#pragma mark -开始人脸识别
- (void)beginDetectorFacewithImage:(UIImage *)imageA
{
CIImage *image = [CIImage imageWithCGImage:imageA.CGImage];
NSDictionary *opts = [NSDictionary dictionaryWithObject:CIDetectorAccuracyHigh forKey:CIDetectorAccuracy];
CIDetector *detector = [CIDetector detectorOfType:CIDetectorTypeFace context:nil options:opts];
//得到人体面部数据
NSArray* features = [detector featuresInImage:image];
for (CIFaceFeature *f in features)
{
CGRect aRect = f.bounds;
NSLog(@"%f, %f, %f, %f", aRect.origin.x, aRect.origin.y, aRect.size.width, aRect.size.height);
//眼睛和嘴的位置
if(f.hasLeftEyePosition) {
NSString *leftEyeString = [NSString stringWithFormat:@"LeftEye---> %g %g\n", f.leftEyePosition.x, f.leftEyePosition.y];
NSLog(@"%@\n", leftEyeString);
}
if(f.hasRightEyePosition) {
NSString *rightEyeString = [NSString stringWithFormat:@"RightEye---> %g %g\n", f.rightEyePosition.x, f.rightEyePosition.y];
NSLog(@"%@\n", rightEyeString);
}
if(f.hasMouthPosition) {
NSString *mouthString = [NSString stringWithFormat:@"Mouth---> %g %g\n", f.mouthPosition.x, f.mouthPosition.y];
NSLog(@"%@\n", mouthString);
}
}
}
大致步骤就是这么多,接下来可以尝试一下更详细的做法。