首先
推荐个博客给大家http://www.cnblogs.com/kenshincui/p/4186022.html 这个博客很全面的介绍了多媒体 音视频的录制及播放。
摄像头实时获取视频流
// ViewController.m
// 实时视频Demo
#import "ViewController.h"
#import <AVFoundation/AVFoundation.h>
@interface ViewController ()<AVCaptureVideoDataOutputSampleBufferDelegate>
@end
@implementation ViewController
- (void)viewDidLoad {
[super viewDidLoad];
// Do any additional setup after loading the view, typically from a nib.
[self setupCaptureSession];
}
- (void)setupCaptureSession
{
NSError *error = nil;
// Create the session
AVCaptureSession *session = [[AVCaptureSession alloc] init];//负责输入和输出设置之间的数据传递
// Configure the session to produce lower resolution video frames, if your
// processing algorithm can cope. We'll specify medium quality for the
// chosen device.
session.sessionPreset = AVCaptureSessionPresetMedium;//设置分辨率
// Find a suitable AVCaptureDevice
AVCaptureDevice *device = [AVCaptureDevice
defaultDeviceWithMediaType:AVMediaTypeVideo];//这里默认是使用后置摄像头,你可以改成前置摄像头
// Create a device input with the device and add it to the session.
AVCaptureDeviceInput *input = [AVCaptureDeviceInput deviceInputWithDevice:device
error:&error];
if (!input) {
// Handling the error appropriately.
}
[session addInput:input];
// Create a VideoDataOutput and add it to the session
AVCaptureVideoDataOutput *output = [[AVCaptureVideoDataOutput alloc] init];//创建一个视频数据输出流
[session addOutput:output];
// Configure your output.
dispatch_queue_t queue = dispatch_queue_create("myQueue", NULL);
[output setSampleBufferDelegate:self queue:queue];
// Specify the pixel format
output.videoSettings = [NSDictionary dictionaryWithObjectsAndKeys:
[NSNumber numberWithInt:kCVPixelFormatType_32BGRA], kCVPixelBufferPixelFormatTypeKey,
[NSNumber numberWithInt: 320], (id)kCVPixelBufferWidthKey,
[NSNumber numberWithInt: 240], (id)kCVPixelBufferHeightKey,
nil nil];
AVCaptureVideoPreviewLayer* preLayer = [AVCaptureVideoPreviewLayer layerWithSession: session];//相机拍摄预览图层
//preLayer = [AVCaptureVideoPreviewLayer layerWithSession:session];
preLayer.frame = CGRectMake(0, 0, 320, 240);
preLayer.videoGravity = AVLayerVideoGravityResizeAspectFill;
[self.view.layer addSublayer:preLayer];
// If you wish to cap the frame rate to a known value, such as 15 fps, set
// minFrameDuration.
output.minFrameDuration = CMTimeMake(1, 15);
// Start the session running to start the flow of data
[session startRunning];
// Assign session to an ivar.
//[self setSession:session];
}
// Delegate routine that is called when a sample buffer was written
- (void)captureOutput:(AVCaptureOutput *)captureOutput
didOutputSampleBuffer:(CMSampleBufferRef)sampleBuffer
fromConnection:(AVCaptureConnection *)connection
{
// Create a UIImage from the sample buffer data
UIImage *image = [self imageFromSampleBuffer:sampleBuffer];
}
// Create a UIImage from sample buffer data
- (UIImage *) imageFromSampleBuffer:(CMSampleBufferRef) sampleBuffer
{
// Get a CMSampleBuffer's Core Video image buffer for the media data
CVImageBufferRef imageBuffer = CMSampleBufferGetImageBuffer(sampleBuffer);
// Lock the base address of the pixel buffer
CVPixelBufferLockBaseAddress(imageBuffer, 0);
// Get the number of bytes per row for the pixel buffer
voidvoid *baseAddress = CVPixelBufferGetBaseAddress(imageBuffer);
// Get the number of bytes per row for the pixel buffer
size_t bytesPerRow = CVPixelBufferGetBytesPerRow(imageBuffer);
// Get the pixel buffer width and height
size_t width = CVPixelBufferGetWidth(imageBuffer);
size_t height = CVPixelBufferGetHeight(imageBuffer);
// Create a device-dependent RGB color space
CGColorSpaceRef colorSpace = CGColorSpaceCreateDeviceRGB();
// Create a bitmap graphics context with the sample buffer data
CGContextRef context = CGBitmapContextCreate(baseAddress, width, height, 8,
bytesPerRow, colorSpace, kCGBitmapByteOrder32Little | kCGImageAlphaPremultipliedFirst);
// Create a Quartz image from the pixel data in the bitmap graphics context
CGImageRef quartzImage = CGBitmapContextCreateImage(context);
// Unlock the pixel buffer
CVPixelBufferUnlockBaseAddress(imageBuffer,0);
// Free up the context and color space
CGContextRelease(context);
CGColorSpaceRelease(colorSpace);
// Create an image object from the Quartz image
// JPEG 格式
UIImage *image = [UIImage imageWithCGImage:quartzImage];
// png 格式 image = [UIImage imageWithData:[NSData dataWithData:UIImagePNGRepresentation(image)]];
// Release the Quartz image
CGImageRelease(quartzImage);
image = PAFixOrientationWithImage(image);//图像反转
return (image);
}
UIImage* PAFixOrientationWithImage(UIImage *image){
if (image.imageOrientation == UIImageOrientationUp) return image;
CGAffineTransform transform = CGAffineTransformIdentity;
switch (image.imageOrientation) {
case UIImageOrientationDown:
case UIImageOrientationDownMirrored:
transform = CGAffineTransformTranslate(transform, image.size.width, image.size.height);
transform = CGAffineTransformRotate(transform, M_PI);
break;
case UIImageOrientationLeft:
case UIImageOrientationLeftMirrored:
transform = CGAffineTransformTranslate(transform, image.size.width, 0);
transform = CGAffineTransformRotate(transform, M_PI_2);
break;
case UIImageOrientationRight:
case UIImageOrientationRightMirrored:
transform = CGAffineTransformTranslate(transform, 0, image.size.height);
transform = CGAffineTransformRotate(transform, -M_PI_2);
break;
case UIImageOrientationUp:
case UIImageOrientationUpMirrored:
break;
}
switch (image.imageOrientation) {
case UIImageOrientationUpMirrored:
case UIImageOrientationDownMirrored:
transform = CGAffineTransformTranslate(transform, image.size.width, 0);
transform = CGAffineTransformScale(transform, -1, 1);
break;
case UIImageOrientationLeftMirrored:
case UIImageOrientationRightMirrored:
transform = CGAffineTransformTranslate(transform, image.size.height, 0);
transform = CGAffineTransformScale(transform, -1, 1);
break;
case UIImageOrientationUp:
case UIImageOrientationDown:
case UIImageOrientationLeft:
case UIImageOrientationRight:
break;
}
CGContextRef ctx = CGBitmapContextCreate(NULL,
image.size.width,
image.size.height,
CGImageGetBitsPerComponent(image.CGImage),
0,
CGImageGetColorSpace(image.CGImage),
CGImageGetBitmapInfo(image.CGImage));
CGContextConcatCTM(ctx, transform);
switch (image.imageOrientation) {
case UIImageOrientationLeft:
case UIImageOrientationLeftMirrored:
case UIImageOrientationRight:
case UIImageOrientationRightMirrored:
CGContextDrawImage(ctx, CGRectMake(0,0,image.size.height,image.size.width), image.CGImage);
break;
default:
CGContextDrawImage(ctx, CGRectMake(0,0,image.size.width,image.size.height), image.CGImage);
break;
}
CGImageRef cgiPA = CGBitmapContextCreateImage(ctx);
UIImage *iPA = [UIImage imageWithCGImage:cgiPA];
CGContextRelease(ctx);
CGImageRelease(cgiPA);
return iPA;
}
@end