将最近学习的直播推流技术做个笔记。
iOS推流的主要流程如下:
采集
iOS一般使用AVFundation进行采集,涉及到的类如下:
AVCaptureSession 采集的会话对象,它一头连接输入对象,一头连接输出对象向app提供采集好的原始音视频数据,通过它管理采集的开始与结束
AVCaptureDevice 采集用的设备,比如麦克风还是摄像头,是前置摄像头还是后置摄像头。所以初始化时一般指定mediaType:
AVMediaTypeAudio
(音频) orAVMediaTypeVideo
(视频)AVCaptureDeviceInput 采集输入对象,通过AVCaptureDevice对象进行初始化
AVCaptureVideoDataOutput 、AVCaptureAudioDataOutput 视频数据和音频数据采集后的输出对象,与输入对象对应。废话不多说了看代码吧!
// 先创建一个采集类,并进行必要属性定义吧
@interface Capture()<AVCaptureVideoDataOutputSampleBufferDelegate, AVCaptureAudioDataOutputSampleBufferDelegate> {
// 采集相关对象
AVCaptureVideoDataOutput *videoOutput;
AVCaptureAudioDataOutput *audioOutput;
AVCaptureConnection *videoConnection;
AVCaptureConnection *audioConnection;
dispatch_queue_t acaptureQueue;
dispatch_queue_t vcaptureQueue;
dispatch_semaphore_t samaphore;
AVCaptureDeviceInput *_deviceInput;
int sampleCount;
}
@property (nonatomic, assign) AVCaptureDevicePosition devicePosition;
@property (nonatomic, assign) AVCaptureVideoOrientation orientation;
@property (nonatomic, assign) AVCaptureSessionPreset preset;
@property (nonatomic, assign) BOOL isMirrored;
@end
- 初始化
// 1.创建一个采集类会话 AVCaptureSession
_session = [[AVCaptureSession alloc] init];
// 设置视频采集的宽高参数
_session.sessionPreset = _preset; // @sea AVCaptureSessionPreset1280x720
// 2.设置音频采集
AVCaptureDevice *micro = [AVCaptureDevice defaultDeviceWithDeviceType:AVCaptureDeviceTypeBuiltInMicrophone mediaType:AVMediaTypeAudio position:AVCaptureDevicePositionUnspecified];
AVCaptureDeviceInput *audioInput = [[AVCaptureDeviceInput alloc] initWithDevice:micro error:nil];
if (![_session canAddInput:audioInput]) {
NSLog(@"can not add audioInput");
return NO;
}
// 将采集输入源添加到session
[_session addInput:audioInput];
// 3. 采集物理设备对象,选择后置摄像头
AVCaptureDevice *camera = [self videoDeviceWitchPosition:_devicePosition];
// 通过视频物理设备对象创建视频输入对象
AVCaptureDeviceInput *videoInput = [[AVCaptureDeviceInput alloc] initWithDevice:camera error:nil];
if (![_session canAddInput:videoInput]) {
NSLog(@"can not add video input");
return NO;
}
_deviceInput = videoInput;
[_session addInput:videoInput];
/* 如果调用了[session startRunning]之后要想改变音视频输出对象配置参数,则必须调用[session beginConfiguration];和 [session commitConfiguration];才能生效。
如果没有调用[session startRunning]则这两句代码可以不写 */
[_session beginConfiguration];
// 4. 创建视频输出对象
videoOutput = [[AVCaptureVideoDataOutput alloc] init];
// 设置视频输出参数,这里设置输出格式为YUV420YpCbCr8
NSDictionary *videoSettings = @{(id)kCVPixelBufferPixelFormatTypeKey: @(kCVPixelFormatType_420YpCbCr8BiPlanarFullRange)};
videoOutput.videoSettings = videoSettings;
// 当采集速度过快而处理速度跟不上时的丢弃策略,默认丢弃最新采集的视频。这里设置为NO,表示不丢弃缓存起来
videoOutput.alwaysDiscardsLateVideoFrames = YES;
// vcaptureQueue 输出执行的视频队列,它是一个串行队列
// 视频输出代理
[videoOutput setSampleBufferDelegate:self queue:vcaptureQueue];
// 添加到输出
[_session addOutput:videoOutput];
// 5. 创建音频输出对象
audioOutput = [[AVCaptureAudioDataOutput alloc] init];
// acaptureQueue 输出执行的音频队列,它是一个串行队列
// 音频输出代理
[audioOutput setSampleBufferDelegate:self queue:acaptureQueue];
// 添加到输出
[_session addOutput:audioOutput];
// 6. 分别创建音频和视频AVCaptureConnection
/*AVCaptureConnection表示avcaptureputport或端口之间的连接, 可用AVCaptureVideoPreviewLayer呈现采集
的内容。*/
videoConnection = [videoOutput connectionWithMediaType:AVMediaTypeVideo];
audioConnection = [audioOutput connectionWithMediaType:AVMediaTypeAudio];
// 视频镜像采集
videoConnection.videoMirrored = _isMirrored;
// 设置采集的方向,如果不设置采集到的视频旋转了90度。
if([videoConnection isVideoOrientationSupported]) {
videoConnection.videoOrientation = _orientation;
}
// 提交配置
[_session commitConfiguration];
- 以上是初始化的过程,开始采集和结束采集还需要调用:
/// 开始采集
- (void)startRunning {
[_session startRunning];
}
/// 停止采集
- (void)stopRunning {
[_session stopRunning];
}
- 设置代理
// 这里的sampleBuffer就是采集到的数据了,根据connection来判断,是Video还是Audio的数据
- (void)captureOutput:(AVCaptureOutput *)output didOutputSampleBuffer:(CMSampleBufferRef)sampleBuffer fromConnection:(AVCaptureConnection *)connection
{
// 这里的sampleBuffer就是采集到的数据了,根据connection来判断,是Video还是Audio的数据
if (connection == videoConnection) {
NSLog(@"这里获的 video sampleBuffer,做进一步处理(编码H.264)%i", ++sampleCount);
if (self.delegate) {
[self.delegate capture:self videoBuffer:sampleBuffer];
}
} else if (connection == audioConnection) {
NSLog(@"这里获得 audio sampleBuffer,做进一步处理(编码AAC)");
if (self.delegate) {
[self.delegate capture:self audioBuffer:sampleBuffer];
}
}
}
- 显示,使用AVCaptureVideoPreviewLayer可以显示采集的视频,自定义一个UIView, 命名CapturePreviewView,设置layerClass为[AVCaptureVideoPreviewLayer class],设置layer基本参数:
self.previewLayer.videoGravity = AVLayerVideoGravityResizeAspectFill;
self.previewLayer.connection.videoOrientation = AVCaptureVideoOrientationPortrait;
在viewDidLoad里调用
- (void)showPreview {
self.preview = [[CapturePreviewView alloc] initWithFrame:self.view.bounds];
_preview.previewLayer.session = self.capture.session;
_preview.autoresizingMask = UIViewAutoresizingFlexibleWidth | UIViewAutoresizingFlexibleHeight;
[self.view insertSubview:_preview atIndex:0];
}
- 我们可以对采集到的数据进行预览播放,使用AVSampleBufferDisplayLayer播放 CMSampleBufferRef格式数据
-(void)showSampleLayer {
_displayLayer = [[AVSampleBufferDisplayLayer alloc] init];
_displayLayer.frame = CGRectMake(0, 0, _videoEncoder.config->width / 5.0, _videoEncoder.config->height / 5.0);
_displayLayer.videoGravity = AVLayerVideoGravityResizeAspect;
[self.preview.layer insertSublayer:_displayLayer above:_preview.previewLayer];
}
// 在采集到视频数据的回调里播放buffer
-(void)capture:(Capture *)capture videoBuffer:(CMSampleBufferRef _Nullable)buffer
{
// 显示播放
[_displayLayer enqueueSampleBuffer:buffer];
// [_videoEncoder encode:buffer timeStamp:CACurrentMediaTime()*1000];
}
6.修改采样参数,如采样率,帧率等等
/// 更新帧率
- (void)updateFps:(int32_t)fps {
AVCaptureDevice *vDevice = [self videoDeviceWitchPosition:_devicePosition];
//获取当前支持的最大fps
float maxRate = [(AVFrameRateRange *)[vDevice.activeFormat.videoSupportedFrameRateRanges objectAtIndex:0] maxFrameRate];
//如果想要设置的fps小于或等于做大fps,就进行修改
if (maxRate >= fps) {
//实际修改fps的代码
if ([vDevice lockForConfiguration:NULL]) {
vDevice.activeVideoMinFrameDuration = CMTimeMake(10, (int)(fps * 10));
vDevice.activeVideoMaxFrameDuration = vDevice.activeVideoMinFrameDuration;
[vDevice unlockForConfiguration];
}
}
}
/// 切换摄像头(前置或后置)
- (void)changeCamaraPosition {
dispatch_async(vcaptureQueue, ^{
if (self.devicePosition == AVCaptureDevicePositionFront) {
self.devicePosition = AVCaptureDevicePositionBack;
} else {
self.devicePosition = AVCaptureDevicePositionFront;
}
AVCaptureDevice *camera = [self videoDeviceWitchPosition:self.devicePosition];
[self.session beginConfiguration];
[self.session removeInput:self->_deviceInput];
AVCaptureDeviceInput *videoInput = [[AVCaptureDeviceInput alloc] initWithDevice:camera error:nil];
if (!videoInput) {
NSLog(@"can not init video input");
return;
}
if (![self.session canAddInput:videoInput]) {
NSLog(@"can not add video input");
return;
}
self->_deviceInput = videoInput;
[self.session addInput:videoInput];
[self.session commitConfiguration];
});
}
/// 设置视频采集方向
- (void)setVideoOrientation:(AVCaptureVideoOrientation)orientation
{
_orientation = orientation;
dispatch_async(vcaptureQueue, ^{
self->videoConnection.videoOrientation = orientation;
});
}
/// 设置是否镜像
- (void)setVideoMirrored:(BOOL)isMirrored
{
_isMirrored = isMirrored;
dispatch_async(vcaptureQueue, ^{
self->videoConnection.videoMirrored = isMirrored;
});
}
/// 设置采集分辨率
- (void)setVideoDimension:(AVCaptureSessionPreset)preset
{
_preset = preset;
dispatch_async(vcaptureQueue, ^{
[self.session beginConfiguration];
if ([self.session canSetSessionPreset:preset]) {
[self.session setSessionPreset:preset];
};
[self.session commitConfiguration];
});
}