- 硬编码
VTCompressionSessionRef: 一个支持对视频帧序列压缩的会话
VTCompressionSessionCreate: 创建会话
VTSessionSetProperty: 设置会话的属性
VTCompressionSessionEncodeFrame: 编码视频帧
VTCompressionSessionCompleteFrames: 强制完成所有或者指定时间点及之前的所有帧
VTCompressionSessionInvalidate: 停止会话 还需要调用CFRelease释放该会话
-(void)configCompressionSession{
//创建
OSStatus status = VTCompressionSessionCreate(NULL,
_configuration.videoSize.width,
_configuration.videoSize.height,
kCMVideoCodecType_H264,
NULL,
NULL,
NULL,
VideoCompressonOutputCallback,
(__bridge void *)self,
&compressionSession);
if (status != noErr) {
return;
}
/*
关键帧的间隔
0 默认 由encoder选择关键帧的位置
1 每个帧都是关键帧
2 隔一帧一个关键帧
一次类推
*/
VTSessionSetProperty(
compressionSession,
kVTCompressionPropertyKey_MaxKeyFrameInterval,
(__bridge CFTypeRef)@(_configuration.videoMaxKeyframeInterval)
);
//关键帧的时间间隔, 与xxx_MaxKeyFrameInterval配合,谁先到就出现关键帧
VTSessionSetProperty(compressionSession,
kVTCompressionPropertyKey_MaxKeyFrameIntervalDuration,
(__bridge CFTypeRef)@(_configuration.videoMaxKeyframeInterval_configuration.videoFrameRate)
);
/*
期望帧率
帧速率以每秒帧数为单位。此设置不用于控制帧频;
它是作为提示提供给视频编码器的,以便可以在压缩开始之前设置内部配置。
实际帧速率取决于帧持续时间,并且可能会有所不同。
默认情况下,该值为0,表示未知的帧速率
*/
VTSessionSetProperty(
compressionSession,
kVTCompressionPropertyKey_ExpectedFrameRate,
(__bridge CFTypeRef)@(_configuration.videoFrameRate)
);
/*
平均帧率 默认为 0
仅当为源帧提供定时信息时,比特率设置才有效
*/
VTSessionSetProperty(
compressionSession,
kVTCompressionPropertyKey_AverageBitRate,
(__bridge CFTypeRef)@(_configuration.videoBitRate)
);
/*
Read/write, CFArray[CFNumber], [bytes, seconds, bytes, seconds...], Optional
*/
NSArray *limit = @[@(_configuration.videoBitRate * 1.5/8), @(1)];
VTSessionSetProperty(
compressionSession,
kVTCompressionPropertyKey_DataRateLimits,
(__bridge CFArrayRef)limit
);
//离线的建议设置为false(更好的压缩效果), 在线的建议设置为true
VTSessionSetProperty(
compressionSession,
kVTCompressionPropertyKey_RealTime,
kCFBooleanTrue
);
/*
可用的配置文件和级别随格式和视频编码器的不同而不同。
如果可用,视频编码器应使用标准键;
如果没有标准键,则视频编码器应遵循标准模式
*/
VTSessionSetProperty(
compressionSession,
kVTCompressionPropertyKey_ProfileLevel,
kVTProfileLevel_H264_Main_AutoLevel
);
/*
是否允许产生B帧, 默认True
由于B帧是双向预测帧,根绝前后帧计算出本帧,编码顺序和显示顺序不同. 设置false关闭B帧
*/
VTSessionSetProperty(
compressionSession,
kVTCompressionPropertyKey_AllowFrameReordering,
kCFBooleanTrue
);
/*
H.264压缩的熵编码模式
CABAC: 基于上下文的自适应二进制算数编码
or
CAVLC: 基于上下文的自适应变长编码
CABAC通常以更高的计算开销为代价提供更好的压缩。
默认值是特定于编码器的,并且可能会根据其他编码器设置而改变
*/
VTSessionSetProperty(
compressionSession,
kVTCompressionPropertyKey_H264EntropyMode,
kVTH264EntropyMode_CABAC
);
//准备编码
VTCompressionSessionPrepareToEncodeFrames(compressionSession);
}
//开始编码
- (void)encodeVideoData:(CVPixelBufferRef)pixelBuffer timeStamp:(uint64_t)timeStamp {
if(_isBackGround) return;
frameCount++;
//时间戳
CMTime presentationTimeStamp = CMTimeMake(frameCount, (int32_t)_configuration.videoFrameRate);
VTEncodeInfoFlags flags;
CMTime duration = CMTimeMake(1, (int32_t)_configuration.videoFrameRate);
//如果该帧 应该是关键帧, 则编码为关键帧
NSDictionary *properties = nil;
if (frameCount % (int32_t)_configuration.videoMaxKeyframeInterval == 0) {
properties = @{(__bridge NSString *)kVTEncodeFrameOptionKey_ForceKeyFrame: @YES};
}
NSNumber *timeNumber = @(timeStamp);
//编码
OSStatus status = VTCompressionSessionEncodeFrame(compressionSession, pixelBuffer, presentationTimeStamp, duration, (__bridge CFDictionaryRef)properties, (__bridge_retained void *)timeNumber, &flags);
if(status != noErr){
[self resetCompressionSession];
}
}
//编码回调函数
static void VideoCompressonOutputCallback(void *VTref, void *VTFrameRef, OSStatus status, VTEncodeInfoFlags infoFlags, CMSampleBufferRef sampleBuffer){
if (!sampleBuffer) return;
CFArrayRef array = CMSampleBufferGetSampleAttachmentsArray(sampleBuffer, true);
if (!array) return;
CFDictionaryRef dic = (CFDictionaryRef)CFArrayGetValueAtIndex(array, 0);
if (!dic) return;
//sync样本即位 关键帧
BOOL keyframe = !CFDictionaryContainsKey(dic, kCMSampleAttachmentKey_NotSync);
//时间戳
uint64_t timeStamp = [((__bridge_transfer NSNumber *)VTFrameRef) longLongValue];
LFHardwareVideoEncoder *videoEncoder = (__bridge LFHardwareVideoEncoder *)VTref;
if (status != noErr) {
return;
}
if (keyframe && !videoEncoder->sps) {
//格式描述
CMFormatDescriptionRef format = CMSampleBufferGetFormatDescription(sampleBuffer);
/*
SPS: 序列参数集
保存了一组编码视频序列的全局参数, 即原始视频编码后组成的序列
NAL-Unit类型为7
封装格式中, 和 PPS NAL一起保存在视频文件的文件头中
一般用作解码器初始化信息使用, SPS NAL和 PPS NAL一般位于整个码流的起始位置.
视频播放时,为了让后续的解码过程可以使用SPS中的参数,必须对其数据结构解析.
PPS: 图片参数集
NAL-Unit类型为8
单独保存在一个NAL Unit中
*/
// 获取SPS
size_t sparameterSetSize, sparameterSetCount;
const uint8_t *sparameterSet;
OSStatus statusCode = CMVideoFormatDescriptionGetH264ParameterSetAtIndex(format, 0, &sparameterSet, &sparameterSetSize, &sparameterSetCount, 0);
if (statusCode == noErr) {
//获取PPS
size_t pparameterSetSize, pparameterSetCount;
const uint8_t *pparameterSet;
OSStatus statusCode = CMVideoFormatDescriptionGetH264ParameterSetAtIndex(format, 1, &pparameterSet, &pparameterSetSize, &pparameterSetCount, 0);
if (statusCode == noErr) {
videoEncoder->sps = [NSData dataWithBytes:sparameterSet length:sparameterSetSize];
videoEncoder->pps = [NSData dataWithBytes:pparameterSet length:pparameterSetSize];
if (videoEncoder->enabledWriteVideoFile) {
NSMutableData *data = [[NSMutableData alloc] init];
uint8_t header[] = {0x00, 0x00, 0x00, 0x01};
[data appendBytes:header length:4];
[data appendData:videoEncoder->sps];
[data appendBytes:header length:4];
[data appendData:videoEncoder->pps];
fwrite(data.bytes, 1, data.length, videoEncoder->fp);
}
}
}
}
CMBlockBufferRef dataBuffer = CMSampleBufferGetDataBuffer(sampleBuffer);
size_t length, totalLength;
char *dataPointer;
OSStatus statusCodeRet = CMBlockBufferGetDataPointer(dataBuffer, 0, &length, &totalLength, &dataPointer);
if (statusCodeRet == noErr) {
size_t bufferOffset = 0;
static const int AVCCHeaderLength = 4;
//循环读取数据, 一次一个NAL Unit
while (bufferOffset < totalLength - AVCCHeaderLength) {
// Read the NAL unit length
uint32_t NALUnitLength = 0;
//NALUnitLength - 4字节无符号整形
memcpy(&NALUnitLength, dataPointer + bufferOffset, AVCCHeaderLength);
//读取 NAL—Unit数据长度(大端转 为本机的大小端模式)
NALUnitLength = CFSwapInt32BigToHost(NALUnitLength);
//组装成为一个视频帧 对象
LFVideoFrame *videoFrame = [LFVideoFrame new];
videoFrame.timestamp = timeStamp;
videoFrame.data = [[NSData alloc] initWithBytes:(dataPointer + bufferOffset + AVCCHeaderLength) length:NALUnitLength];
videoFrame.isKeyFrame = keyframe;
videoFrame.sps = videoEncoder->sps;
videoFrame.pps = videoEncoder->pps;
//调用回调函数
if (videoEncoder.h264Delegate && [videoEncoder.h264Delegate respondsToSelector:@selector(videoEncoder:videoFrame:)]) {
[videoEncoder.h264Delegate videoEncoder:videoEncoder videoFrame:videoFrame];
}
//看是否需要 存储文件
if (videoEncoder->enabledWriteVideoFile) {
NSMutableData *data = [[NSMutableData alloc] init];
if (keyframe) {
//关键帧
uint8_t header[] = {0x00, 0x00, 0x00, 0x01};
[data appendBytes:header length:4];
} else {
//非关键帧
uint8_t header[] = {0x00, 0x00, 0x01};
[data appendBytes:header length:3];
}
[data appendData:videoFrame.data];
fwrite(data.bytes, 1, data.length, videoEncoder->fp);
}
//长度所占4字节 + 数据所占子节
bufferOffset += AVCCHeaderLength + NALUnitLength;
}
}
}
未完...待续