本文介绍iOS下使用VTDecompressionSessionRef将编码格式为H.264的每一帧frame数据解码的方法。
编码H.264请参考:iOS实时硬编码H.264
DecodeH264.h
#import <Foundation/Foundation.h>
#import <VideoToolbox/VideoToolbox.h>
@protocol DecodeH264Delegate <NSObject>
- (void)displayDecodedFrame:(CVImageBufferRef)imageBuffer;
@end
@interface DecodeH264 : NSObject
- (BOOL)initH264Decoder;
- (void)decodeNalu:(uint8_t *)frame withSize:(uint32_t)frameSize;
@property (nonatomic,weak) id<DecodeH264Delegate>delegate;
@end
DecodeH264.m
#import "DecodeH264.h"
#define h264outputWidth 800
#define h264outputHeight 600
@interface DecodeH264() {
uint8_t *sps;
uint8_t *pps;
int spsSize;
int ppsSize;
VTDecompressionSessionRef session;
CMVideoFormatDescriptionRef description;
}
@end
@implementation DecodeH264
//解码回调函数
static void outputCallback(void *decompressionOutputRefCon,
void *sourceFrameRefCon,
OSStatus status,
VTDecodeInfoFlags infoFlags,
CVImageBufferRef pixelBuffer,
CMTime presentationTimeStamp,
CMTime presentationDuration)
{
CVPixelBufferRef *outputPixelBuffer = (CVPixelBufferRef *)sourceFrameRefCon;
*outputPixelBuffer = CVPixelBufferRetain(pixelBuffer);
DecodeH264 *decoder = (__bridge DecodeH264 *)decompressionOutputRefCon;
if (decoder.delegate!=nil) {
[decoder.delegate displayDecodedFrame:pixelBuffer];
}
}
//创建解码器
- (BOOL)initH264Decoder {
if(session) {
return YES;
}
const uint8_t *parameterSetPointers[2] = {sps,pps};
const size_t parameterSetSizes[2] = {spsSize,ppsSize};
//设置参数
OSStatus status = CMVideoFormatDescriptionCreateFromH264ParameterSets(kCFAllocatorDefault,
2,//param count
parameterSetPointers,
parameterSetSizes,
4,//nal start code size
&description);
if(status==noErr) {
//设置属性
NSDictionary *destinationPixelBufferAttributes = @{
//硬解必须为kCVPixelFormatType_420YpCbCr8BiPlanarVideoRange或kCVPixelFormatType_420YpCbCr8Planar,因为iOS是nv12,其他是nv21
(id)kCVPixelBufferPixelFormatTypeKey:[NSNumber numberWithInt:kCVPixelFormatType_420YpCbCr8BiPlanarVideoRange],
//宽高与编码相反
(id)kCVPixelBufferWidthKey:[NSNumber numberWithInt:h264outputHeight*2],
(id)kCVPixelBufferHeightKey:[NSNumber numberWithInt:h264outputWidth*2],
(id)kCVPixelBufferOpenGLCompatibilityKey:[NSNumber numberWithBool:YES]
};
//设置回调
VTDecompressionOutputCallbackRecord callBackRecord;
callBackRecord.decompressionOutputCallback = outputCallback;
callBackRecord.decompressionOutputRefCon = (__bridge void *)self;
//创建session
status = VTDecompressionSessionCreate(kCFAllocatorDefault,
description,
NULL,
(__bridge CFDictionaryRef)destinationPixelBufferAttributes,
&callBackRecord,
&session);
//设置属性
VTSessionSetProperty(description, kVTDecompressionPropertyKey_ThreadCount, (__bridge CFTypeRef)[NSNumber numberWithInt:1]);
VTSessionSetProperty(description, kVTDecompressionPropertyKey_RealTime, kCFBooleanTrue);
}
else {
NSLog(@"创建失败,status=%d",status);
}
return YES;
}
//获取nalu数据
- (void)decodeNalu:(uint8_t *)frame withSize:(uint32_t)frameSize {
int nalu_type = (frame[4] & 0x1F);//用于判断nalu类型
uint32_t nalSize = (uint32_t)(frameSize - 4);
uint8_t *pNalSize = (uint8_t*)(&nalSize);
frame[0] = *(pNalSize + 3);
frame[1] = *(pNalSize + 2);
frame[2] = *(pNalSize + 1);
frame[3] = *(pNalSize);
//传输的时候I帧(关键帧)不能丢数据,否则绿屏,B/P帧可以丢但会卡顿
switch (nalu_type)
{
case 0x05:
//I帧
if([self initH264Decoder]) {
//解码I帧
[self decode:frame withSize:frameSize];
}
break;
case 0x07:
//sps
spsSize = frameSize - 4;
sps = malloc(spsSize);
memcpy(sps, &frame[4], spsSize);
break;
case 0x08:
//pps
ppsSize = frameSize - 4;
pps = malloc(ppsSize);
memcpy(pps, &frame[4], ppsSize);
break;
default:
//P/B帧
if([self initH264Decoder]) {
//解码P/B帧
[self decode:frame withSize:frameSize];
}
break;
}
}
//解码
- (CVPixelBufferRef)decode:(uint8_t *)frame withSize:(uint32_t)frameSize {
CVPixelBufferRef outputPixelBuffer = NULL;
CMBlockBufferRef blockBuffer = NULL;
//创建blockBuffer
OSStatus status = CMBlockBufferCreateWithMemoryBlock(NULL,
(void *)frame,
frameSize,
kCFAllocatorNull,
NULL,
0,
frameSize,
FALSE,
&blockBuffer);
if(status==kCMBlockBufferNoErr) {
CMSampleBufferRef sampleBuffer = NULL;
const size_t sampleSizeArray[] = {frameSize};
//创建sampleBuffer
status = CMSampleBufferCreateReady(kCFAllocatorDefault,
blockBuffer,
description,
1,
0,
NULL,
1,
sampleSizeArray,
&sampleBuffer);
if (status==kCMBlockBufferNoErr && sampleBuffer) {
VTDecodeFrameFlags flags = 0;
VTDecodeInfoFlags flagOut = 0;
//解码
OSStatus status = VTDecompressionSessionDecodeFrame(session,
sampleBuffer,
flags,
&outputPixelBuffer,
&flagOut);
if (status==kVTInvalidSessionErr) {
NSLog(@"无效session");
}
else if (status==kVTVideoDecoderBadDataErr) {
NSLog(@"解码失败(Bad data),status=%d",status);
}
else if (status!=noErr) {
NSLog(@"解码失败,status=%d",status);
}
CFRelease(sampleBuffer);
}
CFRelease(blockBuffer);
}
return outputPixelBuffer;
}
@end
ViewController.m
//创建解码对象
_decoder = [[DecodeH264 alloc] init];
_decoder.delegate = self;
//编码回调
- (void)gotSpsPps:(NSData *)sps pps:(NSData *)pps {
const char bytes[] = "\x00\x00\x00\x01";//起始码
size_t length = (sizeof bytes) - 1;
NSData *ByteHeader = [NSData dataWithBytes:bytes length:length];
//sps
NSMutableData *h264Data = [[NSMutableData alloc] init];
[h264Data appendData:ByteHeader];
[h264Data appendData:sps];
[_decoder decodeNalu:(uint8_t *)[h264Data bytes] withSize:(uint32_t)h264Data.length];
//pps
[h264Data resetBytesInRange:NSMakeRange(0, [h264Data length])];
[h264Data setLength:0];
[h264Data appendData:ByteHeader];
[h264Data appendData:pps];
[_decoder decodeNalu:(uint8_t *)[h264Data bytes] withSize:(uint32_t)h264Data.length];
}
//编码回调
- (void)gotEncodedData:(NSData *)data {
const char bytes[] = "\x00\x00\x00\x01";//起始码
size_t length = (sizeof bytes) - 1;
NSData *ByteHeader = [NSData dataWithBytes:bytes length:length];
NSMutableData *h264Data = [[NSMutableData alloc] init];
[h264Data appendData:ByteHeader];
[h264Data appendData:data];
[_decoder decodeNalu:(uint8_t *)[h264Data bytes] withSize:(uint32_t)h264Data.length];
}
//解码回调
- (void)displayDecodedFrame:(CVImageBufferRef)imageBuffer {
NSLog(@"decode success");
CVPixelBufferRelease(imageBuffer);
}
解码后的CVImageBufferRef可以通过OpenGLES处理渲染。