iOS实时硬解码H.264

本文介绍iOS下使用VTDecompressionSessionRef将编码格式为H.264的每一帧frame数据解码的方法。

编码H.264请参考:iOS实时硬编码H.264

DecodeH264.h
#import <Foundation/Foundation.h>
#import <VideoToolbox/VideoToolbox.h>

@protocol DecodeH264Delegate <NSObject>
- (void)displayDecodedFrame:(CVImageBufferRef)imageBuffer;
@end

@interface DecodeH264 : NSObject
- (BOOL)initH264Decoder;
- (void)decodeNalu:(uint8_t *)frame withSize:(uint32_t)frameSize;
@property (nonatomic,weak) id<DecodeH264Delegate>delegate;
@end
DecodeH264.m
#import "DecodeH264.h"

#define h264outputWidth 800
#define h264outputHeight 600

@interface DecodeH264() {
    uint8_t *sps;
    uint8_t *pps;
    int spsSize;
    int ppsSize;
    VTDecompressionSessionRef session;
    CMVideoFormatDescriptionRef description;
}
@end

@implementation DecodeH264

//解码回调函数
static void outputCallback(void *decompressionOutputRefCon,
                           void *sourceFrameRefCon,
                           OSStatus status,
                           VTDecodeInfoFlags infoFlags,
                           CVImageBufferRef pixelBuffer,
                           CMTime presentationTimeStamp,
                           CMTime presentationDuration)
{
    CVPixelBufferRef *outputPixelBuffer = (CVPixelBufferRef *)sourceFrameRefCon;
    *outputPixelBuffer = CVPixelBufferRetain(pixelBuffer);
    DecodeH264 *decoder = (__bridge DecodeH264 *)decompressionOutputRefCon;
    if (decoder.delegate!=nil) {
        [decoder.delegate displayDecodedFrame:pixelBuffer];
    }
}

//创建解码器
- (BOOL)initH264Decoder {
    if(session) {
        return YES;
    }
    const uint8_t *parameterSetPointers[2] = {sps,pps};
    const size_t parameterSetSizes[2] = {spsSize,ppsSize};
    //设置参数
    OSStatus status = CMVideoFormatDescriptionCreateFromH264ParameterSets(kCFAllocatorDefault,
                                                                          2,//param count
                                                                          parameterSetPointers,
                                                                          parameterSetSizes,
                                                                          4,//nal start code size
                                                                          &description);
    if(status==noErr) {
        //设置属性
        NSDictionary *destinationPixelBufferAttributes = @{
                                                           //硬解必须为kCVPixelFormatType_420YpCbCr8BiPlanarVideoRange或kCVPixelFormatType_420YpCbCr8Planar,因为iOS是nv12,其他是nv21
                                                           (id)kCVPixelBufferPixelFormatTypeKey:[NSNumber numberWithInt:kCVPixelFormatType_420YpCbCr8BiPlanarVideoRange],
                                                           //宽高与编码相反
                                                           (id)kCVPixelBufferWidthKey:[NSNumber numberWithInt:h264outputHeight*2],
                                                           (id)kCVPixelBufferHeightKey:[NSNumber numberWithInt:h264outputWidth*2],
                                                           (id)kCVPixelBufferOpenGLCompatibilityKey:[NSNumber numberWithBool:YES]
                                                           };
        //设置回调
        VTDecompressionOutputCallbackRecord callBackRecord;
        callBackRecord.decompressionOutputCallback = outputCallback;
        callBackRecord.decompressionOutputRefCon = (__bridge void *)self;
        //创建session
        status = VTDecompressionSessionCreate(kCFAllocatorDefault,
                                              description,
                                              NULL,
                                              (__bridge CFDictionaryRef)destinationPixelBufferAttributes,
                                              &callBackRecord,
                                              &session);
        //设置属性
        VTSessionSetProperty(description, kVTDecompressionPropertyKey_ThreadCount, (__bridge CFTypeRef)[NSNumber numberWithInt:1]);
        VTSessionSetProperty(description, kVTDecompressionPropertyKey_RealTime, kCFBooleanTrue);
    }
    else {
        NSLog(@"创建失败,status=%d",status);
    }
    return YES;
}

//获取nalu数据
- (void)decodeNalu:(uint8_t *)frame withSize:(uint32_t)frameSize {
    int nalu_type = (frame[4] & 0x1F);//用于判断nalu类型
    uint32_t nalSize = (uint32_t)(frameSize - 4);
    uint8_t *pNalSize = (uint8_t*)(&nalSize);
    frame[0] = *(pNalSize + 3);
    frame[1] = *(pNalSize + 2);
    frame[2] = *(pNalSize + 1);
    frame[3] = *(pNalSize);
    //传输的时候I帧(关键帧)不能丢数据,否则绿屏,B/P帧可以丢但会卡顿
    switch (nalu_type)
    {
        case 0x05:
            //I帧
            if([self initH264Decoder]) {
                //解码I帧
                [self decode:frame withSize:frameSize];
            }
            break;
        case 0x07:
            //sps
            spsSize = frameSize - 4;
            sps = malloc(spsSize);
            memcpy(sps, &frame[4], spsSize);
            break;
        case 0x08:
            //pps
            ppsSize = frameSize - 4;
            pps = malloc(ppsSize);
            memcpy(pps, &frame[4], ppsSize);
            break;
        default:
            //P/B帧
            if([self initH264Decoder]) {
                //解码P/B帧
                [self decode:frame withSize:frameSize];
            }
            break;
    }
}

//解码
- (CVPixelBufferRef)decode:(uint8_t *)frame withSize:(uint32_t)frameSize {
    CVPixelBufferRef outputPixelBuffer = NULL;
    CMBlockBufferRef blockBuffer = NULL;
    //创建blockBuffer
    OSStatus status = CMBlockBufferCreateWithMemoryBlock(NULL,
                                                        (void *)frame,
                                                        frameSize,
                                                        kCFAllocatorNull,
                                                        NULL,
                                                        0,
                                                        frameSize,
                                                        FALSE,
                                                        &blockBuffer);
    if(status==kCMBlockBufferNoErr) {
        CMSampleBufferRef sampleBuffer = NULL;
        const size_t sampleSizeArray[] = {frameSize};
        //创建sampleBuffer
        status = CMSampleBufferCreateReady(kCFAllocatorDefault,
                                           blockBuffer,
                                           description,
                                           1,
                                           0,
                                           NULL,
                                           1,
                                           sampleSizeArray,
                                           &sampleBuffer);
        if (status==kCMBlockBufferNoErr && sampleBuffer) {
            VTDecodeFrameFlags flags = 0;
            VTDecodeInfoFlags flagOut = 0;
            //解码
            OSStatus status = VTDecompressionSessionDecodeFrame(session,
                                                                sampleBuffer,
                                                                flags,
                                                                &outputPixelBuffer,
                                                                &flagOut);
            if (status==kVTInvalidSessionErr) {
                NSLog(@"无效session");
            }
            else if (status==kVTVideoDecoderBadDataErr) {
                NSLog(@"解码失败(Bad data),status=%d",status);
            }
            else if (status!=noErr) {
                NSLog(@"解码失败,status=%d",status);
            }
            CFRelease(sampleBuffer);
        }
        CFRelease(blockBuffer);
    }
    return outputPixelBuffer;
}

@end
ViewController.m
    //创建解码对象
    _decoder = [[DecodeH264 alloc] init];
    _decoder.delegate = self;
//编码回调
- (void)gotSpsPps:(NSData *)sps pps:(NSData *)pps {
    const char bytes[] = "\x00\x00\x00\x01";//起始码
    size_t length = (sizeof bytes) - 1;
    NSData *ByteHeader = [NSData dataWithBytes:bytes length:length];
    //sps
    NSMutableData *h264Data = [[NSMutableData alloc] init];
    [h264Data appendData:ByteHeader];
    [h264Data appendData:sps];
    [_decoder decodeNalu:(uint8_t *)[h264Data bytes] withSize:(uint32_t)h264Data.length];
    //pps
    [h264Data resetBytesInRange:NSMakeRange(0, [h264Data length])];
    [h264Data setLength:0];
    [h264Data appendData:ByteHeader];
    [h264Data appendData:pps];
    [_decoder decodeNalu:(uint8_t *)[h264Data bytes] withSize:(uint32_t)h264Data.length];
}

//编码回调
- (void)gotEncodedData:(NSData *)data {
    const char bytes[] = "\x00\x00\x00\x01";//起始码
    size_t length = (sizeof bytes) - 1;
    NSData *ByteHeader = [NSData dataWithBytes:bytes length:length];
    NSMutableData *h264Data = [[NSMutableData alloc] init];
    [h264Data appendData:ByteHeader];
    [h264Data appendData:data];
    [_decoder decodeNalu:(uint8_t *)[h264Data bytes] withSize:(uint32_t)h264Data.length];
}

//解码回调
- (void)displayDecodedFrame:(CVImageBufferRef)imageBuffer {
    NSLog(@"decode success");
    CVPixelBufferRelease(imageBuffer);
}

解码后的CVImageBufferRef可以通过OpenGLES处理渲染。

©著作权归作者所有,转载或内容合作请联系作者
平台声明:文章内容(如有图片或视频亦包括在内)由作者上传并发布,文章内容仅代表作者本人观点,简书系信息发布平台,仅提供信息存储服务。

推荐阅读更多精彩内容

  • 用到的组件 1、通过CocoaPods安装 2、第三方类库安装 3、第三方服务 友盟社会化分享组件 友盟用户反馈 ...
    SunnyLeong阅读 14,708评论 1 180
  • 在保证视频图像质量的前提下,HEVC通过增加一定的计算复杂度,可以实现码流在H.264/AVC的基础上降低50%。...
    加刘景长阅读 8,014评论 0 6
  • 有一种喜欢,叫我爱你。 认识你八年了哇(没想到这么久了),初见你于微时,那时方知道了什么是明目皓齿灿若星辰,什么是...
    小爪纸阅读 305评论 0 1
  • Runloop学习 | 目录 ||: ------------- || 1 什么是...
    不多满阅读 308评论 0 0
  • 亲爱的秀秀,当你读到这封信的时候,请不要惊讶,我是来自七年后的你。或许你会觉得有点不可思议,但请你认真阅读此信,因...
    一十七掌阅读 483评论 0 0