1.安装FFMpeg
pod 'mobile-ffmpeg-full'
1.FFMpeg解码步骤
1.通过url 和传输方式初始化解码器
static AVFormatContext *fmt_ctx;
static AVCodecContext *dec_ctx;
static int video_stream_index = -1;
static int init_rtsp_with_url(char *url,char *transport){
const AVCodec *dec;
int ret;
AVDictionary *opts = 0;
av_dict_set(&opts, "rtsp_transport", "tcp", 0);
if ((ret = avformat_open_input(&fmt_ctx, url, NULL, NULL)) < 0) {
av_log(NULL, AV_LOG_ERROR, "打开流媒体失败\n");
return ret;
}
if ((ret = avformat_find_stream_info(fmt_ctx, NULL)) < 0) {
av_log(NULL, AV_LOG_ERROR, "无法提取流信息\n");
return ret;
}
/* select the video stream */
ret = av_find_best_stream(fmt_ctx, AVMEDIA_TYPE_VIDEO, -1, -1, &dec, 0);
if (ret < 0) {
av_log(NULL, AV_LOG_ERROR, "找不到视频流\n");
return ret ;
}
video_stream_index = ret;
/* create decoding context */
dec_ctx = avcodec_alloc_context3(dec);
if (!dec_ctx){
return AVERROR(ENOMEM);
}
avcodec_parameters_to_context(dec_ctx, fmt_ctx->streams[video_stream_index]->codecpar);
/* init the video decoder */
if ((ret = avcodec_open2(dec_ctx, dec, NULL)) < 0) {
av_log(NULL, AV_LOG_ERROR, "无法打开解码器\n");
return ret;
}
return 0;
}
2.创建循环线程 用于解码frame
AVPacket *packet;
AVFrame *frame;
void * run(void *param) // 新线程调用方法,里边为需要执行的任务
{
int ret;
H264YUV_Frame yuvFrame;
memset(&yuvFrame, 0, sizeof(H264YUV_Frame));
/***读取视频流***/
frame = av_frame_alloc();
packet = av_packet_alloc();
while (1) {
if ((ret = av_read_frame(fmt_ctx, packet)) < 0)
break;
if (packet->stream_index == video_stream_index) {
ret = avcodec_send_packet(dec_ctx, packet);
if (ret < 0) {
av_log(NULL, AV_LOG_ERROR, "Error while sending a packet to the decoder\n");
break;
}
while (ret >= 0) {
ret = avcodec_receive_frame(dec_ctx, frame);
if (ret == AVERROR(EAGAIN) || ret == AVERROR_EOF) {
break;
} else if (ret < 0) {
av_log(NULL, AV_LOG_ERROR, "Error while receiving a frame from the decoder\n");
goto end;
}
frame->pts = frame->best_effort_timestamp;
yuvFrame = decodeToYUV(dec_ctx, frame);
//回调函数 用于渲染yuv
(*yuvCallBack)(yuvFrame);
av_frame_unref(frame);
}
}
av_packet_unref(packet);
}
end:
close_rtsp();
return NULL;
}
释放解码器
void close_rtsp(void){
avcodec_free_context(&dec_ctx);
avformat_close_input(&fmt_ctx);
av_frame_free(&frame);
av_packet_free(&packet);
}
frame 转为 yuv 结构体可以自己定义
typedef struct H264FrameDef
{
unsigned int length;
unsigned char* dataBuffer;
}H264Frame;
typedef struct H264YUVDef
{
unsigned int width;
unsigned int height;
H264Frame luma;
H264Frame chromaB;
H264Frame chromaR;
}H264YUV_Frame;
void copyDecodedFrame(unsigned char *src, unsigned char *dist,int linesize, int width, int height)
{
width = fminf(linesize, width);
for (int i = 0; i < height; ++i)
{
memcpy(dist, src, width);
dist += width;
src += linesize;
}
}
H264YUV_Frame decodeToYUV(AVCodecContext *dec_ctx, AVFrame *frame){
unsigned int lumaLength= (dec_ctx->height)*(fminf(frame->linesize[0], dec_ctx->width));
unsigned int chromBLength=((dec_ctx->height)/2)*(fminf(frame->linesize[1], (dec_ctx->width)/2));
unsigned int chromRLength=((dec_ctx->height)/2)*(fminf(frame->linesize[2], (dec_ctx->width)/2));
H264YUV_Frame yuvFrame;
memset(&yuvFrame, 0, sizeof(H264YUV_Frame));
yuvFrame.luma.length = lumaLength;
yuvFrame.chromaB.length = chromBLength;
yuvFrame.chromaR.length =chromRLength;
yuvFrame.luma.dataBuffer=(unsigned char*)malloc(lumaLength);
yuvFrame.chromaB.dataBuffer=(unsigned char*)malloc(chromBLength);
yuvFrame.chromaR.dataBuffer=(unsigned char*)malloc(chromRLength);
copyDecodedFrame(frame->data[0],yuvFrame.luma.dataBuffer,frame->linesize[0],
dec_ctx->width,dec_ctx->height);
copyDecodedFrame(frame->data[1], yuvFrame.chromaB.dataBuffer,frame->linesize[1],
dec_ctx->width / 2,dec_ctx->height / 2);
copyDecodedFrame(frame->data[2], yuvFrame.chromaR.dataBuffer,frame->linesize[2],
dec_ctx->width / 2,dec_ctx->height / 2);
yuvFrame.width=dec_ctx->width;
yuvFrame.height=dec_ctx->height;
return yuvFrame;
}