寻找视频流的第一帧
int videoStream = -1;
for (int i = 0; i < pFormatCtx->nb_streams; i++) {
if (pFormatCtx->streams[i]->codec->codec_type == AVMEDIA_TYPE_VIDEO
&& videoStream < 0) {
videoStream = i;
}
}
======》
int videoStream = -1;
for (int i = 0; i < pFormatCtx->nb_streams; i++) {
if (pFormatCtx->streams[i]->codecpar->codec_type == AVMEDIA_TYPE_VIDEO && videoStream < 0){
videoStream = i;
}
}
获取codec上下文指针
AVCodecContext * pCodecCtx = pFormatCtx->streams[videoStream]->codec;
======》
AVCodecContext *pCodecCtx = avcodec_alloc_context3(NULL);
if (pCodecCtx == NULL)
{
printf("Could not allocate AVCodecContext\n");
return -1;
}
avcodec_parameters_to_context(pCodecCtx, pFormatCtx->streams[videoStream]->codecpar);
对该帧进行解码
while(av_read_frame(pFormatCtx, &packet)>=0) {
//判断是否为视频流
if(packet.stream_index==videoStream) {
//对该帧进行解码,该方法已经过时弃用
avcodec_decode_video2(pCodecCtx, pFrame, &frameFinished, &packet);
if (frameFinished) {
// lock native window
ANativeWindow_lock(nativeWindow, &windowBuffer, 0);
// 格式转换
sws_scale(sws_ctx, (uint8_t const * const *)pFrame->data,
pFrame->linesize, 0, pCodecCtx->height,
pFrameRGBA->data, pFrameRGBA->linesize);
// 获取stride
uint8_t * dst = windowBuffer.bits;
int dstStride = windowBuffer.stride * 4;
uint8_t * src = pFrameRGBA->data[0];
int srcStride = pFrameRGBA->linesize[0];
// 由于window的stride和帧的stride不同,因此需要逐行复制
int h;
for (h = 0; h < videoHeight; h++) {
memcpy(dst + h * dstStride, src + h * srcStride, (size_t) srcStride);
}
ANativeWindow_unlockAndPost(nativeWindow);
}
//延迟等待
usleep((unsigned long) (1000 * 40 * play_rate));
}
av_packet_unref(&packet);
}
======》
while (av_read_frame(pFormatCtx,&packet) >= 0){
//判断是否为视频流
if (packet.stream_index == videoStream) {
//解码该帧
if (avcodec_send_packet(pCodecCtx,&packet) == 0) {
// 一个avPacket可能包含多帧数据,所以需要使用while循环一直读取
while (avcodec_receive_frame(pCodecCtx,pFrame) == 0){
// 1.lock window
// 设置缓冲区的属性:宽高、像素格式(需要与Java层的格式一致)
ANativeWindow_setBuffersGeometry(nativeWindow,videoWidth,videoHeight,WINDOW_FORMAT_RGBA_8888);
ANativeWindow_lock(nativeWindow,&windowBuffer,NULL);
// 格式转换
sws_scale(sws_ctx, (uint8_t const * const *)pFrame->data,
pFrame->linesize, 0, pCodecCtx->height,
pFrameRGBA->data, pFrameRGBA->linesize);
// 获取stride
uint8_t *dst =(uint8_t *) windowBuffer.bits;
int dstStride = windowBuffer.stride * 4;
uint8_t * src = pFrameRGBA->data[0];
int srcStride = pFrameRGBA->linesize[0];
// 由于window的stride和帧的stride不同,因此需要逐行复制
int h;
for (h = 0; h < videoHeight; h++) {
memcpy(dst + h * dstStride, src + h * srcStride, (size_t) srcStride);
}
ANativeWindow_unlockAndPost(nativeWindow);
}
} else {
LOGE("Error sending a packet for decoding");
return -1;
}
// 每绘制一帧便休眠16毫秒,避免绘制过快导致播放的视频速度加快
usleep(1000 * 16);
}
av_packet_unref(&packet);
}
编码参数上下文的拷贝
ret = avcodec_copy_context(out_stream->codec, in_stream->codec);
if (ret < 0){
printf("Failed to copy context from input to output stream codec context\n");
goto end;
}
out_stream->codec->codec_tag = 0;
if (ofmt_ctx->oformat->flags & AVFMT_GLOBALHEADER)
out_stream->codec->flags |= AV_CODEC_FLAG_GLOBAL_HEADER;
======》
AVCodecContext *codec_ctx = avcodec_alloc_context3(in_codec);
ret = avcodec_parameters_to_context(codec_ctx, in_stream->codecpar);
if (ret < 0){
printf("Failed to copy in_stream codecpar to codec context\n");
goto end;
}
codec_ctx->codec_tag = 0;
if (ofmt_ctx->oformat->flags & AVFMT_GLOBALHEADER)
codec_ctx->flags |= AV_CODEC_FLAG_GLOBAL_HEADER;
ret = avcodec_parameters_from_context(out_stream->codecpar, codec_ctx);
if (ret < 0){
printf("Failed to copy codec context to out_stream codecpar context\n");
goto end;
}