之前利用ffmpeg获取过画面帧,但是一个64位的ffmpeg的so库就是20M,现在应用市场还要求必须有32位和64位,包体压力很大,而且上次用的那个ffmpeg的截取速度是真的操蛋,于是大佬建议我去了解一下MediaCode,之前用这个做过屏幕录制,但是截取画面帧还没用过,现在记录一下demo,万一用到了呢。
他的速度是真牛皮,看gif图。视频是1分16秒的视频
首先我们获取一下视频数据,也判断一下视频有没有关键帧
extractor = new MediaExtractor();
extractor.setDataSource(videoPath);
int trackCount = extractor.getTrackCount();
for(int x = 0; x < trackCount; x++){
MediaFormat trackFormat = extractor.getTrackFormat(x);
if(trackFormat.getString(MediaFormat.KEY_MIME).contains("video")){
videoFormat = trackFormat;
extractor.selectTrack(x);
break;
}
}
if(videoFormat == null){
throw new Exception("没有发现视频关键帧");
}
videoFormat.setInteger(MediaFormat.KEY_COLOR_FORMAT, MediaCodecInfo.CodecCapabilities.COLOR_FormatYUV420Flexible);
videoFormat.setInteger(MediaFormat.KEY_WIDTH,videoFormat.getInteger(MediaFormat.KEY_WIDTH));
videoFormat.setInteger(MediaFormat.KEY_HEIGHT,videoFormat.getInteger(MediaFormat.KEY_HEIGHT));
if(videoFormat.containsKey(MediaFormat.KEY_ROTATION)){
rotation = videoFormat.getInteger(MediaFormat.KEY_ROTATION);
}
duration = videoFormat.getLong(MediaFormat.KEY_DURATION);
totalCount = (int) (duration / 1000 /1000);
mediaCodec = MediaCodec.createDecoderByType(videoFormat.getString(MediaFormat.KEY_MIME));
mediaCodec.configure(videoFormat,null,null,0);
mediaCodec.start();
通过MediaFormat,获取了视频信息,duration 和rotation 是比较重要的duration /1000/1000可以判断我们需要取几帧,1s一帧,rotation判断视频旋转角度,后面生成bitmap需要用到。
然后开始解析,但是解析费时间,最好在子线程
public void decode(){
if (childThread == null) {
stopDecode = false;//开始后,停止解析标签为false
childThread = new Thread(this, "decode");
childThread.start();
}
}
@Override
public void run() {
processByExtractor();
}
解析代码如下
private void processByExtractor(){
try {
MediaCodec.BufferInfo bufferInfo = new MediaCodec.BufferInfo();
long timeOut = 5 * 1000;
boolean inputDone = false;
boolean outputDone = false;
ByteBuffer[] inputBuffers = null;
if(Build.VERSION.SDK_INT < Build.VERSION_CODES.LOLLIPOP){
inputBuffers = mediaCodec.getInputBuffers();
}
count = 0;
progressCount = 0;
while (!outputDone){
if(!inputDone){
//如果按了停止按钮,就跳出循环
if(stopDecode){
outputDone = true;
inputDone = true;
break;
}
extractor.seekTo(count * 1000 * 1000,MediaExtractor.SEEK_TO_PREVIOUS_SYNC);
int inputBufferIndex = mediaCodec.dequeueInputBuffer(timeOut);
if(inputBufferIndex >= 0){
ByteBuffer inputBuffer;
if(Build.VERSION.SDK_INT >= Build.VERSION_CODES.LOLLIPOP){
inputBuffer = mediaCodec.getInputBuffer(inputBufferIndex);
}else{
inputBuffer = inputBuffers[inputBufferIndex];
}
int sampleData = extractor.readSampleData(inputBuffer, 0);
if(sampleData > 0 && count <= totalCount){
long sampleTime = extractor.getSampleTime();
mediaCodec.queueInputBuffer(inputBufferIndex,0,sampleData,sampleTime,0);
extractor.advance();
count++;
}else{
mediaCodec.queueInputBuffer(inputBufferIndex,0,0,0,MediaCodec.BUFFER_FLAG_END_OF_STREAM);
inputDone = true;
}
}
}
if(!outputDone){
//如果按了停止按钮,就跳出循环
if(stopDecode){
outputDone = true;
inputDone = true;
break;
}
int status = mediaCodec.dequeueOutputBuffer(bufferInfo, timeOut);
if(status == MediaCodec.INFO_TRY_AGAIN_LATER){
}else if(status == MediaCodec.INFO_OUTPUT_FORMAT_CHANGED){
}else if(status == MediaCodec.INFO_OUTPUT_BUFFERS_CHANGED){
}else{
if((bufferInfo.flags & MediaCodec.BUFFER_FLAG_END_OF_STREAM) != 0){
outputDone = true;
}
boolean doRender = (bufferInfo.size != 0);
if(doRender){
Image image = mediaCodec.getOutputImage(status);
//转换
dataFromImage(image);
image.close();
mediaCodec.releaseOutputBuffer(status,doRender);
if(mMediaCodeFrameCallBack != null){
mMediaCodeFrameCallBack.onProgress(totalCount,progressCount);
}
progressCount++;
}
}
}
}
mediaCodec.stop();
if(mMediaCodeFrameCallBack != null){
mMediaCodeFrameCallBack.onFrameFinish();
}
}catch (Exception e){
e.printStackTrace();
}finally {
if (mediaCodec != null) {
mediaCodec.stop();
mediaCodec.release();
mediaCodec = null;
}
if (extractor != null) {
extractor.release();
extractor = null;
}
}
}
可以看到,我们这里截取一帧就调用extractor.seekTo(count * 1000 * 1000,MediaExtractor.SEEK_TO_PREVIOUS_SYNC);移动一下视频进度,保证1s一帧。
图片的转换
private void dataFromImage(Image image) {
ByteArrayOutputStream outStream = new ByteArrayOutputStream();
Rect rect = image.getCropRect();
YuvImage yuvImage = new YuvImage(getDataFromImage(image, COLOR_FormatNV21), ImageFormat.NV21, rect.width(), rect.height(), null);
yuvImage.compressToJpeg(rect, 100, outStream);
byte[] bytes = outStream.toByteArray();
Bitmap bitmap = null;
if(rotation == 0){
bitmap = BitmapFactory.decodeByteArray(bytes, 0, bytes.length);
}else{
Matrix matrix = new Matrix();
matrix.postRotate(rotation);
Bitmap b = BitmapFactory.decodeByteArray(bytes, 0, bytes.length);
bitmap = Bitmap.createBitmap(b,0,0,rect.width(),rect.height(),matrix,true);
}
Bitmap finalBitmap = bitmap;
handler.post(new Runnable() {
@Override
public void run() {
if(mMediaCodeFrameCallBack != null){
mMediaCodeFrameCallBack.onFrameBitmap(finalBitmap);
}
}
});
}
private static byte[] getDataFromImage(Image image, int colorFormat) {
if (colorFormat != COLOR_FormatI420 && colorFormat != COLOR_FormatNV21) {
throw new IllegalArgumentException("only support COLOR_FormatI420 " + "and COLOR_FormatNV21");
}
if (!isImageFormatSupported(image)) {
throw new RuntimeException("can't convert Image to byte array, format " + image.getFormat());
}
Rect crop = image.getCropRect();
int format = image.getFormat();
int width = crop.width();
int height = crop.height();
Image.Plane[] planes = image.getPlanes();
byte[] data = new byte[width * height * ImageFormat.getBitsPerPixel(format) / 8];
byte[] rowData = new byte[planes[0].getRowStride()];
int channelOffset = 0;
int outputStride = 1;
for (int i = 0; i < planes.length; i++) {
switch (i) {
case 0:
channelOffset = 0;
outputStride = 1;
break;
case 1:
if (colorFormat == COLOR_FormatI420) {
channelOffset = width * height;
outputStride = 1;
} else if (colorFormat == COLOR_FormatNV21) {
channelOffset = width * height + 1;
outputStride = 2;
}
break;
case 2:
if (colorFormat == COLOR_FormatI420) {
channelOffset = (int) (width * height * 1.25);
outputStride = 1;
} else if (colorFormat == COLOR_FormatNV21) {
channelOffset = width * height;
outputStride = 2;
}
break;
}
ByteBuffer buffer = planes[i].getBuffer();
int rowStride = planes[i].getRowStride();
int pixelStride = planes[i].getPixelStride();
int shift = (i == 0) ? 0 : 1;
int w = width >> shift;
int h = height >> shift;
buffer.position(rowStride * (crop.top >> shift) + pixelStride * (crop.left >> shift));
for (int row = 0; row < h; row++) {
int length;
if (pixelStride == 1 && outputStride == 1) {
length = w;
buffer.get(data, channelOffset, length);
channelOffset += length;
} else {
length = (w - 1) * pixelStride + 1;
buffer.get(rowData, 0, length);
for (int col = 0; col < w; col++) {
data[channelOffset] = rowData[col * pixelStride];
channelOffset += outputStride;
}
}
if (row < h - 1) {
buffer.position(buffer.position() + rowStride - length);
}
}
}
return data;
}
private static boolean isImageFormatSupported(Image image) {
int format = image.getFormat();
switch (format) {
case ImageFormat.YUV_420_888:
case ImageFormat.NV21:
case ImageFormat.YV12:
return true;
}
return false;
}
全部代码
public class MediaCodeFrameUtil implements Runnable{
private String videoPath = null;
private MediaExtractor extractor;
private MediaFormat videoFormat;
private int rotation;
private final long duration;
private MediaCodec mediaCodec;
private Thread childThread;
private static final int COLOR_FormatI420 = 1;
private static final int COLOR_FormatNV21 = 2;
private MediaCodeFrameCallBack mMediaCodeFrameCallBack;
private volatile int count = 0;//子线程里面变化数据,要用volatile
private volatile int progressCount = 0;
private volatile int totalCount = 0;
private Handler handler = null;
private boolean stopDecode = false;
public MediaCodeFrameUtil(String path,MediaCodeFrameCallBack callBack) throws Exception{
videoPath = path;
mMediaCodeFrameCallBack = callBack;
handler = new Handler(Looper.getMainLooper());
extractor = new MediaExtractor();
extractor.setDataSource(videoPath);
int trackCount = extractor.getTrackCount();
for(int x = 0; x < trackCount; x++){
MediaFormat trackFormat = extractor.getTrackFormat(x);
if(trackFormat.getString(MediaFormat.KEY_MIME).contains("video")){
videoFormat = trackFormat;
extractor.selectTrack(x);
break;
}
}
if(videoFormat == null){
throw new Exception("没有发现视频关键帧");
}
videoFormat.setInteger(MediaFormat.KEY_COLOR_FORMAT, MediaCodecInfo.CodecCapabilities.COLOR_FormatYUV420Flexible);
videoFormat.setInteger(MediaFormat.KEY_WIDTH,videoFormat.getInteger(MediaFormat.KEY_WIDTH));
videoFormat.setInteger(MediaFormat.KEY_HEIGHT,videoFormat.getInteger(MediaFormat.KEY_HEIGHT));
if(videoFormat.containsKey(MediaFormat.KEY_ROTATION)){
rotation = videoFormat.getInteger(MediaFormat.KEY_ROTATION);
}
duration = videoFormat.getLong(MediaFormat.KEY_DURATION);
totalCount = (int) (duration / 1000 /1000);
mediaCodec = MediaCodec.createDecoderByType(videoFormat.getString(MediaFormat.KEY_MIME));
mediaCodec.configure(videoFormat,null,null,0);
mediaCodec.start();
}
public void decode(){
if (childThread == null) {
stopDecode = false;//开始后,停止解析标签为false
childThread = new Thread(this, "decode");
childThread.start();
}
}
public void stopDecode(){
if(childThread != null){
stopDecode = true;//子线程调用stop没法停止线程的,我们用了while循环,所以做个标志位,提示跳出while循环
childThread = null;
}
}
@Override
public void run() {
processByExtractor();
}
private void processByExtractor(){
try {
MediaCodec.BufferInfo bufferInfo = new MediaCodec.BufferInfo();
long timeOut = 5 * 1000;
boolean inputDone = false;
boolean outputDone = false;
ByteBuffer[] inputBuffers = null;
if(Build.VERSION.SDK_INT < Build.VERSION_CODES.LOLLIPOP){
inputBuffers = mediaCodec.getInputBuffers();
}
count = 0;
progressCount = 0;
while (!outputDone){
if(!inputDone){
//如果按了停止按钮,就跳出循环
if(stopDecode){
outputDone = true;
inputDone = true;
break;
}
extractor.seekTo(count * 1000 * 1000,MediaExtractor.SEEK_TO_PREVIOUS_SYNC);
int inputBufferIndex = mediaCodec.dequeueInputBuffer(timeOut);
if(inputBufferIndex >= 0){
ByteBuffer inputBuffer;
if(Build.VERSION.SDK_INT >= Build.VERSION_CODES.LOLLIPOP){
inputBuffer = mediaCodec.getInputBuffer(inputBufferIndex);
}else{
inputBuffer = inputBuffers[inputBufferIndex];
}
int sampleData = extractor.readSampleData(inputBuffer, 0);
if(sampleData > 0 && count <= totalCount){
long sampleTime = extractor.getSampleTime();
mediaCodec.queueInputBuffer(inputBufferIndex,0,sampleData,sampleTime,0);
extractor.advance();
count++;
}else{
mediaCodec.queueInputBuffer(inputBufferIndex,0,0,0,MediaCodec.BUFFER_FLAG_END_OF_STREAM);
inputDone = true;
}
}
}
if(!outputDone){
//如果按了停止按钮,就跳出循环
if(stopDecode){
outputDone = true;
inputDone = true;
break;
}
int status = mediaCodec.dequeueOutputBuffer(bufferInfo, timeOut);
if(status == MediaCodec.INFO_TRY_AGAIN_LATER){
}else if(status == MediaCodec.INFO_OUTPUT_FORMAT_CHANGED){
}else if(status == MediaCodec.INFO_OUTPUT_BUFFERS_CHANGED){
}else{
if((bufferInfo.flags & MediaCodec.BUFFER_FLAG_END_OF_STREAM) != 0){
outputDone = true;
}
boolean doRender = (bufferInfo.size != 0);
if(doRender){
Image image = mediaCodec.getOutputImage(status);
//转换
dataFromImage(image);
image.close();
mediaCodec.releaseOutputBuffer(status,doRender);
if(mMediaCodeFrameCallBack != null){
mMediaCodeFrameCallBack.onProgress(totalCount,progressCount);
}
progressCount++;
}
}
}
}
mediaCodec.stop();
if(mMediaCodeFrameCallBack != null){
mMediaCodeFrameCallBack.onFrameFinish();
}
}catch (Exception e){
e.printStackTrace();
}finally {
if (mediaCodec != null) {
mediaCodec.stop();
mediaCodec.release();
mediaCodec = null;
}
if (extractor != null) {
extractor.release();
extractor = null;
}
}
}
private void dataFromImage(Image image) {
ByteArrayOutputStream outStream = new ByteArrayOutputStream();
Rect rect = image.getCropRect();
YuvImage yuvImage = new YuvImage(getDataFromImage(image, COLOR_FormatNV21), ImageFormat.NV21, rect.width(), rect.height(), null);
yuvImage.compressToJpeg(rect, 100, outStream);
byte[] bytes = outStream.toByteArray();
Bitmap bitmap = null;
if(rotation == 0){
bitmap = BitmapFactory.decodeByteArray(bytes, 0, bytes.length);
}else{
Matrix matrix = new Matrix();
matrix.postRotate(rotation);
Bitmap b = BitmapFactory.decodeByteArray(bytes, 0, bytes.length);
bitmap = Bitmap.createBitmap(b,0,0,rect.width(),rect.height(),matrix,true);
}
Bitmap finalBitmap = bitmap;
handler.post(new Runnable() {
@Override
public void run() {
if(mMediaCodeFrameCallBack != null){
mMediaCodeFrameCallBack.onFrameBitmap(finalBitmap);
}
}
});
}
private static byte[] getDataFromImage(Image image, int colorFormat) {
if (colorFormat != COLOR_FormatI420 && colorFormat != COLOR_FormatNV21) {
throw new IllegalArgumentException("only support COLOR_FormatI420 " + "and COLOR_FormatNV21");
}
if (!isImageFormatSupported(image)) {
throw new RuntimeException("can't convert Image to byte array, format " + image.getFormat());
}
Rect crop = image.getCropRect();
int format = image.getFormat();
int width = crop.width();
int height = crop.height();
Image.Plane[] planes = image.getPlanes();
byte[] data = new byte[width * height * ImageFormat.getBitsPerPixel(format) / 8];
byte[] rowData = new byte[planes[0].getRowStride()];
int channelOffset = 0;
int outputStride = 1;
for (int i = 0; i < planes.length; i++) {
switch (i) {
case 0:
channelOffset = 0;
outputStride = 1;
break;
case 1:
if (colorFormat == COLOR_FormatI420) {
channelOffset = width * height;
outputStride = 1;
} else if (colorFormat == COLOR_FormatNV21) {
channelOffset = width * height + 1;
outputStride = 2;
}
break;
case 2:
if (colorFormat == COLOR_FormatI420) {
channelOffset = (int) (width * height * 1.25);
outputStride = 1;
} else if (colorFormat == COLOR_FormatNV21) {
channelOffset = width * height;
outputStride = 2;
}
break;
}
ByteBuffer buffer = planes[i].getBuffer();
int rowStride = planes[i].getRowStride();
int pixelStride = planes[i].getPixelStride();
int shift = (i == 0) ? 0 : 1;
int w = width >> shift;
int h = height >> shift;
buffer.position(rowStride * (crop.top >> shift) + pixelStride * (crop.left >> shift));
for (int row = 0; row < h; row++) {
int length;
if (pixelStride == 1 && outputStride == 1) {
length = w;
buffer.get(data, channelOffset, length);
channelOffset += length;
} else {
length = (w - 1) * pixelStride + 1;
buffer.get(rowData, 0, length);
for (int col = 0; col < w; col++) {
data[channelOffset] = rowData[col * pixelStride];
channelOffset += outputStride;
}
}
if (row < h - 1) {
buffer.position(buffer.position() + rowStride - length);
}
}
}
return data;
}
private static boolean isImageFormatSupported(Image image) {
int format = image.getFormat();
switch (format) {
case ImageFormat.YUV_420_888:
case ImageFormat.NV21:
case ImageFormat.YV12:
return true;
}
return false;
}
}
然后activity中使用
class MediaCodeTestActivity : AppCompatActivity(), MediaCodeFrameCallBack {
private val list:ArrayList<Bitmap> = ArrayList()
private val mAdapter by lazy {
TestAdapter()
}
private var mediaCodeFrameUtil:MediaCodeFrameUtil? = null
private val resouce: Photo by lazy {
intent.getParcelableExtra<Photo>(VideoCutActivity.PATH)!!
}
override fun onCreate(savedInstanceState: Bundle?) {
super.onCreate(savedInstanceState)
setContentView(R.layout.activity_media_code_test)
mBtnJieXi?.setOnClickListener {
if(resouce == null){
return@setOnClickListener
}
analysis()
}
mBtnJieXiStop?.setOnClickListener {
mediaCodeFrameUtil?.stopDecode()
}
mRecyclerView?.apply {
var mLayoutManager = LinearLayoutManager(this@MediaCodeTestActivity)
mLayoutManager.orientation = LinearLayoutManager.HORIZONTAL
layoutManager = mLayoutManager
adapter = mAdapter
}
}
private fun analysis() {
list.clear()//重新开始,删除上一次记录的所有图片,防止重复
setButClickable(false)
mediaCodeFrameUtil = MediaCodeFrameUtil(resouce?.path,this)
mediaCodeFrameUtil?.decode()
}
private fun setButClickable(isClickable:Boolean){
mBtnJieXi?.isClickable = isClickable
}
companion object{
val PATH: String = "path"
}
override fun onProgress(totalCount: Int, currentCount: Int) {
Log.d("yanjin", "totalCount = $totalCount currentCount = $currentCount")
}
override fun onFrameBitmap(bitmap: Bitmap?) {
list.add(bitmap!!)
mAdapter.updateList(list)
}
override fun onFrameFinish() {
Log.d("yanjin", "onFrameFinish")
setButClickable(true)
}
override fun onDestroy() {
super.onDestroy()
mediaCodeFrameUtil?.stopDecode()
}
}
回调代码
public interface MediaCodeFrameCallBack {
void onProgress(int totalCount,int currentCount);
void onFrameBitmap(Bitmap bitmap);
void onFrameFinish();
}
adapter代码
public class TestAdapter extends RecyclerView.Adapter<TestAdapter.ViewHolder> {
private List<Bitmap> list = new ArrayList<>();
@NonNull
@Override
public ViewHolder onCreateViewHolder(@NonNull ViewGroup parent, int viewType) {
View view = LayoutInflater.from(parent.getContext()).inflate(R.layout.media_bitmap_item_layout, parent, false);
return new ViewHolder(view);
}
@Override
public void onBindViewHolder(@NonNull ViewHolder holder, int position) {
Bitmap bitmap = list.get(position);
holder.mIv.setImageBitmap(bitmap);
}
@Override
public int getItemCount() {
return list.size();
}
public void updateList(@NotNull ArrayList<Bitmap> list) {
this.list.clear();
this.list.addAll(list);
notifyDataSetChanged();
}
public class ViewHolder extends RecyclerView.ViewHolder {
private final ImageView mIv;
public ViewHolder(@NonNull View itemView) {
super(itemView);
mIv = itemView.findViewById(R.id.mIv);
}
}
}
条目布局代码
<?xml version="1.0" encoding="utf-8"?>
<RelativeLayout xmlns:android="http://schemas.android.com/apk/res/android"
android:layout_width="100dp"
android:layout_height="match_parent">
<ImageView
android:id="@+id/mIv"
android:layout_width="match_parent"
android:layout_height="match_parent" />
</RelativeLayout>
用ffmpeg获取画面帧的都知道ffmpeg要把解析的图片放到sd卡,但是mediacode不用,直接拿到bitmap挺好的。
参考文章
https://www.polarxiong.com/archives/Android-MediaCodec%E8%A7%86%E9%A2%91%E6%96%87%E4%BB%B6%E7%A1%AC%E4%BB%B6%E8%A7%A3%E7%A0%81-%E9%AB%98%E6%95%88%E7%8E%87%E5%BE%97%E5%88%B0YUV%E6%A0%BC%E5%BC%8F%E5%B8%A7-%E5%BF%AB%E9%80%9F%E4%BF%9D%E5%AD%98JPEG%E5%9B%BE%E7%89%87-%E4%B8%8D%E4%BD%BF%E7%94%A8OpenGL.html