2023年6月27日发(作者:)
AndroidMediaMuxer+MediaCodec编码yuv数据成mp4⼀、简介使⽤ MediaCodec 对 yuv 数据进⾏编码,编码的格式为 H.264(AVC) 。使⽤ MediaMuxer 将视频track和⾳频track混合到 mp4 容器中,通常视频编码使⽤H.264(AVC)编码,⾳频编码使⽤AAC编码。⼆、流程分析(简要介绍⼀下流程,具体api的参数说明起来篇幅太⼤,不清楚的可以⾃⼰搜索⼀下)1. 创建编码器并配置MediaFormat mediaFormat = VideoFormat(PE_VIDEO_AVC, videoWidth, videoHeight);//
设置编码的颜⾊格式,实则为nv12(不同⼿机可能会不⼀样)eger(_COLOR_FORMAT, _FormatYUV420Flexible);//
设置视频的⽐特率,⽐特率太⼩会影响编码的视频质量eger(_BIT_RATE, width * height * 6);//
设置视频的帧率eger(_FRAME_RATE, 30);//
设置I帧(关键帧)的间隔时间,单位秒eger(_I_FRAME_INTERVAL, 1);//
创建编码器、配置和启动MediaCodec encoder = EncoderByType(PE_VIDEO_AVC);ure(mediaFormat, null, null, URE_FLAG_ENCODE);();关于⽐特率可以参考:2. 编码⼀帧数据private void encode(byte[] yuv, long presentationTimeUs) { //
⼀、给编码器设置⼀帧输⼊数据 // 1.获取⼀个可⽤的输⼊buffer,最⼤等待时长为DEFAULT_TIMEOUT_US int inputBufferIndex = eInputBuffer(DEFAULT_TIMEOUT_US); ByteBuffer inputBuffer = utBuffer(inputBufferIndex); // 2.将输⼊数据放到buffer中 (yuv); // 3.将buffer压⼊解码队列中,即编码线程就会处理队列中的数据了 nputBuffer(inputBufferIndex, 0, , presentationTimeUs, 0); //
⼆、从编码器中取出⼀帧编码后的输出数据 // 1.获取⼀个可⽤的输出buffer,最⼤等待时长为DEFAULT_TIMEOUT_US Info bufferInfo = new Info(); int outputBufferIndex = eOutputBuffer(bufferInfo, DEFAULT_TIMEOUT_US); ByteBuffer outputBuffer = putBuffer(outputBufferIndex); // MediaMuxer将编码数据写⼊到mp4中 // 3.⽤完后释放这个输出buffer eOutputBuffer(outputBufferIndex, false);}3. MediaMuxer写⼊编码数据在写⼊前,需要配置⼀些视频的头部信息(csd参数),否则会报错。csd参数全称Codec-specific Data。对于H.264来说,"csd-0"和"csd-1"分别对应sps和pps;对于AAC来说,"csd-0"对应ADTS。//
写⼊头部信息,并启动 MediaMuxerprivate int writeHeadInfo(ByteBuffer outputBuffer, Info bufferInfo) { byte[] csd = new byte[]; ( + ); on(); (csd); ByteBuffer sps = null; ByteBuffer pps = null; for (int i = - 1; i > 3; i--) { if (csd[i] == 1 && csd[i - 1] == 0 && csd[i - 2] == 0 && csd[i - 3] == 0) { sps = te(i - 3); pps = te( - (i - 3)); (csd, 0, i - 3).position(0); (csd, i - 3, - (i - 3)).position(0); } } MediaFormat outputFormat = putFormat(); if (sps != null && pps != null) { eBuffer("csd-0", sps); eBuffer("csd-1", pps); } int videoTrackIndex = ck(outputFormat); Log.d(TAG, "videoTrackIndex: " + videoTrackIndex); (); return videoTrackIndex;}//
写⼊⼀帧编码后的数据ampleData(mVideoTrackIndex, outputBuffer, bufferInfo);4. 结束后释放相应对象();e();e();三、完整代码包含⼀些基本的返回值检查、接⼝回调、以及可以中途停⽌解码的⽅法等。import odec;import odecInfo;import ormat;import uxer;import ;import ption;import ffer;public class VideoEncoder { private static final String TAG = "VideoEncoder"; private final static String MIME_TYPE = PE_VIDEO_AVC; private static final long DEFAULT_TIMEOUT_US = 10000; private MediaCodec mEncoder; private MediaMuxer mMediaMuxer; private int mVideoTrackIndex; private boolean mStop = false; public void init(String outPath, int width, int height) { try { mStop = false; mVideoTrackIndex = -1; mMediaMuxer = new MediaMuxer(outPath, _OUTPUT_MPEG_4); mEncoder = EncoderByType(MIME_TYPE); MediaFormat mediaFormat = VideoFormat(MIME_TYPE, width, height); //
编码器输⼊是NV12格式 eger(_COLOR_FORMAT, _FormatYUV420Flexible); eger(_BIT_RATE, width * height * 6); eger(_FRAME_RATE, 30); eger(_I_FRAME_INTERVAL, 1); ure(mediaFormat, null, null, URE_FLAG_ENCODE); (); } catch (IOException e) { tackTrace(); } } public void release() { mStop = true; if (mEncoder != null) { (); e(); mEncoder = null; } if (mMediaMuxer != null) { e(); mMediaMuxer = null; } } public void encode(byte[] yuv, long presentationTimeUs) { if (mEncoder == null || mMediaMuxer == null) { Log.e(TAG, "mEncoder or mMediaMuxer is null"); return; } if (yuv == null) { Log.e(TAG, "input yuv data is null"); return; } int inputBufferIndex = eInputBuffer(DEFAULT_TIMEOUT_US); Log.d(TAG, "inputBufferIndex: " + inputBufferIndex); Log.d(TAG, "inputBufferIndex: " + inputBufferIndex); if (inputBufferIndex == -1) { Log.e(TAG, "no valid buffer available"); return; } ByteBuffer inputBuffer = utBuffer(inputBufferIndex); (yuv); nputBuffer(inputBufferIndex, 0, , presentationTimeUs, 0); while (!mStop) { Info bufferInfo = new Info(); int outputBufferIndex = eOutputBuffer(bufferInfo, DEFAULT_TIMEOUT_US); Log.d(TAG, "outputBufferIndex: " + outputBufferIndex); if (outputBufferIndex >= 0) { ByteBuffer outputBuffer = putBuffer(outputBufferIndex); // write head info if (mVideoTrackIndex == -1) { Log.d(TAG, "this is first frame, call writeHeadInfo first"); mVideoTrackIndex = writeHeadInfo(outputBuffer, bufferInfo); } if (( & _FLAG_CODEC_CONFIG) == 0) { Log.d(TAG, "write outputBuffer"); ampleData(mVideoTrackIndex, outputBuffer, bufferInfo); } eOutputBuffer(outputBufferIndex, false); break; //
跳出循环 } } } private int writeHeadInfo(ByteBuffer outputBuffer, Info bufferInfo) { byte[] csd = new byte[]; ( + ); on(); (csd); ByteBuffer sps = null; ByteBuffer pps = null; for (int i = - 1; i > 3; i--) { if (csd[i] == 1 && csd[i - 1] == 0 && csd[i - 2] == 0 && csd[i - 3] == 0) { sps = te(i - 3); pps = te( - (i - 3)); (csd, 0, i - 3).position(0); (csd, i - 3, - (i - 3)).position(0); } } MediaFormat outputFormat = putFormat(); if (sps != null && pps != null) { eBuffer("csd-0", sps); eBuffer("csd-1", pps); } int videoTrackIndex = ck(outputFormat); Log.d(TAG, "videoTrackIndex: " + videoTrackIndex); (); return videoTrackIndex; }}四、调⽤⽰例结合上⼀篇的解码器,可以做⼀些解码再编码的例⼦。链接:VideoDecoder mVideoDecoder = new VideoDecoder();putFormat(_FORMAT_NV12); //
设置输出nv12的数据VideoEncoder mVideoEncoder = null;//
某某线程中("/sdcard/4", new Callback() { @Override public void onDecode(byte[] yuv, int width, int height, int frameCount, long presentationTimeUs) { Log.d(TAG, "frameCount: " + frameCount + ", presentationTimeUs: " + presentationTimeUs); if (mVideoEncoder == null) { mVideoEncoder = new VideoEncoder(); ("/sdcard/test_4", width, height); } // yuv数据操作,例如保存或者再去编码等 (yuv, presentationTimeUs); } @Override public void onFinish() { Log.d(TAG, "onFinish"); if (mVideoEncoder != null) e(); } @Override public void onStop() { Log.d(TAG, "onStop"); if (mVideoEncoder != null) e(); }});
发布者:admin,转转请注明出处:http://www.yc00.com/web/1687821615a48110.html
评论列表(0条)