2023年7月25日发(作者:)
视频编解码流程视频编解码流程本⽂是描述视频编解码的过程,实现的功能是从摄像头读取数据(YUV,H264,MJPEG格式),解码播放,编码保存MP4的过程,包括了视频的解封装、解码、格式转换、显⽰、编码、封装保存等步骤,结尾附完整源码,开发环境为qtCreater5.7。流程1 摄像头视频读取2 解封装3 初始化解码4 初始化格式转换(YuvtoRgb)5 初始化格式转换(RgbtoYuv)6 初始化编码7 初始化封装。8 解码->YuvtoRgb->RgbtoYuv->编码->封装9 清理内存程序流程图1 摄像头视频流读取//初始化解封装上下⽂ifmt_ctx = avformat_alloc_context();//初始化解封装对象AVInputFormat *ifmt = av_find_input_format("video4linux2");//打开输⼊视频流avformat_open_input(&ifmt_ctx, inputFilename, ifmt, &options);2 解封装avformat_find_stream_info(ifmt_ctx, NULL);videoStreamIndex = -1; //从输⼊封装上下⽂获取输⼊视频流索引videoStreamIndex = av_find_best_stream(ifmt_ctx, AVMEDIA_TYPE_VIDEO, -1, -1, &deCodec, 0);//获取输⼊视频流in_stream = ifmt_ctx->streams[videoStreamIndex];3 初始化解码//获取视频流解码器deCodecCtx = in_stream->codec;avcodec_open2(deCodecCtx, deCodec, NULL);4 初始化格式转换(YuvtoRgb)//定义像素格式AVPixelFormat srcFormat = AV_PIX_FMT_YUV420P;AVPixelFormat dstFormat = AV_PIX_FMT_RGB32;swsContextYuvtoRgb = sws_getContext(videoWidth, videoHeight, srcFormat, videoWidth, videoHeight, dstFormat, flags, NULL, NULL, NULL);5 初始化格式转换(RgbtoYuv)//定义像素格式AVPixelFormat srcFormat = AV_PIX_FMT_RGB32;AVPixelFormat dstFormat = AV_PIX_FMT_YUV420P;//格式转换上下⽂(RgbtoYuv)swsContextRgbtoYuv = sws_getContext(videoWidth, videoHeight, srcFormat, videoWidth, videoHeight, dstFormat, flags, NULL, NULL, NULL);6 初始化编码//找到编码器对象AVCodec *encodec = avcodec_find_encoder(AV_CODEC_ID_H264);//分配编码器上下⽂enCodecCtx = avcodec_alloc_context3(encodec);//打开编码器avcodec_open2(enCodecCtx,encodec,¶m);7 初始化封装//设置输出封装格式上下⽂avformat_alloc_output_context2(&ofmt_ctx,0,0,outputFilename);//封装格式上下⽂中增加视频流信息out_stream = avformat_new_stream(ofmt_ctx,NULL);//设置封装器参数out_stream->id = 0;out_stream->codecpar->codec_tag = 0;avcodec_parameters_from_context(out_stream->codecpar,enCodecCtx);8 解码->YuvtoRgb->显⽰->RgbtoYuv->编码->封装while(true){ if (av_read_frame(ifmt_ctx, avDePacket) >= 0) { //判断当前包是视频还是⾳频 int index = avDePacket->stream_index; in_stream = ifmt_ctx->streams[index]; if (index == videoStreamIndex) { avcodec_decode_video2(deCodecCtx, avDeFrameYuv, &frameFinish, avDePacket); if (frameFinish) { //格式转换YuvtoRgb sws_scale(swsContextYuvtoRgb, (const uint8_t *const *)avDeFrameYuv->data, avDeFrameYuv->linesize, 0, videoHeight, avDeFrameRgb->data, avDeFrameRgb->linesize); //显⽰
QImage image((uchar *)buffer, videoWidth, videoHeight, QImage::Format_RGB32) //格式转换RgbtoYuv int h = sws_scale(swsContextRgbtoYuv, (const uint8_t *const *)inputdata, inputlinesize,0,videoHeight, avEnFrameYuv->data, avEnFrameYuv->linesize);
//发送给ffmpeg队列进⾏编码 int ret = avcodec_send_frame(enCodecCtx,avEnFrameYuv); ret = avcodec_receive_packet(enCodecCtx,avEnPacket); av_interleaved_write_frame(ofmt_ctx,avEnPacket); } av_packet_unref(avDePacket); av_freep(avDePacket); } }}9 清理内存//写⼊视频索引av_write_trailer(ofmt_ctx);//关闭视频输出IOavio_close(ofmt_ctx->pb);//清理封装格式上下⽂avformat_free_context(ofmt_ctx);avformat_free_context(ifmt_ctx);//关闭编码和解码器avcodec_close(enCodecCtx);avcodec_close(deCodecCtx);//清理编码器和解码器上下⽂avcodec_free_context(&enCodecCtx);avcodec_free_context(&deCodecCtx);//清理格式转换上下⽂sws_freeContext(swsContextRgbtoYuv);sws_freeContext(swsContextYuvtoRgb);附完整源码:编解码线程头⽂件//#define FFMPEG_MJPEG//#define FFMPEG_H264#define FFMPEG_YUV这三个宏定义了根据摄像头输出的格式进⾏选择,其中引⼊的ffmpeg的头⽂件⾮全部必须#ifndef FFMPEG_H#define FFMPEG_H//#define FFMPEG_MJPEG//#define FFMPEG_H264#define FFMPEG_YUV#include #include #include #include #include #include //引⼊ffmpeg头⽂件extern "C" {#include "libavutil/opt.h"#include "libavutil/time.h"#include "libavutil/frame.h"#include "libavutil/pixdesc.h"#include "libavutil/avassert.h"#include "libavutil/imgutils.h"#include "libavutil/ffversion.h"#include "libavcodec/avcodec.h"#include "libswscale/swscale.h"#include "libavdevice/avdevice.h"#include "libavformat/avformat.h"#include "libavfilter/avfilter.h"#ifndef gcc45#include "libavutil/hwcontext.h"#endif}namespace Ui {class ffmpeg;}class ffmpeg : public QThread{ Q_OBJECTpublic: explicit ffmpeg(QWidget *parent = nullptr); ~ffmpeg();protected: void run();signals: //收到图⽚信号 void receiveImage(const QImage &image);private: int lastMsec; int videoStreamIndex; //视频流索引 int videoWidth; //视频宽度 int videoHeight; //视频⾼度 int videoFps; //视频流帧率 int frameFinish; //⼀帧完成 uint64_t framCount; //帧计数 uint8_t *buffer; //存储解码后图⽚buffer AVOutputFormat *ofmt = NULL; //输出格式 AVPacket *avDePacket; //解码包对象 AVPacket *avEnPacket; //编码包对象 AVFrame *avDeFrameYuv; //解码帧对象YUV AVFrame *avDeFrameRgb; //解码帧对象RGB AVFrame *avEnFrameYuv; //编码帧对象YUV AVFrame *avEnFrameRgb; //编码帧对象RGB AVFormatContext *ifmt_ctx; //输⼊封装格式对象 AVFormatContext *ofmt_ctx; //输出封装格式对象 AVStream *in_stream; //输⼊视频流 AVStream *out_stream; //输出视频流 AVCodecContext *deCodecCtx; //解码器上下⽂ AVCodecContext *enCodecCtx; //编码码器上下⽂ SwsContext *swsContextYuvtoRgb; //格式转换上下⽂(YuvtoRgb) SwsContext *swsContextRgbtoYuv; //格式转换上下⽂(RgbtoYuv) int oldWidth; //上⼀次视频宽度 int oldHeight; //上⼀次视频⾼度 const char *outputFilename = "4"; const char *inputFilename = "/dev/video0";private: Ui::ffmpeg *ui; int initDecodeVideo(); int initEncodeVideo(); int playVideo();};#endif // FFMPEG_H编解码线程源⽂件#include "ffmpeg.h"#define TIMEMS qPrintable(QTime::currentTime().toString("HH:mm:ss zzz"))ffmpeg::ffmpeg(QWidget *parent) : QThread(parent){ framCount = 0; frameFinish = 0; initDecodeVideo(); initEncodeVideo();}ffmpeg::~ffmpeg(){}/* 功能:初始化解封装上下⽂,解码器上下⽂,和格式转换上下⽂(yuv转rgb) * 1 解封装 * 2 解码 * 3 格式转换 * 参数:⽆ * 返回值:成功返回零,失败返回-1 * 返回值:成功返回零,失败返回-1 */int ffmpeg::initDecodeVideo(){ //注册库中所有可⽤的⽂件格式和解码器 av_register_all(); //注册所有设备,主要⽤于本地摄像机播放⽀持 avdevice_register_all(); //初始化⽹络流格式,使⽤⽹络流时必须先执⾏ avformat_network_init(); //初始化所有编解码器 avcodec_register_all(); qDebug() << TIMEMS << "init ffmpeg lib ok" << " version:" << FFMPEG_VERSION; AVDictionary *options = NULL; AVCodec *deCodec = NULL; //解码器 av_dict_set_int(&options, "rtbufsize", 18432000 , 0);#ifdef FFMPEG_MJPEG av_dict_set(&options, "framerate", "25", 0); av_dict_set(&options, "video_size", "1920x1080", 0); //av_dict_set(&options, "video_size", "1280x720", 0); av_dict_set(&options, "input_format", "mjpeg", 0);#endif //为解封装上下⽂开辟空间 ifmt_ctx = avformat_alloc_context(); //解封装对象 AVInputFormat *ifmt = av_find_input_format("video4linux2"); if (nullptr != ifmt) { qDebug("input device name video4linux2!"); } else { qDebug("Null point "); } //打开输⼊视频流,进⾏解封装 int result = avformat_open_input(&ifmt_ctx, inputFilename, ifmt, &options); if (result < 0) { qDebug() << TIMEMS << "open input error" << inputFilename; return false; } //释放设置参数 if(options != NULL) { av_dict_free(&options); } //获取流信息 result = avformat_find_stream_info(ifmt_ctx, NULL); if (result < 0) { qDebug() << TIMEMS << "find stream info error"; return false; } videoStreamIndex = -1; videoStreamIndex = av_find_best_stream(ifmt_ctx, AVMEDIA_TYPE_VIDEO, -1, -1, &deCodec, 0); if (videoStreamIndex < 0) { qDebug() << TIMEMS << "find video stream index error"; return false; } //从输⼊封装上下⽂获取输⼊视频流 in_stream = ifmt_ctx->streams[videoStreamIndex]; if (!in_stream) if (!in_stream) { printf("Failed get input streamn"); return false; } //获取视频流解码器上下⽂ deCodecCtx = in_stream->codec; //获取分辨率⼤⼩ videoWidth = in_stream->codec->width; videoHeight = in_stream->codec->height; //如果没有获取到宽⾼则返回 if (videoWidth == 0 || videoHeight == 0) { qDebug() << TIMEMS << "find width height error"; return false; } //获取视频流的帧率 fps,要对0进⾏过滤,除数不能为0,有些时候获取到的是0 int num = in_stream->codec->; int den = in_stream->codec->; if (num != 0 && den != 0) { videoFps = num / den ; } QString videoInfo = QString("视频流信息 -> 索引: %1 格式: %2 时长: %3 秒 fps: %4 分辨率: %5*%6") .arg(videoStreamIndex).arg(ifmt_ctx->iformat->name) .arg((ifmt_ctx->duration) / 1000000).arg(videoFps).arg(videoWidth).arg(videoHeight); qDebug() << TIMEMS << videoInfo; //打开视频解码器 result = avcodec_open2(deCodecCtx, deCodec, NULL); if (result < 0) { qDebug() << TIMEMS << "open video codec error"; return false; } avDePacket = av_packet_alloc(); avDeFrameYuv = av_frame_alloc(); avDeFrameRgb = av_frame_alloc(); //⽐较上⼀次⽂件的宽度⾼度,当改变时,需要重新分配内存 if (oldWidth != videoWidth || oldHeight != videoHeight) { int byte = avpicture_get_size(AV_PIX_FMT_RGB32, videoWidth, videoHeight); buffer = (uint8_t *)av_malloc(byte * sizeof(uint8_t)); oldWidth = videoWidth; oldHeight = videoHeight; } //定义像素格式 AVPixelFormat srcFormat = AV_PIX_FMT_YUV420P; AVPixelFormat dstFormat = AV_PIX_FMT_RGB32; //以下两种⽅法都可以 //avpicture_fill((AVPicture *)avDeFrameRgb, buffer, dstFormat, videoWidth, videoHeight); av_image_fill_arrays(avDeFrameRgb->data, avDeFrameRgb->linesize, buffer, dstFormat, videoWidth, videoHeight, 1); //默认最快速度的解码采⽤的SWS_FAST_BILINEAR参数,可能会丢失部分图⽚数据,可以⾃⾏更改成其他参数 int flags = SWS_FAST_BILINEAR;#ifdef FFMPEG_MJPEG srcFormat = AV_PIX_FMT_YUV420P;#endif#ifdef FFMPEG_YUV#ifdef FFMPEG_YUV srcFormat = AV_PIX_FMT_YUYV422;#endif#ifdef FFMPEG_H264 srcFormat = AV_PIX_FMT_YUV420P;#endif swsContextYuvtoRgb = sws_getContext(videoWidth, videoHeight, srcFormat, videoWidth, videoHeight, dstFormat, flags, NULL, NULL, NULL); qDebug() << TIMEMS << "init ffmpegVideo ok"; return 0;}/* 功能:初始化编码器、格式转换上下⽂(rgb转yuv)、封装器 * * 1 格式转换 * 2 编码 * 3 封装 * 参数:⽆ * 返回值:成功返回零,失败返回-1 */int ffmpeg::initEncodeVideo(){ //找到编码`器对象 AVCodec *encodec = avcodec_find_encoder(AV_CODEC_ID_H264); if(!encodec) { qDebug()<<"Failed to find encoder."; return -1; } //分配编码器上下⽂ enCodecCtx = avcodec_alloc_context3(encodec); if(!enCodecCtx) { qDebug()<<"Failed to alloc context3."; return -1; } videoFps = 10; //设置编码器参数 enCodecCtx->bit_rate = 400000; enCodecCtx->width = videoWidth; enCodecCtx->height = videoHeight; enCodecCtx->time_base = {1,videoFps}; enCodecCtx->framerate = {videoFps,1}; enCodecCtx->gop_size = 50; enCodecCtx->keyint_min =20; enCodecCtx->max_b_frames = 0; enCodecCtx->pix_fmt = AV_PIX_FMT_YUV420P; enCodecCtx->codec_id = AV_CODEC_ID_H264; enCodecCtx->flags |= AV_CODEC_FLAG2_LOCAL_HEADER; //编码器信息存储在头部 enCodecCtx->thread_count = 8; //量化因⼦,范围越⼤,画质越差,编码速度越快 enCodecCtx->qmin = 20; enCodecCtx->qmax = 30; enCodecCtx->me_range = 16; enCodecCtx->max_qdiff = 4; enCodecCtx->qcompress = 0.6; enCodecCtx->max_b_frames = 0; enCodecCtx->b_frame_strategy = true; AVDictionary *param = 0; av_dict_set(¶m, "preset", "superfast", 0); av_dict_set(¶m, "tune", "zerolatency", 0); //打开编码器 int ret = avcodec_open2(enCodecCtx,encodec,¶m); if(ret < 0) { qDebug()<<"Failed to open enCodecCtx."; return -1; } qDebug()<<"Open enCodecCtx succeed."; //设置输出封装格式上下⽂ avformat_alloc_output_context2(&ofmt_ctx,0,0,outputFilename); //封装格式上下⽂中增加视频流信息 out_stream = avformat_new_stream(ofmt_ctx,NULL); //设置封装器参数 out_stream->id = 0; out_stream->codecpar->codec_tag = 0; avcodec_parameters_from_context(out_stream->codecpar,enCodecCtx); qDebug()<<"===================================================="; av_dump_format(ofmt_ctx,0,outputFilename,1); qDebug()<<"===================================================="; //定义像素格式 AVPixelFormat srcFormat = AV_PIX_FMT_RGB32; AVPixelFormat dstFormat = AV_PIX_FMT_YUV420P; //默认最快速度的解码采⽤的SWS_FAST_BILINEAR参数,可能会丢失部分图⽚数据,可以⾃⾏更改成其他参数 int flags = SWS_FAST_BILINEAR; //格式转换上下⽂(RgbtoYuv) swsContextRgbtoYuv = sws_getContext(videoWidth, videoHeight, srcFormat, videoWidth, videoHeight, dstFormat, flags, NULL, NULL, NULL); //分配AVFram及像素存储空间 avEnFrameYuv = av_frame_alloc(); avEnFrameYuv->format = dstFormat; avEnFrameYuv->width = videoWidth; avEnFrameYuv->height = videoHeight; ret = av_frame_get_buffer(avEnFrameYuv,32); if(ret < 0) { qDebug()<<"Failed to av_frame_get_buffer."; return -1; } //写MP4头 avio_open(&ofmt_ctx->pb,outputFilename,AVIO_FLAG_WRITE); ret = avformat_write_header(ofmt_ctx,NULL); if(ret < 0) { qDebug()<<"Failed to avformat_write_header."; return -1; } avEnPacket = av_packet_alloc(); av_init_packet(avEnPacket); qDebug() << TIMEMS; return 0;}}/*1 解封装--->2 解码--->3 格式转换(YuvtoRgb)--->4 格式转换(RgbtoYuv)--->5 编码--->6 封装*/int ffmpeg::playVideo(){ while(true) { if (av_read_frame(ifmt_ctx, avDePacket) >= 0) { //判断当前包是视频还是⾳频 int index = avDePacket->stream_index; in_stream = ifmt_ctx->streams[index]; if (index == videoStreamIndex) { avcodec_decode_video2(deCodecCtx, avDeFrameYuv, &frameFinish, avDePacket); //将数据转成⼀张图⽚YuvtoRgb sws_scale(swsContextYuvtoRgb, (const uint8_t *const *)avDeFrameYuv->data, avDeFrameYuv->linesize, 0, videoHeight, avDeFrameRgb->data, avDeFrameRgb->linesize); //以下两种⽅法都可以 //QImage image(avDeFrameRgb->data[0], videoWidth, videoHeight, QImage::Format_RGB32); QImage image((uchar *)buffer, videoWidth, videoHeight, QImage::Format_RGB32); if (!()) { emit receiveImage(image); } uint8_t *inputdata[AV_NUM_DATA_POINTERS] = { 0 }; int inputlinesize[AV_NUM_DATA_POINTERS] = { 0 }; inputdata[0] = buffer; inputlinesize[0] = videoWidth * 4; int h = sws_scale(swsContextRgbtoYuv, (const uint8_t *const *)inputdata, inputlinesize,0,videoHeight, avEnFrameYuv->data, avEnFrameYuv->linesize); int base = 90000/videoFps; //发送给ffmpeg队列进⾏编码 avEnFrameYuv->pts = framCount; framCount = framCount + base; qDebug()<<"framCount = "< 6000*200) { qDebug()<<"编码完成"; qDebug() << TIMEMS; break; } int ret = avcodec_send_frame(enCodecCtx,avEnFrameYuv); if(ret != 0) { continue; } ret = avcodec_receive_packet(enCodecCtx,avEnPacket); if(ret != 0) { continue; } qDebug()<<"avEnPacket = "<size; av_interleaved_write_frame(ofmt_ctx,avEnPacket); } av_packet_unref(avDePacket); av_freep(avDePacket); } } //写⼊视频索引 av_write_trailer(ofmt_ctx); //关闭视频输出IO avio_close(ofmt_ctx->pb); //清理封装格式上下⽂ avformat_free_context(ofmt_ctx); avformat_free_context(ifmt_ctx); //关闭编码和解码器 avcodec_close(enCodecCtx); avcodec_close(deCodecCtx); //清理编码器和解码器上下⽂ avcodec_free_context(&enCodecCtx); avcodec_free_context(&deCodecCtx); //清理格式转换上下⽂ sws_freeContext(swsContextRgbtoYuv); sws_freeContext(swsContextYuvtoRgb); qDebug() << TIMEMS << "stop ffmpeg thread";}void ffmpeg::run(){ playVideo();}主程序:#include "mainwindow.h"#include "ui_mainwindow.h"#include "ffmpeg.h"MainWindow::MainWindow(QWidget *parent) : QMainWindow(parent), ui(new Ui::MainWindow){ ui->setupUi(this); ffmpeg * ffmpegThread = new ffmpeg(this); ffmpegThread->start(); connect(ffmpegThread, SIGNAL(receiveImage(QImage)), this, SLOT(updateImage(QImage)));}MainWindow::~MainWindow(){ delete ui;}void MainWindow::updateImage(const QImage &image){ ui->label->resize((),()); ui->label->setPixmap(QPixmap::fromImage(image));}
发布者:admin,转转请注明出处:http://www.yc00.com/news/1690228936a318051.html
评论列表(0条)