You cannot select more than 25 topics Topics must start with a letter or number, can include dashes ('-') and can be up to 35 characters long.
PayloadAPP/Src/Video/cffmpeg_decode.cpp

420 lines
14 KiB
C++

This file contains ambiguous Unicode characters!

This file contains ambiguous Unicode characters that may be confused with others in your current locale. If your use case is intentional and legitimate, you can safely ignore this warning. Use the Escape button to highlight these characters.

#include "cffmpeg_decode.h"
// Cffmpeg_decode::Cffmpeg_decode() {
Cffmpeg_decode::Cffmpeg_decode(QObject *parent) : QObject(parent) {
inputFormatCtx = avformat_alloc_context();
inputPacket = av_packet_alloc();
yuvFrame = av_frame_alloc();
rgbFrame = av_frame_alloc();
avformat_network_init();
m_rtsp_transport = "tcp";
}
Cffmpeg_decode::~Cffmpeg_decode() {
if (!inputPacket)
av_packet_free(&inputPacket);
if (!yuvFrame)
av_frame_free(&yuvFrame);
if (!rgbFrame)
av_frame_free(&rgbFrame);
if (!encoderCtx)
avcodec_free_context(&encoderCtx);
if (!encoderCtx)
avcodec_close(encoderCtx);
if (!decoderCtx)
avcodec_free_context(&decoderCtx);
if (!decoderCtx)
avcodec_close(decoderCtx);
if (!inputFormatCtx)
avformat_close_input(&inputFormatCtx);
}
void Cffmpeg_decode::setStreamUrl(QString url) { _url = url; }
void Cffmpeg_decode::setPlayVideo(bool bPlay) {
mutex.lock();
bPlayVideoFlag = bPlay;
if (bPlayVideoFlag) {
// 初始化 previous_pts_time 为无效值(拉流使用)
previous_pts_time = -1.0; // 表示没有上一帧的时间戳
first_frame_pts_time = 0.0; // 第一帧的 PTS 时间
first_frame_system_time = 0.0; // 第一帧解码时的系统时间
}
mutex.unlock();
}
/**
* @brief 设置推流
* @param bPushStream: 开启/关闭推流
*/
void Cffmpeg_decode::setPushStream(bool bPushStream) {
mutex.lock();
bPushStreamFlag = bPushStream;
if (bPushStreamFlag) { // 推流初始化
// emit sendInitPushStream_Signal(inputFormatCtx);
} else { // 停止推流
emit sendStopPushStream_Signal();
bOpenPushStreamFlag = false;
firstDts = AV_NOPTS_VALUE; // 初始化第一帧的DTS
}
mutex.unlock();
}
bool Cffmpeg_decode::open_input_file() {
if (_url.isEmpty())
return 0;
if (!m_bSetRtspTransport) {
m_rtsp_transport = _url.left(4) == "rtmp" ? "tcp" : "udp";
}
//========================解码============================
AVDictionary *avdic = NULL;
// 如果设置失败则设置UDP传输
if (m_rtsp_transport == "udp")
av_dict_set(&avdic, "rtsp_transport", "udp", 0);
else
av_dict_set(&avdic, "rtsp_transport", "tcp", 0);
// 设置缓存大小1080p可将值调大
av_dict_set(&avdic, "buffer_size", "4096000", 0);
// 设置超时断开连接时间,单位微秒//listen_timeout
// av_dict_set(&avdic, "listen_timeout", "200000", 0);
av_dict_set(&avdic, "stimeout", "3000000", 0); // 设置超时3秒
av_dict_set(&avdic, "max_delay", "300000", 0); // 设置最大时延300ms
av_dict_set(&avdic, "tune", "zerolatency", 0); // 实时编码
av_dict_set(&avdic, "preset", "faster", 0); // ultrafast
av_dict_set(&avdic, "threads", "auto", 0); // 自动开启线程数
inputFormatCtx->flags |= AVFMT_FLAG_NONBLOCK;
// 打开输入流
if (avformat_open_input(&inputFormatCtx, _url.toUtf8().data(), NULL, &avdic) <
0) {
printf("Cannot open input file.\n");
return 0;
}
// 查找流信息
if (avformat_find_stream_info(inputFormatCtx, NULL) < 0) {
printf("Cannot find any stream in file.\n");
return 0;
}
// 从输入流中找到第一个视频流
for (uint i = 0; i < inputFormatCtx->nb_streams; i++) {
if (inputFormatCtx->streams[i]->codecpar->codec_type ==
AVMEDIA_TYPE_VIDEO) {
videoStreamIndex = i;
break;
}
}
if (videoStreamIndex == -1) {
printf("Cannot find video stream in file.\n");
return 0;
}
// 获取视频流的解码器参数
videoCodecPara = inputFormatCtx->streams[videoStreamIndex]->codecpar;
decoder = avcodec_find_decoder(videoCodecPara->codec_id);
if (!decoder) {
printf("Cannot find valid decode codec.\n");
return 0;
}
// 为解码器上下文分配空间
decoderCtx = avcodec_alloc_context3(decoder);
if (!decoderCtx) {
printf("Cannot find valid decode codec context.\n");
return 0;
}
// 初始化解码器上下文
if (avcodec_parameters_to_context(decoderCtx, videoCodecPara) < 0) {
printf("Cannot initialize parameters.\n");
return 0;
}
// 设置加速解码
decoderCtx->lowres = decoder->max_lowres;
decoderCtx->flags2 |= AV_CODEC_FLAG2_FAST;
// 打开解码器
if (avcodec_open2(decoderCtx, decoder, NULL) < 0) {
printf("Cannot open codec.\n");
return 0;
}
// 初始化图像转换器
swsCtx = sws_getContext(decoderCtx->width, //
decoderCtx->height, //
decoderCtx->pix_fmt, //
decoderCtx->width, //
decoderCtx->height, //
AV_PIX_FMT_RGB32, //
SWS_BICUBIC, NULL, NULL, NULL);
// 颜色空间调整(可选)
// sws_setColorspaceDetails(swsCtx, sws_getCoefficients(SWS_CS_DEFAULT), 0,
// sws_getCoefficients(SWS_CS_DEFAULT), 0, 0, 0, 0);
numBytes = av_image_get_buffer_size(AV_PIX_FMT_RGB32, //
decoderCtx->width, //
decoderCtx->height, //
1);
out_buffer = (unsigned char *)av_malloc(numBytes * sizeof(unsigned char));
int res = av_image_fill_arrays(rgbFrame->data, //
rgbFrame->linesize, //
out_buffer, //
AV_PIX_FMT_RGB32, //
decoderCtx->width, //
decoderCtx->height, 1);
if (res < 0) {
qDebug() << "Fill arrays failed.\n";
return 0;
}
// 裸流保存初始化
m_saveVideoFlag = openSave();
return true;
}
// 线程里持续执行
void Cffmpeg_decode::run() {
if (!open_input_file()) {
qDebug() << "Please open video file first.";
emit sendConnectFail(1);
IsstopPlay = true;
return;
}
// 初始化 previous_pts_time 为无效值(拉流使用)
previous_pts_time = -1.0; // 表示没有上一帧的时间戳
first_frame_pts_time = 0.0; // 第一帧的 PTS 时间
first_frame_system_time = 0.0; // 第一帧解码时的系统时间
// 推流使用
firstDts = AV_NOPTS_VALUE; // 初始化第一帧的DTS
startTime = av_gettime();
// 读取数据包
while (av_read_frame(inputFormatCtx, inputPacket) >= 0) {
if (IsstopPlay) {
qDebug() << "video play stop";
break;
}
// 开始推流时第一次获取时间设置起始DTS
if (firstDts == AV_NOPTS_VALUE && bPushStreamFlag) {
firstDts = inputPacket->dts;
startTime = av_gettime(); // 记录第一个包到来的系统时间
}
if (inputPacket->stream_index == videoStreamIndex) {
// 推流
if (bPushStreamFlag) {
if (!bOpenPushStreamFlag) { // 推流初始化
emit sendInitPushStream_Signal(inputFormatCtx);
bOpenPushStreamFlag = true;
// QThread::msleep(10);
}
AVPacket *outputPacket = av_packet_clone(inputPacket);
emit sendStreamData_Signal(outputPacket, frm_cnt, startTime, firstDts);
}
// 保存裸流
if (m_saveVideoFlag) {
// 由于保存的m_formatContextSave只创建了一个视频流而读取到的图像的流索引不一定为0可能会出现错误【Invalid
// packet stream index: 1】
// 所以这里需要将stream_index指定为和m_formatContextSave中视频流索引相同因为就一个流所以直接设置为0
inputPacket->stream_index = 0;
av_write_frame(m_formatContextSave,
inputPacket); // 将数据包写入输出媒体文件
}
if (bPlayVideoFlag) {
// 解码数据包
if (avcodec_send_packet(decoderCtx, inputPacket) >= 0) {
int ret;
while ((ret = avcodec_receive_frame(decoderCtx, yuvFrame)) >= 0) {
if (ret == AVERROR(EAGAIN) || ret == AVERROR_EOF) {
return;
} else if (ret < 0) {
fprintf(stderr, "Error during decoding\n");
exit(1);
}
if (inputPacket->dts <= 0)
continue;
// 获取当前帧的 PTS显示时间戳并转换为相同时间基
double pts_time = av_rescale_q(
inputPacket->pts,
inputFormatCtx->streams[videoStreamIndex]->time_base,
AVRational{1, AV_TIME_BASE});
// 如果是第一帧,记录系统时间和 PTS 时间
if (previous_pts_time == -1.0) {
first_frame_pts_time = pts_time;
first_frame_system_time = av_gettime(); // 系统时间(秒)
previous_pts_time = pts_time;
}
// 转化为RGB图像并显示
sws_scale(swsCtx, //
yuvFrame->data, //
yuvFrame->linesize, //
0, //
decoderCtx->height, //
rgbFrame->data, //
rgbFrame->linesize);
QImage img(out_buffer, //
decoderCtx->width, //
decoderCtx->height, //
QImage::Format_RGB32);
emit sendQImage(img);
// QThread::msleep(1);
// 计算从第一帧开始的相对 PTS 时间
double elapsed_pts_time = pts_time - first_frame_pts_time;
// 计算从第一帧开始的相对系统时间
double elapsed_system_time = av_gettime() - first_frame_system_time;
// 计算需要等待的时间(us)
double wait_time = elapsed_pts_time - elapsed_system_time;
// qDebug() << "pull stream sleep time:"
// << QString::number(wait_time / 1000.0);
if (wait_time > 0) {
av_usleep(wait_time); // 延时以同步 PTS
}
// 更新 previous_pts_time 为当前帧的 PTS
previous_pts_time = pts_time;
}
}
}
av_packet_unref(inputPacket);
}
frm_cnt++;
if (IsstopPlay) {
if (m_saveVideoFlag) {
saveDone();
}
break;
}
// 花屏
// if (av_read_frame(inputFormatCtx, inputPacket) < 0) {
// break; // 达到文件末尾
// }
}
if (m_saveVideoFlag) {
saveDone();
}
// QCoreApplication::processEvents();
qDebug() << "All video play done";
}
// 退出
void Cffmpeg_decode::stop() {
IsstopPlay = true;
if (!inputPacket)
av_packet_free(&inputPacket);
if (!yuvFrame)
av_frame_free(&yuvFrame);
if (!rgbFrame)
av_frame_free(&rgbFrame);
if (!encoderCtx)
avcodec_close(encoderCtx);
if (!encoderCtx)
avcodec_free_context(&encoderCtx);
if (!decoderCtx)
avcodec_close(decoderCtx);
if (!decoderCtx)
avcodec_free_context(&decoderCtx);
if (!inputFormatCtx)
avformat_close_input(&inputFormatCtx);
}
/**
* @brief 设置拉流视频保存路径
* @param fileDirPath: 拉流视频保存路径
*/
void Cffmpeg_decode::setSaveFileDirPath(QString fileDirPath) {
saveFileDirPath = fileDirPath;
}
/**
* @brief 打开本地视频保存输出流
* @return
*/
bool Cffmpeg_decode::openSave() {
QDir dir;
if (!dir.exists(saveFileDirPath)) {
dir.mkdir(saveFileDirPath);
}
QString strName =
QString("/%1.h264")
.arg(QDateTime::currentDateTime().toString("yyyy-MM-dd HH-mm-ss"));
strName = saveFileDirPath + strName;
// const AVOutputFormat *ofmt = av_guess_format("mp4", NULL, NULL);
int ret = avformat_alloc_output_context2(
&m_formatContextSave, nullptr, nullptr, strName.toStdString().data());
if (ret < 0) {
// free();
qWarning() << "DecodeVideo Error";
return false;
}
// m_videoStreamOut->codecpar->codec_tag = 0;
// if (m_formatContextSave->oformat->flags & AVFMT_GLOBALHEADER) {
// m_formatContextSave->flags |= AV_CODEC_FLAG_GLOBAL_HEADER;
// // m_videoStreamOut->codecpar->extradata = (uint8_t *)av_malloc(1024);
// // m_videoStreamOut->codecpar->extradata_size = 0;
// }
// 创建并初始化AVIOContext以访问url所指示的资源。
ret = avio_open(&m_formatContextSave->pb, strName.toStdString().data(),
AVIO_FLAG_WRITE);
if (ret < 0) {
// free();
qWarning() << "Open file Error";
return false;
}
// 向媒体文件添加新流
m_videoStreamOut = avformat_new_stream(m_formatContextSave, nullptr);
if (!m_videoStreamOut) {
qWarning() << "Create New Stream Error";
return false;
}
// 拷贝一些参数给codecpar赋值(这里使用编码器上下文进行赋值)
ret = avcodec_parameters_from_context(m_videoStreamOut->codecpar, decoderCtx);
if (ret < 0) {
// free();
qWarning() << "avcodec_parameters_from_context Failed";
return false;
}
// 写入文件头
ret = avformat_write_header(m_formatContextSave, nullptr);
if (ret < 0) {
// free();
qWarning() << "avformat_write_header Error";
return false;
}
m_writeHeader = true;
qDebug() << "保存视频文件初始化成功!";
return true;
}
/**
* @brief 视频保存结束,释放资源
*/
void Cffmpeg_decode::saveDone() {
if (m_formatContextSave && m_writeHeader) {
av_write_trailer(m_formatContextSave); // 写入文件尾
m_writeHeader = false;
}
// 关闭文件
if (m_formatContextSave && !(m_formatContextSave->flags & AVFMT_NOFILE)) {
avio_close(m_formatContextSave->pb);
// av_freep(m_videoStreamOut);
if (m_formatContextSave) {
avformat_free_context(m_formatContextSave);
m_formatContextSave = nullptr;
}
}
m_saveVideoFlag = false;
}
void Cffmpeg_decode::setFlowType(QString param) {
m_rtsp_transport = param;
m_bSetRtspTransport = true;
}