You cannot select more than 25 topics Topics must start with a letter or number, can include dashes ('-') and can be up to 35 characters long.
PayloadAPP/Src/VideoGL/decodestream.cpp

368 lines
11 KiB
C++

This file contains ambiguous Unicode characters!

This file contains ambiguous Unicode characters that may be confused with others in your current locale. If your use case is intentional and legitimate, you can safely ignore this warning. Use the Escape button to highlight these characters.

#include "decodestream.h"
DecodeStream::DecodeStream(QObject *parent) : QObject{parent} {
/************* 获取当前环境支持的硬件解码器 **************/
AVHWDeviceType type = AV_HWDEVICE_TYPE_NONE;
QStringList strTypes;
while ((type = av_hwdevice_iterate_types(type)) != AV_HWDEVICE_TYPE_NONE) {
m_HWDeviceTypes.append(type);
const char *ctype = av_hwdevice_get_type_name(type);
if (ctype) {
strTypes.append(QString(ctype));
}
}
qDebug() << "支持的硬件解码器:" << strTypes;
}
bool DecodeStream::init(AVPacketQueueManager *queueManager,
AVFormatContext *formatContext, int videoIndex) {
// QMutexLocker locker(&m_mutex);
// m_packetsQueue = queue;
m_queueManager = queueManager;
m_formatContext = formatContext;
m_videoIndex = videoIndex;
return initDecoder(formatContext, videoIndex);
}
// 视频解码线程任务
void DecodeStream::startDecode() {
qDebug() << "deocdeStreamThreadID:" << QThread::currentThreadId();
m_start = initObject();
if (!m_start) {
free();
return;
}
while (m_start) {
try {
AVPacket *inputPacket = m_queueManager->dequeueDecodePacket();
if (inputPacket) {
AVFrame *frame = decodePacket(inputPacket);
emit repaintSignal(frame);
av_packet_unref(inputPacket);
av_packet_free(&inputPacket);
inputPacket = nullptr;
} else {
// QThread::usleep(1000);
// av_usleep(1000);
}
} catch (...) {
}
av_usleep(1000);
}
free();
qDebug() << "Decoding Thread End!";
emit sendErrorMessageSignal("视频解码结束!",
NotificationType::NOTIFICATION_SUCCESS);
}
void DecodeStream::close() {
// QMutexLocker locker(&m_mutex);
m_start = false;
qDebug() << "decode Stream close!" << m_start;
}
/**
* @brief 设置是否使用硬件解码
* @param flag true使用 false不使用
*/
void DecodeStream::setHWDecoder(bool flag) {
m_HWDecoder = flag;
}
bool DecodeStream::initObject() {
// 分配AVFrame并将其字段设置为默认值。
m_frame = av_frame_alloc();
if (!m_frame) {
qWarning() << "av_frame_alloc() Error";
free();
return false;
}
m_frameHW = av_frame_alloc();
if (!m_frameHW) {
#if PRINT_LOG
qWarning() << "av_frame_alloc() Error";
#endif
free();
return false;
}
return true;
}
bool DecodeStream::initDecoder(AVFormatContext *inputFormatContext,
int videoIndex) {
if (!inputFormatContext) return false;
AVStream *videoStream =
inputFormatContext->streams[videoIndex]; // 通过查询到的索引获取视频流
// 获取视频图像分辨率AVStream中的AVCodecContext在新版本中弃用改为使用AVCodecParameters
// m_size.setWidth(videoStream->codecpar->width);
// m_size.setHeight(videoStream->codecpar->height);
// m_frameRate = rationalToDouble(&videoStream->avg_frame_rate); //
// 视频帧率
AVCodecParameters *videoCodecPara = videoStream->codecpar;
// 通过解码器ID获取视频解码器新版本返回值必须使用const
const AVCodec *codec = avcodec_find_decoder(videoCodecPara->codec_id);
if (!codec) {
printf("Cannot find valid decode codec.\n");
return false;
}
// 分配AVCodecContext并将其字段设置为默认值。
m_codecContext = avcodec_alloc_context3(codec);
if (!m_codecContext) {
#if PRINT_LOG
qWarning() << "创建视频解码器上下文失败!";
#endif
free();
return false;
}
// 使用视频流的codecpar为解码器上下文赋值
int ret = avcodec_parameters_to_context(m_codecContext, videoCodecPara);
if (ret < 0) {
showError(ret);
free();
return false;
}
m_codecContext->lowres = codec->max_lowres;
m_codecContext->flags2 |=
AV_CODEC_FLAG2_FAST; // 允许不符合规范的加速技巧。
// m_codecContext->thread_count = 4; // 使用8线程解码
// 设置解码器容错选项,忽略丢失的帧或参数
// m_codecContext->err_recognition = AV_EF_IGNORE_ERR;
// m_codecContext->flags |= AV_CODEC_FLAG2_CHUNKS;
if (m_HWDecoder) {
// 初始化硬件解码器在avcodec_open2前调用
initHWDecoder(codec);
}
// 初始化解码器上下文如果之前avcodec_alloc_context3传入了解码器这里设置NULL就可以
ret = avcodec_open2(m_codecContext, codec, nullptr);
if (ret < 0) {
showError(ret);
free();
return false;
}
return true;
}
AVFrame *DecodeStream::decodePacket(AVPacket *inputPacket) {
if (!isValidAVPacket(inputPacket)) return nullptr;
// 将读取到的原始数据包传入解码器
int ret = avcodec_send_packet(m_codecContext, inputPacket);
if (ret >= 0) {
while ((ret = avcodec_receive_frame(m_codecContext, m_frame)) >= 0) {
if (ret == AVERROR(EAGAIN) || ret == AVERROR_EOF) {
showError(ret);
qWarning() << "decoding file end\n";
break;
} else if (ret < 0) {
showError(ret);
qWarning() << "Error during decoding\n";
break;
}
if (!isValidAVFrame(m_frame)) {
break;
}
m_frameTemp = m_frame;
if (!m_frame->data[0]) {
m_frameTemp = m_frameHW;
if (!dataCopy()) {
break;
}
}
return m_frameTemp;
// QThread::msleep(1);
}
} else {
qDebug() << "avcodec_send_packet error:" << ret << "\n";
if (ret == AVERROR(EAGAIN)) {
qDebug() << "AVERROR(EAGAIN)";
av_usleep(10000);
}
if (ret == AVERROR_EOF) {
qDebug() << " AVERROR_EOF";
avcodec_flush_buffers(m_codecContext);
av_usleep(10000);
}
showError(ret);
}
av_frame_unref(m_frame);
return nullptr;
}
void DecodeStream::free() {
if (m_codecContext) {
avcodec_free_context(&m_codecContext);
}
if (hw_device_ctx) {
av_buffer_unref(&hw_device_ctx);
}
if (m_frame) {
av_frame_free(&m_frame);
}
if (m_frameHW) {
av_frame_free(&m_frameHW);
}
}
/*********** FFmpeg获取GPU硬件解码帧格式的回调函数 ***************/
static enum AVPixelFormat g_pixelFormat;
/**
* @brief 回调函数获取GPU硬件解码帧的格式
* @param s
* @param fmt
* @return
*/
AVPixelFormat get_hw_format(AVCodecContext *s, const enum AVPixelFormat *fmt) {
Q_UNUSED(s)
const enum AVPixelFormat *p;
for (p = fmt; *p != -1; p++) {
if (*p == g_pixelFormat) {
return *p;
}
}
qDebug() << "无法获取硬件表面格式.";
return AV_PIX_FMT_NONE;
}
/**
* @brief 初始化硬件解码器
* @param codec
*/
bool DecodeStream::initHWDecoder(const AVCodec *codec) {
if (!codec) return false;
for (int i = 0;; i++) {
const AVCodecHWConfig *config = avcodec_get_hw_config(codec, i);
if (!config) {
qDebug() << "打开硬件解码器失败!";
return false;
}
if (config->methods & AV_CODEC_HW_CONFIG_METHOD_HW_DEVICE_CTX) {
for (auto i : m_HWDeviceTypes) {
if (config->device_type == AVHWDeviceType(i)) {
g_pixelFormat = config->pix_fmt;
// 打开指定类型的设备并为其创建AVHWDeviceContext。
int ret = av_hwdevice_ctx_create(&hw_device_ctx,
config->device_type,
nullptr, nullptr, 0);
if (ret < 0) {
showError(ret);
free();
return false;
}
qDebug() << "打开硬件解码器:"
<< av_hwdevice_get_type_name(config->device_type);
m_codecContext->hw_device_ctx =
av_buffer_ref(hw_device_ctx);
m_codecContext->get_format = get_hw_format;
return true;
}
}
}
}
return false;
}
/**
* @brief 硬件解码完成需要将数据从GPU复制到CPU
* @return
*/
bool DecodeStream::dataCopy() {
if (m_frame->format != g_pixelFormat) {
av_frame_unref(m_frame);
return false;
}
#if 1
int ret = av_hwframe_map(m_frameHW, m_frame, 0);
if (ret < 0) {
showError(ret);
av_frame_unref(m_frame);
return false;
}
m_frameHW->width = m_frame->width;
m_frameHW->height = m_frame->height;
#else
#endif
return true;
}
bool DecodeStream::isValidAVFrame(AVFrame *frame) {
// 空指针检查
if (!frame) {
return false;
}
// 检查像素格式是否有效
if (frame->format == AV_PIX_FMT_NONE) {
return false;
}
// 检查数据指针是否有效
if (!frame->data[0]) {
return false;
}
// 检查宽高是否有效
if (frame->width <= 0 || frame->height <= 0) {
return false;
}
// 检查行大小是否有效
if (frame->linesize[0] <= 0) {
return false;
}
// 如果需要,添加更多判断条件,例如时间戳或关键帧检查
if (frame->pts == AV_NOPTS_VALUE) {
return false;
}
return true; // 如果所有条件都通过,则认为 AVFrame 有效
}
bool DecodeStream::isValidAVPacket(AVPacket *pkt) {
if (pkt == nullptr) {
qDebug() << "Invalid AVPacket: packet pointer is null.";
return false;
}
// 检查数据指针和大小
if (pkt->data == nullptr || pkt->size <= 0) {
qDebug() << "Invalid AVPacket: data is null or size is non-positive.\n";
return false;
}
// 检查时间戳
if (pkt->pts == AV_NOPTS_VALUE || pkt->dts == AV_NOPTS_VALUE) {
qDebug() << "Invalid AVPacket: pts or dts is AV_NOPTS_VALUE.\n";
return false;
}
// 检查流索引(如果是多流)
if (pkt->stream_index < 0) {
qDebug() << "Invalid AVPacket: stream_index is invalid.\n";
return false;
}
// 检查是否是有效的关键帧(可选)
if (pkt->flags & AV_PKT_FLAG_KEY) {
// 是关键帧
// qDebug() << "This is a keyframe.\n";
}
return true;
}