You cannot select more than 25 topics Topics must start with a letter or number, can include dashes ('-') and can be up to 35 characters long.
PayloadAPP/Src/VideoGL/videowidget.cpp

575 lines
18 KiB
C++

#include "videowidget.h"
#if USE_WINDOW
VideoWidget::VideoWidget(QOpenGLWindow::UpdateBehavior updateBehavior,
QWindow *parent)
: QOpenGLWindow(updateBehavior, parent) {
// 初始化视图大小由于Shader里面有YUV转RGB的代码会初始化显示为绿色这里通过将视图大小设置为0避免显示绿色背景
m_pos = QPointF(0, 0);
m_zoomSize = QSize(0, 0);
}
#else
VideoWidget::VideoWidget(QWidget *parent, Qt::WindowFlags f)
: QOpenGLWidget(parent, f) {
// 初始化视图大小由于Shader里面有YUV转RGB的代码会初始化显示为绿色这里通过将视图大小设置为0避免显示绿色背景
m_pos = QPointF(0, 0);
m_zoomSize = QSize(0, 0);
}
#endif
VideoWidget::~VideoWidget() {
if (!isValid())
return; // 如果控件和OpenGL资源如上下文已成功初始化则返回true。
decodeStreamer->disconnect(this);
stopPlay();
stopPushStream();
if (readStreamer) {
readStreamer->deleteLater();
}
if (decodeStreamer) {
decodeStreamer->deleteLater();
}
if (saveStreamer) {
saveStreamer->deleteLater();
}
if (streamPusher) {
streamPusher->deleteLater();
}
// 通过将相应的上下文设置为当前上下文并在该上下文中绑定帧缓冲区对象为呈现此小部件的OpenGL内容做准备。
this->makeCurrent();
freeTexYUV420P();
freeTexNV12();
this->doneCurrent(); // 释放上下文
// 释放
glDeleteBuffers(1, &VBO);
glDeleteBuffers(1, &EBO);
glDeleteVertexArrays(1, &VAO);
}
bool VideoWidget::play(const QString &url) {
// if (url.isEmpty()) return;
if (!m_pullFlag) {
m_pullFlag = pullStream(url);
if (!m_pullFlag) {
return false;
}
}
// 解码线程
if (!decodeStreamer) {
decodeStreamer = new DecodeStream;
connect(decodeStreamer, &DecodeStream::startDecodeSignal,
decodeStreamer, &DecodeStream::startDecode,
Qt::UniqueConnection);
}
connect(decodeStreamer, &DecodeStream::repaintSignal, this,
&VideoWidget::repaint,
Qt::QueuedConnection); // Qt::BlockingQueuedConnection
decodeStreamer->moveToThread(&decodeStreamThread);
decodeStreamThread.start();
bool ss = readStreamer->setStreamDecoder(decodeStreamer);
if (!ss) {
qDebug() << "解码器初始化失败!\n";
emit sendErrorMessageSignal("解码器初始化失败!",
NotificationType::NOTIFICATION_ERROR);
stopPlay();
return false;
}
m_playFlag = true;
return true;
}
void VideoWidget::stopPlay() {
m_playFlag = false;
if (decodeStreamer) {
decodeStreamer->disconnect(this);
// QThread::usleep(2000);
decodeStreamer->close();
}
decodeStreamThread.quit();
decodeStreamThread.wait();
if (!m_pushFlag) {
if (readStreamer) {
readStreamer->close();
}
readStreamThread.quit();
readStreamThread.wait();
m_pullFlag = false;
if (saveStreamer) {
saveStreamer->close();
}
saveStreamThread.quit();
saveStreamThread.wait();
}
}
// 推流
bool VideoWidget::pushStream(const QString &url) {
if (url.isEmpty()) {
return false;
} else {
// 先拉流
if (!m_pullFlag) {
m_pullFlag = this->pullStream(m_pullURL);
if (!m_pullFlag) {
emit sendErrorMessageSignal("获取视频流失败!", 2);
return false;
}
}
// 推流线程
if (!streamPusher) {
streamPusher = new PushStream;
connect(streamPusher, &PushStream::startPushStreamSignal,
streamPusher, &PushStream::pushStream,
Qt::UniqueConnection);
connect(streamPusher, &PushStream::sendErrorMessageSignal, this,
&VideoWidget::receiveErrorMessage, Qt::UniqueConnection);
}
streamPusher->setRemoteIP(url);
streamPusher->moveToThread(&pushStreamThread);
pushStreamThread.start();
bool ss = readStreamer->setStreamPusher(streamPusher);
if (!ss) {
emit sendErrorMessageSignal("推流初始化失败!", 2);
stopPushStream();
return false;
}
m_pushURL = url;
m_pushFlag = true;
return true;
}
}
void VideoWidget::stopPushStream() {
m_pushFlag = false;
if (streamPusher) streamPusher->close();
pushStreamThread.quit();
pushStreamThread.wait();
if (!m_playFlag) {
if (readStreamer) readStreamer->close();
readStreamThread.quit();
readStreamThread.wait();
m_pullFlag = false;
if (saveStreamer) saveStreamer->close();
saveStreamThread.quit();
saveStreamThread.wait();
}
}
void VideoWidget::setPullURL(const QString &url) {
m_pullURL = url;
}
void VideoWidget::setPushURL(const QString &url) {
m_pushURL = url;
}
void VideoWidget::setVedioSaveFileDirPath(const QString &dirPath) {
m_videoSaveDirPath = dirPath;
}
void VideoWidget::repaint(AVFrame *frame) {
try {
QMutexLocker locker(&m_mutex);
// 如果帧长宽为0则不需要绘制
if (!frame || frame->width <= 0 || frame->height <= 0) return;
if (!isValid() || frame->format == AV_PIX_FMT_NONE) {
av_frame_unref(frame);
return;
}
m_format = frame->format;
switch (m_format) {
case AV_PIX_FMT_YUVJ420P:
case AV_PIX_FMT_YUV420P: // ffmpeg软解码的像素格式为YUV420P
{
repaintTexYUV420P(frame);
break;
}
case AV_PIX_FMT_NV12: // 由于ffmpeg硬件解码的像素格式为NV12不是YUV,所以需要单独处理
{
repaintTexNV12(frame);
break;
}
default: {
// av_frame_unref(frame);
// return;
break;
}
}
av_frame_unref(frame); // 取消引用帧引用的所有缓冲区并重置帧字段。
this->update();
} catch (...) {
return;
}
}
/**
* @brief YUV420P
* @param frame
*/
void VideoWidget::repaintTexYUV420P(AVFrame *frame) {
// 当切换显示的视频后,如果分辨率不同则需要重新创建纹理,否则会崩溃
if (frame->width != m_size.width() || frame->height != m_size.height()) {
freeTexYUV420P();
}
initTexYUV420P(frame->width, frame->height);
if (m_texY && m_texU && m_texV) {
m_options.setImageHeight(frame->height);
m_options.setRowLength(frame->linesize[0]);
m_texY->setData(QOpenGLTexture::Red, QOpenGLTexture::UInt8,
static_cast<const void *>(frame->data[0]),
&m_options); // 设置图像数据 Y
m_options.setRowLength(frame->linesize[1]);
m_texU->setData(QOpenGLTexture::Red, QOpenGLTexture::UInt8,
static_cast<const void *>(frame->data[1]),
&m_options); // 设置图像数据 U
m_options.setRowLength(frame->linesize[2]);
m_texV->setData(QOpenGLTexture::Red, QOpenGLTexture::UInt8,
static_cast<const void *>(frame->data[2]),
&m_options); // 设置图像数据 V
}
}
/**
* @brief YUV420P
* @param frame
*/
void VideoWidget::initTexYUV420P(int width, int height) {
if (!m_texY) // 初始化纹理
{
// 创建2D纹理
m_texY = new QOpenGLTexture(QOpenGLTexture::Target2D);
// 设置纹理大小
m_texY->setSize(width, height);
// 设置放大、缩小过滤器
m_texY->setMinMagFilters(QOpenGLTexture::LinearMipMapLinear,
QOpenGLTexture::Linear);
// 设置图像格式
m_texY->setFormat(QOpenGLTexture::R8_UNorm);
// 分配内存
m_texY->allocateStorage();
// 记录图像分辨率
m_size.setWidth(width);
m_size.setHeight(height);
resizeGL(this->width(), this->height());
}
if (!m_texU) {
m_texU = new QOpenGLTexture(QOpenGLTexture::Target2D);
m_texU->setSize(width / 2, height / 2);
m_texU->setMinMagFilters(QOpenGLTexture::LinearMipMapLinear,
QOpenGLTexture::Linear);
m_texU->setFormat(QOpenGLTexture::R8_UNorm);
m_texU->allocateStorage();
}
if (!m_texV) // 初始化纹理
{
m_texV = new QOpenGLTexture(QOpenGLTexture::Target2D);
m_texV->setSize(width / 2, height / 2);
m_texV->setMinMagFilters(QOpenGLTexture::LinearMipMapLinear,
QOpenGLTexture::Linear);
m_texV->setFormat(QOpenGLTexture::R8_UNorm);
m_texV->allocateStorage();
}
}
/**
* @brief YUV420P
*/
void VideoWidget::freeTexYUV420P() {
// 释放纹理
if (m_texY) {
m_texY->destroy();
delete m_texY;
m_texY = nullptr;
}
if (m_texU) {
m_texU->destroy();
delete m_texU;
m_texU = nullptr;
}
if (m_texV) {
m_texV->destroy();
delete m_texV;
m_texV = nullptr;
}
}
/**
* @brief NV12
* @param frame
*/
void VideoWidget::repaintTexNV12(AVFrame *frame) {
// 当切换显示的视频后,如果分辨率不同则需要重新创建纹理,否则会崩溃
if (frame->width != m_size.width() || frame->height != m_size.height()) {
freeTexNV12();
}
initTexNV12(frame);
if (m_texY && m_texUV) {
m_options.setImageHeight(frame->height);
m_options.setRowLength(frame->linesize[0]);
m_texY->setData(QOpenGLTexture::Red, QOpenGLTexture::UInt8,
static_cast<const void *>(frame->data[0]),
&m_options); // 设置图像数据 Y
m_options.setImageHeight(frame->height / 2);
m_options.setRowLength(frame->linesize[1] / 2);
m_texUV->setData(QOpenGLTexture::RG, QOpenGLTexture::UInt8,
static_cast<const void *>(frame->data[1]),
&m_options); // 设置图像数据 UV
}
}
/**
* @brief NV12
* @param frame
*/
void VideoWidget::initTexNV12(AVFrame *frame) {
if (!m_texY) // 初始化纹理
{
// 创建2D纹理
m_texY = new QOpenGLTexture(QOpenGLTexture::Target2D);
// 设置纹理大小
m_texY->setSize(frame->width, frame->height);
// 设置放大、缩小过滤器
m_texY->setMinMagFilters(QOpenGLTexture::LinearMipMapLinear,
QOpenGLTexture::Linear);
// 设置图像格式
m_texY->setFormat(QOpenGLTexture::R8_UNorm);
// 分配内存
m_texY->allocateStorage();
// 记录图像分辨率
m_size.setWidth(frame->width);
m_size.setHeight(frame->height);
resizeGL(this->width(), this->height());
}
if (!m_texUV) {
m_texUV = new QOpenGLTexture(QOpenGLTexture::Target2D);
m_texUV->setSize(frame->width / 2, frame->height / 2);
m_texUV->setMinMagFilters(QOpenGLTexture::LinearMipMapLinear,
QOpenGLTexture::Linear);
m_texUV->setFormat(QOpenGLTexture::RG8_UNorm);
m_texUV->allocateStorage();
}
}
/**
* @brief NV12
*/
void VideoWidget::freeTexNV12() {
// 释放纹理
if (m_texY) {
m_texY->destroy();
delete m_texY;
m_texY = nullptr;
}
if (m_texUV) {
m_texUV->destroy();
delete m_texUV;
m_texUV = nullptr;
}
}
// 三个顶点坐标XYZVAO、VBO数据播放范围时[-1 ~ 1]直接
static GLfloat vertices[] = {
// 前三列点坐标,后两列为纹理坐标
1.0f, 1.0f, 0.0f, 1.0f, 1.0f, // 右上角
1.0f, -1.0f, 0.0f, 1.0f, 0.0f, // 右下
-1.0f, -1.0f, 0.0f, 0.0f, 0.0f, // 左下
-1.0f, 1.0f, 0.0f, 0.0f, 1.0f // 左上
};
static GLuint indices[] = {0, 1, 3, 1, 2, 3};
void VideoWidget::initializeGL() {
// 初始化 OpenGL 功能
initializeOpenGLFunctions();
// 加载shader脚本程序
m_program = new QOpenGLShaderProgram(this);
m_program->addShaderFromSourceFile(QOpenGLShader::Vertex,
":/gl/vertex.vsh");
m_program->addShaderFromSourceFile(QOpenGLShader::Fragment,
":/gl/fragment.fsh");
m_program->link();
// 绑定YUV 变量值
m_program->bind();
m_program->setUniformValue("tex_y", 0);
m_program->setUniformValue("tex_u", 1);
m_program->setUniformValue("tex_v", 2);
m_program->setUniformValue("tex_uv", 3);
GLuint posAttr = GLuint(m_program->attributeLocation("aPos"));
GLuint texCord = GLuint(m_program->attributeLocation("aTexCord"));
glGenVertexArrays(1, &VAO);
glBindVertexArray(VAO);
glGenBuffers(1, &VBO);
glBindBuffer(GL_ARRAY_BUFFER, VBO);
glGenBuffers(1, &EBO); // 创建一个EBO
glBindBuffer(GL_ELEMENT_ARRAY_BUFFER, EBO);
glBufferData(GL_ARRAY_BUFFER, sizeof(vertices), vertices, GL_STATIC_DRAW);
glBufferData(GL_ELEMENT_ARRAY_BUFFER, sizeof(indices), indices,
GL_STATIC_DRAW); // 将顶点索引数组传入EBO缓存
// 设置顶点坐标数据
glVertexAttribPointer(posAttr, 3, GL_FLOAT, GL_FALSE, 5 * sizeof(GLfloat),
nullptr);
// 启用通用顶点属性数组
glEnableVertexAttribArray(posAttr);
// 设置纹理坐标数据
glVertexAttribPointer(
texCord, 2, GL_FLOAT, GL_FALSE, 5 * sizeof(GLfloat),
reinterpret_cast<const GLvoid *>(3 * sizeof(GLfloat)));
glEnableVertexAttribArray(texCord);
// 释放
glBindBuffer(GL_ARRAY_BUFFER, 0);
glBindVertexArray(0);
glClearColor(0.0f, 0.0f, 0.0f, 1.0f);
}
void VideoWidget::resizeGL(int w, int h) {
if (m_size.width() < 0 || m_size.height() < 0) return;
QScreen *screen = this->window()->windowHandle()
? this->window()->windowHandle()->screen()
: QGuiApplication::primaryScreen();
double scaleFactor = screen->devicePixelRatio();
w = static_cast<int>(w * scaleFactor);
h = static_cast<int>(h * scaleFactor);
// 计算需要显示图片的窗口大小,用于实现长宽等比自适应显示
if ((double(w) / h) < (double(m_size.width()) / m_size.height())) {
m_zoomSize.setWidth(w);
m_zoomSize.setHeight(((double(w) / m_size.width()) * m_size.height()));
} else {
m_zoomSize.setHeight(h);
m_zoomSize.setWidth((double(h) / m_size.height()) * m_size.width());
}
m_pos.setX(double(w - m_zoomSize.width()) / 2);
m_pos.setY(double(h - m_zoomSize.height()) / 2);
this->update(QRect(0, 0, w, h));
}
void VideoWidget::paintGL() {
glClear(GL_COLOR_BUFFER_BIT);
glViewport(m_pos.x(), m_pos.y(), m_zoomSize.width(), m_zoomSize.height());
m_program->bind();
m_program->setUniformValue("format", m_format);
// 绑定纹理
switch (m_format) {
case AV_PIX_FMT_YUVJ420P:
case AV_PIX_FMT_YUV420P: {
if (m_texY && m_texU && m_texV) {
m_texY->bind(0);
m_texU->bind(1);
m_texV->bind(2);
}
break;
}
case AV_PIX_FMT_NV12: {
if (m_texY && m_texUV) {
m_texY->bind(0);
m_texUV->bind(3);
}
break;
}
default:
break;
}
glBindVertexArray(VAO);
glDrawElements(GL_TRIANGLES, 6, GL_UNSIGNED_INT, nullptr);
glBindVertexArray(0);
// 释放纹理
switch (m_format) {
case AV_PIX_FMT_YUVJ420P:
case AV_PIX_FMT_YUV420P: {
if (m_texY && m_texU && m_texV) {
m_texY->release();
m_texU->release();
m_texV->release();
}
break;
}
case AV_PIX_FMT_NV12: {
if (m_texY && m_texUV) {
m_texY->release();
m_texUV->release();
}
break;
}
default:
break;
}
m_program->release();
}
bool VideoWidget::pullStream(const QString &url) {
if (url.isEmpty()) return false;
if (!readStreamer) {
readStreamer = new ReadStream;
connect(readStreamer, &ReadStream::startPullStreamSignal, readStreamer,
&ReadStream::startPullStream, Qt::UniqueConnection);
connect(readStreamer, &ReadStream::sendErrorMessageSignal, this,
&VideoWidget::receiveErrorMessage, Qt::UniqueConnection);
// connect(this, &VideoWidget::startPullSignal, readStreamer,
// &ReadStream::startPullStream, Qt::UniqueConnection);
}
if (readStreamer->openFile(url)) {
// 保存线程
if (!saveStreamer) {
saveStreamer = new SaveStream;
connect(saveStreamer, &SaveStream::startSaveStreamSignal,
saveStreamer, &SaveStream::startSaveStream,
Qt::UniqueConnection);
}
saveStreamer->setSaveFileDirPath(m_videoSaveDirPath);
saveStreamer->moveToThread(&saveStreamThread);
saveStreamThread.start();
readStreamer->setStreamSaver(saveStreamer);
qDebug() << "UIThreadID:" << QThread::currentThreadId();
readStreamer->moveToThread(&readStreamThread);
readStreamThread.start();
emit readStreamer->startPullStreamSignal();
// emit startPullSignal();
return true;
}
return false;
}
void VideoWidget::receiveErrorMessage(QString message, int type) {
// qDebug() << "receive message:" << message;
emit sendErrorMessageSignal(message, type);
}