diff --git a/Src/HomePage/networkDetection.cpp b/Src/HomePage/networkDetection.cpp index 3144184..c78add0 100644 --- a/Src/HomePage/networkDetection.cpp +++ b/Src/HomePage/networkDetection.cpp @@ -1,7 +1,8 @@ #include "networkDetection.h" #include -networkDetection::networkDetection(QObject *parent,int id) : QObject(parent),m_id(id){ +networkDetection::networkDetection(QObject *parent, int id) + : QObject(parent), m_id(id) { flagRun = true; CmdListChanged = false; m_process = new QProcess(this); @@ -10,7 +11,7 @@ networkDetection::~networkDetection() { delete m_process; } void networkDetection::dowork(int id, QStringList cmdlist) { m_cmd = cmdlist; - if(id != m_id) + if (id != m_id) return; QString result; while (flagRun) { @@ -18,15 +19,17 @@ void networkDetection::dowork(int id, QStringList cmdlist) { // if(id == 1) // { // QDateTime time = QDateTime::currentDateTime(); - // QString str = time .toString("start yyyy-MM-dd hh:mm:ss\n");//格式化时间 .toString("yyyy-MM-dd hh:mm:ss");//格式化时间 - // qDebug()<< str; + // QString str = time .toString("start yyyy-MM-dd + // hh:mm:ss\n");//格式化时间 .toString("yyyy-MM-dd + // hh:mm:ss");//格式化时间 qDebug()<< str; // } - m_process->waitForFinished(); // 等待指令执行完毕 + m_process->waitForFinished(); // 等待指令执行完毕 // if(id == 1) // { // QDateTime time = QDateTime::currentDateTime(); - // QString str = time .toString("end yyyy-MM-dd hh:mm:ss\n");//格式化时间 .toString("yyyy-MM-dd hh:mm:ss");//格式化时间 - // qDebug()<< str; + // QString str = time .toString("end yyyy-MM-dd + // hh:mm:ss\n");//格式化时间 .toString("yyyy-MM-dd + // hh:mm:ss");//格式化时间 qDebug()<< str; // } result = QString::fromLocal8Bit(m_process->readAll()); // 获取指令执行结果 // qDebug() << result; @@ -34,20 +37,20 @@ void networkDetection::dowork(int id, QStringList cmdlist) { { // qDebug() << "在线"; QString delay = selectTTL(result); - emit resultReady(m_id, "在线",delay); + emit resultReady(m_id, "在线", delay); } else { // qDebug() << "离线"; - emit resultReady(m_id, "离线","通信异常"); + emit resultReady(m_id, "离线", "通信异常"); } - //QThread::sleep(0.001); // 加sleep降低CPU占用率 + QThread::sleep(3); // 加sleep降低CPU占用率 } } void networkDetection::stopwork(int id) { - if(id == m_id){ - flagRun = false; - } + if (id == m_id) { + flagRun = false; + } } void networkDetection::DyChangeCmdList(QStringList changeCmdList) { @@ -56,9 +59,9 @@ void networkDetection::DyChangeCmdList(QStringList changeCmdList) { CmdListChanged = true; } -//查找延迟 +// 查找延迟 QString networkDetection::selectTTL(QString str) { - str = str.remove(0,str.indexOf("平均 = ")+5); - str = str.left(str.indexOf("ms")+2); - return "通信正常\n"+str; + str = str.remove(0, str.indexOf("平均 = ") + 5); + str = str.left(str.indexOf("ms") + 2); + return "通信正常\n" + str; } diff --git a/Src/ModelCamera/modelcameradlg.cpp b/Src/ModelCamera/modelcameradlg.cpp index f9ab911..c4ae139 100644 --- a/Src/ModelCamera/modelcameradlg.cpp +++ b/Src/ModelCamera/modelcameradlg.cpp @@ -166,6 +166,8 @@ void ModelCameraDlg::on_pushButton_5_clicked() { QString str = ui->pushButton_5->text(); if (str == "播放视频") { QString ip = ui->videoIPLineEdit->text(); + ui->cameraVideoWidget->setPushStreamIP( + QStringLiteral("rtmp://182.92.130.23/app/stream999")); ui->cameraVideoWidget->play(ip); ui->pushButton_5->setText("暂停播放"); } else { diff --git a/Src/Video/Video.pri b/Src/Video/Video.pri index 2b4a055..50e6992 100644 --- a/Src/Video/Video.pri +++ b/Src/Video/Video.pri @@ -1,9 +1,12 @@ SOURCES += \ $$PWD/cffmpeg_decode.cpp \ + $$PWD/ffmpegpushstream.cpp \ $$PWD/ffmpegvideodlg.cpp HEADERS += \ $$PWD/cffmpeg_decode.h \ + $$PWD/ffmpeginclude.h \ + $$PWD/ffmpegpushstream.h \ $$PWD/ffmpegvideodlg.h \ FORMS += \ diff --git a/Src/Video/cffmpeg_decode.cpp b/Src/Video/cffmpeg_decode.cpp index 5eebefe..54c657d 100644 --- a/Src/Video/cffmpeg_decode.cpp +++ b/Src/Video/cffmpeg_decode.cpp @@ -38,7 +38,7 @@ bool Cffmpeg_decode::open_input_file() { // 设置缓存大小,1080p可将值调大 av_dict_set(&avdic, "buffer_size", "2048000", 0); // 以udp方式打开,如果以tcp方式打开将udp替换为tcp - av_dict_set(&avdic, "rtsp_transport", "udp", 0); + av_dict_set(&avdic, "rtsp_transport", "tcp", 0); // 设置超时断开连接时间,单位微秒//listen_timeout // av_dict_set(&avdic, "listen_timeout", "200000", 0); av_dict_set(&avdic, "stimeout", "200000", 0); @@ -122,6 +122,13 @@ bool Cffmpeg_decode::open_input_file() { qDebug() << "Fill arrays failed.\n"; return 0; } + + // 推流初始化 + if (bPushStreamFlag) { + emit sendInitPushStream_Signal(inputFormatCtx); + } + + // 裸流保存 if (!openSave()) { return false; }; @@ -136,7 +143,13 @@ void Cffmpeg_decode::run() { IsstopPlay = true; return; } - + // if (bRecordTime) { + // startTime = av_gettime(); + // qDebug() << "*******StartTime:" << QString::number(startTime); + // bRecordTime = false; + // } + firstDts = AV_NOPTS_VALUE; // 初始化第一帧的DTS + startTime = av_gettime(); // 读取数据包 while (av_read_frame(inputFormatCtx, inputPacket) >= 0) { if (IsstopPlay) { @@ -144,8 +157,21 @@ void Cffmpeg_decode::run() { break; } + // 第一次获取时间设置起始DTS + if (firstDts == AV_NOPTS_VALUE) { + firstDts = inputPacket->dts; + startTime = av_gettime(); // 记录第一个包到来的系统时间 + } + if (inputPacket->stream_index == videoStreamIndex) { - // 保存裸流 + // 推流 + if (bPushStreamFlag) { + // av_packet_clone(inputPacket); + AVPacket *outputPacket = av_packet_clone(inputPacket); + emit sendStreamData_Signal(outputPacket, frm_cnt, startTime, firstDts); + } + // qDebug() << "******拉流" << QString::number(frm_cnt++); + // 保存裸流 if (m_formatContextSave) { // 由于保存的m_formatContextSave只创建了一个视频流,而读取到的图像的流索引不一定为0,可能会出现错误【Invalid // packet stream index: 1】 @@ -154,6 +180,7 @@ void Cffmpeg_decode::run() { av_write_frame(m_formatContextSave, inputPacket); // 将数据包写入输出媒体文件 } + // 解码数据包 if (avcodec_send_packet(decoderCtx, inputPacket) >= 0) { int ret; @@ -178,11 +205,11 @@ void Cffmpeg_decode::run() { QImage::Format_RGB32); emit sendQImage(img); QThread::msleep(28); - } } av_packet_unref(inputPacket); } + frm_cnt++; if (IsstopPlay) { if (m_formatContextSave) { saveDone(); @@ -222,6 +249,10 @@ void Cffmpeg_decode::stop() { avcodec_close(decoderCtx); if (!inputFormatCtx) avformat_close_input(&inputFormatCtx); + + // 停止推流 + bPushStreamFlag = false; + emit sendStopPushStream_Signal(); } void Cffmpeg_decode::setSaveFileDirPath(QString fileDirPath) { @@ -269,7 +300,7 @@ bool Cffmpeg_decode::openSave() { ret = avcodec_parameters_from_context(m_videoStreamOut->codecpar, decoderCtx); if (ret < 0) { // free(); - qWarning() << "DecodeVideo Error"; + qWarning() << "avcodec_parameters_from_context Failed"; return false; } diff --git a/Src/Video/cffmpeg_decode.h b/Src/Video/cffmpeg_decode.h index 7639787..1b8d922 100644 --- a/Src/Video/cffmpeg_decode.h +++ b/Src/Video/cffmpeg_decode.h @@ -1,6 +1,7 @@ #ifndef CFFMPEG_DECODE_H #define CFFMPEG_DECODE_H +#include "ffmpeginclude.h" #include #include #include @@ -10,23 +11,6 @@ #include #include -extern "C" { -#include "libavcodec/avcodec.h" - -#include -#include -#include -#include -#include -#include -#include -#include - -#include - -#include -} - // class Cffmpeg_decode: public QThread class Cffmpeg_decode : public QObject { Q_OBJECT @@ -37,7 +21,6 @@ public: public: bool open_input_file(); - bool IsstopPlay = false; void stop(); /** * @brief 设置拉流保存文件夹路径 @@ -51,6 +34,14 @@ public slots: signals: void sendQImage(QImage); void sendConnectFail(int); + void sendInitPushStream_Signal(AVFormatContext *inputFormatCtx); + void sendStreamData_Signal(AVPacket *pkt, int frm_cnt, int64_t startTime, + int64_t firstDts); + void sendStopPushStream_Signal(); + +public: + bool IsstopPlay = false; + bool bPushStreamFlag = false; private: AVFormatContext *inputFormatCtx = NULL; // @@ -72,11 +63,14 @@ private: unsigned char *out_buffer = nullptr; int videoStreamIndex = -1; - int videoStreamIndexOut = -1; int numBytes = -1; + int frm_cnt = 0; QString _url; QString saveFileDirPath; + bool bRecordTime = true; + int64_t startTime; + int64_t firstDts; /******** 保存裸流使用 ******************/ AVFormatContext *m_formatContextSave = nullptr; // 封装上下文 QString m_strCodecName; // 编解码器名称 diff --git a/Src/Video/ffmpeginclude.h b/Src/Video/ffmpeginclude.h new file mode 100644 index 0000000..259730c --- /dev/null +++ b/Src/Video/ffmpeginclude.h @@ -0,0 +1,20 @@ +#ifndef FFMPEGINCLUDE_H +#define FFMPEGINCLUDE_H + +extern "C" { +#include "libavcodec/avcodec.h" + +#include +#include +#include +#include +#include +#include +#include +#include +#include +#include +#include +} + +#endif // FFMPEGINCLUDE_H diff --git a/Src/Video/ffmpegpushstream.cpp b/Src/Video/ffmpegpushstream.cpp new file mode 100644 index 0000000..34b8f78 --- /dev/null +++ b/Src/Video/ffmpegpushstream.cpp @@ -0,0 +1,162 @@ +#include "ffmpegpushstream.h" + +FFmpegPushStream::FFmpegPushStream(QObject *parent) : QObject{parent} {} + +/** + * @brief 设置推流地址 + * @param url 远端推流地址 + */ +void FFmpegPushStream::setRemoteIP(QString url) { pushStreamIP = url; } + +int FFmpegPushStream::openNetworkStream(AVFormatContext *inputFormatCtx) { + if (pushStreamIP.isEmpty()) + return -1; + + int ret; + // 初始化网络输出流 + // const char *output_url = "rtsp://182.92.130.23/app/stream999"; + ret = avformat_alloc_output_context2(&outputFormatCtx, NULL, "flv", + pushStreamIP.toUtf8().constData()); + if (ret < 0) { + qDebug() << "Could not create output context."; + return -1; + } + // 复制流信息 + for (unsigned int i = 0; i < inputFormatCtx->nb_streams; ++i) { + if (inputFormatCtx->streams[i]->codecpar->codec_type == + AVMEDIA_TYPE_VIDEO) { + AVStream *inputStream = inputFormatCtx->streams[i]; + AVStream *outputStream = avformat_new_stream(outputFormatCtx, nullptr); + if (!outputStream) { + qDebug() << "Failed allocating output stream.\n"; + return -1; + } + // 复制编解码器参数 + ret = avcodec_parameters_copy(outputStream->codecpar, + inputStream->codecpar); + if (ret < 0) { + // free(); + qWarning() << "avcodec_parameters_from_context Failed"; + return -1; + } + // outputStream->codecpar = inputStream->codecpar; + outputStream->codecpar->codec_tag = 0; + // outputStream->time_base.num = 1; + break; + } + } + + // 打开输出文件 + if (!(outputFormatCtx->flags & AVFMT_NOFILE)) { + if (avio_open(&outputFormatCtx->pb, pushStreamIP.toUtf8().constData(), + AVIO_FLAG_WRITE) < 0) { + qDebug() << "Could not open output file.\n"; + return -1; + } + } + + // 写入头文件 + if (avformat_write_header(outputFormatCtx, NULL) < 0) { + qDebug() << "Error occurred when opening output file.\n"; + return -1; + } + mInitStatus = true; + this->inputFormatCtx = inputFormatCtx; + startTime = av_gettime_relative(); + return 1; +} + +/** + * @brief 推流 + * @param pkt: 未解码帧 + * @param frm_cnt: 帧计数 + * @param startTime: 开始推流的时间 + * @param firstDts: 第一帧的dts + * @return + */ +int FFmpegPushStream::pushStream(AVPacket *pkt, int frm_cnt, int64_t startTime, + int64_t firstDts) { + if (!mInitStatus) + return -1; + + if (pkt->dts <= 0) + return -1; + // qDebug() << "******推流" << QString::number(frm_cnt++); + int inputStreamIndex = pkt->stream_index; + int outputStreamIndex = 0; + // 没有pts的视频数据,如未解码的H.264裸流,需要重新计算其pts。 + if (pkt->pts == AV_NOPTS_VALUE) { + AVRational time_base = inputFormatCtx->streams[inputStreamIndex]->time_base; + // Duration between 2 frames (us) + int64_t calc_duration = + (double)AV_TIME_BASE / + av_q2d(inputFormatCtx->streams[inputStreamIndex]->r_frame_rate); + // Reset Parameters + pkt->pts = (double)(frm_cnt * calc_duration) / + (double)(av_q2d(time_base) * AV_TIME_BASE); + pkt->dts = pkt->pts; + pkt->duration = + (double)calc_duration / (double)(av_q2d(time_base) * AV_TIME_BASE); + } + + // 视频帧推送速度 + firstDts = av_rescale_q(firstDts, + inputFormatCtx->streams[inputStreamIndex]->time_base, + AVRational{1, AV_TIME_BASE}); + auto pts_time = av_rescale_q( + pkt->dts, inputFormatCtx->streams[inputStreamIndex]->time_base, + AVRational{1, AV_TIME_BASE}); + int64_t streamTime = pts_time - firstDts; // 计算帧的相对时间 + auto now_time = av_gettime() - startTime; // 获取差值 + int64_t delay = streamTime - now_time; + if (delay > 0) { + // qDebug() << "****************sleep time:" << QString::number(delay); + av_usleep(delay); + } + + // 计算延时后,重新指定时间戳 + AVRational istream_base = + inputFormatCtx->streams[inputStreamIndex]->time_base; + AVRational ostream_base = + outputFormatCtx->streams[outputStreamIndex]->time_base; + pkt->pts = + av_rescale_q_rnd(pkt->pts, istream_base, ostream_base, + (AVRounding)(AV_ROUND_NEAR_INF | AV_ROUND_PASS_MINMAX)); + pkt->dts = + av_rescale_q_rnd(pkt->dts, istream_base, ostream_base, + (AVRounding)(AV_ROUND_NEAR_INF | AV_ROUND_PASS_MINMAX)); + pkt->pts = pkt->pts < 0 ? 0 : pkt->pts; + pkt->dts = pkt->dts < 0 ? 0 : pkt->dts; + pkt->duration = + (int64_t)av_rescale_q(pkt->duration, istream_base, ostream_base); + pkt->pos = -1; + if (pkt->pts < pkt->dts) { + return 1; + } + + // 向推流服务器推送流数据 + pkt->stream_index = 0; + int ret = av_interleaved_write_frame(outputFormatCtx, pkt); + if (ret < 0) { + return ret; + } + // 数据包写入成功,现在可以释放pkt + av_packet_unref(pkt); +} + +/** + * @brief 停止推流 + */ +void FFmpegPushStream::stopPush() { + av_write_trailer(outputFormatCtx); + if (inputFormatCtx != nullptr) { + avformat_close_input(&inputFormatCtx); + } + // 关闭输出 + if (outputFormatCtx && !(outputFormatCtx->flags & AVFMT_NOFILE)) { + avio_close(outputFormatCtx->pb); + } + if (outputFormatCtx) { + avformat_free_context(outputFormatCtx); + } +} diff --git a/Src/Video/ffmpegpushstream.h b/Src/Video/ffmpegpushstream.h new file mode 100644 index 0000000..94270c4 --- /dev/null +++ b/Src/Video/ffmpegpushstream.h @@ -0,0 +1,33 @@ +#ifndef FFMPEGPUSHSTREAM_H +#define FFMPEGPUSHSTREAM_H + +#include "ffmpeginclude.h" +#include +#include + +class FFmpegPushStream : public QObject { + Q_OBJECT +public: + explicit FFmpegPushStream(QObject *parent = nullptr); + /** + * @brief 设置推流地址 + * @param url 远端推流地址 + */ + void setRemoteIP(QString url); +signals: + +private: + AVFormatContext *inputFormatCtx = nullptr; // + AVFormatContext *outputFormatCtx = NULL; // + int videoIndex = -1; + QString pushStreamIP; // 推流地址 + bool mInitStatus = false; + int64_t startTime; +public slots: + int pushStream(AVPacket *pkt, int frm_cnt, int64_t startTime, + int64_t firstDts); + void stopPush(); + int openNetworkStream(AVFormatContext *inputFormatCtx); +}; + +#endif // FFMPEGPUSHSTREAM_H diff --git a/Src/Video/ffmpegvideodlg.cpp b/Src/Video/ffmpegvideodlg.cpp index 5ce11c3..8be5b8e 100644 --- a/Src/Video/ffmpegvideodlg.cpp +++ b/Src/Video/ffmpegvideodlg.cpp @@ -5,23 +5,22 @@ ffmpegvideoDlg::ffmpegvideoDlg(QWidget *parent) : QWidget(parent), ui(new Ui::ffmpegvideoDlg) { ui->setupUi(this); - // iniWindow(); + ffmpegPushStream = nullptr; + pushStreamThread = nullptr; } ffmpegvideoDlg::~ffmpegvideoDlg() { stop(); delete ui; -} - -// 暂未使用 -void ffmpegvideoDlg::iniWindow() { - QString VideoFilePath = QDir::currentPath() + "./Video"; - QDir VideoDir(VideoFilePath); - if (!VideoDir.exists()) { - VideoDir.mkdir(VideoFilePath); - qDebug() << "文件夹创建成功"; + if (ffmpegPushStream != nullptr) + ffmpegPushStream->deleteLater(); + if (pushStreamThread != nullptr) { + pushStreamThread->quit(); + pushStreamThread->wait(); + pushStreamThread->deleteLater(); } } + void ffmpegvideoDlg::setVedioSaveFileDirPath(QString saveDirPath) { videoSaveDirPath = saveDirPath; } @@ -32,6 +31,8 @@ void ffmpegvideoDlg::play(QString url) { ffmpeg = new Cffmpeg_decode; ffmpeg->setSaveFileDirPath(videoSaveDirPath); ffmpeg->IsstopPlay = false; + if (!pushStreamIP.isEmpty()) // 推流 + startPushStream(); ffmpeg->moveToThread(&workerThread); connect(&workerThread, &QThread::finished, ffmpeg, @@ -46,10 +47,11 @@ void ffmpegvideoDlg::play(QString url) { SLOT(showMessagBox(int))); // 发送错误信息提示 workerThread.start(); - emit this->setUrlSign(url);// 设置URL - emit this->operate(); // 启用线程信号 + emit this->setUrlSign(url); // 设置URL + emit this->operate(); // 启用线程信号 } } + void ffmpegvideoDlg::stop() { if (m_PlayStatus) { ffmpeg->stop(); @@ -108,3 +110,27 @@ void ffmpegvideoDlg::showMessagBox(int type) { bool ffmpegvideoDlg::Isplay(bool IsstopPlay) { return ffmpeg->IsstopPlay = IsstopPlay; } + +void ffmpegvideoDlg::startPushStream() { + if (ffmpegPushStream == nullptr) { + ffmpegPushStream = new FFmpegPushStream; + ffmpegPushStream->setRemoteIP(pushStreamIP); // 设置推流地址 + } + + if (pushStreamThread == nullptr) { + pushStreamThread = new QThread; + pushStreamThread->start(); + ffmpegPushStream->moveToThread(pushStreamThread); + } + connect(ffmpeg, &Cffmpeg_decode::sendInitPushStream_Signal, ffmpegPushStream, + &FFmpegPushStream::openNetworkStream); + connect(ffmpeg, &Cffmpeg_decode::sendStreamData_Signal, ffmpegPushStream, + &FFmpegPushStream::pushStream); + connect(ffmpeg, &Cffmpeg_decode::sendStopPushStream_Signal, ffmpegPushStream, + &FFmpegPushStream::stopPush); + ffmpeg->bPushStreamFlag = true; +} + +void ffmpegvideoDlg::setPushStreamIP(QString pushStreamURL) { + pushStreamIP = pushStreamURL; +} diff --git a/Src/Video/ffmpegvideodlg.h b/Src/Video/ffmpegvideodlg.h index f49fff7..9dbfc74 100644 --- a/Src/Video/ffmpegvideodlg.h +++ b/Src/Video/ffmpegvideodlg.h @@ -2,6 +2,8 @@ #define FFMPEGVIDEODLG_H #include "cffmpeg_decode.h" +#include "ffmpeginclude.h" +#include "ffmpegpushstream.h" #include #include #include @@ -10,23 +12,6 @@ #include #include -extern "C" { -#include - -#include -#include -#include -#include -#include -#include -#include -#include - -#include - -#include -} - namespace Ui { class ffmpegvideoDlg; } @@ -38,18 +23,19 @@ public: explicit ffmpegvideoDlg(QWidget *parent = nullptr); ~ffmpegvideoDlg(); -private: - Ui::ffmpegvideoDlg *ui; - -public: - void iniWindow(); void setVedioSaveFileDirPath(QString saveDirPath); void play(QString); void stop(); bool Isplay(bool IsstopPlay); - bool m_PlayStatus = false; + void startPushStream(); + void setPushStreamIP(QString pushStreamURL); +private: + Ui::ffmpegvideoDlg *ui; + +public: + bool m_PlayStatus = false; double m_setVideoAreaWidth; double m_setVideoAreaHeight; double m_ax; @@ -71,6 +57,9 @@ private: QThread workerThread; QImage img; QString videoSaveDirPath; + QThread *pushStreamThread; + FFmpegPushStream *ffmpegPushStream; + QString pushStreamIP; }; #endif // FFMPEGVIDEODLG_H