|
|
|
|
#include "ffmpegpushstream.h"
|
|
|
|
|
|
|
|
|
|
FFmpegPushStream::FFmpegPushStream(QObject *parent) : QObject{parent} {}
|
|
|
|
|
|
|
|
|
|
/**
|
|
|
|
|
* @brief 设置推流地址
|
|
|
|
|
* @param url 远端推流地址
|
|
|
|
|
*/
|
|
|
|
|
void FFmpegPushStream::setRemoteIP(QString url) { pushStreamIP = url; }
|
|
|
|
|
|
|
|
|
|
int FFmpegPushStream::openNetworkStream(AVFormatContext *inputFormatCtx) {
|
|
|
|
|
mInitStatus = false;
|
|
|
|
|
if (pushStreamIP.isEmpty())
|
|
|
|
|
return -1;
|
|
|
|
|
|
|
|
|
|
if (!inputFormatCtx)
|
|
|
|
|
return -1;
|
|
|
|
|
|
|
|
|
|
int ret;
|
|
|
|
|
// 初始化网络输出流
|
|
|
|
|
// const char *output_url = "rtsp://182.92.130.23/app/stream999";
|
|
|
|
|
ret = avformat_alloc_output_context2(&outputFormatCtx, NULL, "flv",
|
|
|
|
|
pushStreamIP.toUtf8().constData());
|
|
|
|
|
if (ret < 0) {
|
|
|
|
|
qDebug() << "Could not create output context.";
|
|
|
|
|
return -1;
|
|
|
|
|
}
|
|
|
|
|
// 复制流信息
|
|
|
|
|
for (unsigned int i = 0; i < inputFormatCtx->nb_streams; ++i) {
|
|
|
|
|
// AVStream *stream = inputFormatCtx->streams[i];
|
|
|
|
|
if (inputFormatCtx->streams[i]->codecpar->codec_type ==
|
|
|
|
|
AVMEDIA_TYPE_VIDEO) {
|
|
|
|
|
AVStream *inputStream = inputFormatCtx->streams[i];
|
|
|
|
|
AVStream *outputStream = avformat_new_stream(outputFormatCtx, nullptr);
|
|
|
|
|
if (!outputStream) {
|
|
|
|
|
qDebug() << "Failed allocating output stream.\n";
|
|
|
|
|
return -1;
|
|
|
|
|
}
|
|
|
|
|
// 复制编解码器参数
|
|
|
|
|
ret = avcodec_parameters_copy(outputStream->codecpar,
|
|
|
|
|
inputStream->codecpar);
|
|
|
|
|
if (ret < 0) {
|
|
|
|
|
// free();
|
|
|
|
|
qWarning() << "avcodec_parameters_from_context Failed";
|
|
|
|
|
return -1;
|
|
|
|
|
}
|
|
|
|
|
// outputStream->codecpar = inputStream->codecpar;
|
|
|
|
|
outputStream->codecpar->codec_tag = 0;
|
|
|
|
|
// outputStream->time_base.num = 1;
|
|
|
|
|
break;
|
|
|
|
|
}
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
// 打开输出文件
|
|
|
|
|
if (!(outputFormatCtx->flags & AVFMT_NOFILE)) {
|
|
|
|
|
if (avio_open(&outputFormatCtx->pb, pushStreamIP.toUtf8().constData(),
|
|
|
|
|
AVIO_FLAG_WRITE) < 0) {
|
|
|
|
|
qDebug() << "Could not open output file.\n";
|
|
|
|
|
return -1;
|
|
|
|
|
}
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
// 写入头文件
|
|
|
|
|
ret = avformat_write_header(outputFormatCtx, NULL);
|
|
|
|
|
if (ret < 0) {
|
|
|
|
|
qDebug() << "Error occurred when write_header into output file.\n";
|
|
|
|
|
return -1;
|
|
|
|
|
}
|
|
|
|
|
mInitStatus = true;
|
|
|
|
|
this->inputFormatCtx = inputFormatCtx;
|
|
|
|
|
// startTime = av_gettime_relative();
|
|
|
|
|
return 1;
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
int FFmpegPushStream::reconnect(int ret) {
|
|
|
|
|
if (ret == AVERROR(EPIPE) || ret == AVERROR(ECONNRESET)) {
|
|
|
|
|
// int nRetryCount = 0;
|
|
|
|
|
for (int nRetryCount = 0; nRetryCount < MAXCONNECT; ++nRetryCount) {
|
|
|
|
|
// 关闭输出
|
|
|
|
|
if (outputFormatCtx && !(outputFormatCtx->flags & AVFMT_NOFILE)) {
|
|
|
|
|
avio_close(outputFormatCtx->pb);
|
|
|
|
|
}
|
|
|
|
|
// Try to reconnect
|
|
|
|
|
ret = avformat_write_header(outputFormatCtx, nullptr);
|
|
|
|
|
if (ret < 0) {
|
|
|
|
|
qDebug() << "Failed to reconnect" << QString::number(nRetryCount + 1);
|
|
|
|
|
// nRetryCount++;
|
|
|
|
|
av_usleep(5 * 1000000);
|
|
|
|
|
} else {
|
|
|
|
|
mInitStatus = true;
|
|
|
|
|
return ret;
|
|
|
|
|
}
|
|
|
|
|
}
|
|
|
|
|
mInitStatus = false;
|
|
|
|
|
return -1;
|
|
|
|
|
}
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
/**
|
|
|
|
|
* @brief 推流
|
|
|
|
|
* @param pkt: 未解码帧
|
|
|
|
|
* @param frm_cnt: 帧计数
|
|
|
|
|
* @param startTime: 开始推流的时间
|
|
|
|
|
* @param firstDts: 第一帧的dts
|
|
|
|
|
* @return
|
|
|
|
|
*/
|
|
|
|
|
int FFmpegPushStream::pushStream(AVPacket *pkt, int frm_cnt, int64_t startTime,
|
|
|
|
|
int64_t firstDts) {
|
|
|
|
|
// qDebug() << "receive packet";
|
|
|
|
|
if (!mInitStatus) {
|
|
|
|
|
av_packet_unref(pkt);
|
|
|
|
|
return -1;
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
if (pkt->dts <= 0) {
|
|
|
|
|
av_packet_unref(pkt);
|
|
|
|
|
return -1;
|
|
|
|
|
}
|
|
|
|
|
// qDebug() << "******推流" << QString::number(frm_cnt++);
|
|
|
|
|
int inputStreamIndex = pkt->stream_index;
|
|
|
|
|
int outputStreamIndex = 0;
|
|
|
|
|
// 没有pts的视频数据,如未解码的H.264裸流,需要重新计算其pts。
|
|
|
|
|
if (pkt->pts == AV_NOPTS_VALUE) {
|
|
|
|
|
AVRational time_base = inputFormatCtx->streams[inputStreamIndex]->time_base;
|
|
|
|
|
// Duration between 2 frames (us)
|
|
|
|
|
int64_t calc_duration =
|
|
|
|
|
(double)AV_TIME_BASE /
|
|
|
|
|
av_q2d(inputFormatCtx->streams[inputStreamIndex]->r_frame_rate);
|
|
|
|
|
// Reset Parameters
|
|
|
|
|
pkt->pts = (double)(frm_cnt * calc_duration) /
|
|
|
|
|
(double)(av_q2d(time_base) * AV_TIME_BASE);
|
|
|
|
|
pkt->dts = pkt->pts;
|
|
|
|
|
pkt->duration =
|
|
|
|
|
(double)calc_duration / (double)(av_q2d(time_base) * AV_TIME_BASE);
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
// 视频帧推送速度
|
|
|
|
|
firstDts = av_rescale_q(firstDts,
|
|
|
|
|
inputFormatCtx->streams[inputStreamIndex]->time_base,
|
|
|
|
|
AVRational{1, AV_TIME_BASE});
|
|
|
|
|
auto pts_time = av_rescale_q(
|
|
|
|
|
pkt->dts, inputFormatCtx->streams[inputStreamIndex]->time_base,
|
|
|
|
|
AVRational{1, AV_TIME_BASE});
|
|
|
|
|
int64_t streamTime = pts_time - firstDts; // 计算帧的相对时间
|
|
|
|
|
auto now_time = av_gettime() - startTime; // 获取差值
|
|
|
|
|
int64_t delay = streamTime - now_time;
|
|
|
|
|
delay = delay < MAXDELAY ? delay : MAXDELAY;
|
|
|
|
|
if (delay >= 0) {
|
|
|
|
|
// qDebug() << "****************PushStream sleep time:"
|
|
|
|
|
// << QString::number(delay / 1000);
|
|
|
|
|
av_usleep(delay);
|
|
|
|
|
// sleepMsec(delay);
|
|
|
|
|
} else {
|
|
|
|
|
// av_packet_unref(pkt);
|
|
|
|
|
// return 1;
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
// 计算延时后,重新指定时间戳
|
|
|
|
|
AVRational istream_base =
|
|
|
|
|
inputFormatCtx->streams[inputStreamIndex]->time_base;
|
|
|
|
|
AVRational ostream_base =
|
|
|
|
|
outputFormatCtx->streams[outputStreamIndex]->time_base;
|
|
|
|
|
pkt->pts =
|
|
|
|
|
av_rescale_q_rnd(pkt->pts, istream_base, ostream_base,
|
|
|
|
|
(AVRounding)(AV_ROUND_NEAR_INF | AV_ROUND_PASS_MINMAX));
|
|
|
|
|
pkt->dts =
|
|
|
|
|
av_rescale_q_rnd(pkt->dts, istream_base, ostream_base,
|
|
|
|
|
(AVRounding)(AV_ROUND_NEAR_INF | AV_ROUND_PASS_MINMAX));
|
|
|
|
|
pkt->pts = pkt->pts < 0 ? 0 : pkt->pts;
|
|
|
|
|
pkt->dts = pkt->dts < 0 ? 0 : pkt->dts;
|
|
|
|
|
pkt->duration =
|
|
|
|
|
(int64_t)av_rescale_q(pkt->duration, istream_base, ostream_base);
|
|
|
|
|
pkt->pos = -1;
|
|
|
|
|
if (pkt->pts < pkt->dts) {
|
|
|
|
|
av_packet_unref(pkt);
|
|
|
|
|
return 1;
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
// 向推流服务器推送流数据
|
|
|
|
|
pkt->stream_index = 0;
|
|
|
|
|
int ret = av_interleaved_write_frame(outputFormatCtx, pkt);
|
|
|
|
|
if (ret < 0) {
|
|
|
|
|
if (ret == -10053) {
|
|
|
|
|
qDebug() << "网络不稳定";
|
|
|
|
|
}
|
|
|
|
|
// if (ret == AVERROR(EPIPE) || ret == AVERROR(ECONNRESET)) {
|
|
|
|
|
// qDebug() << "网络不稳定1";
|
|
|
|
|
// }
|
|
|
|
|
// reconnect(ret);
|
|
|
|
|
av_packet_unref(pkt);
|
|
|
|
|
return ret;
|
|
|
|
|
}
|
|
|
|
|
// 数据包写入成功,现在可以释放pkt
|
|
|
|
|
av_packet_unref(pkt);
|
|
|
|
|
av_packet_free(&pkt);
|
|
|
|
|
return 1;
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
/**
|
|
|
|
|
* @brief 停止推流
|
|
|
|
|
*/
|
|
|
|
|
void FFmpegPushStream::stopPush() {
|
|
|
|
|
if (mInitStatus)
|
|
|
|
|
av_write_trailer(outputFormatCtx);
|
|
|
|
|
// 关闭输出
|
|
|
|
|
if (outputFormatCtx && !(outputFormatCtx->flags & AVFMT_NOFILE)) {
|
|
|
|
|
avio_close(outputFormatCtx->pb);
|
|
|
|
|
}
|
|
|
|
|
if (outputFormatCtx) {
|
|
|
|
|
avformat_free_context(outputFormatCtx);
|
|
|
|
|
outputFormatCtx = nullptr;
|
|
|
|
|
}
|
|
|
|
|
mInitStatus = false;
|
|
|
|
|
}
|