You cannot select more than 25 topics Topics must start with a letter or number, can include dashes ('-') and can be up to 35 characters long.
PayloadAPP/Src/Video/ffmpegpushstream.cpp

169 lines
5.3 KiB
C++

#include "ffmpegpushstream.h"
FFmpegPushStream::FFmpegPushStream(QObject *parent) : QObject{parent} {}
/**
* @brief
* @param url
*/
void FFmpegPushStream::setRemoteIP(QString url) { pushStreamIP = url; }
int FFmpegPushStream::openNetworkStream(AVFormatContext *inputFormatCtx) {
if (pushStreamIP.isEmpty())
return -1;
if (!inputFormatCtx)
return -1;
int ret;
// 初始化网络输出流
// const char *output_url = "rtsp://182.92.130.23/app/stream999";
ret = avformat_alloc_output_context2(&outputFormatCtx, NULL, "flv",
pushStreamIP.toUtf8().constData());
if (ret < 0) {
qDebug() << "Could not create output context.";
return -1;
}
// 复制流信息
for (unsigned int i = 0; i < inputFormatCtx->nb_streams; ++i) {
if (inputFormatCtx->streams[i]->codecpar->codec_type ==
AVMEDIA_TYPE_VIDEO) {
AVStream *inputStream = inputFormatCtx->streams[i];
AVStream *outputStream = avformat_new_stream(outputFormatCtx, nullptr);
if (!outputStream) {
qDebug() << "Failed allocating output stream.\n";
return -1;
}
// 复制编解码器参数
ret = avcodec_parameters_copy(outputStream->codecpar,
inputStream->codecpar);
if (ret < 0) {
// free();
qWarning() << "avcodec_parameters_from_context Failed";
return -1;
}
// outputStream->codecpar = inputStream->codecpar;
outputStream->codecpar->codec_tag = 0;
// outputStream->time_base.num = 1;
break;
}
}
// 打开输出文件
if (!(outputFormatCtx->flags & AVFMT_NOFILE)) {
if (avio_open(&outputFormatCtx->pb, pushStreamIP.toUtf8().constData(),
AVIO_FLAG_WRITE) < 0) {
qDebug() << "Could not open output file.\n";
return -1;
}
}
// 写入头文件
ret = avformat_write_header(outputFormatCtx, NULL);
if (ret < 0) {
qDebug() << "Error occurred when write_header into output file.\n";
return -1;
}
mInitStatus = true;
this->inputFormatCtx = inputFormatCtx;
startTime = av_gettime_relative();
return 1;
}
/**
* @brief
* @param pkt:
* @param frm_cnt:
* @param startTime:
* @param firstDts: dts
* @return
*/
int FFmpegPushStream::pushStream(AVPacket *pkt, int frm_cnt, int64_t startTime,
int64_t firstDts) {
if (!mInitStatus)
return -1;
if (pkt->dts <= 0)
return -1;
// qDebug() << "******推流" << QString::number(frm_cnt++);
int inputStreamIndex = pkt->stream_index;
int outputStreamIndex = 0;
// 没有pts的视频数据如未解码的H.264裸流需要重新计算其pts。
if (pkt->pts == AV_NOPTS_VALUE) {
AVRational time_base = inputFormatCtx->streams[inputStreamIndex]->time_base;
// Duration between 2 frames (us)
int64_t calc_duration =
(double)AV_TIME_BASE /
av_q2d(inputFormatCtx->streams[inputStreamIndex]->r_frame_rate);
// Reset Parameters
pkt->pts = (double)(frm_cnt * calc_duration) /
(double)(av_q2d(time_base) * AV_TIME_BASE);
pkt->dts = pkt->pts;
pkt->duration =
(double)calc_duration / (double)(av_q2d(time_base) * AV_TIME_BASE);
}
// 视频帧推送速度
firstDts = av_rescale_q(firstDts,
inputFormatCtx->streams[inputStreamIndex]->time_base,
AVRational{1, AV_TIME_BASE});
auto pts_time = av_rescale_q(
pkt->dts, inputFormatCtx->streams[inputStreamIndex]->time_base,
AVRational{1, AV_TIME_BASE});
int64_t streamTime = pts_time - firstDts; // 计算帧的相对时间
auto now_time = av_gettime() - startTime; // 获取差值
int64_t delay = streamTime - now_time;
if (delay > 0) {
// qDebug() << "****************sleep time:" << QString::number(delay /
// 1000);
av_usleep(delay);
}
// 计算延时后,重新指定时间戳
AVRational istream_base =
inputFormatCtx->streams[inputStreamIndex]->time_base;
AVRational ostream_base =
outputFormatCtx->streams[outputStreamIndex]->time_base;
pkt->pts =
av_rescale_q_rnd(pkt->pts, istream_base, ostream_base,
(AVRounding)(AV_ROUND_NEAR_INF | AV_ROUND_PASS_MINMAX));
pkt->dts =
av_rescale_q_rnd(pkt->dts, istream_base, ostream_base,
(AVRounding)(AV_ROUND_NEAR_INF | AV_ROUND_PASS_MINMAX));
pkt->pts = pkt->pts < 0 ? 0 : pkt->pts;
pkt->dts = pkt->dts < 0 ? 0 : pkt->dts;
pkt->duration =
(int64_t)av_rescale_q(pkt->duration, istream_base, ostream_base);
pkt->pos = -1;
if (pkt->pts < pkt->dts) {
return 1;
}
// 向推流服务器推送流数据
pkt->stream_index = 0;
int ret = av_interleaved_write_frame(outputFormatCtx, pkt);
if (ret < 0) {
return ret;
}
// 数据包写入成功现在可以释放pkt
av_packet_unref(pkt);
av_packet_free(&pkt);
return 1;
}
/**
* @brief
*/
void FFmpegPushStream::stopPush() {
av_write_trailer(outputFormatCtx);
// 关闭输出
if (outputFormatCtx && !(outputFormatCtx->flags & AVFMT_NOFILE)) {
avio_close(outputFormatCtx->pb);
}
if (outputFormatCtx) {
avformat_free_context(outputFormatCtx);
outputFormatCtx = nullptr;
}
mInitStatus = false;
}