#include "ffmpegpushstream.h" FFmpegPushStream::FFmpegPushStream(QObject *parent) : QObject{parent} {} /** * @brief 设置推流地址 * @param url 远端推流地址 */ void FFmpegPushStream::setRemoteIP(QString url) { pushStreamIP = url; } int FFmpegPushStream::openNetworkStream(AVFormatContext *inputFormatCtx) { if (pushStreamIP.isEmpty()) return -1; int ret; // 初始化网络输出流 // const char *output_url = "rtsp://182.92.130.23/app/stream999"; ret = avformat_alloc_output_context2(&outputFormatCtx, NULL, "flv", pushStreamIP.toUtf8().constData()); if (ret < 0) { qDebug() << "Could not create output context."; return -1; } // 复制流信息 for (unsigned int i = 0; i < inputFormatCtx->nb_streams; ++i) { if (inputFormatCtx->streams[i]->codecpar->codec_type == AVMEDIA_TYPE_VIDEO) { AVStream *inputStream = inputFormatCtx->streams[i]; AVStream *outputStream = avformat_new_stream(outputFormatCtx, nullptr); if (!outputStream) { qDebug() << "Failed allocating output stream.\n"; return -1; } // 复制编解码器参数 ret = avcodec_parameters_copy(outputStream->codecpar, inputStream->codecpar); if (ret < 0) { // free(); qWarning() << "avcodec_parameters_from_context Failed"; return -1; } // outputStream->codecpar = inputStream->codecpar; // outputStream->codecpar->codec_tag = 0; // outputStream->time_base.num = 1; break; } } // 打开输出文件 if (!(outputFormatCtx->flags & AVFMT_NOFILE)) { if (avio_open(&outputFormatCtx->pb, pushStreamIP.toUtf8().constData(), AVIO_FLAG_WRITE) < 0) { qDebug() << "Could not open output file.\n"; return -1; } } // 写入头文件 if (avformat_write_header(outputFormatCtx, NULL) < 0) { qDebug() << "Error occurred when opening output file.\n"; return -1; } mInitStatus = true; this->inputFormatCtx = inputFormatCtx; return 1; } int FFmpegPushStream::pushStream(AVPacket *pkt, int frm_cnt) { if (!mInitStatus) return -1; if (pkt->dts <= 0) return -1; int inputStreamIndex = pkt->stream_index; int outputStreamIndex = 0; // 没有pts的视频数据,如未解码的H.264裸流,需要重新计算其pts。 if (pkt->pts == AV_NOPTS_VALUE) { AVRational time_base = inputFormatCtx->streams[inputStreamIndex]->time_base; // Duration between 2 frames (us) int64_t calc_duration = (double)AV_TIME_BASE / av_q2d(inputFormatCtx->streams[inputStreamIndex]->r_frame_rate); // Reset Parameters pkt->pts = (double)(frm_cnt * calc_duration) / (double)(av_q2d(time_base) * AV_TIME_BASE); pkt->dts = pkt->pts; pkt->duration = (double)calc_duration / (double)(av_q2d(time_base) * AV_TIME_BASE); } // 计算延时后,重新指定时间戳 AVRational istream_base = inputFormatCtx->streams[inputStreamIndex]->time_base; AVRational ostream_base = outputFormatCtx->streams[outputStreamIndex]->time_base; pkt->pts = av_rescale_q_rnd(pkt->pts, istream_base, ostream_base, (AVRounding)(AV_ROUND_NEAR_INF | AV_ROUND_PASS_MINMAX)); pkt->dts = av_rescale_q_rnd(pkt->dts, istream_base, ostream_base, (AVRounding)(AV_ROUND_NEAR_INF | AV_ROUND_PASS_MINMAX)); pkt->pts = pkt->pts < 0 ? 0 : pkt->pts; pkt->dts = pkt->dts < 0 ? 0 : pkt->dts; pkt->duration = (int64_t)av_rescale_q(pkt->duration, istream_base, ostream_base); pkt->pos = -1; if (pkt->pts < pkt->dts) { return 1; } // 视频帧推送速度 auto pts_time = av_rescale_q(pkt->dts, ostream_base, AVRational{1, 1000}); auto now_time = av_gettime() / 1000; if (pts_time > now_time) { qDebug() << "****************sleep time:" << QString::number(pts_time - now_time); av_usleep(static_cast(pts_time - now_time)); } // 向推流服务器推送流数据 pkt->stream_index = 0; int ret = av_interleaved_write_frame(outputFormatCtx, pkt); if (ret < 0) { return ret; } // 数据包写入成功,现在可以释放pkt av_packet_unref(pkt); }