You cannot select more than 25 topics
Topics must start with a letter or number, can include dashes ('-') and can be up to 35 characters long.
342 lines
7.0 KiB
C++
342 lines
7.0 KiB
C++
#include <Windows.h>
|
|
#include "decoder_hw_d.h"
|
|
|
|
AVPixelFormat GetHwFormat(AVCodecContext *s, const AVPixelFormat *pix_fmts)
|
|
{
|
|
InputStream* ist = (InputStream*)s->opaque;
|
|
ist->active_hwaccel_id = HWACCEL_DXVA2;
|
|
ist->hwaccel_pix_fmt = AV_PIX_FMT_DXVA2_VLD;
|
|
return ist->hwaccel_pix_fmt;
|
|
}
|
|
|
|
bool Decoder_Hardware_D::ffmpeg_ref_release()
|
|
{
|
|
if (codecctx->opaque)
|
|
{
|
|
delete codecctx->opaque;
|
|
codecctx->opaque = NULL;
|
|
}
|
|
if (codecctx)
|
|
{
|
|
avcodec_close(codecctx);
|
|
av_free(codecctx);
|
|
codecctx = NULL;
|
|
}
|
|
if (mFrameYUV)
|
|
{
|
|
av_frame_free(&mFrameYUV);
|
|
mFrameYUV = NULL;
|
|
}
|
|
if (mSrcFrame)
|
|
{
|
|
av_frame_free(&mSrcFrame);
|
|
mSrcFrame = NULL;
|
|
}
|
|
if (mDstFrame)
|
|
{
|
|
av_frame_free(&mDstFrame);
|
|
mDstFrame = NULL;
|
|
}
|
|
if (img_convert_ctx)
|
|
{
|
|
sws_freeContext(img_convert_ctx);
|
|
img_convert_ctx = NULL;
|
|
}
|
|
if (scxt)
|
|
{
|
|
sws_freeContext(scxt);
|
|
scxt = NULL;
|
|
}
|
|
// av_free_packet(&pkt);
|
|
// av_packet_unref(&pkt);
|
|
if (s)
|
|
{
|
|
av_parser_close(s);
|
|
s= NULL;
|
|
}
|
|
return true;
|
|
}
|
|
|
|
void Decoder_Hardware_D::pic_ref_mem_release()
|
|
{
|
|
if (mOutBuf)
|
|
{
|
|
// delete[] mOutBuf;
|
|
av_free(mOutBuf);
|
|
mOutBuf = NULL;
|
|
}
|
|
if (mBufYUV)
|
|
{
|
|
av_free(mBufYUV);
|
|
mBufYUV = NULL;
|
|
}
|
|
return;
|
|
}
|
|
|
|
bool Decoder_Hardware_D::decoder_hw_init(int srcWth,int srcHth,int dstWth,int dstHth,int oPixFmt,HWND hwnd)
|
|
{
|
|
av_register_all();//注册解码器
|
|
// av_log_set_level(AV_LOG_DEBUG);
|
|
|
|
codec = avcodec_find_decoder(AV_CODEC_ID_H264);//根据流信息找到解码器
|
|
if (!codec) {
|
|
ffmpeg_ref_release();
|
|
return false;
|
|
}
|
|
|
|
|
|
codecctx = avcodec_alloc_context3(codec);
|
|
if (!codecctx) {
|
|
ffmpeg_ref_release();
|
|
return false;
|
|
}
|
|
|
|
codecctx->coded_width = srcWth;
|
|
codecctx->coded_height = srcHth;
|
|
codecctx->pix_fmt = AV_PIX_FMT_YUV420P;
|
|
|
|
AVCodecContext *temp_codecctx = codecctx;
|
|
memcpy(temp_codecctx, codecctx, sizeof(codecctx));
|
|
bAccel = true;
|
|
if (bAccel)
|
|
{
|
|
codecctx->thread_count = 1; // Multithreading is apparently not compatible with hardware decoding
|
|
InputStream *ist = new InputStream();
|
|
ist->hwaccel_id = HWACCEL_AUTO;
|
|
ist->active_hwaccel_id = HWACCEL_AUTO;
|
|
ist->hwaccel_device = "dxva2";
|
|
ist->dec = codec;
|
|
ist->dec_ctx = codecctx;
|
|
|
|
codecctx->opaque = ist;
|
|
if (dxva2_init(codecctx,hwnd) == 0)
|
|
{
|
|
codecctx->get_buffer2 = ist->hwaccel_get_buffer;
|
|
codecctx->get_format = GetHwFormat;
|
|
codecctx->thread_safe_callbacks = 1;
|
|
}
|
|
else
|
|
{
|
|
bAccel = false;
|
|
}
|
|
}
|
|
|
|
if (dstWth == 0 || dstHth == 0)
|
|
{
|
|
dstWth = srcWth;
|
|
dstHth = srcHth;
|
|
}
|
|
|
|
mSrcWth = srcWth;
|
|
mSrcHth = srcHth;
|
|
mDstWth = dstWth;
|
|
mDstHth = dstHth;
|
|
|
|
// //对于YUV内存有一半的余量
|
|
// mOutBuf = new uint8_t[mDstWth*mDstHth*3];
|
|
|
|
mPixFmt = oPixFmt;
|
|
|
|
mSrcFrame = av_frame_alloc();
|
|
if (!mSrcFrame) {
|
|
ffmpeg_ref_release();
|
|
return false;
|
|
}
|
|
mDstFrame = av_frame_alloc();
|
|
if (!mDstFrame) {
|
|
ffmpeg_ref_release();
|
|
return false;
|
|
}
|
|
|
|
if (!bAccel)
|
|
{
|
|
avcodec_close(codecctx);
|
|
codecctx = temp_codecctx;
|
|
|
|
m_D3DVidRender.InitD3D_YUV(hwnd, mSrcWth, mSrcHth);
|
|
|
|
mFrameYUV = av_frame_alloc();
|
|
if (!mFrameYUV) {
|
|
ffmpeg_ref_release();
|
|
return false;
|
|
}
|
|
mBufYUV = (uint8_t *)av_malloc(av_image_get_buffer_size(AV_PIX_FMT_YUV420P, mSrcWth, mSrcHth, 1));
|
|
av_image_fill_arrays(mFrameYUV->data, ((AVPicture *)mFrameYUV)->linesize, mBufYUV,
|
|
AV_PIX_FMT_YUV420P, mSrcWth, mSrcHth, 1);
|
|
img_convert_ctx = sws_getContext(mSrcWth, mSrcHth, codecctx->pix_fmt, mSrcWth, mSrcHth, AV_PIX_FMT_YUV420P, SWS_BICUBIC, NULL, NULL, NULL);
|
|
if (!img_convert_ctx)
|
|
{
|
|
ffmpeg_ref_release();
|
|
return false;
|
|
}
|
|
|
|
|
|
switch (oPixFmt)
|
|
{
|
|
case 1:
|
|
{
|
|
opix_fmt = AV_PIX_FMT_RGB24;
|
|
}
|
|
break;
|
|
case 2:
|
|
{
|
|
opix_fmt = AV_PIX_FMT_BGR24;
|
|
}
|
|
break;
|
|
default:
|
|
{
|
|
opix_fmt = AV_PIX_FMT_YUV420P;
|
|
}
|
|
break;
|
|
}
|
|
|
|
ipix_fmt = AV_PIX_FMT_YUV420P;
|
|
scxt = sws_getContext(mSrcWth,mSrcHth,ipix_fmt,mDstWth,mDstHth,opix_fmt,SWS_BILINEAR,NULL,NULL,NULL);
|
|
if (!scxt)
|
|
{
|
|
ffmpeg_ref_release();
|
|
return false;
|
|
}
|
|
|
|
}
|
|
else
|
|
{
|
|
switch (oPixFmt)
|
|
{
|
|
case 1:
|
|
{
|
|
opix_fmt = AV_PIX_FMT_RGB24;
|
|
}
|
|
break;
|
|
case 2:
|
|
{
|
|
opix_fmt = AV_PIX_FMT_BGR24;
|
|
}
|
|
break;
|
|
default:
|
|
{
|
|
opix_fmt = AV_PIX_FMT_YUV420P;
|
|
}
|
|
break;
|
|
}
|
|
|
|
ipix_fmt = AV_PIX_FMT_NV12;
|
|
scxt = sws_getContext(mSrcWth,mSrcHth,ipix_fmt,mDstWth,mDstHth,opix_fmt,SWS_BICUBIC/*SWS_BILINEAR*/,NULL,NULL,NULL);
|
|
if (!scxt)
|
|
{
|
|
ffmpeg_ref_release();
|
|
return false;
|
|
}
|
|
}
|
|
|
|
mOutBuf = (uint8_t *)av_malloc(av_image_get_buffer_size(opix_fmt, mDstWth, mDstHth, 1));
|
|
int ret = av_image_fill_arrays(mDstFrame->data, ((AVPicture *)mDstFrame)->linesize, mOutBuf,
|
|
opix_fmt, mDstWth, mDstHth, 1);
|
|
|
|
/* open it */
|
|
if (avcodec_open2(codecctx, codec, NULL) < 0) {
|
|
ffmpeg_ref_release();
|
|
return false;
|
|
}
|
|
|
|
s = av_parser_init(AV_CODEC_ID_H264);
|
|
|
|
frameCount = 0;
|
|
sTime = GetTickCount();
|
|
return true;
|
|
|
|
}
|
|
void Decoder_Hardware_D::decoder_hw_release()
|
|
{
|
|
ffmpeg_ref_release();
|
|
pic_ref_mem_release();
|
|
|
|
eTime = GetTickCount();
|
|
DWORD dTime = eTime - sTime;
|
|
|
|
FILE* cfile;
|
|
cfile = fopen( "D://run_info.txt", "ab+" );
|
|
// char data[255];
|
|
// sprintf(data,"all frame count:%ld\nstart time:%ld\nend time:%ld\ndiff time:%ld\n",frameCount,sTime,eTime,dTime);
|
|
// fwrite(data,1,sizeof(data),cfile);
|
|
fprintf(cfile,"all frame count:%ld start time:%ld end time:%ld diff time:%ld\r\r\n",frameCount,sTime,eTime,dTime);
|
|
|
|
fclose(cfile);
|
|
|
|
|
|
}
|
|
|
|
void Decoder_Hardware_D::decoder_hw_decoding(unsigned char* in_buf,int in_len)
|
|
{
|
|
int len,size;
|
|
unsigned char* Parser_buf = NULL;
|
|
while (in_len)
|
|
{
|
|
len = av_parser_parse2(s,codecctx,&Parser_buf,&size,in_buf,in_len,0,0,0);
|
|
in_buf += len;
|
|
in_len -= len;
|
|
if (size)
|
|
{
|
|
|
|
int got_picture = 0;
|
|
av_init_packet(&pkt);//无需再次初始化
|
|
pkt.data = /*inbuf_H264*/Parser_buf;
|
|
pkt.size = /*in_len*/size;
|
|
int bytes_used = avcodec_decode_video2(codecctx, mSrcFrame, &got_picture, &pkt);
|
|
if (got_picture)
|
|
{
|
|
if (bAccel)
|
|
{
|
|
//获取数据同时渲染
|
|
dxva2_draw_call(codecctx, mSrcFrame);
|
|
|
|
frameCount ++;
|
|
// memcpy(data,pFrameBGR->data[0],codecctx->width*codecctx->height*3);
|
|
|
|
if (decodingCallback)
|
|
{
|
|
dxva2_retrieve_data_call(codecctx, mSrcFrame);
|
|
decoder_sws_scale_callback();
|
|
}
|
|
}
|
|
else
|
|
{
|
|
sws_scale(img_convert_ctx, (const uint8_t* const*)mSrcFrame->data, mSrcFrame->linesize,
|
|
0, mSrcHth, mFrameYUV->data, mFrameYUV->linesize);
|
|
|
|
m_D3DVidRender.Render_YUV(mBufYUV, mSrcFrame->width, mSrcFrame->height);
|
|
|
|
frameCount ++;
|
|
|
|
if (decodingCallback)
|
|
{
|
|
decoder_sws_scale_callback();
|
|
}
|
|
}
|
|
}
|
|
|
|
av_packet_unref(&pkt);
|
|
}
|
|
}
|
|
}
|
|
|
|
void Decoder_Hardware_D::decoder_sws_scale_callback()
|
|
{
|
|
if (opix_fmt == AV_PIX_FMT_RGB24 || opix_fmt == AV_PIX_FMT_BGR24)
|
|
{
|
|
//翻转yuv
|
|
mSrcFrame->data[0] += mSrcFrame->linesize[0]*(mSrcHth -1);
|
|
mSrcFrame->linesize[0] *= -1;
|
|
mSrcFrame->data[1] += mSrcFrame->linesize[1]*(mSrcHth/2 -1);
|
|
mSrcFrame->linesize[1] *= -1;
|
|
mSrcFrame->data[2] += mSrcFrame->linesize[2]*(mSrcHth/2 -1);
|
|
mSrcFrame->linesize[2] *= -1;
|
|
}
|
|
|
|
sws_scale(scxt,(const uint8_t* const*)mSrcFrame->data,mSrcFrame->linesize,
|
|
0,mSrcHth,mDstFrame->data,mDstFrame->linesize);
|
|
|
|
decodingCallback(mOutBuf/*mDstFrame->data[0]*/,mDstWth,mDstHth);
|
|
|
|
}
|