#include #include "decoder_sw.h" void Decoder_Software::ffmpeg_ref_release() { if (codecctx) { avcodec_close(codecctx); av_free(codecctx); codecctx = NULL; } if (mSrcFrame) { av_frame_free(&mSrcFrame); mSrcFrame = NULL; } if (mDstFrame) { av_frame_free(&mDstFrame); mDstFrame = NULL; } if (scxt) { sws_freeContext(scxt); scxt = NULL; } // av_free_packet(&pkt); if (s) { av_parser_close(s); s= NULL; } return; } void Decoder_Software::pic_ref_mem_release() { if (mOutBuf) { // delete[] mOutBuf; av_free(mOutBuf); mOutBuf = NULL; } return; } //oPixFmt: 0-YUV 1-RGB 2-BGR bool Decoder_Software::decoder_sw_init(int srcWth,int srcHth,int dstWth,int dstHth,int oPixFmt) { // av_register_all(); avcodec_register_all(); /* find the mpeg1 video decoder */ codec = avcodec_find_decoder(AV_CODEC_ID_H264); if (!codec) { ffmpeg_ref_release(); return false; } codecctx = avcodec_alloc_context3(codec); if (!codecctx) { ffmpeg_ref_release(); return false; } if(codec->capabilities&CODEC_CAP_TRUNCATED) codecctx->flags|= CODEC_FLAG_TRUNCATED; /* we do not send complete frames */ // codecctx->flags2 |= CODEC_FLAG2_FAST; // codecctx->width = img_width; // codecctx->height = img_height; codecctx->pix_fmt = AV_PIX_FMT_YUV420P; /* open it */ if (avcodec_open2(codecctx, codec, NULL) < 0) { ffmpeg_ref_release(); return false; } if (dstWth == 0 || dstHth == 0) { dstWth = srcWth; dstHth = srcHth; } mSrcWth = srcWth; mSrcHth = srcHth; mDstWth = dstWth; mDstHth = dstHth; // //对于YUV内存有一半的余量 // mOutBuf = new uint8_t[mDstWth*mDstHth*3]; mPixFmt = oPixFmt; switch (oPixFmt) { case 1: { opix_fmt = AV_PIX_FMT_RGB24; } break; case 2: { opix_fmt = AV_PIX_FMT_BGR24; } break; default: { opix_fmt = AV_PIX_FMT_YUV420P; } break; } scxt = sws_getContext(mSrcWth,mSrcHth,codecctx->pix_fmt,mDstWth,mDstHth,opix_fmt,SWS_BILINEAR,NULL,NULL,NULL); if (!scxt) { ffmpeg_ref_release(); return false; } mSrcFrame = av_frame_alloc(); if (!mSrcFrame) { ffmpeg_ref_release(); return false; } mDstFrame = av_frame_alloc(); if (!mDstFrame) { ffmpeg_ref_release(); return false; } mOutBuf = (uint8_t *)av_malloc(av_image_get_buffer_size(opix_fmt, mDstWth, mDstHth, 1)); av_image_fill_arrays(mDstFrame->data, ((AVPicture *)mDstFrame)->linesize, mOutBuf, opix_fmt, mDstWth, mDstHth, 1); s = av_parser_init(AV_CODEC_ID_H264); return true; } void Decoder_Software::decoder_sw_release() { ffmpeg_ref_release(); pic_ref_mem_release(); } // 实现1 bool YV12_to_RGB24(unsigned char* pYV12, unsigned char* pRGB24, int iWidth, int iHeight) { if(!pYV12 || !pRGB24) return false; const long nYLen = long(iHeight * iWidth); const int nHfWidth = (iWidth>>1); if(nYLen < 1 || nHfWidth < 1) return false; unsigned char* yData = pYV12; unsigned char* vData = &yData[nYLen]; unsigned char* uData = &vData[nYLen>>2]; if(!uData || !vData) return false; int rgb[3]; int i, j, m, n, x, y; m = -iWidth; n = -nHfWidth; for(y = 0; y < iHeight; y++) { m += iWidth; if(!(y % 2)) n += nHfWidth; for(x = 0; x < iWidth; x++) { i = m + x; j = n + (x>>1); rgb[2] = int(yData[i] + 1.370705 * (vData[j] - 128)); // r分量值 rgb[1] = int(yData[i] - 0.698001 * (uData[j] - 128) - 0.703125 * (vData[j] - 128)); // g分量值 rgb[0] = int(yData[i] + 1.732446 * (uData[j] - 128)); // b分量值 j = nYLen - iWidth - m + x; i = (j<<1) + j; for(j = 0; j<3; j++) { if(rgb[j] >= 0 && rgb[j] <= 255) { pRGB24[i + j] = rgb[j]; } else { pRGB24[i + j] = (rgb[j] < 0) ? 0 : 255; } } } } return true; } void Decoder_Software::decoder_sw_decoding(unsigned char* in_buf,int in_len) { if (in_buf == nullptr || in_len <= 0) { return; } unsigned char* Parser_buf = nullptr; int len = 0,size = 0 ; while (in_len) { len = av_parser_parse2(s,codecctx,&Parser_buf,&size,in_buf,in_len,0,0,0); in_buf += len; in_len -= len; if (size) { av_init_packet(&pkt);//无需再次初始化 pkt.data = Parser_buf; pkt.size = size; int got_picture = 0; int ret = avcodec_decode_video2(codecctx,mSrcFrame,&got_picture,&pkt); if (got_picture > 0 && ret > 0) { //callback if (decodingCallback > 0) { if (opix_fmt == AV_PIX_FMT_RGB24 || opix_fmt == AV_PIX_FMT_BGR24) { //翻转yuv mSrcFrame->data[0] += mSrcFrame->linesize[0]*(mSrcHth -1); mSrcFrame->linesize[0] *= -1; mSrcFrame->data[1] += mSrcFrame->linesize[1]*(mSrcHth/2 -1); mSrcFrame->linesize[1] *= -1; mSrcFrame->data[2] += mSrcFrame->linesize[2]*(mSrcHth/2 -1); mSrcFrame->linesize[2] *= -1; } //将yuv转换为bgr try { int ret = sws_scale(scxt, mSrcFrame->data, mSrcFrame->linesize, 0, mSrcHth, mDstFrame->data, mDstFrame->linesize); } catch (...) { int error = 0; } decodingCallback(mOutBuf,mDstWth,mDstHth); } } } } }