openh264中的AQ模块

openh264中的AQ模式总共有两种,

  AQ_QUALITY_MODE,   // Quality mode

  AQ_BITRATE_MODE    // Bitrate mode

两者的区别在于计算iAverageTextureIndex时采用的因子不一样,每个宏块的AQ偏移值在AdaptiveQuantization.cpp中的process函数中完成;

EResult CAdaptiveQuantization::Process(int32_t iType, SPixMap *pSrcPixMap, SPixMap *pRefPixMap)
{
  EResult eReturn = RET_INVALIDPARAM;

  int32_t iWidth = pSrcPixMap->sRect.iRectWidth;
  int32_t iHeight = pSrcPixMap->sRect.iRectHeight;
  int32_t iMbWidth = iWidth >> 4;
  int32_t iMbHeight = iHeight >> 4;
  int32_t iMbTotalNum = iMbWidth * iMbHeight;//total number of mb

  SMotionTextureUnit *pMotionTexture = NULL;
  SVAACalcResult *pVaaCalcResults = NULL;
  int32_t iMotionTextureIndexToDeltaQp = 0;
  int32_t iAverMotionTextureIndexToDeltaQp = 0;  // double to uint32
  int64_t iAverageMotionIndex = 0;      // double to float
  int64_t iAverageTextureIndex = 0;

  int64_t iQStep = 0;
  int64_t iLumaMotionDeltaQp = 0;
  int64_t iLumaTextureDeltaQp = 0;

  uint8_t *pRefFrameY = NULL, *pCurFrameY = NULL;
  int32_t iRefStride = 0, iCurStride = 0;

  uint8_t *pRefFrameTmp = NULL, *pCurFrameTmp = NULL;
  int32_t i = 0, j = 0;

  pRefFrameY = (uint8_t *)pRefPixMap->pPixel[0];
  pCurFrameY = (uint8_t *)pSrcPixMap->pPixel[0];

  iRefStride = pRefPixMap->iStride[0];
  iCurStride = pSrcPixMap->iStride[0];

  /// motion //
  // motion MB residual variance
  iAverageMotionIndex = 0;
  iAverageTextureIndex = 0;
  pMotionTexture = m_sAdaptiveQuantParam.pMotionTextureUnit;
  pVaaCalcResults = m_sAdaptiveQuantParam.pCalcResult;

  if (pVaaCalcResults->pRefY == pRefFrameY && pVaaCalcResults->pCurY == pCurFrameY)
  {
    int32_t iMbIndex = 0;
    int32_t iSumDiff, iSQDiff, uiSum, iSQSum;

    for (j = 0; j < iMbHeight; j++)//for each Mb
    {
      pRefFrameTmp = pRefFrameY;
      pCurFrameTmp = pCurFrameY;

      for (i = 0; i < iMbWidth; i++)
      {
        iSumDiff = pVaaCalcResults->pSad8x8[iMbIndex][0];
        iSumDiff += pVaaCalcResults->pSad8x8[iMbIndex][1];
        iSumDiff += pVaaCalcResults->pSad8x8[iMbIndex][2];
        iSumDiff += pVaaCalcResults->pSad8x8[iMbIndex][3];

        iSQDiff = pVaaCalcResults->pSsd16x16[iMbIndex];
        uiSum = pVaaCalcResults->pSum16x16[iMbIndex];
        iSQSum = pVaaCalcResults->pSumOfSquare16x16[iMbIndex];

        iSumDiff = iSumDiff >> 8;
        pMotionTexture->uiMotionIndex = (iSQDiff >> 8) - (iSumDiff * iSumDiff);

        uiSum = uiSum >> 8;
        pMotionTexture->uiTextureIndex = (iSQSum >> 8) - (uiSum * uiSum);

        iAverageMotionIndex += pMotionTexture->uiMotionIndex;
        iAverageTextureIndex += pMotionTexture->uiTextureIndex;
        pMotionTexture++;
        ++iMbIndex;
        pRefFrameTmp += MB_WIDTH_LUMA;
        pCurFrameTmp += MB_WIDTH_LUMA;
      }
      pRefFrameY += (iRefStride) << 4;
      pCurFrameY += (iCurStride) << 4;
    }
  }
  else
  {
    for (j = 0; j < iMbHeight; j++)
    {
      pRefFrameTmp = pRefFrameY;
      pCurFrameTmp = pCurFrameY;

      for (i = 0; i < iMbWidth; i++)
      {
        m_pfVar(pRefFrameTmp, iRefStride, pCurFrameTmp, iCurStride, pMotionTexture);
        iAverageMotionIndex += pMotionTexture->uiMotionIndex;
        iAverageTextureIndex += pMotionTexture->uiTextureIndex;
        pMotionTexture++;
        pRefFrameTmp += MB_WIDTH_LUMA;
        pCurFrameTmp += MB_WIDTH_LUMA;
      }
      pRefFrameY += (iRefStride) << 4;
      pCurFrameY += (iCurStride) << 4;
    }
  }
  iAverageMotionIndex = WELS_DIV_ROUND64(iAverageMotionIndex * AQ_INT_MULTIPLY, iMbTotalNum);//AQ_INT_MULTIPLY = 10000000
  iAverageTextureIndex = WELS_DIV_ROUND64(iAverageTextureIndex * AQ_INT_MULTIPLY, iMbTotalNum);

  if ((iAverageMotionIndex <= AQ_PESN) && (iAverageMotionIndex >= -AQ_PESN))//AQ_PESN = 10
  {
    iAverageMotionIndex = AQ_INT_MULTIPLY;
  }

  if ((iAverageTextureIndex <= AQ_PESN) && (iAverageTextureIndex >= -AQ_PESN))
  {
    iAverageTextureIndex = AQ_INT_MULTIPLY;
  }
  // motion mb residual map to QP
  // texture mb original map to QP
  iAverMotionTextureIndexToDeltaQp = 0;
  iAverageMotionIndex = WELS_DIV_ROUND64(AVERAGE_TIME_MOTION * iAverageMotionIndex, AQ_TIME_INT_MULTIPLY);//AVERAGE_TIME_MOTION = 3000,AQ_TIME_INT_MULTIPLY = 10000

  if (m_sAdaptiveQuantParam.iAdaptiveQuantMode == AQ_QUALITY_MODE)
  {
    iAverageTextureIndex = WELS_DIV_ROUND64(AVERAGE_TIME_TEXTURE_QUALITYMODE * iAverageTextureIndex, AQ_TIME_INT_MULTIPLY);//AVERAGE_TIME_TEXTURE_QUALITYMODE = 10000,AQ_TIME_INT_MULTIPLY = 10000
  }
  else
  {
    iAverageTextureIndex = WELS_DIV_ROUND64(AVERAGE_TIME_TEXTURE_BITRATEMODE * iAverageTextureIndex, AQ_TIME_INT_MULTIPLY);//AVERAGE_TIME_TEXTURE_BITRATEMODE = 8750,AQ_TIME_INT_MULTIPLY = 10000
  }
  int64_t iAQ_EPSN = -((int64_t)AQ_PESN * AQ_TIME_INT_MULTIPLY * AQ_QSTEP_INT_MULTIPLY / AQ_INT_MULTIPLY);//iAQ_EPSN = -1
  pMotionTexture = m_sAdaptiveQuantParam.pMotionTextureUnit;
  //calculate the delta qp for each MB in adaptive quantization
  for (j = 0; j < iMbHeight; j++)//for each Mb
  {
    for (i = 0; i < iMbWidth; i++)
    {
      int64_t a = WELS_DIV_ROUND64((int64_t)(pMotionTexture->uiTextureIndex) * AQ_INT_MULTIPLY * AQ_TIME_INT_MULTIPLY,
                                   iAverageTextureIndex);
      iQStep = WELS_DIV_ROUND64((a - AQ_TIME_INT_MULTIPLY) * AQ_QSTEP_INT_MULTIPLY, (a + MODEL_ALPHA));//AQ_QSTEP_INT_MULTIPLY = 100, MODEL_ALPHA = 9910
      iLumaTextureDeltaQp = MODEL_TIME * iQStep;// range +- 6, MODEL_TIME = (58185)

      iMotionTextureIndexToDeltaQp = ((int32_t)(iLumaTextureDeltaQp / (AQ_TIME_INT_MULTIPLY)));

      a = WELS_DIV_ROUND64(((int64_t)pMotionTexture->uiMotionIndex) * AQ_INT_MULTIPLY * AQ_TIME_INT_MULTIPLY,
                           iAverageMotionIndex);
      iQStep = WELS_DIV_ROUND64((a - AQ_TIME_INT_MULTIPLY) * AQ_QSTEP_INT_MULTIPLY, (a + MODEL_ALPHA));
      iLumaMotionDeltaQp = MODEL_TIME * iQStep;// range +- 6

      if ((m_sAdaptiveQuantParam.iAdaptiveQuantMode == AQ_QUALITY_MODE && iLumaMotionDeltaQp < iAQ_EPSN)
          || (m_sAdaptiveQuantParam.iAdaptiveQuantMode == AQ_BITRATE_MODE))
      {
        iMotionTextureIndexToDeltaQp += ((int32_t)(iLumaMotionDeltaQp / (AQ_TIME_INT_MULTIPLY)));
      }
      m_sAdaptiveQuantParam.pMotionTextureIndexToDeltaQp[j * iMbWidth + i] = (int8_t)(iMotionTextureIndexToDeltaQp /
                                                                                      AQ_QSTEP_INT_MULTIPLY);
      iAverMotionTextureIndexToDeltaQp += iMotionTextureIndexToDeltaQp;
      pMotionTexture++;
    }
  }
  m_sAdaptiveQuantParam.iAverMotionTextureIndexToDeltaQp = iAverMotionTextureIndexToDeltaQp / iMbTotalNum;

  eReturn = RET_SUCCESS;

  return eReturn;
}

AQ模式的应用在下列函数中实现:

在RcCalculatePictureQp函数中,会先对图像级的QP减去aq模式计算出来的QP调整值的均值;

在RcCalculateMbQp函数中,会先对宏块级的QP加去aq模式计算出来的对应的QP调整值;

void RcCalculatePictureQp(sWelsEncCtx *pEncCtx)
{
  SWelsSvcRc *pWelsSvcRc = &pEncCtx->pWelsSvcRc[pEncCtx->uiDependencyId];
  int32_t iTl = pEncCtx->uiTemporalId;
  SRCTemporal *pTOverRc = &pWelsSvcRc->pTemporalOverRc[iTl];
  int32_t iLumaQp = 0;
  int32_t iDeltaQpTemporal = 0;
  int64_t iFrameComplexity = pEncCtx->pVaa->sComplexityAnalysisParam.iFrameComplexity;

  if (pEncCtx->pSvcParam->iUsageType == SCREEN_CONTENT_REAL_TIME)
  {
    SVAAFrameInfoExt *pVaa = static_cast<SVAAFrameInfoExt *>(pEncCtx->pVaa);
    iFrameComplexity = pVaa->sComplexityScreenParam.iFrameComplexity;
  }

  if (0 == pTOverRc->iPFrameNum)
  {
    iLumaQp = pWelsSvcRc->iInitialQp;
  }
  else if (pWelsSvcRc->iCurrentBitsLevel == BITS_EXCEEDED)
  {
    iLumaQp = pWelsSvcRc->iLastCalculatedQScale + DELTA_QP_BGD_THD;
// limit QP
    int32_t iLastIdxCodecInVGop = pWelsSvcRc->iFrameCodedInVGop - 1;

    if (iLastIdxCodecInVGop < 0)
      iLastIdxCodecInVGop += VGOP_SIZE;
    int32_t iTlLast = pWelsSvcRc->iTlOfFrames[iLastIdxCodecInVGop];
    iDeltaQpTemporal = iTl - iTlLast;

    if (0 == iTlLast && iTl > 0)
      iDeltaQpTemporal += 1;
    else if (0 == iTl && iTlLast > 0)
      iDeltaQpTemporal -= 1;
  }
  else
  {
    int64_t iCmplxRatio = WELS_DIV_ROUND64(iFrameComplexity * INT_MULTIPLY,
                                           pTOverRc->iFrameCmplxMean);
    iCmplxRatio = WELS_CLIP3(iCmplxRatio, INT_MULTIPLY - FRAME_CMPLX_RATIO_RANGE, INT_MULTIPLY + FRAME_CMPLX_RATIO_RANGE);

    pWelsSvcRc->iQStep = WELS_DIV_ROUND((pTOverRc->iLinearCmplx * iCmplxRatio), (pWelsSvcRc->iTargetBits * INT_MULTIPLY));
    iLumaQp = RcConvertQStep2Qp(pWelsSvcRc->iQStep);
    WelsLog(&(pEncCtx->sLogCtx), WELS_LOG_DEBUG,
            "iCmplxRatio = %d,frameComplexity = %" PRId64 ",iFrameCmplxMean = %" PRId64 ",iQStep = %d,iLumaQp = %d", (int)iCmplxRatio,
            iFrameComplexity, pTOverRc->iFrameCmplxMean, pWelsSvcRc->iQStep, iLumaQp);
// limit QP
    int32_t iLastIdxCodecInVGop = pWelsSvcRc->iFrameCodedInVGop - 1;

    if (iLastIdxCodecInVGop < 0)
      iLastIdxCodecInVGop += VGOP_SIZE;
    int32_t iTlLast = pWelsSvcRc->iTlOfFrames[iLastIdxCodecInVGop];
    int32_t iDeltaQpTemporal = iTl - iTlLast;

    if (0 == iTlLast && iTl > 0)
      iDeltaQpTemporal += 1;
    else if (0 == iTl && iTlLast > 0)
      iDeltaQpTemporal -= 1;
  }
  pWelsSvcRc->iMinFrameQp = WELS_CLIP3(pWelsSvcRc->iLastCalculatedQScale - pWelsSvcRc->iFrameDeltaQpLower +
                                       iDeltaQpTemporal, pTOverRc->iMinQp, pTOverRc->iMaxQp);
  pWelsSvcRc->iMaxFrameQp = WELS_CLIP3(pWelsSvcRc->iLastCalculatedQScale + pWelsSvcRc->iFrameDeltaQpUpper +
                                       iDeltaQpTemporal, pTOverRc->iMinQp, pTOverRc->iMaxQp);

  iLumaQp = WELS_CLIP3(iLumaQp, pWelsSvcRc->iMinFrameQp, pWelsSvcRc->iMaxFrameQp);

  if (pEncCtx->pSvcParam->bEnableAdaptiveQuant)//if EnableAdaptiveQuant = true,then adjust Qp for a picture
  {
    iLumaQp = WELS_DIV_ROUND(iLumaQp * INT_MULTIPLY - pEncCtx->pVaa->sAdaptiveQuantParam.iAverMotionTextureIndexToDeltaQp,
                             INT_MULTIPLY);//INT_MULTIPLY = 100
    iLumaQp = WELS_CLIP3(iLumaQp, pWelsSvcRc->iMinFrameQp, pWelsSvcRc->iMaxFrameQp);
  }
  pWelsSvcRc->iQStep = RcConvertQp2QStep(iLumaQp);
  pWelsSvcRc->iLastCalculatedQScale = iLumaQp;
  pEncCtx->iGlobalQp = iLumaQp;
}
void RcCalculateMbQp(sWelsEncCtx *pEncCtx, SSlice *pSlice, SMB *pCurMb)
{
  SWelsSvcRc *pWelsSvcRc = &pEncCtx->pWelsSvcRc[pEncCtx->uiDependencyId];
  SRCSlicing *pSOverRc = &pSlice->sSlicingOverRc;

  int32_t iLumaQp = pSOverRc->iCalculatedQpSlice;
  SDqLayer *pCurLayer = pEncCtx->pCurDqLayer;
  const uint8_t kuiChromaQpIndexOffset = pCurLayer->sLayerInfo.pPpsP->uiChromaQpIndexOffset;

    if (pEncCtx->pSvcParam->bEnableAdaptiveQuant)//if bEnableAdaptiveQuant = true,then adjust qp for a mb
  {
    iLumaQp = (int8_t)WELS_CLIP3(iLumaQp +
                                 pEncCtx->pVaa->sAdaptiveQuantParam.pMotionTextureIndexToDeltaQp[pCurMb->iMbXY], pWelsSvcRc->iMinFrameQp,
                                 pWelsSvcRc->iMaxFrameQp);
  }
  pCurMb->uiChromaQp = g_kuiChromaQpTable[CLIP3_QP_0_51(iLumaQp + kuiChromaQpIndexOffset)];
  pCurMb->uiLumaQp = iLumaQp;
}

 

评论
添加红包

请填写红包祝福语或标题

红包个数最小为10个

红包金额最低5元

当前余额3.43前往充值 >
需支付:10.00
成就一亿技术人!
领取后你会自动成为博主和红包主的粉丝 规则
hope_wisdom
发出的红包
实付
使用余额支付
点击重新获取
扫码支付
钱包余额 0

抵扣说明:

1.余额是钱包充值的虚拟货币,按照1:1的比例进行支付金额的抵扣。
2.余额无法直接购买下载,可以购买VIP、付费专栏及课程。

余额充值