您的位置:华清远见教育科技集团 >> 新闻动态 >> Android资料 >> Android视频编码源码分享  
 
Android视频编码源码分享
分享到:

在Android中,编码器的具体实现会被封装在一个OpenMAX组件中,对于MP4的编码器,相应的组件为OmxComponentMpeg4EncAO,该组件被Mpeg4EncOmxComponentFactory、H263EncOmxComponentFactory等组件工厂共有,用来实现对H.263和MPEG-4的编码。提供给记录引擎的接口为OmxComponentMpeg4EncAO::ProcessData()方法,实际的编码工作在 Mpeg4Encoder_OMX::Mp4EncodeVideo()中实现。下面是该方法的具体实现:

代码:视频的编码过程

OMX_BOOL Mpeg4Encoder_OMX::Mp4EncodeVideo(OMX_U8* aOutBuffer,
    OMX_U32*  aOutputLength,    //输出长度
    OMX_BOOL*  aBufferOverRun,    //运行缓冲
    OMX_U8**  aOverBufferPointer,
    OMX_U8*  aInBuffer,    //输入缓冲
    OMX_U32  aInBufSize,
    OMX_TICKS  aInTimeStamp,
    OMX_TICKS*  aOutTimeStamp,
    OMX_BOOL*  aSyncFlag)    // OMX_FALSE或者OMX_TRUE
    {
    *aSyncFlag=OMX_FALSE;
    if (OMX_FALSE==iModTimeInitialized)
    {
        iNextModTime=aInTimeStamp;
        iModTimeInitialized=OMX_TRUE;
    }
    if (OMX_COLOR_FormatYUV420Planar==iVideoFormat) //YUV420元数据
    {
      if (aInBufSize<(OMX_U32)((iSrcWidth * iSrcHeight * 3) >> 1))
      {
       *aOutputLength=0;
       return OMX_FALSE;
      }
    }
    else if (OMX_COLOR_Format24bitRGB888==iVideoFormat) //RGB888元数据
    {
      if (aInBufSize<(OMX_U32)(iSrcWidth * iSrcHeight * 3))
      {
       *aOutputLength=0;
       return OMX_FALSE;
      }
    }
    else if (OMX_COLOR_Format12bitRGB444==iVideoFormat) //RGB444元数据
    {       if (aInBufSize<(OMX_U32)(iSrcWidth * iSrcHeight * 2))
      {
       *aOutputLength=0;
       return OMX_FALSE;
      }
    }
    else if (OMX_COLOR_FormatYUV420SemiPlanar==iVideoFormat) //YUV420元数据
    {
      if (aInBufSize<(OMX_U32)((iSrcWidth * iSrcHeight * 3)>>1))
      {
       *aOutputLength=0;
       return OMX_FALSE;
      }
    }
    else if ((OMX_COLOR_FormatYCbYCr==iVideoFormat) ||
        (OMX_COLOR_FormatYCrYCb==iVideoFormat) ||
        (OMX_COLOR_FormatCbYCrY==iVideoFormat) ||
        (OMX_COLOR_FormatCrYCbY==iVideoFormat))
    {
      if (aInBufSize<(OMX_U32)(iSrcWidth * iSrcHeight * 2))
      {
       *aOutputLength=0;
       return OMX_FALSE;
      }
    }
    //开始对输入缓冲解码
    VideoEncFrameIO vid_in, vid_out;
    Int Size;
    Bool status;
    ULong modTime;
    Int nLayer=0;
    if ((iNextModTime * 1000) <=aInTimeStamp)
    {
      Size=*aOutputLength;
    #if PROFILING_ON
      OMX_U32 Start=OsclTickCount::TickCount();
    #endif
      if (iVideoFormat==OMX_COLOR_FormatYUV420Planar)
      {
       if (iYUVIn)
       {//对YUV元数据进行复制
        CopyToYUVIn(aInBuffer, iSrcWidth, iSrcHeight,
        ((iSrcWidth+15)>>4)<<4, ((iSrcHeight+15)>>4)<<4);
        iVideoIn=iYUVIn;
       }
       else
       {
        iVideoIn=aInBuffer;
       }
      }
    else if ((iVideoFormat==OMX_COLOR_Format12bitRGB444) ||
    (iVideoFormat==OMX_COLOR_Format24bitRGB888) ||
      (iVideoFormat==OMX_COLOR_FormatYUV420SemiPlanar) ||
    (iVideoFormat==OMX_COLOR_FormatYCbYCr) ||
      (iVideoFormat==OMX_COLOR_FormatYCrYCb) ||
    (iVideoFormat==OMX_COLOR_FormatCbYCrY) ||
      (iVideoFormat==OMX_COLOR_FormatCrYCbY))
      {/ /元数据格式转换
       ccRGBtoYUV->Convert((uint8*)aInBuffer, iYUVIn);
       iVideoIn=iYUVIn;
      }
    #if PROFILING_ON
      OMX_U32 Stop=OsclTickCount::TickCount();
      iProfileStats.iColorConversionTime +=(Stop-Start);
      ++iProfileStats.iTotalNumFrames;
      OMX_U32 StartTime=OsclTickCount::TickCount();
    #endif
      *aOutTimeStamp=aInTimeStamp;
      vid_in.height=((iSrcHeight+15)>>4)<<4;
      vid_in.pitch=((iSrcWidth+15)>>4)<<4;
      vid_in.timestamp=(ULong)(aInTimeStamp / 1000);
      vid_in.yChan=(UChar*)iVideoIn;
      vid_in.uChan=(UChar*)(iVideoIn+vid_in.height * vid_in.pitch);
      vid_in.vChan=vid_in.uChan+((vid_in.height * vid_in.pitch) >> 2);
    //进行一帧YUV数据的编码
     status=PVEncodeVideoFrame(&iEncoderControl, &vid_in, &vid_out,&modTime, (UChar*)aOutBuffer,&Size, &nLayer);
    #if PROFILING_ON
      OMX_U32 EndTime=OsclTickCount::TickCount();
      iProfileStats.iTotalEncTime +=(EndTime-StartTime);
      if ((PV_TRUE==status) && (Size > 0))
      {
       ++iProfileStats.iNumFramesEncoded;
       iProfileStats.iDuration=vid_out.timestamp;
      }
    #endif
      if (status==PV_TRUE)
      {
       iNextModTime=modTime; //以毫秒为单位
       if ((nLayer >=0) && ((OMX_U32) Size > *aOutputLength))
       {
        *aOverBufferPointer=PVGetOverrunBuffer(&iEncoderControl);
        *aBufferOverRun=OMX_TRUE;
       }
      *aOutputLength=Size;
      if (Size > 0)
      {
       //转换毫秒为微秒
       *aOutTimeStamp=((OMX_TICKS) vid_out.timestamp * 1000);
       PVGetHintTrack(&iEncoderControl, &iHintTrack);
       if (0==iHintTrack.CodeType)
       {
        *aSyncFlag=OMX_TRUE;
       }
      }
      return OMX_TRUE;
      }
      else
      {
       *aOutputLength=0;
       return OMX_FALSE;
      }
      }
      else
      {
       *aOutputLength=0;
       return OMX_TRUE;
      }
    }

从编码过程可以看出,在进行编码时,首先要判断出元数据的格式,如果不是OMX_COLOR_FormatYUV420Planar格式,则调用ColorConvertBase:: Convert()方法对元数据格式进行转换,然后通过PVEncodeVideoFrame()方法针对当前一帧进行编码。

对于MPEG-4,向组件库注册的注册函数为Mpeg4EncRegister(),组件名为“OMX.PV. mpeg4enc”;对于H263,向组件库注册的注册函数为H263EncRegister(),组件名为“OMX.PV.h263enc”。

Mpeg4Encoder_OMX的编码模式为MODE_H263和MODE_MPEG4。

 更多相关文章

·Android 双缓冲局部渲染
·Android Overlay硬件加速
·Android OpenGL ES渲染方式
·Android Skia UI控件架构
·Android Skia图形渲染