在《Android视频编码源码分享》一文中,我们主要学习的是Android视频编码的实现方法,本文要给大家讲述的则是Android中视频的解码过程。
在Android中,解码器的具体实现会被封装在一个OpenMAX组件中,对于视频的编码器,相应的组件为OpenmaxMpeg4AO,该组件被Mpeg4OmxComponentFactory、H263OmxComponentFactory等组件工厂共有,用来实现对H.263和MPEG-4的解码。提供给记录引擎的接口为OpenmaxMpeg4AO::ProcessData()方法,实际的解码工作在 Mpeg4 Decoder_OMX::Mp4DecodeVideo ()方法中实现。下面是该方法的具体实现:
代码:视频的解码过程
MX_BOOL Mpeg4Decoder_OMX::Mp4DecodeVideo(OMX_BUFFERHEADERTYPE* aOutBuffer, OMX_U32*
aOutputLength,OMX_U8** aInputBuf, OMX_U32* aInBufSize,
OMX_PARAM_PORTDEFINITIONTYPE* aPortParam, OMX_BOOL aDeBlocking,
OMX_S32* aFrameCount, OMX_BOOL aMarkerFlag, OMX_BOOL *aResizeFlag)
{
OMX_BOOL Status=OMX_TRUE;
OMX_S32 OldWidth, OldHeight;
OldWidth=aPortParam->format.video.nFrameWidth;
OldHeight=aPortParam->format.video.nFrameHeight;
*aResizeFlag=OMX_FALSE;
#ifdef _DEBUG
static OMX_U32 FrameCount=0;
#endif
uint UseExtTimestamp=0;
uint32 TimeStamp;
OMX_S32 InputSize, InitSize;
if ((Mpeg4InitCompleteFlag==OMX_FALSE) && (MPEG4_MODE==CodecMode))
{
if (!aMarkerFlag)
{
InitSize=GetVideoHeader(0, *aInputBuf, *aInBufSize);
}
else
{
InitSize=*aInBufSize;
}
//初始化解码
if (PV_TRUE!=InitializeVideoDecode(&iDisplay_Width, &iDisplay_Height,aInputBuf, (OMX_S32*)aInBufSize, MPEG4_MODE, aDeBlocking))
return OMX_FALSE;
Mpeg4InitCompleteFlag=OMX_TRUE;
aPortParam->format.video.nFrameWidth=iDisplay_Width;
aPortParam->format.video.nFrameHeight=iDisplay_Height;
OMX_U32 min_stride=((aPortParam->format.video.nFrameWidth+15) & (~15));
OMX_U32 min_sliceheight=((aPortParam->format.video.nFrameHeight+15) & (~15));
aPortParam->format.video.nStride=min_stride;
aPortParam->format.video.nSliceHeight=min_sliceheight;
//解码器组件总是输出YUV420格式
aPortParam->nBufferSize=(aPortParam->format.video.nSliceHeight*aPortParam->
format.video.nStride * 3)>>1;
iFrameSize=(aPortParam->format.video.nSliceHeight * aPortParam->format. video.nStride);
if ((iDisplay_Width !=OldWidth) || (iDisplay_Height !=OldHeight))
{
*aResizeFlag=OMX_TRUE;
}
else if (NULL !=aOutBuffer)
{
PVSetReferenceYUV(&VideoCtrl, (uint8*)(aOutBuffer->pBuffer));
BufferCtrlStruct *pBCTRL=(BufferCtrlStruct *)(aOutBuffer-> pOutputPortPrivate);
pBCTRL->iRefCount++;
ipRefCtrPreviousReferenceBuffer=&(pBCTRL->iRefCount);
iReferenceYUVWasSet=OMX_TRUE;
}
*aFrameCount=1;
*aInBufSize -=InitSize;
return OMX_TRUE;
}
if ((*(OMX_S32*)aInBufSize) <=0)
{
return OMX_FALSE;
}
TimeStamp=0xFFFFFFFF;
InputSize=*aInBufSize;
if ((OMX_FALSE==Mpeg4InitCompleteFlag) && (H263_MODE==CodecMode))
{
int32 aligned_width, aligned_height;
int32 display_width, display_height;
//获取配置信息
if (iGetM4VConfigInfo(*aInputBuf, *aInBufSize, &aligned_width, &aligned_height,&display_width, &display_height))
{
return OMX_FALSE;
}
Mpeg4InitCompleteFlag=OMX_TRUE;
iDisplay_Width=display_width;
iDisplay_Height=display_height;
aPortParam->format.video.nFrameWidth=iDisplay_Width; // use non 16byte aligned values (display_width) for H263
aPortParam->format.video.nFrameHeight=iDisplay_Height; // like in the case of M4V (PVGetVideoDimensions also returns display_width/height)
OMX_U32 min_stride=((aPortParam->format.video.nFrameWidth+15) & (~15));
OMX_U32 min_sliceheight=((aPortParam->format.video.nFrameHeight+15) & (~15));
aPortParam->format.video.nStride=min_stride;
aPortParam->format.video.nSliceHeight=min_sliceheight;
aPortParam->nBufferSize=(aPortParam->format.video.nSliceHeight * aPortParam->format.video.nStride * 3) >> 1;
iFrameSize=(aPortParam->format.video.nSliceHeight * aPortParam->format.video.nStride);
if ((iDisplay_Width !=OldWidth)||(iDisplay_Height !=OldHeight))
{
*aResizeFlag=OMX_TRUE;
}
else if (NULL !=aOutBuffer)
{
PVSetReferenceYUV(&VideoCtrl, (uint8*)(aOutBuffer->pBuffer));
BufferCtrlStruct *pBCTRL=(BufferCtrlStruct *)(aOutBuffer->pOutputPortPrivate);
pBCTRL->iRefCount++;
ipRefCtrPreviousReferenceBuffer=&(pBCTRL->iRefCount);
iReferenceYUVWasSet=OMX_TRUE;
}
*aFrameCount=1;
return OMX_TRUE;
}
if (iReferenceYUVWasSet==OMX_FALSE)
{
PVSetReferenceYUV(&VideoCtrl, (uint8*)(aOutBuffer->pBuffer));
BufferCtrlStruct *pBCTRL=(BufferCtrlStruct *)(aOutBuffer-> pOutputPortPrivate);
pBCTRL->iRefCount++;
ipRefCtrPreviousReferenceBuffer=&(pBCTRL->iRefCount);
iReferenceYUVWasSet=OMX_TRUE;
return OMX_TRUE;
}
#if PROFILING_ON
OMX_U32 StartTime=OsclTickCount::TickCount();
#endif
//针对一帧进行解码
Status=(OMX_BOOL) PVDecodeVideoFrame(&VideoCtrl, aInputBuf,&TimeStamp,(int32*)aInBufSize,&UseExtTimestamp,(OMX_U8*)(aOutBuffer->pBuffer));
#if PROFILING_ON
OMX_U32 EndTime=OsclTickCount::TickCount();
iTotalTicks +=(EndTime-StartTime);
#endif
if (Status==PV_TRUE)
{
#ifdef _DEBUG
#endif
*aInputBuf +=(InputSize-*aInBufSize);
(*ipRefCtrPreviousReferenceBuffer)--;
if ((*ipRefCtrPreviousReferenceBuffer)==0)
{
ipOMXComponent->iNumAvailableOutputBuffers++;
}
BufferCtrlStruct *pBCTRL=(BufferCtrlStruct *)(aOutBuffer-> pOutputPortPrivate);
pBCTRL->iRefCount++;
ipRefCtrPreviousReferenceBuffer=&(pBCTRL->iRefCount);
*aOutputLength=(iFrameSize * 3)>>1;
(*aFrameCount)++;
}
else
{
*aInBufSize=InputSize;
*aOutputLength=0;
}
return Status;
}
在进行解码时,会首先判断编码格式是MPEG4_MODE还是H263_MODE,如果是MPEG4_MODE,则首先通过GetVideoHeader()方法读取视频的头信息,然后通过InitializeVideoDecode()方法初始化视频解码器,终将数据解码为YUV420格式的元数据。如果编码格式为H263_MODE,则首先通过iGetM4VConfigInfo()方法获取M4V文件的配置信息,终将数据解码为YUV420格式的元数据。