Welcome to mirror list, hosted at ThFree Co, Russian Federation.

github.com/mpc-hc/mpc-hc.git - Unnamed repository; edit this file 'description' to name the repository.
summaryrefslogtreecommitdiff
diff options
context:
space:
mode:
Diffstat (limited to 'src/filters/transform/MPCVideoDec')
-rw-r--r--src/filters/transform/MPCVideoDec/DXVADecoder.cpp545
-rw-r--r--src/filters/transform/MPCVideoDec/DXVADecoder.h15
-rw-r--r--src/filters/transform/MPCVideoDec/DXVADecoderH264.cpp228
-rw-r--r--src/filters/transform/MPCVideoDec/DXVADecoderMpeg2.cpp81
-rw-r--r--src/filters/transform/MPCVideoDec/DXVADecoderVC1.cpp95
-rw-r--r--src/filters/transform/MPCVideoDec/DXVADecoderVC1.h18
-rw-r--r--src/filters/transform/MPCVideoDec/FfmpegContext.c387
-rw-r--r--src/filters/transform/MPCVideoDec/FfmpegContext.h3
-rw-r--r--src/filters/transform/MPCVideoDec/H264QuantizationMatrix.h116
-rw-r--r--src/filters/transform/MPCVideoDec/IMPCVideoDecFilter.h6
-rw-r--r--src/filters/transform/MPCVideoDec/MPCAudioDecFilter.cpp96
-rw-r--r--src/filters/transform/MPCVideoDec/MPCAudioDecFilter.h2
-rw-r--r--src/filters/transform/MPCVideoDec/MPCFfmpegDec.cpp9
-rw-r--r--src/filters/transform/MPCVideoDec/MPCVideoDecFilter.cpp1038
-rw-r--r--src/filters/transform/MPCVideoDec/MPCVideoDecFilter.h61
-rw-r--r--src/filters/transform/MPCVideoDec/MPCVideoDecSettingsWnd.cpp131
-rw-r--r--src/filters/transform/MPCVideoDec/MPCVideoDecSettingsWnd.h3
-rw-r--r--src/filters/transform/MPCVideoDec/TlibavcodecExt.cpp6
-rw-r--r--src/filters/transform/MPCVideoDec/TlibavcodecExt.h3
-rw-r--r--src/filters/transform/MPCVideoDec/VideoDecDXVAAllocator.cpp70
-rw-r--r--src/filters/transform/MPCVideoDec/VideoDecDXVAAllocator.h17
-rw-r--r--src/filters/transform/MPCVideoDec/VideoDecOutputPin.cpp31
-rw-r--r--src/filters/transform/MPCVideoDec/stdafx.cpp6
23 files changed, 1438 insertions, 1529 deletions
diff --git a/src/filters/transform/MPCVideoDec/DXVADecoder.cpp b/src/filters/transform/MPCVideoDec/DXVADecoder.cpp
index 771fbc212..374080e74 100644
--- a/src/filters/transform/MPCVideoDec/DXVADecoder.cpp
+++ b/src/filters/transform/MPCVideoDec/DXVADecoder.cpp
@@ -98,8 +98,9 @@ void CDXVADecoder::AllocExecuteParams (int nSize)
{
m_ExecuteParams.pCompressedBuffers = DNew DXVA2_DecodeBufferDesc[nSize];
- for (int i=0; i<nSize; i++)
+ for (int i=0; i<nSize; i++) {
memset (&m_ExecuteParams.pCompressedBuffers[i], 0, sizeof(DXVA2_DecodeBufferDesc));
+ }
}
void CDXVADecoder::SetExtraData (BYTE* pDataIn, UINT nSize)
@@ -117,8 +118,7 @@ void CDXVADecoder::CopyBitstream(BYTE* pDXVABuffer, BYTE* pBuffer, UINT& nSize)
void CDXVADecoder::Flush()
{
TRACE ("CDXVADecoder::Flush\n");
- for (int i=0; i<m_nPicEntryNumber; i++)
- {
+ for (int i=0; i<m_nPicEntryNumber; i++) {
m_pPictureStore[i].bRefPicture = false;
m_pPictureStore[i].bInUse = false;
m_pPictureStore[i].bDisplayed = false;
@@ -139,8 +139,7 @@ HRESULT CDXVADecoder::ConfigureDXVA1()
HRESULT hr = S_FALSE;
DXVA_ConfigPictureDecode ConfigRequested;
- if (m_pAMVideoAccelerator)
- {
+ if (m_pAMVideoAccelerator) {
memset (&ConfigRequested, 0, sizeof(ConfigRequested));
ConfigRequested.guidConfigBitstreamEncryption = DXVA_NoEncrypt;
ConfigRequested.guidConfigMBcontrolEncryption = DXVA_NoEncrypt;
@@ -167,13 +166,12 @@ HRESULT CDXVADecoder::ConfigureDXVA1()
m_DXVA2Config.ConfigSpecificIDCT = m_DXVA1Config.bConfigSpecificIDCT;
m_DXVA2Config.Config4GroupedCoefs = m_DXVA1Config.bConfig4GroupedCoefs;
- if (SUCCEEDED (hr))
- {
+ if (SUCCEEDED (hr)) {
writeDXVA_QueryOrReplyFunc (&m_DXVA1Config.dwFunction, DXVA_QUERYORREPLYFUNCFLAG_DECODER_LOCK_QUERY, DXVA_PICTURE_DECODING_FUNCTION);
hr = m_pAMVideoAccelerator->Execute (m_DXVA1Config.dwFunction, &m_DXVA1Config, sizeof(DXVA_ConfigPictureDecode), &ConfigRequested, sizeof(DXVA_ConfigPictureDecode), 0, NULL);
// TODO : check config!
-// ASSERT (ConfigRequested.bConfigBitstreamRaw == 2);
+ // ASSERT (ConfigRequested.bConfigBitstreamRaw == 2);
AMVAUncompDataInfo DataInfo;
DWORD dwNum = COMP_BUFFER_COUNT;
@@ -190,14 +188,15 @@ CDXVADecoder* CDXVADecoder::CreateDecoder (CMPCVideoDecFilter* pFilter, IAMVideo
{
CDXVADecoder* pDecoder = NULL;
- if ((*guidDecoder == DXVA2_ModeH264_E) || (*guidDecoder == DXVA2_ModeH264_F) || (*guidDecoder == DXVA_Intel_H264_ClearVideo))
+ if ((*guidDecoder == DXVA2_ModeH264_E) || (*guidDecoder == DXVA2_ModeH264_F) || (*guidDecoder == DXVA_Intel_H264_ClearVideo)) {
pDecoder = DNew CDXVADecoderH264 (pFilter, pAMVideoAccelerator, H264_VLD, nPicEntryNumber);
- else if (*guidDecoder == DXVA2_ModeVC1_D || *guidDecoder == DXVA_Intel_VC1_ClearVideo)
+ } else if (*guidDecoder == DXVA2_ModeVC1_D || *guidDecoder == DXVA_Intel_VC1_ClearVideo) {
pDecoder = DNew CDXVADecoderVC1 (pFilter, pAMVideoAccelerator, VC1_VLD, nPicEntryNumber);
- else if (*guidDecoder == DXVA2_ModeMPEG2_VLD)
+ } else if (*guidDecoder == DXVA2_ModeMPEG2_VLD) {
pDecoder = DNew CDXVADecoderMpeg2 (pFilter, pAMVideoAccelerator, MPEG2_VLD, nPicEntryNumber);
- else
- ASSERT (FALSE); // Unknown decoder !!
+ } else {
+ ASSERT (FALSE); // Unknown decoder !!
+ }
return pDecoder;
}
@@ -207,14 +206,15 @@ CDXVADecoder* CDXVADecoder::CreateDecoder (CMPCVideoDecFilter* pFilter, IDirectX
{
CDXVADecoder* pDecoder = NULL;
- if ((*guidDecoder == DXVA2_ModeH264_E) || (*guidDecoder == DXVA2_ModeH264_F) || (*guidDecoder == DXVA_Intel_H264_ClearVideo))
+ if ((*guidDecoder == DXVA2_ModeH264_E) || (*guidDecoder == DXVA2_ModeH264_F) || (*guidDecoder == DXVA_Intel_H264_ClearVideo)) {
pDecoder = DNew CDXVADecoderH264 (pFilter, pDirectXVideoDec, H264_VLD, nPicEntryNumber, pDXVA2Config);
- else if (*guidDecoder == DXVA2_ModeVC1_D || *guidDecoder == DXVA_Intel_VC1_ClearVideo)
+ } else if (*guidDecoder == DXVA2_ModeVC1_D || *guidDecoder == DXVA_Intel_VC1_ClearVideo) {
pDecoder = DNew CDXVADecoderVC1 (pFilter, pDirectXVideoDec, VC1_VLD, nPicEntryNumber, pDXVA2Config);
- else if (*guidDecoder == DXVA2_ModeMPEG2_VLD)
+ } else if (*guidDecoder == DXVA2_ModeMPEG2_VLD) {
pDecoder = DNew CDXVADecoderMpeg2 (pFilter, pDirectXVideoDec, MPEG2_VLD, nPicEntryNumber, pDXVA2Config);
- else
- ASSERT (FALSE); // Unknown decoder !!
+ } else {
+ ASSERT (FALSE); // Unknown decoder !!
+ }
return pDecoder;
}
@@ -230,60 +230,61 @@ HRESULT CDXVADecoder::AddExecuteBuffer (DWORD CompressedBufferType, UINT nSize,
//if (CompressedBufferType != DXVA2_PictureParametersBufferType && CompressedBufferType != DXVA2_InverseQuantizationMatrixBufferType)
// dwNumMBs = FFGetMBNumber (m_pFilter->GetAVCtx());
- switch (m_nEngine)
- {
- case ENGINE_DXVA1 :
- DWORD dwTypeIndex;
- LONG lStride;
- dwTypeIndex = GetDXVA1CompressedType (CompressedBufferType);
-
-// TRACE ("Fill : %d - %d\n", dwTypeIndex, m_dwBufferIndex);
- hr = m_pAMVideoAccelerator->GetBuffer(dwTypeIndex, m_dwBufferIndex, FALSE, (void**)&pDXVABuffer, &lStride);
- ASSERT (SUCCEEDED (hr));
-
- if (SUCCEEDED (hr))
- {
- if (CompressedBufferType == DXVA2_BitStreamDateBufferType)
- CopyBitstream (pDXVABuffer, (BYTE*)pBuffer, nSize);
- else
- memcpy (pDXVABuffer, (BYTE*)pBuffer, nSize);
- m_DXVA1BufferInfo[m_dwNumBuffersInfo].dwTypeIndex = dwTypeIndex;
- m_DXVA1BufferInfo[m_dwNumBuffersInfo].dwBufferIndex = m_dwBufferIndex;
- m_DXVA1BufferInfo[m_dwNumBuffersInfo].dwDataSize = nSize;
-
- m_DXVA1BufferDesc[m_dwNumBuffersInfo].dwTypeIndex = dwTypeIndex;
- m_DXVA1BufferDesc[m_dwNumBuffersInfo].dwBufferIndex = m_dwBufferIndex;
- m_DXVA1BufferDesc[m_dwNumBuffersInfo].dwDataSize = nSize;
- m_DXVA1BufferDesc[m_dwNumBuffersInfo].dwNumMBsInBuffer = dwNumMBs;
-
- m_dwNumBuffersInfo++;
- }
- break;
+ switch (m_nEngine) {
+ case ENGINE_DXVA1 :
+ DWORD dwTypeIndex;
+ LONG lStride;
+ dwTypeIndex = GetDXVA1CompressedType (CompressedBufferType);
- case ENGINE_DXVA2 :
- UINT nDXVASize;
- hr = m_pDirectXVideoDec->GetBuffer (CompressedBufferType, (void**)&pDXVABuffer, &nDXVASize);
- ASSERT (nSize <= nDXVASize);
+ // TRACE ("Fill : %d - %d\n", dwTypeIndex, m_dwBufferIndex);
+ hr = m_pAMVideoAccelerator->GetBuffer(dwTypeIndex, m_dwBufferIndex, FALSE, (void**)&pDXVABuffer, &lStride);
+ ASSERT (SUCCEEDED (hr));
- if (SUCCEEDED (hr) && (nSize <= nDXVASize))
- {
- if (CompressedBufferType == DXVA2_BitStreamDateBufferType)
- CopyBitstream (pDXVABuffer, (BYTE*)pBuffer, nSize);
- else
- memcpy (pDXVABuffer, (BYTE*)pBuffer, nSize);
+ if (SUCCEEDED (hr)) {
+ if (CompressedBufferType == DXVA2_BitStreamDateBufferType) {
+ CopyBitstream (pDXVABuffer, (BYTE*)pBuffer, nSize);
+ } else {
+ memcpy (pDXVABuffer, (BYTE*)pBuffer, nSize);
+ }
+ m_DXVA1BufferInfo[m_dwNumBuffersInfo].dwTypeIndex = dwTypeIndex;
+ m_DXVA1BufferInfo[m_dwNumBuffersInfo].dwBufferIndex = m_dwBufferIndex;
+ m_DXVA1BufferInfo[m_dwNumBuffersInfo].dwDataSize = nSize;
+
+ m_DXVA1BufferDesc[m_dwNumBuffersInfo].dwTypeIndex = dwTypeIndex;
+ m_DXVA1BufferDesc[m_dwNumBuffersInfo].dwBufferIndex = m_dwBufferIndex;
+ m_DXVA1BufferDesc[m_dwNumBuffersInfo].dwDataSize = nSize;
+ m_DXVA1BufferDesc[m_dwNumBuffersInfo].dwNumMBsInBuffer = dwNumMBs;
+
+ m_dwNumBuffersInfo++;
+ }
+ break;
- m_ExecuteParams.pCompressedBuffers[m_ExecuteParams.NumCompBuffers].CompressedBufferType = CompressedBufferType;
- m_ExecuteParams.pCompressedBuffers[m_ExecuteParams.NumCompBuffers].DataSize = nSize;
- m_ExecuteParams.pCompressedBuffers[m_ExecuteParams.NumCompBuffers].NumMBsInBuffer = dwNumMBs;
- m_ExecuteParams.NumCompBuffers++;
+ case ENGINE_DXVA2 :
+ UINT nDXVASize;
+ hr = m_pDirectXVideoDec->GetBuffer (CompressedBufferType, (void**)&pDXVABuffer, &nDXVASize);
+ ASSERT (nSize <= nDXVASize);
+
+ if (SUCCEEDED (hr) && (nSize <= nDXVASize)) {
+ if (CompressedBufferType == DXVA2_BitStreamDateBufferType) {
+ CopyBitstream (pDXVABuffer, (BYTE*)pBuffer, nSize);
+ } else {
+ memcpy (pDXVABuffer, (BYTE*)pBuffer, nSize);
+ }
+
+ m_ExecuteParams.pCompressedBuffers[m_ExecuteParams.NumCompBuffers].CompressedBufferType = CompressedBufferType;
+ m_ExecuteParams.pCompressedBuffers[m_ExecuteParams.NumCompBuffers].DataSize = nSize;
+ m_ExecuteParams.pCompressedBuffers[m_ExecuteParams.NumCompBuffers].NumMBsInBuffer = dwNumMBs;
+ m_ExecuteParams.NumCompBuffers++;
- }
- break;
- default :
- ASSERT (FALSE);
- break;
+ }
+ break;
+ default :
+ ASSERT (FALSE);
+ break;
+ }
+ if (pRealSize) {
+ *pRealSize = nSize;
}
- if (pRealSize) *pRealSize = nSize;
return hr;
}
@@ -295,15 +296,13 @@ HRESULT CDXVADecoder::GetDeliveryBuffer(REFERENCE_TIME rtStart, REFERENCE_TIME r
CComPtr<IMediaSample> pNewSample;
// Change aspect ratio for DXVA2
- if (m_nEngine == ENGINE_DXVA2)
- {
+ if (m_nEngine == ENGINE_DXVA2) {
m_pFilter->UpdateAspectRatio();
m_pFilter->ReconnectOutput(m_pFilter->PictWidthRounded(), m_pFilter->PictHeightRounded(), true, m_pFilter->PictWidth(), m_pFilter->PictHeight());
}
hr = m_pFilter->GetOutputPin()->GetDeliveryBuffer(&pNewSample, 0, 0, 0);
- if (SUCCEEDED (hr))
- {
+ if (SUCCEEDED (hr)) {
pNewSample->SetTime(&rtStart, &rtStop);
pNewSample->SetMediaTime(NULL, NULL);
*ppSampleToDeliver = pNewSample.Detach();
@@ -315,43 +314,40 @@ HRESULT CDXVADecoder::Execute()
{
HRESULT hr = E_INVALIDARG;
- switch (m_nEngine)
- {
- case ENGINE_DXVA1 :
- DWORD dwFunction;
- HRESULT hr2;
+ switch (m_nEngine) {
+ case ENGINE_DXVA1 :
+ DWORD dwFunction;
+ HRESULT hr2;
-// writeDXVA_QueryOrReplyFunc (&dwFunction, DXVA_QUERYORREPLYFUNCFLAG_DECODER_LOCK_QUERY, DXVA_PICTURE_DECODING_FUNCTION);
-// hr = m_pAMVideoAccelerator->Execute (dwFunction, &m_DXVA1Config, sizeof(DXVA_ConfigPictureDecode), NULL, 0, m_dwNumBuffersInfo, m_DXVA1BufferInfo);
+ // writeDXVA_QueryOrReplyFunc (&dwFunction, DXVA_QUERYORREPLYFUNCFLAG_DECODER_LOCK_QUERY, DXVA_PICTURE_DECODING_FUNCTION);
+ // hr = m_pAMVideoAccelerator->Execute (dwFunction, &m_DXVA1Config, sizeof(DXVA_ConfigPictureDecode), NULL, 0, m_dwNumBuffersInfo, m_DXVA1BufferInfo);
- DWORD dwResult;
- dwFunction = 0x01000000;
- hr = m_pAMVideoAccelerator->Execute (dwFunction, m_DXVA1BufferDesc, sizeof(DXVA_BufferDescription)*m_dwNumBuffersInfo,&dwResult, sizeof(dwResult), m_dwNumBuffersInfo, m_DXVA1BufferInfo);
- ASSERT (SUCCEEDED (hr));
+ DWORD dwResult;
+ dwFunction = 0x01000000;
+ hr = m_pAMVideoAccelerator->Execute (dwFunction, m_DXVA1BufferDesc, sizeof(DXVA_BufferDescription)*m_dwNumBuffersInfo,&dwResult, sizeof(dwResult), m_dwNumBuffersInfo, m_DXVA1BufferInfo);
+ ASSERT (SUCCEEDED (hr));
- for (DWORD i=0; i<m_dwNumBuffersInfo; i++)
- {
- hr2 = m_pAMVideoAccelerator->ReleaseBuffer (m_DXVA1BufferInfo[i].dwTypeIndex, m_DXVA1BufferInfo[i].dwBufferIndex);
- ASSERT (SUCCEEDED (hr2));
- }
+ for (DWORD i=0; i<m_dwNumBuffersInfo; i++) {
+ hr2 = m_pAMVideoAccelerator->ReleaseBuffer (m_DXVA1BufferInfo[i].dwTypeIndex, m_DXVA1BufferInfo[i].dwBufferIndex);
+ ASSERT (SUCCEEDED (hr2));
+ }
- m_dwNumBuffersInfo = 0;
- break;
+ m_dwNumBuffersInfo = 0;
+ break;
- case ENGINE_DXVA2 :
+ case ENGINE_DXVA2 :
- for (DWORD i=0; i<m_ExecuteParams.NumCompBuffers; i++)
- {
- hr2 = m_pDirectXVideoDec->ReleaseBuffer (m_ExecuteParams.pCompressedBuffers[i].CompressedBufferType);
- ASSERT (SUCCEEDED (hr2));
- }
+ for (DWORD i=0; i<m_ExecuteParams.NumCompBuffers; i++) {
+ hr2 = m_pDirectXVideoDec->ReleaseBuffer (m_ExecuteParams.pCompressedBuffers[i].CompressedBufferType);
+ ASSERT (SUCCEEDED (hr2));
+ }
- hr = m_pDirectXVideoDec->Execute(&m_ExecuteParams);
- m_ExecuteParams.NumCompBuffers = 0;
- break;
- default :
- ASSERT (FALSE);
- break;
+ hr = m_pDirectXVideoDec->Execute(&m_ExecuteParams);
+ m_ExecuteParams.NumCompBuffers = 0;
+ break;
+ default :
+ ASSERT (FALSE);
+ break;
}
return hr;
@@ -364,24 +360,23 @@ HRESULT CDXVADecoder::QueryStatus(PVOID LPDXVAStatus, UINT nSize)
DXVA2_DecodeExtensionData ExtensionData;
DWORD dwFunction = 0x07000000;
- switch (m_nEngine)
- {
- case ENGINE_DXVA1 :
- hr = m_pAMVideoAccelerator->Execute (dwFunction, NULL, 0, LPDXVAStatus, nSize, 0, NULL);
- break;
-
- case ENGINE_DXVA2 :
- memset (&ExecuteParams, 0, sizeof(ExecuteParams));
- memset (&ExtensionData, 0, sizeof(ExtensionData));
- ExecuteParams.pExtensionData = &ExtensionData;
- ExtensionData.pPrivateOutputData = LPDXVAStatus;
- ExtensionData.PrivateOutputDataSize = nSize;
- ExtensionData.Function = 7;
- hr = m_pDirectXVideoDec->Execute(&ExecuteParams);
- break;
- default :
- ASSERT (FALSE);
- break;
+ switch (m_nEngine) {
+ case ENGINE_DXVA1 :
+ hr = m_pAMVideoAccelerator->Execute (dwFunction, NULL, 0, LPDXVAStatus, nSize, 0, NULL);
+ break;
+
+ case ENGINE_DXVA2 :
+ memset (&ExecuteParams, 0, sizeof(ExecuteParams));
+ memset (&ExtensionData, 0, sizeof(ExtensionData));
+ ExecuteParams.pExtensionData = &ExtensionData;
+ ExtensionData.pPrivateOutputData = LPDXVAStatus;
+ ExtensionData.PrivateOutputDataSize = nSize;
+ ExtensionData.Function = 7;
+ hr = m_pDirectXVideoDec->Execute(&ExecuteParams);
+ break;
+ default :
+ ASSERT (FALSE);
+ break;
}
return hr;
@@ -389,21 +384,19 @@ HRESULT CDXVADecoder::QueryStatus(PVOID LPDXVAStatus, UINT nSize)
DWORD CDXVADecoder::GetDXVA1CompressedType (DWORD dwDXVA2CompressedType)
{
- if (dwDXVA2CompressedType <= DXVA2_BitStreamDateBufferType)
+ if (dwDXVA2CompressedType <= DXVA2_BitStreamDateBufferType) {
return dwDXVA2CompressedType + 1;
- else
- {
- switch (dwDXVA2CompressedType)
- {
- case DXVA2_MotionVectorBuffer :
- return DXVA_MOTION_VECTOR_BUFFER;
- break;
- case DXVA2_FilmGrainBuffer :
- return DXVA_FILM_GRAIN_BUFFER;
- break;
- default :
- ASSERT (FALSE);
- return DXVA_COMPBUFFER_TYPE_THAT_IS_NOT_USED;
+ } else {
+ switch (dwDXVA2CompressedType) {
+ case DXVA2_MotionVectorBuffer :
+ return DXVA_MOTION_VECTOR_BUFFER;
+ break;
+ case DXVA2_FilmGrainBuffer :
+ return DXVA_FILM_GRAIN_BUFFER;
+ break;
+ default :
+ ASSERT (FALSE);
+ return DXVA_COMPBUFFER_TYPE_THAT_IS_NOT_USED;
}
}
}
@@ -425,47 +418,47 @@ HRESULT CDXVADecoder::BeginFrame(int nSurfaceIndex, IMediaSample* pSampleToDeliv
HRESULT hr = E_INVALIDARG;
int nTry = 0;
- for (int i=0; i<20; i++)
- {
- switch (m_nEngine)
- {
- case ENGINE_DXVA1 :
- AMVABeginFrameInfo BeginFrameInfo;
+ for (int i=0; i<20; i++) {
+ switch (m_nEngine) {
+ case ENGINE_DXVA1 :
+ AMVABeginFrameInfo BeginFrameInfo;
- BeginFrameInfo.dwDestSurfaceIndex = nSurfaceIndex;
- BeginFrameInfo.dwSizeInputData = sizeof(nSurfaceIndex);
- BeginFrameInfo.pInputData = &nSurfaceIndex;
- BeginFrameInfo.dwSizeOutputData = 0;
- BeginFrameInfo.pOutputData = NULL;
+ BeginFrameInfo.dwDestSurfaceIndex = nSurfaceIndex;
+ BeginFrameInfo.dwSizeInputData = sizeof(nSurfaceIndex);
+ BeginFrameInfo.pInputData = &nSurfaceIndex;
+ BeginFrameInfo.dwSizeOutputData = 0;
+ BeginFrameInfo.pOutputData = NULL;
- DO_DXVA_PENDING_LOOP (m_pAMVideoAccelerator->BeginFrame(&BeginFrameInfo));
+ DO_DXVA_PENDING_LOOP (m_pAMVideoAccelerator->BeginFrame(&BeginFrameInfo));
- ASSERT (SUCCEEDED (hr));
- // TRACE ("BeginFrame %d\n",nSurfaceIndex);
- if (SUCCEEDED (hr))
- hr = FindFreeDXVA1Buffer ((DWORD)-1, m_dwBufferIndex);
- break;
+ ASSERT (SUCCEEDED (hr));
+ // TRACE ("BeginFrame %d\n",nSurfaceIndex);
+ if (SUCCEEDED (hr)) {
+ hr = FindFreeDXVA1Buffer ((DWORD)-1, m_dwBufferIndex);
+ }
+ break;
- case ENGINE_DXVA2 :
- {
- CComQIPtr<IMFGetService> pSampleService;
- CComPtr<IDirect3DSurface9> pDecoderRenderTarget;
- pSampleService = pSampleToDeliver;
- if (pSampleService)
- {
- hr = pSampleService->GetService (MR_BUFFER_SERVICE, __uuidof(IDirect3DSurface9), (void**) &pDecoderRenderTarget);
- if (SUCCEEDED (hr))
- DO_DXVA_PENDING_LOOP (m_pDirectXVideoDec->BeginFrame(pDecoderRenderTarget, NULL));
+ case ENGINE_DXVA2 : {
+ CComQIPtr<IMFGetService> pSampleService;
+ CComPtr<IDirect3DSurface9> pDecoderRenderTarget;
+ pSampleService = pSampleToDeliver;
+ if (pSampleService) {
+ hr = pSampleService->GetService (MR_BUFFER_SERVICE, __uuidof(IDirect3DSurface9), (void**) &pDecoderRenderTarget);
+ if (SUCCEEDED (hr)) {
+ DO_DXVA_PENDING_LOOP (m_pDirectXVideoDec->BeginFrame(pDecoderRenderTarget, NULL));
+ }
+ }
}
- }
- break;
- default :
- ASSERT (FALSE);
break;
+ default :
+ ASSERT (FALSE);
+ break;
}
// For slow accelerator wait a little...
- if (SUCCEEDED (hr)) break;
+ if (SUCCEEDED (hr)) {
+ break;
+ }
Sleep(1);
}
@@ -478,24 +471,23 @@ HRESULT CDXVADecoder::EndFrame(int nSurfaceIndex)
HRESULT hr = E_INVALIDARG;
DWORD dwDummy = nSurfaceIndex;
- switch (m_nEngine)
- {
- case ENGINE_DXVA1 :
- AMVAEndFrameInfo EndFrameInfo;
-
- EndFrameInfo.dwSizeMiscData = sizeof (dwDummy); // TODO : usefull ??
- EndFrameInfo.pMiscData = &dwDummy;
- hr = m_pAMVideoAccelerator->EndFrame(&EndFrameInfo);
-// TRACE ("EndFrame %d\n",nSurfaceIndex);
- ASSERT (SUCCEEDED (hr));
- break;
-
- case ENGINE_DXVA2 :
- hr = m_pDirectXVideoDec->EndFrame(NULL);
- break;
- default :
- ASSERT (FALSE);
- break;
+ switch (m_nEngine) {
+ case ENGINE_DXVA1 :
+ AMVAEndFrameInfo EndFrameInfo;
+
+ EndFrameInfo.dwSizeMiscData = sizeof (dwDummy); // TODO : usefull ??
+ EndFrameInfo.pMiscData = &dwDummy;
+ hr = m_pAMVideoAccelerator->EndFrame(&EndFrameInfo);
+ // TRACE ("EndFrame %d\n",nSurfaceIndex);
+ ASSERT (SUCCEEDED (hr));
+ break;
+
+ case ENGINE_DXVA2 :
+ hr = m_pDirectXVideoDec->EndFrame(NULL);
+ break;
+ default :
+ ASSERT (FALSE);
+ break;
}
return hr;
@@ -506,8 +498,7 @@ bool CDXVADecoder::AddToStore (int nSurfaceIndex, IMediaSample* pSample, bool bR
REFERENCE_TIME rtStart, REFERENCE_TIME rtStop, bool bIsField,
FF_FIELD_TYPE nFieldType, FF_SLICE_TYPE nSliceType, int nCodecSpecific)
{
- if (bIsField && (m_nFieldSurface == -1))
- {
+ if (bIsField && (m_nFieldSurface == -1)) {
m_nFieldSurface = nSurfaceIndex;
m_pFieldSample = pSample;
m_pPictureStore[nSurfaceIndex].n1FieldType = nFieldType;
@@ -515,9 +506,7 @@ bool CDXVADecoder::AddToStore (int nSurfaceIndex, IMediaSample* pSample, bool bR
m_pPictureStore[nSurfaceIndex].rtStop = rtStop;
m_pPictureStore[nSurfaceIndex].nCodecSpecific = nCodecSpecific;
return false;
- }
- else
- {
+ } else {
//TRACE ("Add Stor: %10I64d - %10I64d Ind = %d Codec=%d\n", rtStart, rtStop, nSurfaceIndex, nCodecSpecific);
ASSERT (m_pPictureStore[nSurfaceIndex].pSample == NULL);
ASSERT (!m_pPictureStore[nSurfaceIndex].bInUse);
@@ -529,8 +518,7 @@ bool CDXVADecoder::AddToStore (int nSurfaceIndex, IMediaSample* pSample, bool bR
m_pPictureStore[nSurfaceIndex].pSample = pSample;
m_pPictureStore[nSurfaceIndex].nSliceType = nSliceType;
- if (!bIsField)
- {
+ if (!bIsField) {
m_pPictureStore[nSurfaceIndex].rtStart = rtStart;
m_pPictureStore[nSurfaceIndex].rtStop = rtStop;
m_pPictureStore[nSurfaceIndex].n1FieldType = nFieldType;
@@ -555,8 +543,9 @@ void CDXVADecoder::RemoveRefFrame (int nSurfaceIndex)
ASSERT ((nSurfaceIndex < m_nPicEntryNumber) && m_pPictureStore[nSurfaceIndex].bInUse);
m_pPictureStore[nSurfaceIndex].bRefPicture = false;
- if (m_pPictureStore[nSurfaceIndex].bDisplayed)
+ if (m_pPictureStore[nSurfaceIndex].bDisplayed) {
FreePictureSlot (nSurfaceIndex);
+ }
}
@@ -566,14 +555,11 @@ int CDXVADecoder::FindOldestFrame()
int nPos = -1;
// TODO : find better solution...
- if (m_nWaitingPics > m_nMaxWaiting)
- {
- for (int i=0; i<m_nPicEntryNumber; i++)
- {
+ if (m_nWaitingPics > m_nMaxWaiting) {
+ for (int i=0; i<m_nPicEntryNumber; i++) {
if (!m_pPictureStore[i].bDisplayed &&
- m_pPictureStore[i].bInUse &&
- (m_pPictureStore[i].rtStart < rtMin))
- {
+ m_pPictureStore[i].bInUse &&
+ (m_pPictureStore[i].rtStart < rtMin)) {
rtMin = m_pPictureStore[i].rtStart;
nPos = i;
}
@@ -584,36 +570,33 @@ int CDXVADecoder::FindOldestFrame()
void CDXVADecoder::SetTypeSpecificFlags(PICTURE_STORE* pPicture, IMediaSample* pMS)
{
- if(CComQIPtr<IMediaSample2> pMS2 = pMS)
- {
+ if(CComQIPtr<IMediaSample2> pMS2 = pMS) {
AM_SAMPLE2_PROPERTIES props;
- if(SUCCEEDED(pMS2->GetProperties(sizeof(props), (BYTE*)&props)))
- {
+ if(SUCCEEDED(pMS2->GetProperties(sizeof(props), (BYTE*)&props))) {
props.dwTypeSpecificFlags &= ~0x7f;
- if(pPicture->n1FieldType == PICT_FRAME)
+ if(pPicture->n1FieldType == PICT_FRAME) {
props.dwTypeSpecificFlags |= AM_VIDEO_FLAG_WEAVE;
- else
- {
- if(pPicture->n1FieldType == PICT_TOP_FIELD)
+ } else {
+ if(pPicture->n1FieldType == PICT_TOP_FIELD) {
props.dwTypeSpecificFlags |= AM_VIDEO_FLAG_FIELD1FIRST;
+ }
//if(m_fb.flags & PIC_FLAG_REPEAT_FIRST_FIELD)
// props.dwTypeSpecificFlags |= AM_VIDEO_FLAG_REPEAT_FIELD;
}
- switch (pPicture->nSliceType)
- {
- case I_TYPE :
- case SI_TYPE :
- props.dwTypeSpecificFlags |= AM_VIDEO_FLAG_I_SAMPLE;
- break;
- case P_TYPE :
- case SP_TYPE :
- props.dwTypeSpecificFlags |= AM_VIDEO_FLAG_P_SAMPLE;
- break;
- default :
- props.dwTypeSpecificFlags |= AM_VIDEO_FLAG_B_SAMPLE;
- break;
+ switch (pPicture->nSliceType) {
+ case I_TYPE :
+ case SI_TYPE :
+ props.dwTypeSpecificFlags |= AM_VIDEO_FLAG_I_SAMPLE;
+ break;
+ case P_TYPE :
+ case SP_TYPE :
+ props.dwTypeSpecificFlags |= AM_VIDEO_FLAG_P_SAMPLE;
+ break;
+ default :
+ props.dwTypeSpecificFlags |= AM_VIDEO_FLAG_B_SAMPLE;
+ break;
}
pMS2->SetProperties(sizeof(props), (BYTE*)&props);
@@ -630,24 +613,23 @@ HRESULT CDXVADecoder::DisplayNextFrame()
int nPicIndex;
nPicIndex = FindOldestFrame();
- if (nPicIndex != -1)
- {
- if (m_pPictureStore[nPicIndex].rtStart >= 0)
- {
- switch (m_nEngine)
- {
- case ENGINE_DXVA1 :
- // For DXVA1, query a media sample at the last time (only one in the allocator)
- hr = GetDeliveryBuffer (m_pPictureStore[nPicIndex].rtStart, m_pPictureStore[nPicIndex].rtStop, &pSampleToDeliver);
- SetTypeSpecificFlags(&m_pPictureStore[nPicIndex], pSampleToDeliver);
- if (SUCCEEDED (hr)) hr = m_pAMVideoAccelerator->DisplayFrame(nPicIndex, pSampleToDeliver);
- break;
- case ENGINE_DXVA2 :
- // For DXVA2 media sample is in the picture store
- m_pPictureStore[nPicIndex].pSample->SetTime (&m_pPictureStore[nPicIndex].rtStart, &m_pPictureStore[nPicIndex].rtStop);
- SetTypeSpecificFlags(&m_pPictureStore[nPicIndex], m_pPictureStore[nPicIndex].pSample);
- hr = m_pFilter->GetOutputPin()->Deliver(m_pPictureStore[nPicIndex].pSample);
- break;
+ if (nPicIndex != -1) {
+ if (m_pPictureStore[nPicIndex].rtStart >= 0) {
+ switch (m_nEngine) {
+ case ENGINE_DXVA1 :
+ // For DXVA1, query a media sample at the last time (only one in the allocator)
+ hr = GetDeliveryBuffer (m_pPictureStore[nPicIndex].rtStart, m_pPictureStore[nPicIndex].rtStop, &pSampleToDeliver);
+ SetTypeSpecificFlags(&m_pPictureStore[nPicIndex], pSampleToDeliver);
+ if (SUCCEEDED (hr)) {
+ hr = m_pAMVideoAccelerator->DisplayFrame(nPicIndex, pSampleToDeliver);
+ }
+ break;
+ case ENGINE_DXVA2 :
+ // For DXVA2 media sample is in the picture store
+ m_pPictureStore[nPicIndex].pSample->SetTime (&m_pPictureStore[nPicIndex].rtStart, &m_pPictureStore[nPicIndex].rtStop);
+ SetTypeSpecificFlags(&m_pPictureStore[nPicIndex], m_pPictureStore[nPicIndex].pSample);
+ hr = m_pFilter->GetOutputPin()->Deliver(m_pPictureStore[nPicIndex].pSample);
+ break;
}
@@ -665,8 +647,9 @@ HRESULT CDXVADecoder::DisplayNextFrame()
}
m_pPictureStore[nPicIndex].bDisplayed = true;
- if (!m_pPictureStore[nPicIndex].bRefPicture)
+ if (!m_pPictureStore[nPicIndex].bRefPicture) {
FreePictureSlot (nPicIndex);
+ }
}
return hr;
@@ -678,48 +661,42 @@ HRESULT CDXVADecoder::GetFreeSurfaceIndex(int& nSurfaceIndex, IMediaSample** ppS
int nPos = -1;
DWORD dwMinDisplay = MAXDWORD;
- if (m_nFieldSurface != -1)
- {
+ if (m_nFieldSurface != -1) {
nSurfaceIndex = m_nFieldSurface;
*ppSampleToDeliver = m_pFieldSample.Detach();
return S_FALSE;
}
- switch (m_nEngine)
- {
- case ENGINE_DXVA1 :
- for (int i=0; i<m_nPicEntryNumber; i++)
- {
- if (!m_pPictureStore[i].bInUse && m_pPictureStore[i].dwDisplayCount < dwMinDisplay)
- {
- dwMinDisplay = m_pPictureStore[i].dwDisplayCount;
- nPos = i;
+ switch (m_nEngine) {
+ case ENGINE_DXVA1 :
+ for (int i=0; i<m_nPicEntryNumber; i++) {
+ if (!m_pPictureStore[i].bInUse && m_pPictureStore[i].dwDisplayCount < dwMinDisplay) {
+ dwMinDisplay = m_pPictureStore[i].dwDisplayCount;
+ nPos = i;
+ }
}
- }
- if (nPos != -1)
- {
- nSurfaceIndex = nPos;
- return S_OK;
- }
+ if (nPos != -1) {
+ nSurfaceIndex = nPos;
+ return S_OK;
+ }
- // Ho ho...
- ASSERT (FALSE);
- Flush();
- break;
- case ENGINE_DXVA2 :
- CComPtr<IMediaSample> pNewSample;
- CComQIPtr<IMPCDXVA2Sample> pMPCDXVA2Sample;
- // TODO : test IDirect3DDeviceManager9::TestDevice !!!
-// TRACE ("==> Try get buffer...\n");
- if (SUCCEEDED (hr = GetDeliveryBuffer(rtStart, rtStop, &pNewSample)))
- {
- pMPCDXVA2Sample = pNewSample;
- nSurfaceIndex = pMPCDXVA2Sample ? pMPCDXVA2Sample->GetDXSurfaceId() : 0;
- *ppSampleToDeliver = pNewSample.Detach();
-// TRACE ("GetFreeSurfaceIndex : %d\n", nSurfaceIndex);
- }
- break;
+ // Ho ho...
+ ASSERT (FALSE);
+ Flush();
+ break;
+ case ENGINE_DXVA2 :
+ CComPtr<IMediaSample> pNewSample;
+ CComQIPtr<IMPCDXVA2Sample> pMPCDXVA2Sample;
+ // TODO : test IDirect3DDeviceManager9::TestDevice !!!
+ // TRACE ("==> Try get buffer...\n");
+ if (SUCCEEDED (hr = GetDeliveryBuffer(rtStart, rtStop, &pNewSample))) {
+ pMPCDXVA2Sample = pNewSample;
+ nSurfaceIndex = pMPCDXVA2Sample ? pMPCDXVA2Sample->GetDXSurfaceId() : 0;
+ *ppSampleToDeliver = pNewSample.Detach();
+ // TRACE ("GetFreeSurfaceIndex : %d\n", nSurfaceIndex);
+ }
+ break;
}
return hr;
@@ -728,7 +705,7 @@ HRESULT CDXVADecoder::GetFreeSurfaceIndex(int& nSurfaceIndex, IMediaSample** ppS
void CDXVADecoder::FreePictureSlot (int nSurfaceIndex)
{
-// TRACE ("Free : %d\n", nSurfaceIndex);
+ // TRACE ("Free : %d\n", nSurfaceIndex);
m_pPictureStore[nSurfaceIndex].dwDisplayCount = m_dwDisplayCount++;
m_pPictureStore[nSurfaceIndex].bInUse = false;
m_pPictureStore[nSurfaceIndex].bDisplayed = false;
@@ -740,12 +717,11 @@ void CDXVADecoder::FreePictureSlot (int nSurfaceIndex)
BYTE CDXVADecoder::GetConfigResidDiffAccelerator()
{
- switch (m_nEngine)
- {
- case ENGINE_DXVA1 :
- return m_DXVA1Config.bConfigResidDiffAccelerator;
- case ENGINE_DXVA2 :
- return m_DXVA2Config.ConfigResidDiffAccelerator;
+ switch (m_nEngine) {
+ case ENGINE_DXVA1 :
+ return m_DXVA1Config.bConfigResidDiffAccelerator;
+ case ENGINE_DXVA2 :
+ return m_DXVA2Config.ConfigResidDiffAccelerator;
}
return 0;
}
@@ -753,12 +729,11 @@ BYTE CDXVADecoder::GetConfigResidDiffAccelerator()
BYTE CDXVADecoder::GetConfigIntraResidUnsigned()
{
- switch (m_nEngine)
- {
- case ENGINE_DXVA1 :
- return m_DXVA1Config.bConfigIntraResidUnsigned;
- case ENGINE_DXVA2 :
- return m_DXVA2Config.ConfigIntraResidUnsigned;
+ switch (m_nEngine) {
+ case ENGINE_DXVA1 :
+ return m_DXVA1Config.bConfigIntraResidUnsigned;
+ case ENGINE_DXVA2 :
+ return m_DXVA2Config.ConfigIntraResidUnsigned;
}
return 0;
}
diff --git a/src/filters/transform/MPCVideoDec/DXVADecoder.h b/src/filters/transform/MPCVideoDec/DXVADecoder.h
index a8c92154b..6c776454e 100644
--- a/src/filters/transform/MPCVideoDec/DXVADecoder.h
+++ b/src/filters/transform/MPCVideoDec/DXVADecoder.h
@@ -26,28 +26,24 @@
#include <dxva2api.h>
#include <videoacc.h>
-typedef enum
-{
+typedef enum {
ENGINE_DXVA1,
ENGINE_DXVA2
} DXVA_ENGINE;
-typedef enum
-{
+typedef enum {
H264_VLD,
VC1_VLD,
MPEG2_VLD
} DXVAMode;
-typedef enum
-{
+typedef enum {
PICT_TOP_FIELD = 1,
PICT_BOTTOM_FIELD = 2,
PICT_FRAME = 3
} FF_FIELD_TYPE;
-typedef enum
-{
+typedef enum {
I_TYPE = 1, ///< Intra
P_TYPE = 2, ///< Predicted
B_TYPE = 3, ///< Bi-dir predicted
@@ -57,8 +53,7 @@ typedef enum
BI_TYPE = 7
} FF_SLICE_TYPE;
-typedef struct
-{
+typedef struct {
bool bRefPicture; // True if reference picture
bool bInUse; // Slot in use
bool bDisplayed; // True if picture have been presented
diff --git a/src/filters/transform/MPCVideoDec/DXVADecoderH264.cpp b/src/filters/transform/MPCVideoDec/DXVADecoderH264.cpp
index 612168526..d75e7f310 100644
--- a/src/filters/transform/MPCVideoDec/DXVADecoderH264.cpp
+++ b/src/filters/transform/MPCVideoDec/DXVADecoderH264.cpp
@@ -60,18 +60,18 @@ void CDXVADecoderH264::Init()
memset (&m_pSliceShort, 0, sizeof (DXVA_Slice_H264_Short)*MAX_SLICES);
m_DXVAPicParams.MbsConsecutiveFlag = 1;
- if(m_pFilter->GetPCIVendor() == PCIV_Intel)
+ if(m_pFilter->GetPCIVendor() == PCIV_Intel) {
m_DXVAPicParams.Reserved16Bits = 0x534c;
- else
+ } else {
m_DXVAPicParams.Reserved16Bits = 0;
+ }
m_DXVAPicParams.ContinuationFlag = 1;
m_DXVAPicParams.Reserved8BitsA = 0;
m_DXVAPicParams.Reserved8BitsB = 0;
m_DXVAPicParams.MinLumaBipredSize8x8Flag = 1; // Improve accelerator performances
m_DXVAPicParams.StatusReportFeedbackNumber = 0; // Use to report status
- for (int i =0; i<16; i++)
- {
+ for (int i =0; i<16; i++) {
m_DXVAPicParams.RefFrameList[i].AssociatedFlag = 1;
m_DXVAPicParams.RefFrameList[i].bPicEntry = 255;
m_DXVAPicParams.RefFrameList[i].Index7Bits = 127;
@@ -81,13 +81,12 @@ void CDXVADecoderH264::Init()
m_nNALLength = 4;
m_nMaxSlices = 0;
- switch (GetMode())
- {
- case H264_VLD :
- AllocExecuteParams (3);
- break;
- default :
- ASSERT(FALSE);
+ switch (GetMode()) {
+ case H264_VLD :
+ AllocExecuteParams (3);
+ break;
+ default :
+ ASSERT(FALSE);
}
}
@@ -103,67 +102,62 @@ void CDXVADecoderH264::CopyBitstream(BYTE* pDXVABuffer, BYTE* pBuffer, UINT& nSi
#if 0
// Test to place Nal on multiple of 128 bytes (seems to be not necessary)
- if(!m_bUseLongSlice)
- {
- while (Nalu.ReadNext())
- {
- switch (Nalu.GetType())
- {
- case NALU_TYPE_SLICE:
- case NALU_TYPE_IDR:
- // For AVC1, put startcode 0x000001
- pDXVABuffer[0]=pDXVABuffer[1]=0;
- pDXVABuffer[2]=1;
-
- // Copy NALU
- memcpy (pDXVABuffer+3, Nalu.GetDataBuffer(), Nalu.GetDataLength());
-
- // Complete with zero padding (buffer size should be a multiple of 128)
- nDummy = 128 - ((Nalu.GetDataLength()+3) %128);
- pDXVABuffer += Nalu.GetDataLength() + 3;
- memset (pDXVABuffer, 0, nDummy);
- pDXVABuffer += nDummy;
-
- // Update slice control buffer
- nDxvaNalLength = Nalu.GetDataLength()+3+nDummy;
- m_pSliceShort[nSlices].BSNALunitDataLocation = nSize;
- m_pSliceShort[nSlices].SliceBytesInBuffer = nDxvaNalLength;
-
- nSize += nDxvaNalLength;
- nSlices++;
- break;
+ if(!m_bUseLongSlice) {
+ while (Nalu.ReadNext()) {
+ switch (Nalu.GetType()) {
+ case NALU_TYPE_SLICE:
+ case NALU_TYPE_IDR:
+ // For AVC1, put startcode 0x000001
+ pDXVABuffer[0]=pDXVABuffer[1]=0;
+ pDXVABuffer[2]=1;
+
+ // Copy NALU
+ memcpy (pDXVABuffer+3, Nalu.GetDataBuffer(), Nalu.GetDataLength());
+
+ // Complete with zero padding (buffer size should be a multiple of 128)
+ nDummy = 128 - ((Nalu.GetDataLength()+3) %128);
+ pDXVABuffer += Nalu.GetDataLength() + 3;
+ memset (pDXVABuffer, 0, nDummy);
+ pDXVABuffer += nDummy;
+
+ // Update slice control buffer
+ nDxvaNalLength = Nalu.GetDataLength()+3+nDummy;
+ m_pSliceShort[nSlices].BSNALunitDataLocation = nSize;
+ m_pSliceShort[nSlices].SliceBytesInBuffer = nDxvaNalLength;
+
+ nSize += nDxvaNalLength;
+ nSlices++;
+ break;
}
}
- }
- else
+ } else
#endif
{
- while (Nalu.ReadNext())
- {
- switch (Nalu.GetType())
- {
- case NALU_TYPE_SLICE:
- case NALU_TYPE_IDR:
- // Skip the NALU if the data length is below 0
- if(Nalu.GetDataLength() < 0)
+ while (Nalu.ReadNext()) {
+ switch (Nalu.GetType()) {
+ case NALU_TYPE_SLICE:
+ case NALU_TYPE_IDR:
+ // Skip the NALU if the data length is below 0
+ if(Nalu.GetDataLength() < 0) {
+ break;
+ }
+
+ // For AVC1, put startcode 0x000001
+ pDXVABuffer[0]=pDXVABuffer[1]=0;
+ pDXVABuffer[2]=1;
+
+ // Copy NALU
+ memcpy (pDXVABuffer+3, Nalu.GetDataBuffer(), Nalu.GetDataLength());
+
+ // Update slice control buffer
+ nDxvaNalLength = Nalu.GetDataLength()+3;
+ m_pSliceShort[nSlices].BSNALunitDataLocation = nSize;
+ m_pSliceShort[nSlices].SliceBytesInBuffer = nDxvaNalLength;
+
+ nSize += nDxvaNalLength;
+ pDXVABuffer += nDxvaNalLength;
+ nSlices++;
break;
-
- // For AVC1, put startcode 0x000001
- pDXVABuffer[0]=pDXVABuffer[1]=0;
- pDXVABuffer[2]=1;
-
- // Copy NALU
- memcpy (pDXVABuffer+3, Nalu.GetDataBuffer(), Nalu.GetDataLength());
-
- // Update slice control buffer
- nDxvaNalLength = Nalu.GetDataLength()+3;
- m_pSliceShort[nSlices].BSNALunitDataLocation = nSize;
- m_pSliceShort[nSlices].SliceBytesInBuffer = nDxvaNalLength;
-
- nSize += nDxvaNalLength;
- pDXVABuffer += nDxvaNalLength;
- nSlices++;
- break;
}
}
@@ -204,39 +198,43 @@ HRESULT CDXVADecoderH264::DecodeFrame (BYTE* pDataIn, UINT nSize, REFERENCE_TIME
Nalu.SetBuffer (pDataIn, nSize, m_nNALLength);
FFH264DecodeBuffer (m_pFilter->GetAVCtx(), pDataIn, nSize, &nFramePOC, &nOutPOC, &rtOutStart);
- while (Nalu.ReadNext())
- {
- switch (Nalu.GetType())
- {
- case NALU_TYPE_SLICE:
- case NALU_TYPE_IDR:
- if(m_bUseLongSlice)
- {
- m_pSliceLong[nSlices].BSNALunitDataLocation = nNalOffset;
- m_pSliceLong[nSlices].SliceBytesInBuffer = Nalu.GetDataLength()+3; //.GetRoundedDataLength();
- m_pSliceLong[nSlices].slice_id = nSlices;
- FF264UpdateRefFrameSliceLong(&m_DXVAPicParams, &m_pSliceLong[nSlices], m_pFilter->GetAVCtx());
-
- if (nSlices>0)
- m_pSliceLong[nSlices-1].NumMbsForSlice = m_pSliceLong[nSlices].NumMbsForSlice = m_pSliceLong[nSlices].first_mb_in_slice - m_pSliceLong[nSlices-1].first_mb_in_slice;
- }
- nSlices++;
- nNalOffset += (UINT)(Nalu.GetDataLength() + 3);
- if (nSlices > MAX_SLICES) break;
- break;
+ while (Nalu.ReadNext()) {
+ switch (Nalu.GetType()) {
+ case NALU_TYPE_SLICE:
+ case NALU_TYPE_IDR:
+ if(m_bUseLongSlice) {
+ m_pSliceLong[nSlices].BSNALunitDataLocation = nNalOffset;
+ m_pSliceLong[nSlices].SliceBytesInBuffer = Nalu.GetDataLength()+3; //.GetRoundedDataLength();
+ m_pSliceLong[nSlices].slice_id = nSlices;
+ FF264UpdateRefFrameSliceLong(&m_DXVAPicParams, &m_pSliceLong[nSlices], m_pFilter->GetAVCtx());
+
+ if (nSlices>0) {
+ m_pSliceLong[nSlices-1].NumMbsForSlice = m_pSliceLong[nSlices].NumMbsForSlice = m_pSliceLong[nSlices].first_mb_in_slice - m_pSliceLong[nSlices-1].first_mb_in_slice;
+ }
+ }
+ nSlices++;
+ nNalOffset += (UINT)(Nalu.GetDataLength() + 3);
+ if (nSlices > MAX_SLICES) {
+ break;
+ }
+ break;
}
}
- if (nSlices == 0) return S_FALSE;
+ if (nSlices == 0) {
+ return S_FALSE;
+ }
m_nMaxWaiting = min (max (m_DXVAPicParams.num_ref_frames, 3), 8);
// If parsing fail (probably no PPS/SPS), continue anyway it may arrived later (happen on truncated streams)
- if (FAILED (FFH264BuildPicParams (&m_DXVAPicParams, &m_DXVAScalingMatrix, &nFieldType, &nSliceType, m_pFilter->GetAVCtx(), m_pFilter->GetPCIVendor())))
+ if (FAILED (FFH264BuildPicParams (&m_DXVAPicParams, &m_DXVAScalingMatrix, &nFieldType, &nSliceType, m_pFilter->GetAVCtx(), m_pFilter->GetPCIVendor()))) {
return S_FALSE;
+ }
// Wait I frame after a flush
- if (m_bFlushed && !m_DXVAPicParams.IntraPicFlag)
+ if (m_bFlushed && !m_DXVAPicParams.IntraPicFlag) {
return S_FALSE;
+ }
CHECK_HR (GetFreeSurfaceIndex (nSurfaceIndex, &pSampleToDeliver, rtStart, rtStop));
@@ -246,7 +244,7 @@ HRESULT CDXVADecoderH264::DecodeFrame (BYTE* pDataIn, UINT nSize, REFERENCE_TIME
m_DXVAPicParams.StatusReportFeedbackNumber++;
-// TRACE("CDXVADecoderH264 : Decode frame %u\n", m_DXVAPicParams.StatusReportFeedbackNumber);
+ // TRACE("CDXVADecoderH264 : Decode frame %u\n", m_DXVAPicParams.StatusReportFeedbackNumber);
// Send picture parameters
CHECK_HR (AddExecuteBuffer (DXVA2_PictureParametersBufferType, sizeof(m_DXVAPicParams), &m_DXVAPicParams));
@@ -255,12 +253,9 @@ HRESULT CDXVADecoderH264::DecodeFrame (BYTE* pDataIn, UINT nSize, REFERENCE_TIME
// Add bitstream, slice control and quantization matrix
CHECK_HR (AddExecuteBuffer (DXVA2_BitStreamDateBufferType, nSize, pDataIn, &nSize));
- if (m_bUseLongSlice)
- {
+ if (m_bUseLongSlice) {
CHECK_HR(AddExecuteBuffer(DXVA2_SliceControlBufferType, sizeof(DXVA_Slice_H264_Long)*nSlices, m_pSliceLong));
- }
- else
- {
+ } else {
CHECK_HR (AddExecuteBuffer (DXVA2_SliceControlBufferType, sizeof (DXVA_Slice_H264_Short)*nSlices, m_pSliceShort));
}
@@ -272,7 +267,7 @@ HRESULT CDXVADecoderH264::DecodeFrame (BYTE* pDataIn, UINT nSize, REFERENCE_TIME
CHECK_HR (EndFrame(nSurfaceIndex));
#ifdef _DEBUG
-// DisplayStatus();
+ // DisplayStatus();
#endif
bool bAdded = AddToStore (nSurfaceIndex, pSampleToDeliver, m_DXVAPicParams.RefPicFlag, rtStart, rtStop,
@@ -282,12 +277,10 @@ HRESULT CDXVADecoderH264::DecodeFrame (BYTE* pDataIn, UINT nSize, REFERENCE_TIME
FFH264UpdateRefFramesList (&m_DXVAPicParams, m_pFilter->GetAVCtx());
ClearUnusedRefFrames();
- if (bAdded)
- {
+ if (bAdded) {
hr = DisplayNextFrame();
- if (nOutPOC != INT_MIN)
- {
+ if (nOutPOC != INT_MIN) {
m_nOutPOC = nOutPOC;
m_rtOutStart = rtOutStart;
}
@@ -299,10 +292,8 @@ HRESULT CDXVADecoderH264::DecodeFrame (BYTE* pDataIn, UINT nSize, REFERENCE_TIME
void CDXVADecoderH264::RemoveUndisplayedFrame(int nPOC)
{
// Find frame with given POC, and free the slot
- for (int i=0; i<m_nPicEntryNumber; i++)
- {
- if (m_pPictureStore[i].bInUse && m_pPictureStore[i].nCodecSpecific == nPOC)
- {
+ for (int i=0; i<m_nPicEntryNumber; i++) {
+ if (m_pPictureStore[i].bInUse && m_pPictureStore[i].nCodecSpecific == nPOC) {
m_pPictureStore[i].bDisplayed = true;
RemoveRefFrame (i);
return;
@@ -313,11 +304,11 @@ void CDXVADecoderH264::RemoveUndisplayedFrame(int nPOC)
void CDXVADecoderH264::ClearUnusedRefFrames()
{
// Remove old reference frames (not anymore a short or long ref frame)
- for (int i=0; i<m_nPicEntryNumber; i++)
- {
+ for (int i=0; i<m_nPicEntryNumber; i++) {
if (m_pPictureStore[i].bRefPicture && m_pPictureStore[i].bDisplayed)
- if (!FFH264IsRefFrameInUse (i, m_pFilter->GetAVCtx()))
+ if (!FFH264IsRefFrameInUse (i, m_pFilter->GetAVCtx())) {
RemoveRefFrame (i);
+ }
}
}
@@ -334,10 +325,8 @@ void CDXVADecoderH264::ClearRefFramesList()
{
int i;
- for (int i=0; i<m_nPicEntryNumber; i++)
- {
- if (m_pPictureStore[i].bInUse)
- {
+ for (int i=0; i<m_nPicEntryNumber; i++) {
+ if (m_pPictureStore[i].bInUse) {
m_pPictureStore[i].bDisplayed = true;
RemoveRefFrame (i);
}
@@ -369,22 +358,17 @@ int CDXVADecoderH264::FindOldestFrame()
int nPos = -1;
REFERENCE_TIME rtPos = _I64_MAX;
- for (int i=0; i<m_nPicEntryNumber; i++)
- {
- if (m_pPictureStore[i].bInUse && !m_pPictureStore[i].bDisplayed)
- {
- if (m_pPictureStore[i].nCodecSpecific == m_nOutPOC && m_pPictureStore[i].rtStart < rtPos)
- {
+ for (int i=0; i<m_nPicEntryNumber; i++) {
+ if (m_pPictureStore[i].bInUse && !m_pPictureStore[i].bDisplayed) {
+ if (m_pPictureStore[i].nCodecSpecific == m_nOutPOC && m_pPictureStore[i].rtStart < rtPos) {
nPos = i;
rtPos = m_pPictureStore[i].rtStart;
}
}
}
- if (nPos != -1)
- {
- if (m_rtOutStart == _I64_MIN)
- {
+ if (nPos != -1) {
+ if (m_rtOutStart == _I64_MIN) {
// If start time not set (no PTS for example), guess presentation time!
m_rtOutStart = m_rtLastFrameDisplayed;
}
diff --git a/src/filters/transform/MPCVideoDec/DXVADecoderMpeg2.cpp b/src/filters/transform/MPCVideoDec/DXVADecoderMpeg2.cpp
index 3012ae884..af5e4d9a6 100644
--- a/src/filters/transform/MPCVideoDec/DXVADecoderMpeg2.cpp
+++ b/src/filters/transform/MPCVideoDec/DXVADecoderMpeg2.cpp
@@ -67,13 +67,12 @@ void CDXVADecoderMpeg2::Init()
m_wRefPictureIndex[1] = NO_REF_FRAME;
m_nSliceCount = 0;
- switch (GetMode())
- {
- case MPEG2_VLD :
- AllocExecuteParams (4);
- break;
- default :
- ASSERT(FALSE);
+ switch (GetMode()) {
+ case MPEG2_VLD :
+ AllocExecuteParams (4);
+ break;
+ default :
+ ASSERT(FALSE);
}
}
@@ -90,12 +89,12 @@ HRESULT CDXVADecoderMpeg2::DecodeFrame (BYTE* pDataIn, UINT nSize, REFERENCE_TIM
m_pFilter->GetFrame(), &m_nNextCodecIndex, &nFieldType, &nSliceType, pDataIn, nSize);
// Wait I frame after a flush
- if (m_bFlushed && ! m_PictureParams.bPicIntra)
+ if (m_bFlushed && ! m_PictureParams.bPicIntra) {
return S_FALSE;
+ }
hr = GetFreeSurfaceIndex (nSurfaceIndex, &pSampleToDeliver, rtStart, rtStop);
- if (FAILED (hr))
- {
+ if (FAILED (hr)) {
ASSERT (hr == VFW_E_NOT_COMMITTED); // Normal when stop playing
return hr;
}
@@ -134,9 +133,10 @@ void CDXVADecoderMpeg2::UpdatePictureParams(int nSurfaceIndex)
m_PictureParams.wDecodedPictureIndex = nSurfaceIndex;
// Manage reference picture list
- if (!m_PictureParams.bPicBackwardPrediction)
- {
- if (m_wRefPictureIndex[0] != NO_REF_FRAME) RemoveRefFrame (m_wRefPictureIndex[0]);
+ if (!m_PictureParams.bPicBackwardPrediction) {
+ if (m_wRefPictureIndex[0] != NO_REF_FRAME) {
+ RemoveRefFrame (m_wRefPictureIndex[0]);
+ }
m_wRefPictureIndex[0] = m_wRefPictureIndex[1];
m_wRefPictureIndex[1] = nSurfaceIndex;
}
@@ -144,34 +144,37 @@ void CDXVADecoderMpeg2::UpdatePictureParams(int nSurfaceIndex)
m_PictureParams.wBackwardRefPictureIndex = (m_PictureParams.bPicBackwardPrediction == 1) ? m_wRefPictureIndex[1] : NO_REF_FRAME;
// Shall be 0 if bConfigResidDiffHost is 0 or if BPP > 8
- if (cpd->ConfigResidDiffHost == 0 || m_PictureParams.bBPPminus1 > 7)
+ if (cpd->ConfigResidDiffHost == 0 || m_PictureParams.bBPPminus1 > 7) {
m_PictureParams.bPicSpatialResid8 = 0;
- else
- {
+ } else {
if (m_PictureParams.bBPPminus1 == 7 && m_PictureParams.bPicIntra && cpd->ConfigResidDiffHost)
// Shall be 1 if BPP is 8 and bPicIntra is 1 and bConfigResidDiffHost is 1
+ {
m_PictureParams.bPicSpatialResid8 = 1;
- else
+ } else
// Shall be 1 if bConfigSpatialResid8 is 1
+ {
m_PictureParams.bPicSpatialResid8 = cpd->ConfigSpatialResid8;
+ }
}
// Shall be 0 if bConfigResidDiffHost is 0 or if bConfigSpatialResid8 is 0 or if BPP > 8
- if (cpd->ConfigResidDiffHost == 0 || cpd->ConfigSpatialResid8 == 0 || m_PictureParams.bBPPminus1 > 7)
+ if (cpd->ConfigResidDiffHost == 0 || cpd->ConfigSpatialResid8 == 0 || m_PictureParams.bBPPminus1 > 7) {
m_PictureParams.bPicOverflowBlocks = 0;
+ }
// Shall be 1 if bConfigHostInverseScan is 1 or if bConfigResidDiffAccelerator is 0.
- if (cpd->ConfigHostInverseScan == 1 || cpd->ConfigResidDiffAccelerator == 0)
- {
+ if (cpd->ConfigHostInverseScan == 1 || cpd->ConfigResidDiffAccelerator == 0) {
m_PictureParams.bPicScanFixed = 1;
- if (cpd->ConfigHostInverseScan != 0)
- m_PictureParams.bPicScanMethod = 3; // 11 = Arbitrary scan with absolute coefficient address.
- else if (FFGetAlternateScan(m_pFilter->GetAVCtx()))
- m_PictureParams.bPicScanMethod = 1; // 00 = Zig-zag scan (MPEG-2 Figure 7-2)
- else
- m_PictureParams.bPicScanMethod = 0; // 01 = Alternate-vertical (MPEG-2 Figure 7-3),
+ if (cpd->ConfigHostInverseScan != 0) {
+ m_PictureParams.bPicScanMethod = 3; // 11 = Arbitrary scan with absolute coefficient address.
+ } else if (FFGetAlternateScan(m_pFilter->GetAVCtx())) {
+ m_PictureParams.bPicScanMethod = 1; // 00 = Zig-zag scan (MPEG-2 Figure 7-2)
+ } else {
+ m_PictureParams.bPicScanMethod = 0; // 01 = Alternate-vertical (MPEG-2 Figure 7-3),
+ }
}
}
@@ -184,12 +187,13 @@ void CDXVADecoderMpeg2::SetExtraData (BYTE* pDataIn, UINT nSize)
void CDXVADecoderMpeg2::CopyBitstream(BYTE* pDXVABuffer, BYTE* pBuffer, UINT& nSize)
{
- while (*((DWORD*)pBuffer) != 0x01010000)
- {
+ while (*((DWORD*)pBuffer) != 0x01010000) {
pBuffer++;
nSize--;
- if (nSize <= 0) return;
+ if (nSize <= 0) {
+ return;
+ }
}
memcpy (pDXVABuffer, pBuffer, nSize);
@@ -200,8 +204,12 @@ void CDXVADecoderMpeg2::Flush()
{
m_nNextCodecIndex = INT_MIN;
- if (m_wRefPictureIndex[0] != NO_REF_FRAME) RemoveRefFrame (m_wRefPictureIndex[0]);
- if (m_wRefPictureIndex[1] != NO_REF_FRAME) RemoveRefFrame (m_wRefPictureIndex[1]);
+ if (m_wRefPictureIndex[0] != NO_REF_FRAME) {
+ RemoveRefFrame (m_wRefPictureIndex[0]);
+ }
+ if (m_wRefPictureIndex[1] != NO_REF_FRAME) {
+ RemoveRefFrame (m_wRefPictureIndex[1]);
+ }
m_wRefPictureIndex[0] = NO_REF_FRAME;
m_wRefPictureIndex[1] = NO_REF_FRAME;
@@ -214,19 +222,18 @@ int CDXVADecoderMpeg2::FindOldestFrame()
{
int nPos = -1;
- for (int i=0; i<m_nPicEntryNumber; i++)
- {
+ for (int i=0; i<m_nPicEntryNumber; i++) {
if (!m_pPictureStore[i].bDisplayed &&
- m_pPictureStore[i].bInUse &&
- (m_pPictureStore[i].nCodecSpecific == m_nNextCodecIndex))
- {
+ m_pPictureStore[i].bInUse &&
+ (m_pPictureStore[i].nCodecSpecific == m_nNextCodecIndex)) {
m_nNextCodecIndex = INT_MIN;
nPos = i;
}
}
- if (nPos != -1)
+ if (nPos != -1) {
m_pFilter->UpdateFrameTime(m_pPictureStore[nPos].rtStart, m_pPictureStore[nPos].rtStop);
+ }
return nPos;
}
diff --git a/src/filters/transform/MPCVideoDec/DXVADecoderVC1.cpp b/src/filters/transform/MPCVideoDec/DXVADecoderVC1.cpp
index 2d55d0e84..212d18b6c 100644
--- a/src/filters/transform/MPCVideoDec/DXVADecoderVC1.cpp
+++ b/src/filters/transform/MPCVideoDec/DXVADecoderVC1.cpp
@@ -72,13 +72,12 @@ void CDXVADecoderVC1::Init()
m_wRefPictureIndex[0] = NO_REF_FRAME;
m_wRefPictureIndex[1] = NO_REF_FRAME;
- switch (GetMode())
- {
- case VC1_VLD :
- AllocExecuteParams (3);
- break;
- default :
- ASSERT(FALSE);
+ switch (GetMode()) {
+ case VC1_VLD :
+ AllocExecuteParams (3);
+ break;
+ default :
+ ASSERT(FALSE);
}
}
@@ -92,16 +91,17 @@ HRESULT CDXVADecoderVC1::DecodeFrame (BYTE* pDataIn, UINT nSize, REFERENCE_TIME
int nSliceType;
FFVC1UpdatePictureParam (&m_PictureParams, m_pFilter->GetAVCtx(), &nFieldType, &nSliceType, pDataIn, nSize);
- if (FFIsSkipped (m_pFilter->GetAVCtx()))
+ if (FFIsSkipped (m_pFilter->GetAVCtx())) {
return S_OK;
+ }
// Wait I frame after a flush
- if (m_bFlushed && ! m_PictureParams.bPicIntra)
+ if (m_bFlushed && ! m_PictureParams.bPicIntra) {
return S_FALSE;
+ }
hr = GetFreeSurfaceIndex (nSurfaceIndex, &pSampleToDeliver, rtStart, rtStop);
- if (FAILED (hr))
- {
+ if (FAILED (hr)) {
ASSERT (hr == VFW_E_NOT_COMMITTED); // Normal when stop playing
return hr;
}
@@ -114,9 +114,10 @@ HRESULT CDXVADecoderVC1::DecodeFrame (BYTE* pDataIn, UINT nSize, REFERENCE_TIME
m_PictureParams.wDeblockedPictureIndex = m_PictureParams.wDecodedPictureIndex;
// Manage reference picture list
- if (!m_PictureParams.bPicBackwardPrediction)
- {
- if (m_wRefPictureIndex[0] != NO_REF_FRAME) RemoveRefFrame (m_wRefPictureIndex[0]);
+ if (!m_PictureParams.bPicBackwardPrediction) {
+ if (m_wRefPictureIndex[0] != NO_REF_FRAME) {
+ RemoveRefFrame (m_wRefPictureIndex[0]);
+ }
m_wRefPictureIndex[0] = m_wRefPictureIndex[1];
m_wRefPictureIndex[1] = nSurfaceIndex;
}
@@ -133,7 +134,7 @@ HRESULT CDXVADecoderVC1::DecodeFrame (BYTE* pDataIn, UINT nSize, REFERENCE_TIME
// Send picture params to accelerator
m_PictureParams.wDecodedPictureIndex = nSurfaceIndex;
CHECK_HR (AddExecuteBuffer (DXVA2_PictureParametersBufferType, sizeof(m_PictureParams), &m_PictureParams));
-// CHECK_HR (Execute());
+ // CHECK_HR (Execute());
// Send bitstream to accelerator
@@ -152,20 +153,16 @@ HRESULT CDXVADecoderVC1::DecodeFrame (BYTE* pDataIn, UINT nSize, REFERENCE_TIME
#endif
// Re-order B frames
- if (m_pFilter->IsReorderBFrame())
- {
- if (m_PictureParams.bPicBackwardPrediction == 1)
- {
+ if (m_pFilter->IsReorderBFrame()) {
+ if (m_PictureParams.bPicBackwardPrediction == 1) {
SwapRT (rtStart, m_rtStartDelayed);
SwapRT (rtStop, m_rtStopDelayed);
- }
- else
- {
+ } else {
// Save I or P reference time (swap later)
- if (!m_bFlushed)
- {
- if (m_nDelayedSurfaceIndex != -1)
+ if (!m_bFlushed) {
+ if (m_nDelayedSurfaceIndex != -1) {
UpdateStore (m_nDelayedSurfaceIndex, m_rtStartDelayed, m_rtStopDelayed);
+ }
m_rtStartDelayed = m_rtStopDelayed = _I64_MAX;
SwapRT (rtStart, m_rtStartDelayed);
SwapRT (rtStop, m_rtStopDelayed);
@@ -209,8 +206,8 @@ void CDXVADecoderVC1::SetExtraData (BYTE* pDataIn, UINT nSize)
// iWMV9 - i9IRU - iOHIT - iINSO - iWMVA - 0 - 0 - 0 | Section 3.2.5
m_PictureParams.bBidirectionalAveragingMode = (1 << 7) |
- (GetConfigIntraResidUnsigned() <<6) | // i9IRU
- (GetConfigResidDiffAccelerator() <<5); // iOHIT
+ (GetConfigIntraResidUnsigned() <<6) | // i9IRU
+ (GetConfigResidDiffAccelerator() <<5); // iOHIT
}
@@ -218,29 +215,20 @@ BYTE* CDXVADecoderVC1::FindNextStartCode(BYTE* pBuffer, UINT nSize, UINT& nPacke
{
BYTE* pStart = pBuffer;
BYTE bCode = 0;
- for (int i=0; i<nSize-4; i++)
- {
- if ( ((*((DWORD*)(pBuffer+i)) & 0x00FFFFFF) == 0x00010000) || (i >= nSize-5) )
- {
- if (bCode == 0)
- {
+ for (int i=0; i<nSize-4; i++) {
+ if ( ((*((DWORD*)(pBuffer+i)) & 0x00FFFFFF) == 0x00010000) || (i >= nSize-5) ) {
+ if (bCode == 0) {
bCode = pBuffer[i+3];
- if ((nSize == 5) && (bCode == 0x0D))
- {
+ if ((nSize == 5) && (bCode == 0x0D)) {
nPacketSize = nSize;
return pBuffer;
}
- }
- else
- {
- if (bCode == 0x0D)
- {
+ } else {
+ if (bCode == 0x0D) {
// Start code found!
nPacketSize = i - (pStart - pBuffer) + (i >= nSize-5 ? 5 : 1);
return pStart;
- }
- else
- {
+ } else {
// Other stuff, ignore it
pStart = pBuffer + i;
bCode = pBuffer[i+3];
@@ -257,8 +245,7 @@ void CDXVADecoderVC1::CopyBitstream(BYTE* pDXVABuffer, BYTE* pBuffer, UINT& nSiz
{
int nDummy;
- if ( (*((DWORD*)pBuffer) & 0x00FFFFFF) != 0x00010000)
- {
+ if ( (*((DWORD*)pBuffer) & 0x00FFFFFF) != 0x00010000) {
// Some splitter have remove startcode (Haali)
pDXVABuffer[0]=pDXVABuffer[1]=0;
pDXVABuffer[2]=1;
@@ -267,15 +254,12 @@ void CDXVADecoderVC1::CopyBitstream(BYTE* pDXVABuffer, BYTE* pBuffer, UINT& nSiz
// Copy bitstream buffer, with zero padding (buffer is rounded to multiple of 128)
memcpy (pDXVABuffer, (BYTE*)pBuffer, nSize);
nSize +=4;
- }
- else
- {
+ } else {
BYTE* pStart;
UINT nPacketSize;
pStart = FindNextStartCode (pBuffer, nSize, nPacketSize);
- if (pStart)
- {
+ if (pStart) {
// Startcode already present
memcpy (pDXVABuffer, (BYTE*)pStart, nPacketSize);
nSize = nPacketSize;
@@ -295,8 +279,12 @@ void CDXVADecoderVC1::Flush()
m_rtStartDelayed = _I64_MAX;
m_rtStopDelayed = _I64_MAX;
- if (m_wRefPictureIndex[0] != NO_REF_FRAME) RemoveRefFrame (m_wRefPictureIndex[0]);
- if (m_wRefPictureIndex[1] != NO_REF_FRAME) RemoveRefFrame (m_wRefPictureIndex[1]);
+ if (m_wRefPictureIndex[0] != NO_REF_FRAME) {
+ RemoveRefFrame (m_wRefPictureIndex[0]);
+ }
+ if (m_wRefPictureIndex[1] != NO_REF_FRAME) {
+ RemoveRefFrame (m_wRefPictureIndex[1]);
+ }
m_wRefPictureIndex[0] = NO_REF_FRAME;
m_wRefPictureIndex[1] = NO_REF_FRAME;
@@ -311,8 +299,7 @@ HRESULT CDXVADecoderVC1::DisplayStatus()
memset (&Status, 0, sizeof(Status));
- if (SUCCEEDED (hr = CDXVADecoder::QueryStatus(&Status, sizeof(Status))))
- {
+ if (SUCCEEDED (hr = CDXVADecoder::QueryStatus(&Status, sizeof(Status)))) {
Status.StatusReportFeedbackNumber = 0x00FF & Status.StatusReportFeedbackNumber;
TRACE_VC1 ("CDXVADecoderVC1 : Status for the frame %u : bBufType = %u, bStatus = %u, wNumMbsAffected = %u\n",
diff --git a/src/filters/transform/MPCVideoDec/DXVADecoderVC1.h b/src/filters/transform/MPCVideoDec/DXVADecoderVC1.h
index d08a6ffea..6c2e1873c 100644
--- a/src/filters/transform/MPCVideoDec/DXVADecoderVC1.h
+++ b/src/filters/transform/MPCVideoDec/DXVADecoderVC1.h
@@ -39,38 +39,33 @@ public:
virtual void CopyBitstream (BYTE* pDXVABuffer, BYTE* pBuffer, UINT& nSize);
virtual void Flush();
- typedef enum
- {
+ typedef enum {
VC1_PS_TOP_FIELD = 1,
VC1_PS_BOTTOM_FIELD = 2,
VC1_PS_PROGRESSIVE = 3
};
- typedef enum
- {
+ typedef enum {
VC1_CHROMA_420 = 1,
VC1_CHROMA_422 = 2,
VC1_CHROMA_444 = 3
} VC1_CHROMA_FORMAT;
- typedef enum
- {
+ typedef enum {
VC1_CR_BICUBIC_QUARTER_CHROMA = 4,
VC1_CR_BICUBIC_HALF_CHROMA = 5,
VC1_CR_BILINEAR_QUARTER_CHROMA = 12,
VC1_CR_BILINEAR_HALF_CHROMA = 13,
};
- typedef enum
- {
+ typedef enum {
VC1_SCAN_ZIGZAG = 0,
VC1_SCAN_ALTERNATE_VERTICAL = 1,
VC1_SCAN_ALTERNATE_HORIZONTAL = 2,
VC1_SCAN_ARBITRARY = 3 // Use when bConfigHostInverseScan = 1
} VC1_PIC_SCAN_METHOD;
- typedef enum // Values for bPicDeblockConfined when bConfigBitstreamRaw = 1
- {
+ typedef enum { // Values for bPicDeblockConfined when bConfigBitstreamRaw = 1
VC1_EXTENDED_DMV = 0x0001,
VC1_PSF = 0x0002,
VC1_REFPICFLAG = 0x0004,
@@ -81,8 +76,7 @@ public:
VC1_POSTPROCFLAG = 0x0080
} VC1_DEBLOCK_CONFINED;
- typedef enum // Values for bPicSpatialResid8
- {
+ typedef enum { // Values for bPicSpatialResid8
VC1_VSTRANSFORM = 0x0001,
VC1_DQUANT = 0x0002,
VC1_EXTENDED_MV = 0x0004,
diff --git a/src/filters/transform/MPCVideoDec/FfmpegContext.c b/src/filters/transform/MPCVideoDec/FfmpegContext.c
index 596f0973d..08566ca83 100644
--- a/src/filters/transform/MPCVideoDec/FfmpegContext.c
+++ b/src/filters/transform/MPCVideoDec/FfmpegContext.c
@@ -42,15 +42,15 @@ int av_h264_decode_frame(struct AVCodecContext* avctx, int* nOutPOC, int64_t* rt
int av_vc1_decode_frame(AVCodecContext *avctx, uint8_t *buf, int buf_size);
void av_init_packet(AVPacket *pkt);
-const byte ZZ_SCAN[16] =
-{ 0, 1, 4, 8, 5, 2, 3, 6, 9, 12, 13, 10, 7, 11, 14, 15
+const byte ZZ_SCAN[16] = {
+ 0, 1, 4, 8, 5, 2, 3, 6, 9, 12, 13, 10, 7, 11, 14, 15
};
-const byte ZZ_SCAN8[64] =
-{ 0, 1, 8, 16, 9, 2, 3, 10, 17, 24, 32, 25, 18, 11, 4, 5,
- 12, 19, 26, 33, 40, 48, 41, 34, 27, 20, 13, 6, 7, 14, 21, 28,
- 35, 42, 49, 56, 57, 50, 43, 36, 29, 22, 15, 23, 30, 37, 44, 51,
- 58, 59, 52, 45, 38, 31, 39, 46, 53, 60, 61, 54, 47, 55, 62, 63
+const byte ZZ_SCAN8[64] = {
+ 0, 1, 8, 16, 9, 2, 3, 10, 17, 24, 32, 25, 18, 11, 4, 5,
+ 12, 19, 26, 33, 40, 48, 41, 34, 27, 20, 13, 6, 7, 14, 21, 28,
+ 35, 42, 49, 56, 57, 50, 43, 36, 29, 22, 15, 23, 30, 37, 44, 51,
+ 58, 59, 52, 45, 38, 31, 39, 46, 53, 60, 61, 54, 47, 55, 62, 63
};
// FIXME : remove duplicate declaration with ffmpeg ??
@@ -75,16 +75,16 @@ BOOL IsVistaOrAbove()
static BOOL result = FALSE;
OSVERSIONINFO osver;
- if (!checked)
- {
+ if (!checked) {
checked = TRUE;
osver.dwOSVersionInfoSize = sizeof( OSVERSIONINFO );
if (GetVersionEx( &osver ) &&
- osver.dwPlatformId == VER_PLATFORM_WIN32_NT &&
- (osver.dwMajorVersion >= 6 ) )
+ osver.dwPlatformId == VER_PLATFORM_WIN32_NT &&
+ (osver.dwMajorVersion >= 6 ) ) {
result = TRUE;
+ }
}
return result;
@@ -103,16 +103,15 @@ inline MpegEncContext* GetMpegEncContext(struct AVCodecContext* pAVCtx)
Mpeg1Context* s1;
MpegEncContext* s = NULL;
- switch (pAVCtx->codec_id)
- {
- case CODEC_ID_VC1 :
- case CODEC_ID_H264 :
- s = (MpegEncContext*) pAVCtx->priv_data;
- break;
- case CODEC_ID_MPEG2VIDEO:
- s1 = (Mpeg1Context*)pAVCtx->priv_data;
- s = (MpegEncContext*)&s1->mpeg_enc_ctx;
- break;
+ switch (pAVCtx->codec_id) {
+ case CODEC_ID_VC1 :
+ case CODEC_ID_H264 :
+ s = (MpegEncContext*) pAVCtx->priv_data;
+ break;
+ case CODEC_ID_MPEG2VIDEO:
+ s1 = (Mpeg1Context*)pAVCtx->priv_data;
+ s = (MpegEncContext*)&s1->mpeg_enc_ctx;
+ break;
}
return s;
}
@@ -120,38 +119,29 @@ inline MpegEncContext* GetMpegEncContext(struct AVCodecContext* pAVCtx)
void FFH264DecodeBuffer (struct AVCodecContext* pAVCtx, BYTE* pBuffer, UINT nSize, int* pFramePOC, int* pOutPOC, REFERENCE_TIME* pOutrtStart)
{
- if (pBuffer != NULL)
- {
+ if (pBuffer != NULL) {
H264Context* h = (H264Context*) pAVCtx->priv_data;
av_h264_decode_frame (pAVCtx, pOutPOC, pOutrtStart, pBuffer, nSize);
- if (h->s.current_picture_ptr != NULL && pFramePOC) *pFramePOC = h->s.current_picture_ptr->field_poc[0];
+ if (h->s.current_picture_ptr != NULL && pFramePOC) {
+ *pFramePOC = h->s.current_picture_ptr->field_poc[0];
+ }
}
}
// returns TRUE if version is equal to or higher than A.B.C.D, returns FALSE otherwise
BOOL DriverVersionCheck(LARGE_INTEGER VideoDriverVersion, int A, int B, int C, int D)
{
- if (HIWORD(VideoDriverVersion.HighPart) > A)
- {
+ if (HIWORD(VideoDriverVersion.HighPart) > A) {
return TRUE;
- }
- else if (HIWORD(VideoDriverVersion.HighPart) == A)
- {
- if (LOWORD(VideoDriverVersion.HighPart) > B)
- {
+ } else if (HIWORD(VideoDriverVersion.HighPart) == A) {
+ if (LOWORD(VideoDriverVersion.HighPart) > B) {
return TRUE;
- }
- else if (LOWORD(VideoDriverVersion.HighPart) == B)
- {
- if (HIWORD(VideoDriverVersion.LowPart) > C)
- {
+ } else if (LOWORD(VideoDriverVersion.HighPart) == B) {
+ if (HIWORD(VideoDriverVersion.LowPart) > C) {
return TRUE;
- }
- else if (HIWORD(VideoDriverVersion.LowPart) == C)
- {
- if (LOWORD(VideoDriverVersion.LowPart) >= D)
- {
+ } else if (HIWORD(VideoDriverVersion.LowPart) == C) {
+ if (LOWORD(VideoDriverVersion.LowPart) >= D) {
return TRUE;
}
}
@@ -173,61 +163,45 @@ int FFH264CheckCompatibility(int nWidth, int nHeight, struct AVCodecContext* pAV
int max_ref_frames = 0;
int max_ref_frames_dpb41 = min(11, 8388608/(nWidth * nHeight) );
- if (pBuffer != NULL)
- {
+ if (pBuffer != NULL) {
av_h264_decode_frame (pAVCtx, NULL, NULL, pBuffer, nSize);
}
cur_sps = pContext->sps_buffers[0];
cur_pps = pContext->pps_buffers[0];
- if (cur_sps != NULL)
- {
+ if (cur_sps != NULL) {
video_is_level51 = cur_sps->level_idc >= 51 ? 1 : 0;
profile_higher_than_high = (cur_sps->profile_idc > 100);
max_ref_frames = max_ref_frames_dpb41; // default value is calculate
- if (nPCIVendor == PCIV_nVidia)
- {
+ if (nPCIVendor == PCIV_nVidia) {
// nVidia cards support level 5.1 since drivers v6.14.11.7800 for XP and drivers v7.15.11.7800 for Vista/7
- if (IsVistaOrAbove())
- {
- if (DriverVersionCheck(VideoDriverVersion, 7, 15, 11, 7800))
- {
+ if (IsVistaOrAbove()) {
+ if (DriverVersionCheck(VideoDriverVersion, 7, 15, 11, 7800)) {
no_level51_support = 0;
// max ref frames is 16 for HD and 11 otherwise
if(nWidth >= 1280) {
max_ref_frames = 16;
- }
- else
- {
+ } else {
max_ref_frames = 11;
}
}
- }
- else
- {
- if (DriverVersionCheck(VideoDriverVersion, 6, 14, 11, 7800))
- {
+ } else {
+ if (DriverVersionCheck(VideoDriverVersion, 6, 14, 11, 7800)) {
no_level51_support = 0;
// max ref frames is 14
max_ref_frames = 14;
}
}
- }
- else if (nPCIVendor == PCIV_S3_Graphics)
- {
+ } else if (nPCIVendor == PCIV_S3_Graphics) {
no_level51_support = 0;
- }
- else if (nPCIVendor == PCIV_ATI)
- {
+ } else if (nPCIVendor == PCIV_ATI) {
// HD4xxx and HD5xxx ATI cards support level 5.1 since drivers v8.14.1.6105 (Catalyst 10.4)
- if((nPCIDevice >> 8 == 0x68) || (nPCIDevice >> 8 == 0x94))
- {
- if (DriverVersionCheck(VideoDriverVersion, 8, 14, 1, 6105))
- {
+ if((nPCIDevice >> 8 == 0x68) || (nPCIDevice >> 8 == 0x94)) {
+ if (DriverVersionCheck(VideoDriverVersion, 8, 14, 1, 6105)) {
no_level51_support = 0;
max_ref_frames = 16;
}
@@ -235,8 +209,7 @@ int FFH264CheckCompatibility(int nWidth, int nHeight, struct AVCodecContext* pAV
}
// Check maximum allowed number reference frames
- if (cur_sps->ref_frame_count > max_ref_frames)
- {
+ if (cur_sps->ref_frame_count > max_ref_frames) {
too_much_ref_frames = 1;
}
}
@@ -249,37 +222,38 @@ void CopyScalingMatrix(DXVA_Qmatrix_H264* pDest, DXVA_Qmatrix_H264* pSource, int
{
int i,j;
- switch (nPCIVendor)
- {
- case PCIV_ATI :
- // The ATI way
- memcpy (pDest, pSource, sizeof (DXVA_Qmatrix_H264));
- break;
-
- default :
- // The nVidia way (and other manufacturers compliant with specifications....)
- for (i=0; i<6; i++)
- for (j=0; j<16; j++)
- pDest->bScalingLists4x4[i][j] = pSource->bScalingLists4x4[i][ZZ_SCAN[j]];
-
- for (i=0; i<2; i++)
- for (j=0; j<64; j++)
- pDest->bScalingLists8x8[i][j] = pSource->bScalingLists8x8[i][ZZ_SCAN8[j]];
- break;
+ switch (nPCIVendor) {
+ case PCIV_ATI :
+ // The ATI way
+ memcpy (pDest, pSource, sizeof (DXVA_Qmatrix_H264));
+ break;
+
+ default :
+ // The nVidia way (and other manufacturers compliant with specifications....)
+ for (i=0; i<6; i++)
+ for (j=0; j<16; j++) {
+ pDest->bScalingLists4x4[i][j] = pSource->bScalingLists4x4[i][ZZ_SCAN[j]];
+ }
+
+ for (i=0; i<2; i++)
+ for (j=0; j<64; j++) {
+ pDest->bScalingLists8x8[i][j] = pSource->bScalingLists8x8[i][ZZ_SCAN8[j]];
+ }
+ break;
}
}
USHORT FFH264FindRefFrameIndex(USHORT num_frame, DXVA_PicParams_H264* pDXVAPicParams)
{
int i;
- for (i=0; i<pDXVAPicParams->num_ref_frames; i++)
- {
- if (pDXVAPicParams->FrameNumList[i] == num_frame)
+ for (i=0; i<pDXVAPicParams->num_ref_frames; i++) {
+ if (pDXVAPicParams->FrameNumList[i] == num_frame) {
return pDXVAPicParams->RefFrameList[i].Index7Bits;
+ }
}
#ifdef _DEBUG
-// DebugBreak(); // Ref frame not found !
+ // DebugBreak(); // Ref frame not found !
#endif
return 127;
@@ -299,34 +273,33 @@ HRESULT FFH264BuildPicParams (DXVA_PicParams_H264* pDXVAPicParams, DXVA_Qmatrix_
cur_sps = &h->sps;
cur_pps = &h->pps;
- if (cur_sps && cur_pps)
- {
+ if (cur_sps && cur_pps) {
*nFieldType = h->s.picture_structure;
- if (h->sps.pic_struct_present_flag)
- {
- switch (h->sei_pic_struct)
- {
- case SEI_PIC_STRUCT_TOP_FIELD:
- case SEI_PIC_STRUCT_TOP_BOTTOM:
- case SEI_PIC_STRUCT_TOP_BOTTOM_TOP:
- *nFieldType = PICT_TOP_FIELD;
- break;
- case SEI_PIC_STRUCT_BOTTOM_FIELD:
- case SEI_PIC_STRUCT_BOTTOM_TOP:
- case SEI_PIC_STRUCT_BOTTOM_TOP_BOTTOM:
- *nFieldType = PICT_BOTTOM_FIELD;
- break;
- case SEI_PIC_STRUCT_FRAME_DOUBLING:
- case SEI_PIC_STRUCT_FRAME_TRIPLING:
- case SEI_PIC_STRUCT_FRAME:
- *nFieldType = PICT_FRAME;
- break;
+ if (h->sps.pic_struct_present_flag) {
+ switch (h->sei_pic_struct) {
+ case SEI_PIC_STRUCT_TOP_FIELD:
+ case SEI_PIC_STRUCT_TOP_BOTTOM:
+ case SEI_PIC_STRUCT_TOP_BOTTOM_TOP:
+ *nFieldType = PICT_TOP_FIELD;
+ break;
+ case SEI_PIC_STRUCT_BOTTOM_FIELD:
+ case SEI_PIC_STRUCT_BOTTOM_TOP:
+ case SEI_PIC_STRUCT_BOTTOM_TOP_BOTTOM:
+ *nFieldType = PICT_BOTTOM_FIELD;
+ break;
+ case SEI_PIC_STRUCT_FRAME_DOUBLING:
+ case SEI_PIC_STRUCT_FRAME_TRIPLING:
+ case SEI_PIC_STRUCT_FRAME:
+ *nFieldType = PICT_FRAME;
+ break;
}
}
*nSliceType = h->slice_type;
- if (cur_sps->mb_width==0 || cur_sps->mb_height==0) return VFW_E_INVALID_FILE_FORMAT;
+ if (cur_sps->mb_width==0 || cur_sps->mb_height==0) {
+ return VFW_E_INVALID_FILE_FORMAT;
+ }
pDXVAPicParams->wFrameWidthInMbsMinus1 = cur_sps->mb_width - 1; // pic_width_in_mbs_minus1;
pDXVAPicParams->wFrameHeightInMbsMinus1 = cur_sps->mb_height * (2 - cur_sps->frame_mbs_only_flag) - 1; // pic_height_in_map_units_minus1;
pDXVAPicParams->num_ref_frames = cur_sps->ref_frame_count; // num_ref_frames;
@@ -346,16 +319,16 @@ HRESULT FFH264BuildPicParams (DXVA_PicParams_H264* pDXVAPicParams, DXVA_Qmatrix_
pDXVAPicParams->bit_depth_luma_minus8 = cur_sps->bit_depth_luma - 8; // bit_depth_luma_minus8
pDXVAPicParams->bit_depth_chroma_minus8 = cur_sps->bit_depth_chroma - 8; // bit_depth_chroma_minus8
- // pDXVAPicParams->StatusReportFeedbackNumber = SET IN DecodeFrame;
+ // pDXVAPicParams->StatusReportFeedbackNumber = SET IN DecodeFrame;
- // pDXVAPicParams->CurrFieldOrderCnt = SET IN UpdateRefFramesList;
- // pDXVAPicParams->FieldOrderCntList = SET IN UpdateRefFramesList;
- // pDXVAPicParams->FrameNumList = SET IN UpdateRefFramesList;
- // pDXVAPicParams->UsedForReferenceFlags = SET IN UpdateRefFramesList;
- // pDXVAPicParams->NonExistingFrameFlags
+ // pDXVAPicParams->CurrFieldOrderCnt = SET IN UpdateRefFramesList;
+ // pDXVAPicParams->FieldOrderCntList = SET IN UpdateRefFramesList;
+ // pDXVAPicParams->FrameNumList = SET IN UpdateRefFramesList;
+ // pDXVAPicParams->UsedForReferenceFlags = SET IN UpdateRefFramesList;
+ // pDXVAPicParams->NonExistingFrameFlags
pDXVAPicParams->frame_num = h->frame_num;
- // pDXVAPicParams->SliceGroupMap
+ // pDXVAPicParams->SliceGroupMap
pDXVAPicParams->log2_max_frame_num_minus4 = cur_sps->log2_max_frame_num - 4; // log2_max_frame_num_minus4;
@@ -378,25 +351,19 @@ HRESULT FFH264BuildPicParams (DXVA_PicParams_H264* pDXVAPicParams, DXVA_Qmatrix_
pDXVAPicParams->pic_init_qp_minus26 = cur_pps->init_qp - 26;
pDXVAPicParams->pic_init_qs_minus26 = cur_pps->init_qs - 26;
- if (field_pic_flag)
- {
+ if (field_pic_flag) {
pDXVAPicParams->CurrPic.AssociatedFlag = (h->s.picture_structure == PICT_BOTTOM_FIELD);
- if (pDXVAPicParams->CurrPic.AssociatedFlag)
- {
+ if (pDXVAPicParams->CurrPic.AssociatedFlag) {
// Bottom field
pDXVAPicParams->CurrFieldOrderCnt[0] = 0;
pDXVAPicParams->CurrFieldOrderCnt[1] = h->poc_lsb + h->poc_msb;
- }
- else
- {
+ } else {
// Top field
pDXVAPicParams->CurrFieldOrderCnt[0] = h->poc_lsb + h->poc_msb;
pDXVAPicParams->CurrFieldOrderCnt[1] = 0;
}
- }
- else
- {
+ } else {
pDXVAPicParams->CurrPic.AssociatedFlag = 0;
pDXVAPicParams->CurrFieldOrderCnt[0] = h->poc_lsb + h->poc_msb;
pDXVAPicParams->CurrFieldOrderCnt[1] = h->poc_lsb + h->poc_msb;
@@ -416,8 +383,9 @@ void FFH264SetCurrentPicture (int nIndex, DXVA_PicParams_H264* pDXVAPicParams, s
pDXVAPicParams->CurrPic.Index7Bits = nIndex;
- if (h->s.current_picture_ptr)
+ if (h->s.current_picture_ptr) {
h->s.current_picture_ptr->opaque = (void*)nIndex;
+ }
}
@@ -429,51 +397,40 @@ void FFH264UpdateRefFramesList (DXVA_PicParams_H264* pDXVAPicParams, struct AVCo
Picture* pic;
UCHAR AssociatedFlag;
- for(i=0; i<16; i++)
- {
- if (i < h->short_ref_count)
- {
+ for(i=0; i<16; i++) {
+ if (i < h->short_ref_count) {
// Short list reference frames
pic = h->short_ref[h->short_ref_count - i - 1];
AssociatedFlag = pic->long_ref != 0;
- }
- else if (i >= h->short_ref_count && i < h->long_ref_count)
- {
+ } else if (i >= h->short_ref_count && i < h->long_ref_count) {
// Long list reference frames
pic = h->short_ref[h->short_ref_count + h->long_ref_count - i - 1];
AssociatedFlag = 1;
- }
- else
+ } else {
pic = NULL;
+ }
- if (pic != NULL)
- {
+ if (pic != NULL) {
pDXVAPicParams->FrameNumList[i] = pic->long_ref ? pic->pic_id : pic->frame_num;
- if (pic->field_poc[0] != INT_MAX)
- {
+ if (pic->field_poc[0] != INT_MAX) {
pDXVAPicParams->FieldOrderCntList[i][0] = pic->field_poc [0];
nUsedForReferenceFlags |= 1<<(i*2);
- }
- else
+ } else {
pDXVAPicParams->FieldOrderCntList[i][0] = 0;
+ }
- if (pic->field_poc[1] != INT_MAX)
- {
+ if (pic->field_poc[1] != INT_MAX) {
pDXVAPicParams->FieldOrderCntList[i][1] = pic->field_poc [1];
nUsedForReferenceFlags |= 2<<(i*2);
- }
- else
- {
+ } else {
pDXVAPicParams->FieldOrderCntList[i][1] = 0;
}
pDXVAPicParams->RefFrameList[i].AssociatedFlag = AssociatedFlag;
pDXVAPicParams->RefFrameList[i].Index7Bits = (UCHAR)pic->opaque;
- }
- else
- {
+ } else {
pDXVAPicParams->FrameNumList[i] = 0;
pDXVAPicParams->FieldOrderCntList[i][0] = 0;
pDXVAPicParams->FieldOrderCntList[i][1] = 0;
@@ -490,16 +447,16 @@ BOOL FFH264IsRefFrameInUse (int nFrameNum, struct AVCodecContext* pAVCtx)
H264Context* h = (H264Context*) pAVCtx->priv_data;
int i;
- for (i=0; i<h->short_ref_count; i++)
- {
- if ((int)h->short_ref[i]->opaque == nFrameNum)
+ for (i=0; i<h->short_ref_count; i++) {
+ if ((int)h->short_ref[i]->opaque == nFrameNum) {
return TRUE;
+ }
}
- for (i=0; i<h->long_ref_count; i++)
- {
- if ((int)h->long_ref[i]->opaque == nFrameNum)
+ for (i=0; i<h->long_ref_count; i++) {
+ if ((int)h->long_ref[i]->opaque == nFrameNum) {
return TRUE;
+ }
}
return FALSE;
@@ -513,8 +470,8 @@ void FF264UpdateRefFrameSliceLong(DXVA_PicParams_H264* pDXVAPicParams, DXVA_Slic
HRESULT hr = E_FAIL;
unsigned int i;
- for(i=0; i<32; i++)
- { pSlice->RefPicList[0][i].AssociatedFlag = 1;
+ for(i=0; i<32; i++) {
+ pSlice->RefPicList[0][i].AssociatedFlag = 1;
pSlice->RefPicList[0][i].bPicEntry = 255;
pSlice->RefPicList[0][i].Index7Bits = 127;
pSlice->RefPicList[1][i].AssociatedFlag = 1;
@@ -522,56 +479,54 @@ void FF264UpdateRefFrameSliceLong(DXVA_PicParams_H264* pDXVAPicParams, DXVA_Slic
pSlice->RefPicList[1][i].Index7Bits = 127;
}
- if(h->slice_type != FF_I_TYPE && h->slice_type != FF_SI_TYPE)
- {
+ if(h->slice_type != FF_I_TYPE && h->slice_type != FF_SI_TYPE) {
if(h->ref_count[0] > 0) {
for(i=0; i < h->ref_count[0]; i++) {
pSlice->RefPicList[0][i].Index7Bits = FFH264FindRefFrameIndex (h->ref_list[0][i].frame_num, pDXVAPicParams);
pSlice->RefPicList[0][i].AssociatedFlag = 0;
if((h->s.picture_structure != PICT_FRAME)) {
if((h->sei_pic_struct == SEI_PIC_STRUCT_BOTTOM_FIELD) ||
- (h->sei_pic_struct == SEI_PIC_STRUCT_TOP_BOTTOM) ||
- (h->sei_pic_struct == SEI_PIC_STRUCT_TOP_BOTTOM_TOP)) {
+ (h->sei_pic_struct == SEI_PIC_STRUCT_TOP_BOTTOM) ||
+ (h->sei_pic_struct == SEI_PIC_STRUCT_TOP_BOTTOM_TOP)) {
pSlice->RefPicList[0][i].AssociatedFlag = 1;
}
}
}
}
- }
- else
+ } else {
pSlice->num_ref_idx_l0_active_minus1 = 0;
+ }
- if(h->slice_type == FF_B_TYPE || h->slice_type == FF_S_TYPE || h->slice_type == FF_BI_TYPE)
- {
+ if(h->slice_type == FF_B_TYPE || h->slice_type == FF_S_TYPE || h->slice_type == FF_BI_TYPE) {
if(h->ref_count[1] > 0) {
for(i=0; i < h->ref_count[1]; i++) {
pSlice->RefPicList[1][i].Index7Bits = FFH264FindRefFrameIndex (h->ref_list[1][i].frame_num, pDXVAPicParams);
pSlice->RefPicList[1][i].AssociatedFlag = 0;
if((h->s.picture_structure != PICT_FRAME)) {
if((h->sei_pic_struct == SEI_PIC_STRUCT_BOTTOM_FIELD) ||
- (h->sei_pic_struct == SEI_PIC_STRUCT_TOP_BOTTOM) ||
- (h->sei_pic_struct == SEI_PIC_STRUCT_TOP_BOTTOM_TOP)) {
+ (h->sei_pic_struct == SEI_PIC_STRUCT_TOP_BOTTOM) ||
+ (h->sei_pic_struct == SEI_PIC_STRUCT_TOP_BOTTOM_TOP)) {
pSlice->RefPicList[1][i].AssociatedFlag = 1;
}
}
}
}
- }
- else
+ } else {
pSlice->num_ref_idx_l1_active_minus1 = 0;
+ }
- if(h->slice_type == FF_I_TYPE || h->slice_type == FF_SI_TYPE)
- {
- for(i = 0; i<16; i++)
+ if(h->slice_type == FF_I_TYPE || h->slice_type == FF_SI_TYPE) {
+ for(i = 0; i<16; i++) {
pSlice->RefPicList[0][i].bPicEntry = 0xff;
+ }
}
if(h->slice_type == FF_P_TYPE || h->slice_type == FF_I_TYPE ||
- h->slice_type ==FF_SP_TYPE || h->slice_type == FF_SI_TYPE)
- {
- for(i = 0; i < 16; i++)
- pSlice->RefPicList[1][i].bPicEntry = 0xff;
+ h->slice_type ==FF_SP_TYPE || h->slice_type == FF_SI_TYPE) {
+ for(i = 0; i < 16; i++) {
+ pSlice->RefPicList[1][i].bPicEntry = 0xff;
+ }
}
}
@@ -585,16 +540,16 @@ HRESULT FFVC1UpdatePictureParam (DXVA_PictureParameters* pPicParams, struct AVCo
{
VC1Context* vc1 = (VC1Context*) pAVCtx->priv_data;
- if (pBuffer)
- {
+ if (pBuffer) {
av_vc1_decode_frame (pAVCtx, pBuffer, nSize);
}
// WARNING : vc1->interlace is not reliable (always set for progressive video on HD-DVD material)
- if (vc1->fcm == 0)
+ if (vc1->fcm == 0) {
*nFieldType = PICT_FRAME;
- else // fcm : 2 or 3 frame or field interlaced
+ } else { // fcm : 2 or 3 frame or field interlaced
*nFieldType = (vc1->tff ? PICT_TOP_FIELD : PICT_BOTTOM_FIELD);
+ }
pPicParams->bPicIntra = (vc1->s.pict_type == FF_I_TYPE);
pPicParams->bPicBackwardPrediction = (vc1->s.pict_type == FF_B_TYPE);
@@ -602,8 +557,8 @@ HRESULT FFVC1UpdatePictureParam (DXVA_PictureParameters* pPicParams, struct AVCo
// Init Init Init Todo
// iWMV9 - i9IRU - iOHIT - iINSO - iWMVA - 0 - 0 - 0 | Section 3.2.5
pPicParams->bBidirectionalAveragingMode = (pPicParams->bBidirectionalAveragingMode & 0xE0) | // init in SetExtraData
- ((vc1->lumshift!=0 || vc1->lumscale!=32) ? 0x10 : 0)| // iINSO
- ((vc1->profile == PROFILE_ADVANCED) <<3 ); // iWMVA
+ ((vc1->lumshift!=0 || vc1->lumscale!=32) ? 0x10 : 0)| // iINSO
+ ((vc1->profile == PROFILE_ADVANCED) <<3 ); // iWMVA
// Section 3.2.20.3
pPicParams->bPicSpatialResid8 = (vc1->panscanflag << 7) | (vc1->refdist_flag << 6) |
@@ -637,9 +592,9 @@ HRESULT FFVC1UpdatePictureParam (DXVA_PictureParameters* pPicParams, struct AVCo
// TODO : not finish...
pPicParams->bMVprecisionAndChromaRelation = ((vc1->mv_mode == MV_PMODE_1MV_HPEL_BILIN) << 3) | // 0 for non-bilinear luma motion, 1 for bilinear
- (1 << 2) | // 0 for WMV8, 1 for WMV9 motion
- (0 << 1) | // 1 for WMV8 quarter sample luma motion
- (0); // 0 for quarter sample chroma motion, 1 for half sample chroma
+ (1 << 2) | // 0 for WMV8, 1 for WMV9 motion
+ (0 << 1) | // 1 for WMV8 quarter sample luma motion
+ (0); // 0 for quarter sample chroma motion, 1 for half sample chroma
// Cf §7.1.1.25 in VC1 specification, §3.2.14.3 in DXVA spec
pPicParams->bRcontrol = vc1->rnd;
@@ -660,11 +615,10 @@ HRESULT FFMpeg2DecodeFrame (DXVA_PictureParameters* pPicParams, DXVA_QmatrixData
int got_picture = 0;
Mpeg1Context* s1 = (Mpeg1Context*)pAVCtx->priv_data;
MpegEncContext* s = (MpegEncContext*)&s1->mpeg_enc_ctx;
- AVPacket avpkt;
+ AVPacket avpkt;
- if (pBuffer)
- {
- av_init_packet(&avpkt);
+ if (pBuffer) {
+ av_init_packet(&avpkt);
avpkt.data = pBuffer;
avpkt.size = nSize;
// HACK for CorePNG to decode as normal PNG by default
@@ -718,13 +672,13 @@ HRESULT FFMpeg2DecodeFrame (DXVA_PictureParameters* pPicParams, DXVA_QmatrixData
// pPicParams->bReservedBits; // ??
pPicParams->wBitstreamFcodes = (s->mpeg_f_code[0][0]<<12) | (s->mpeg_f_code[0][1]<<8) |
- (s->mpeg_f_code[1][0]<<4) | (s->mpeg_f_code[1][1]);
+ (s->mpeg_f_code[1][0]<<4) | (s->mpeg_f_code[1][1]);
pPicParams->wBitstreamPCEelements = (s->intra_dc_precision<<14) | (s->picture_structure<<12) |
- (s->top_field_first<<11) | (s->frame_pred_frame_dct<<10)|
- (s->concealment_motion_vectors<<9) | (s->q_scale_type<<8)|
- (s->intra_vlc_format<<7) | (s->alternate_scan<<6)|
- (s->repeat_first_field<<5) | (s->chroma_420_type<<4)|
- (s->progressive_frame<<3);
+ (s->top_field_first<<11) | (s->frame_pred_frame_dct<<10)|
+ (s->concealment_motion_vectors<<9) | (s->q_scale_type<<8)|
+ (s->intra_vlc_format<<7) | (s->alternate_scan<<6)|
+ (s->repeat_first_field<<5) | (s->chroma_420_type<<4)|
+ (s->progressive_frame<<3);
// TODO : could be interesting to parameter concealment method?
// pPicParams->bBitstreamConcealmentNeed;
@@ -734,16 +688,16 @@ HRESULT FFMpeg2DecodeFrame (DXVA_PictureParameters* pPicParams, DXVA_QmatrixData
pQMatrixData->bNewQmatrix[1] = 1;
pQMatrixData->bNewQmatrix[2] = 1;
pQMatrixData->bNewQmatrix[3] = 1;
- for (i=0; i<64; i++) // intra Y, inter Y, intra chroma, inter chroma
- {
+ for (i=0; i<64; i++) { // intra Y, inter Y, intra chroma, inter chroma
pQMatrixData->Qmatrix[0][i] = s->intra_matrix[ZZ_SCAN8[i]];
pQMatrixData->Qmatrix[1][i] = s->inter_matrix[ZZ_SCAN8[i]];
pQMatrixData->Qmatrix[2][i] = s->chroma_intra_matrix[ZZ_SCAN8[i]];
pQMatrixData->Qmatrix[3][i] = s->chroma_inter_matrix[ZZ_SCAN8[i]];
}
- if (got_picture)
+ if (got_picture) {
*nNextCodecIndex = pFrame->coded_picture_number;
+ }
return S_OK;
}
@@ -764,18 +718,16 @@ int FFIsSkipped(struct AVCodecContext* pAVCtx)
int FFIsInterlaced(struct AVCodecContext* pAVCtx, int nHeight)
{
- if (pAVCtx->codec_id == CODEC_ID_H264)
- {
+ if (pAVCtx->codec_id == CODEC_ID_H264) {
H264Context* h = (H264Context*) pAVCtx->priv_data;
SPS* cur_sps = h->sps_buffers[0];
- if (cur_sps && !cur_sps->frame_mbs_only_flag)
+ if (cur_sps && !cur_sps->frame_mbs_only_flag) {
return 1;
- else
+ } else {
return 0;
- }
- else if (pAVCtx->codec_id == CODEC_ID_VC1)
- {
+ }
+ } else if (pAVCtx->codec_id == CODEC_ID_VC1) {
VC1Context* vc1 = (VC1Context*) pAVCtx->priv_data;
return vc1->interlace;
}
@@ -785,25 +737,24 @@ int FFIsInterlaced(struct AVCodecContext* pAVCtx, int nHeight)
void FFSetThreadNumber(struct AVCodecContext* pAVCtx, int nThreadCount)
{
- if (pAVCtx->thread_count > 1)
- {
+ if (pAVCtx->thread_count > 1) {
avcodec_thread_free (pAVCtx);
pAVCtx->thread_count = 1;
}
- if (nThreadCount > 1)
+ if (nThreadCount > 1) {
avcodec_thread_init(pAVCtx, nThreadCount);
+ }
}
BOOL FFSoftwareCheckCompatibility(struct AVCodecContext* pAVCtx)
{
- if (pAVCtx->codec_id == CODEC_ID_VC1)
- {
+ if (pAVCtx->codec_id == CODEC_ID_VC1) {
VC1Context* vc1 = (VC1Context*) pAVCtx->priv_data;
return !vc1->interlace;
- }
- else
+ } else {
return TRUE;
+ }
}
diff --git a/src/filters/transform/MPCVideoDec/FfmpegContext.h b/src/filters/transform/MPCVideoDec/FfmpegContext.h
index 958b9a16d..1477d329b 100644
--- a/src/filters/transform/MPCVideoDec/FfmpegContext.h
+++ b/src/filters/transform/MPCVideoDec/FfmpegContext.h
@@ -27,8 +27,7 @@
struct AVCodecContext;
struct AVFrame;
-enum PCI_Vendors
-{
+enum PCI_Vendors {
PCIV_ATI = 0x1002,
PCIV_nVidia = 0x10DE,
PCIV_Intel = 0x8086,
diff --git a/src/filters/transform/MPCVideoDec/H264QuantizationMatrix.h b/src/filters/transform/MPCVideoDec/H264QuantizationMatrix.h
index b05941919..011363214 100644
--- a/src/filters/transform/MPCVideoDec/H264QuantizationMatrix.h
+++ b/src/filters/transform/MPCVideoDec/H264QuantizationMatrix.h
@@ -22,8 +22,7 @@
#pragma once
-typedef enum
-{
+typedef enum {
Flat16,
JVTMatrix,
QMatrix,
@@ -31,59 +30,64 @@ typedef enum
} QMatrixH264Type;
-const byte ZZ_SCAN[16] =
-{ 0, 1, 4, 8, 5, 2, 3, 6, 9, 12, 13, 10, 7, 11, 14, 15
+const byte ZZ_SCAN[16] = {
+ 0, 1, 4, 8, 5, 2, 3, 6, 9, 12, 13, 10, 7, 11, 14, 15
};
-const byte ZZ_SCAN8[64] =
-{ 0, 1, 8, 16, 9, 2, 3, 10, 17, 24, 32, 25, 18, 11, 4, 5,
- 12, 19, 26, 33, 40, 48, 41, 34, 27, 20, 13, 6, 7, 14, 21, 28,
- 35, 42, 49, 56, 57, 50, 43, 36, 29, 22, 15, 23, 30, 37, 44, 51,
- 58, 59, 52, 45, 38, 31, 39, 46, 53, 60, 61, 54, 47, 55, 62, 63
+const byte ZZ_SCAN8[64] = {
+ 0, 1, 8, 16, 9, 2, 3, 10, 17, 24, 32, 25, 18, 11, 4, 5,
+ 12, 19, 26, 33, 40, 48, 41, 34, 27, 20, 13, 6, 7, 14, 21, 28,
+ 35, 42, 49, 56, 57, 50, 43, 36, 29, 22, 15, 23, 30, 37, 44, 51,
+ 58, 59, 52, 45, 38, 31, 39, 46, 53, 60, 61, 54, 47, 55, 62, 63
};
-const DXVA_Qmatrix_H264 g_QMatrixH264[] =
-{
+const DXVA_Qmatrix_H264 g_QMatrixH264[] = {
// 1) #flat 16
{
{
// INTRA4X4_LUMA =
- { 16,16,16,16,
+ {
+ 16,16,16,16,
16,16,16,16,
16,16,16,16,
16,16,16,16
},
// INTRA4X4_CHROMAU =
- { 16,16,16,16,
+ {
+ 16,16,16,16,
16,16,16,16,
16,16,16,16,
16,16,16,16
},
// INTRA4X4_CHROMAV =
- { 16,16,16,16,
+ {
+ 16,16,16,16,
16,16,16,16,
16,16,16,16,
16,16,16,16
},
// INTER4X4_LUMA =
- { 16,16,16,16,
+ {
+ 16,16,16,16,
16,16,16,16,
16,16,16,16,
16,16,16,16
},
// INTER4X4_CHROMAU =
- { 16,16,16,16,
+ {
+ 16,16,16,16,
16,16,16,16,
16,16,16,16,
16,16,16,16
},
// INTER4X4_CHROMAV =
- { 16,16,16,16,
+ {
+ 16,16,16,16,
16,16,16,16,
16,16,16,16,
16,16,16,16
@@ -91,7 +95,8 @@ const DXVA_Qmatrix_H264 g_QMatrixH264[] =
},
{
// INTRA8X8_LUMA =
- { 16,16,16,16,16,16,16,16,
+ {
+ 16,16,16,16,16,16,16,16,
16,16,16,16,16,16,16,16,
16,16,16,16,16,16,16,16,
16,16,16,16,16,16,16,16,
@@ -102,7 +107,8 @@ const DXVA_Qmatrix_H264 g_QMatrixH264[] =
},
// INTER8X8_LUMA =
- { 16,16,16,16,16,16,16,16,
+ {
+ 16,16,16,16,16,16,16,16,
16,16,16,16,16,16,16,16,
16,16,16,16,16,16,16,16,
16,16,16,16,16,16,16,16,
@@ -117,42 +123,48 @@ const DXVA_Qmatrix_H264 g_QMatrixH264[] =
{
{
// INTRA4X4_LUMA
- { 6,13,20,28,
+ {
+ 6,13,20,28,
13,20,28,32,
20,28,32,37,
28,32,37,42
},
// INTRA4X4_CHROMAU
- { 6,13,20,28,
+ {
+ 6,13,20,28,
13,20,28,32,
20,28,32,37,
28,32,37,42
},
// INTRA4X4_CHROMAV
- { 6,13,20,28,
+ {
+ 6,13,20,28,
13,20,28,32,
20,28,32,37,
28,32,37,42
},
// INTER4X4_LUMA
- { 10,14,20,24,
+ {
+ 10,14,20,24,
14,20,24,27,
20,24,27,30,
24,27,30,34
},
// INTER4X4_CHROMAU
- { 10,14,20,24,
+ {
+ 10,14,20,24,
14,20,24,27,
20,24,27,30,
24,27,30,34
},
// INTER4X4_CHROMAV
- { 10,14,20,24,
+ {
+ 10,14,20,24,
14,20,24,27,
20,24,27,30,
24,27,30,34
@@ -160,7 +172,8 @@ const DXVA_Qmatrix_H264 g_QMatrixH264[] =
},
{
// INTRA8X8_LUMA
- { 6,10,13,16,18,23,25,27,
+ {
+ 6,10,13,16,18,23,25,27,
10,11,16,18,23,25,27,29,
13,16,18,23,25,27,29,31,
16,18,23,25,27,29,31,33,
@@ -171,7 +184,8 @@ const DXVA_Qmatrix_H264 g_QMatrixH264[] =
},
// INTER8X8_LUMA
- { 9,13,15,17,19,21,22,24,
+ {
+ 9,13,15,17,19,21,22,24,
13,13,17,19,21,22,24,25,
15,17,19,21,22,24,25,27,
17,19,21,22,24,25,27,28,
@@ -187,42 +201,48 @@ const DXVA_Qmatrix_H264 g_QMatrixH264[] =
{
{
// INTRA4X4_LUMA =
- { 6,12,19,26,
+ {
+ 6,12,19,26,
12,19,26,31,
19,26,31,35,
26,31,35,39
},
// INTRA4X4_CHROMAU =
- { 6,12,19,26,
+ {
+ 6,12,19,26,
12,19,26,31,
19,26,31,35,
26,31,35,39
},
// INTRA4X4_CHROMAV =
- { 6,12,19,26,
+ {
+ 6,12,19,26,
12,19,26,31,
19,26,31,35,
26,31,35,39
},
// INTER4X4_LUMA =
- { 9,13,18,21,
+ {
+ 9,13,18,21,
13,18,21,24,
18,21,24,27,
21,24,27,30
},
// INTER4X4_CHROMAU =
- { 9,13,18,21,
+ {
+ 9,13,18,21,
13,18,21,24,
18,21,24,27,
21,24,27,30
},
// INTER4X4_CHROMAV =
- { 9,13,18,21,
+ {
+ 9,13,18,21,
13,18,21,24,
18,21,24,27,
21,24,27,30
@@ -230,7 +250,8 @@ const DXVA_Qmatrix_H264 g_QMatrixH264[] =
},
{
// INTRA8X8_LUMA =
- { 6,10,13,16,19,24,26,28,
+ {
+ 6,10,13,16,19,24,26,28,
10,12,16,19,24,26,28,31,
13,16,19,24,26,28,31,33,
16,19,24,26,28,31,33,35,
@@ -241,7 +262,8 @@ const DXVA_Qmatrix_H264 g_QMatrixH264[] =
},
// INTER8X8_LUMA =
- { 9,12,14,16,18,19,21,22,
+ {
+ 9,12,14,16,18,19,21,22,
12,13,16,18,19,21,22,24,
14,16,18,19,21,22,24,25,
16,18,19,21,22,24,25,27,
@@ -257,42 +279,48 @@ const DXVA_Qmatrix_H264 g_QMatrixH264[] =
{
{
// INTRA4X4_LUMA =
- { 7,16,22,24,
+ {
+ 7,16,22,24,
16,22,24,28,
18,22,27,33,
22,24,32,47
},
// INTRA4X4_CHROMAU =
- { 7,16,22,24,
+ {
+ 7,16,22,24,
16,22,24,28,
18,22,27,33,
22,24,32,47
},
// INTRA4X4_CHROMAV =
- { 7,16,22,24,
+ {
+ 7,16,22,24,
16,22,24,28,
18,22,27,33,
22,24,32,47
},
// INTER4X4_LUMA =
- { 13,15,17,18,
+ {
+ 13,15,17,18,
15,17,18,20,
17,18,21,22,
18,20,22,25
},
// INTER4X4_CHROMAU =
- { 13,15,17,18,
+ {
+ 13,15,17,18,
15,17,18,20,
17,18,21,22,
18,20,22,25
},
// INTER4X4_CHROMAV =
- { 13,15,17,18,
+ {
+ 13,15,17,18,
15,17,18,20,
17,18,21,22,
18,20,22,25
@@ -300,7 +328,8 @@ const DXVA_Qmatrix_H264 g_QMatrixH264[] =
},
{
// INTRA8X8_LUMA =
- { 7,13,16,18,22,22,24,28,
+ {
+ 7,13,16,18,22,22,24,28,
13,13,18,20,22,24,28,31,
16,18,22,22,24,28,28,32,
18,18,22,22,24,28,31,33,
@@ -311,7 +340,8 @@ const DXVA_Qmatrix_H264 g_QMatrixH264[] =
},
// INTER8X8_LUMA =
- { 13,14,15,16,17,17,18,19,
+ {
+ 13,14,15,16,17,17,18,19,
14,15,16,17,17,18,19,20,
15,16,17,17,18,19,20,21,
16,17,17,18,19,20,21,22,
diff --git a/src/filters/transform/MPCVideoDec/IMPCVideoDecFilter.h b/src/filters/transform/MPCVideoDec/IMPCVideoDecFilter.h
index 45b204c33..dfd8ad5ec 100644
--- a/src/filters/transform/MPCVideoDec/IMPCVideoDecFilter.h
+++ b/src/filters/transform/MPCVideoDec/IMPCVideoDecFilter.h
@@ -24,8 +24,7 @@
#pragma once
// Internal codec list (use to enable/disable codec in standalone mode)
-typedef enum
-{
+typedef enum {
MPCVD_H264 = 1,
MPCVD_VC1 = MPCVD_H264<<1,
MPCVD_XVID = MPCVD_VC1<<1,
@@ -45,8 +44,7 @@ typedef enum
interface __declspec(uuid("CDC3B5B3-A8B0-4c70-A805-9FC80CDEF262"))
IMPCVideoDecFilter :
-public IUnknown
-{
+public IUnknown {
STDMETHOD(Apply()) = 0;
STDMETHOD(SetThreadNumber(int nValue)) = 0;
diff --git a/src/filters/transform/MPCVideoDec/MPCAudioDecFilter.cpp b/src/filters/transform/MPCVideoDec/MPCAudioDecFilter.cpp
index ce310b302..b8c5891d2 100644
--- a/src/filters/transform/MPCVideoDec/MPCAudioDecFilter.cpp
+++ b/src/filters/transform/MPCVideoDec/MPCAudioDecFilter.cpp
@@ -37,29 +37,25 @@
#include <moreuuids.h>
-typedef struct
-{
+typedef struct {
const CLSID* clsMinorType;
const enum CodecID nFFCodec;
const int fourcc;
} FFMPEG_CODECS;
-const FFMPEG_CODECS ffCodecs[] =
-{
+const FFMPEG_CODECS ffCodecs[] = {
// AMVA
{ &MEDIASUBTYPE_IMA_AMV, CODEC_ID_ADPCM_IMA_AMV, MAKEFOURCC('A','M','V','A') },
};
-const AMOVIESETUP_MEDIATYPE CMPCAudioDecFilter::sudPinTypesIn[] =
-{
+const AMOVIESETUP_MEDIATYPE CMPCAudioDecFilter::sudPinTypesIn[] = {
{ &MEDIATYPE_Audio, &MEDIASUBTYPE_IMA_AMV },
};
const int CMPCAudioDecFilter::sudPinTypesInCount = countof(CMPCAudioDecFilter::sudPinTypesIn);
-const AMOVIESETUP_MEDIATYPE CMPCAudioDecFilter::sudPinTypesOut[] =
-{
+const AMOVIESETUP_MEDIATYPE CMPCAudioDecFilter::sudPinTypesOut[] = {
{&MEDIATYPE_Audio, &MEDIASUBTYPE_PCM}
};
const int CMPCAudioDecFilter::sudPinTypesOutCount = countof(CMPCAudioDecFilter::sudPinTypesOut);
@@ -68,10 +64,16 @@ const int CMPCAudioDecFilter::sudPinTypesOutCount = countof(CMPCAudioDecFilter::
CMPCAudioDecFilter::CMPCAudioDecFilter(LPUNKNOWN lpunk, HRESULT* phr)
: CTransformFilter(NAME("CMPCAudioDecFilter"), lpunk, __uuidof(this))
{
- if(!(m_pInput = new CTransformInputPin(NAME("CAudioDecInputPin"), this, phr, L"In"))) *phr = E_OUTOFMEMORY;
- if(FAILED(*phr)) return;
+ if(!(m_pInput = new CTransformInputPin(NAME("CAudioDecInputPin"), this, phr, L"In"))) {
+ *phr = E_OUTOFMEMORY;
+ }
+ if(FAILED(*phr)) {
+ return;
+ }
- if(!(m_pOutput = new CTransformOutputPin(NAME("CAudioDecOutputPin"), this, phr, L"Out"))) *phr = E_OUTOFMEMORY;
+ if(!(m_pOutput = new CTransformOutputPin(NAME("CAudioDecOutputPin"), this, phr, L"Out"))) {
+ *phr = E_OUTOFMEMORY;
+ }
if(FAILED(*phr)) {
delete m_pInput, m_pInput = NULL;
return;
@@ -97,12 +99,13 @@ CMPCAudioDecFilter::~CMPCAudioDecFilter(void)
void CMPCAudioDecFilter::Cleanup()
{
- if (m_pAVCtx)
- {
+ if (m_pAVCtx) {
avcodec_thread_free (m_pAVCtx);
av_free(m_pAVCtx);
}
- if (m_pFrame) av_free(m_pFrame);
+ if (m_pFrame) {
+ av_free(m_pFrame);
+ }
m_pAVCodec = NULL;
m_pAVCtx = NULL;
@@ -124,9 +127,9 @@ void CMPCAudioDecFilter::LogLibAVCodec(void* par,int level,const char *fmt,va_li
STDMETHODIMP CMPCAudioDecFilter::NonDelegatingQueryInterface(REFIID riid, void** ppv)
{
return
-// QI(IMPCVideoDecFilter)
-// QI(ISpecifyPropertyPages)
-// QI(ISpecifyPropertyPages2)
+ // QI(IMPCVideoDecFilter)
+ // QI(ISpecifyPropertyPages)
+ // QI(ISpecifyPropertyPages2)
__super::NonDelegatingQueryInterface(riid, ppv);
}
@@ -146,11 +149,11 @@ HRESULT CMPCAudioDecFilter::DecideBufferSize(IMemAllocator* pAllocator, ALLOCATO
HRESULT CMPCAudioDecFilter::CheckInputType(const CMediaType* mtIn)
{
- for (int i=0; i<sizeof(sudPinTypesIn)/sizeof(AMOVIESETUP_MEDIATYPE); i++)
- {
+ for (int i=0; i<sizeof(sudPinTypesIn)/sizeof(AMOVIESETUP_MEDIATYPE); i++) {
if ((mtIn->majortype == *sudPinTypesIn[i].clsMajorType) &&
- (mtIn->subtype == *sudPinTypesIn[i].clsMinorType))
+ (mtIn->subtype == *sudPinTypesIn[i].clsMinorType)) {
return S_OK;
+ }
}
return VFW_E_TYPE_NOT_ACCEPTED;
@@ -158,10 +161,16 @@ HRESULT CMPCAudioDecFilter::CheckInputType(const CMediaType* mtIn)
HRESULT CMPCAudioDecFilter::GetMediaType(int iPosition, CMediaType* pmt)
{
- if(m_pInput->IsConnected() == FALSE) return E_UNEXPECTED;
+ if(m_pInput->IsConnected() == FALSE) {
+ return E_UNEXPECTED;
+ }
- if(iPosition < 0) return E_INVALIDARG;
- if(iPosition > 0) return VFW_S_NO_MORE_ITEMS;
+ if(iPosition < 0) {
+ return E_INVALIDARG;
+ }
+ if(iPosition > 0) {
+ return VFW_S_NO_MORE_ITEMS;
+ }
CMediaType mt = m_pInput->CurrentMediaType();
const GUID& subtype = mt.subtype;
@@ -178,7 +187,7 @@ HRESULT CMPCAudioDecFilter::GetMediaType(int iPosition, CMediaType* pmt)
//}
//else
//{
- *pmt = CreateMediaType(GetSampleFormat(), wfe->nSamplesPerSec, min(2, wfe->nChannels));
+ *pmt = CreateMediaType(GetSampleFormat(), wfe->nSamplesPerSec, min(2, wfe->nChannels));
//}
return S_OK;
@@ -193,12 +202,10 @@ HRESULT CMPCAudioDecFilter::CompleteConnect(PIN_DIRECTION direction, IPin* pRece
{
int nNewCodec;
- if (direction == PINDIR_OUTPUT)
- {
+ if (direction == PINDIR_OUTPUT) {
CMediaType& mt = m_pInput->CurrentMediaType();
nNewCodec = FindCodec(&mt);
- if ((direction == PINDIR_OUTPUT) && (nNewCodec != -1) && (nNewCodec != m_nCodecNb))
- {
+ if ((direction == PINDIR_OUTPUT) && (nNewCodec != -1) && (nNewCodec != m_nCodecNb)) {
WAVEFORMATEX* wfex = (WAVEFORMATEX*) mt.pbFormat;
Cleanup();
@@ -256,8 +263,9 @@ HRESULT CMPCAudioDecFilter::CompleteConnect(PIN_DIRECTION direction, IPin* pRece
}
*/
- if (avcodec_open(m_pAVCtx, m_pAVCodec)<0)
+ if (avcodec_open(m_pAVCtx, m_pAVCodec)<0) {
return VFW_E_INVALIDMEDIATYPE;
+ }
}
}
@@ -321,15 +329,16 @@ HRESULT CMPCAudioDecFilter::Transform(IMediaSample* pIn)
int CMPCAudioDecFilter::FindCodec(const CMediaType* mtIn)
{
for (int i=0; i<countof(ffCodecs); i++)
- if (mtIn->subtype == *ffCodecs[i].clsMinorType)
+ if (mtIn->subtype == *ffCodecs[i].clsMinorType) {
return i;
+ }
return -1;
}
STDMETHODIMP_(SampleFormat) CMPCAudioDecFilter::GetSampleFormat()
{
-// CAutoLock cAutoLock(&m_csProps);
+ // CAutoLock cAutoLock(&m_csProps);
return m_iSampleFormat;
}
@@ -347,26 +356,25 @@ CMediaType CMPCAudioDecFilter::CreateMediaType(SampleFormat sf, DWORD nSamplesPe
wfe->wFormatTag = (WORD)mt.subtype.Data1;
wfe->nChannels = nChannels;
wfe->nSamplesPerSec = nSamplesPerSec;
- switch(sf)
- {
- default:
- case SAMPLE_FMT_S16:
- wfe->wBitsPerSample = 16;
- break;
- case SAMPLE_FMT_S32:
- case SAMPLE_FMT_FLT:
- wfe->wBitsPerSample = 32;
- break;
+ switch(sf) {
+ default:
+ case SAMPLE_FMT_S16:
+ wfe->wBitsPerSample = 16;
+ break;
+ case SAMPLE_FMT_S32:
+ case SAMPLE_FMT_FLT:
+ wfe->wBitsPerSample = 32;
+ break;
}
wfe->nBlockAlign = wfe->nChannels*wfe->wBitsPerSample/8;
wfe->nAvgBytesPerSec = wfe->nSamplesPerSec*wfe->nBlockAlign;
// FIXME: 32 bit only seems to work with WAVE_FORMAT_EXTENSIBLE
- if(dwChannelMask == 0 && (sf == SAMPLE_FMT_S32))
+ if(dwChannelMask == 0 && (sf == SAMPLE_FMT_S32)) {
dwChannelMask = nChannels == 2 ? (SPEAKER_FRONT_LEFT|SPEAKER_FRONT_RIGHT) : SPEAKER_FRONT_CENTER;
+ }
- if(dwChannelMask)
- {
+ if(dwChannelMask) {
wfex.Format.wFormatTag = WAVE_FORMAT_EXTENSIBLE;
wfex.Format.cbSize = sizeof(wfex) - sizeof(wfex.Format);
wfex.dwChannelMask = dwChannelMask;
diff --git a/src/filters/transform/MPCVideoDec/MPCAudioDecFilter.h b/src/filters/transform/MPCVideoDec/MPCAudioDecFilter.h
index 938870ae1..ea1737c6c 100644
--- a/src/filters/transform/MPCVideoDec/MPCAudioDecFilter.h
+++ b/src/filters/transform/MPCVideoDec/MPCAudioDecFilter.h
@@ -49,7 +49,7 @@ public:
HRESULT CheckTransform(const CMediaType* mtIn, const CMediaType* mtOut);
HRESULT DecideBufferSize(IMemAllocator* pAllocator, ALLOCATOR_PROPERTIES* pProperties);
-// HRESULT DecideBufferSize(IMemAllocator* pAllocator, ALLOCATOR_PROPERTIES* pProperties);
+ // HRESULT DecideBufferSize(IMemAllocator* pAllocator, ALLOCATOR_PROPERTIES* pProperties);
HRESULT CMPCAudioDecFilter::CompleteConnect(PIN_DIRECTION direction, IPin* pReceivePin);
STDMETHODIMP_(SampleFormat) GetSampleFormat();
diff --git a/src/filters/transform/MPCVideoDec/MPCFfmpegDec.cpp b/src/filters/transform/MPCVideoDec/MPCFfmpegDec.cpp
index 41ab0c7af..bbfd7beee 100644
--- a/src/filters/transform/MPCVideoDec/MPCFfmpegDec.cpp
+++ b/src/filters/transform/MPCVideoDec/MPCFfmpegDec.cpp
@@ -29,21 +29,18 @@
// Workaround: graphedit crashes when a filter exposes more than 115 input MediaTypes!
-const AMOVIESETUP_PIN sudpPinsVideoDec[] =
-{
+const AMOVIESETUP_PIN sudpPinsVideoDec[] = {
{L"Input", FALSE, FALSE, FALSE, FALSE, &CLSID_NULL, NULL, CMPCVideoDecFilter::sudPinTypesInCount > 115 ? 115 : CMPCVideoDecFilter::sudPinTypesInCount, CMPCVideoDecFilter::sudPinTypesIn},
{L"Output", FALSE, TRUE, FALSE, FALSE, &CLSID_NULL, NULL, CMPCVideoDecFilter::sudPinTypesOutCount, CMPCVideoDecFilter::sudPinTypesOut}
};
-const AMOVIESETUP_FILTER sudFilters[] =
-{
+const AMOVIESETUP_FILTER sudFilters[] = {
{&__uuidof(CMPCVideoDecFilter), L"MPC - Video decoder", /*MERIT_DO_NOT_USE*/0x40000001, countof(sudpPinsVideoDec), sudpPinsVideoDec, CLSID_LegacyAmFilterCategory}
};
-CFactoryTemplate g_Templates[] =
-{
+CFactoryTemplate g_Templates[] = {
{sudFilters[0].strName, &__uuidof(CMPCVideoDecFilter), CreateInstance<CMPCVideoDecFilter>, NULL, &sudFilters[0]},
{L"CMPCVideoDecPropertyPage", &__uuidof(CMPCVideoDecSettingsWnd), CreateInstance<CInternalPropertyPageTempl<CMPCVideoDecSettingsWnd> >},
{L"CMPCVideoDecPropertyPage2", &__uuidof(CMPCVideoDecCodecWnd), CreateInstance<CInternalPropertyPageTempl<CMPCVideoDecCodecWnd> >},
diff --git a/src/filters/transform/MPCVideoDec/MPCVideoDecFilter.cpp b/src/filters/transform/MPCVideoDec/MPCVideoDecFilter.cpp
index 1832b7802..27932d76a 100644
--- a/src/filters/transform/MPCVideoDec/MPCVideoDecFilter.cpp
+++ b/src/filters/transform/MPCVideoDec/MPCVideoDecFilter.cpp
@@ -52,35 +52,34 @@ extern "C"
#define ROUND_FRAMERATE(var,FrameRate) if (labs ((long)(var - FrameRate)) < FrameRate*1/100) var = FrameRate;
-typedef struct
-{
+typedef struct {
const int PicEntryNumber;
const UINT PreferedConfigBitstream;
const GUID* Decoder[MAX_SUPPORTED_MODE];
const WORD RestrictedMode[MAX_SUPPORTED_MODE];
} DXVA_PARAMS;
-typedef struct
-{
+typedef struct {
const CLSID* clsMinorType;
const enum CodecID nFFCodec;
const int fourcc;
const DXVA_PARAMS* DXVAModes;
- int DXVAModeCount()
- {
- if (!DXVAModes) return 0;
- for (int i=0; i<MAX_SUPPORTED_MODE; i++)
- {
- if (DXVAModes->Decoder[i] == &GUID_NULL) return i;
+ int DXVAModeCount() {
+ if (!DXVAModes) {
+ return 0;
+ }
+ for (int i=0; i<MAX_SUPPORTED_MODE; i++) {
+ if (DXVAModes->Decoder[i] == &GUID_NULL) {
+ return i;
+ }
}
return MAX_SUPPORTED_MODE;
}
} FFMPEG_CODECS;
-typedef enum
-{
+typedef enum {
ffYCbCr_RGB_coeff_ITUR_BT601 = 0,
ffYCbCr_RGB_coeff_ITUR_BT709 = 1,
ffYCbCr_RGB_coeff_SMPTE240M = 2,
@@ -105,8 +104,7 @@ struct TYCbCr2RGB_coeffs {
int cspOptionsBlackCutoff,
int cspOptionsChromaCutoff,
double cspOptionsRGB_WhiteLevel,
- double cspOptionsRGB_BlackLevel)
- {
+ double cspOptionsRGB_BlackLevel) {
if (cspOptionsIturBt == ffYCbCr_RGB_coeff_ITUR_BT601) {
Kr = 0.299;
Kg = 0.587;
@@ -139,8 +137,7 @@ struct TYCbCr2RGB_coeffs {
// DXVA modes supported for Mpeg2
-DXVA_PARAMS DXVA_Mpeg2 =
-{
+DXVA_PARAMS DXVA_Mpeg2 = {
9, // PicEntryNumber
1, // PreferedConfigBitstream
{ &DXVA2_ModeMPEG2_VLD, &GUID_NULL },
@@ -148,16 +145,14 @@ DXVA_PARAMS DXVA_Mpeg2 =
};
// DXVA modes supported for H264
-DXVA_PARAMS DXVA_H264 =
-{
+DXVA_PARAMS DXVA_H264 = {
16, // PicEntryNumber
2, // PreferedConfigBitstream
{ &DXVA2_ModeH264_E, &DXVA2_ModeH264_F, &DXVA_Intel_H264_ClearVideo, &GUID_NULL },
{ DXVA_RESTRICTED_MODE_H264_E, 0}
};
-DXVA_PARAMS DXVA_H264_VISTA =
-{
+DXVA_PARAMS DXVA_H264_VISTA = {
22, // PicEntryNumber
2, // PreferedConfigBitstream
{ &DXVA2_ModeH264_E, &DXVA2_ModeH264_F, &DXVA_Intel_H264_ClearVideo, &GUID_NULL },
@@ -165,16 +160,14 @@ DXVA_PARAMS DXVA_H264_VISTA =
};
// DXVA modes supported for VC1
-DXVA_PARAMS DXVA_VC1 =
-{
+DXVA_PARAMS DXVA_VC1 = {
14, // PicEntryNumber
1, // PreferedConfigBitstream
{ &DXVA2_ModeVC1_D, &GUID_NULL },
{ DXVA_RESTRICTED_MODE_VC1_D, 0}
};
-FFMPEG_CODECS ffCodecs[] =
-{
+FFMPEG_CODECS ffCodecs[] = {
#if HAS_FFMPEG_VIDEO_DECODERS
// Flash video
{ &MEDIASUBTYPE_FLV1, CODEC_ID_FLV1, MAKEFOURCC('F','L','V','1'), NULL },
@@ -348,8 +341,7 @@ FFMPEG_CODECS ffCodecs[] =
};
/* Important: the order should be exactly the same as in ffCodecs[] */
-const AMOVIESETUP_MEDIATYPE CMPCVideoDecFilter::sudPinTypesIn[] =
-{
+const AMOVIESETUP_MEDIATYPE CMPCVideoDecFilter::sudPinTypesIn[] = {
#if HAS_FFMPEG_VIDEO_DECODERS
// Flash video
{ &MEDIATYPE_Video, &MEDIASUBTYPE_FLV1 },
@@ -530,8 +522,7 @@ bool* CMPCVideoDecFilter::FFmpegFilters = NULL;
bool* CMPCVideoDecFilter::DXVAFilters = NULL;
bool CMPCVideoDecFilter::m_ref_frame_count_check_skip = false;
-const AMOVIESETUP_MEDIATYPE CMPCVideoDecFilter::sudPinTypesOut[] =
-{
+const AMOVIESETUP_MEDIATYPE CMPCVideoDecFilter::sudPinTypesOut[] = {
{&MEDIATYPE_Video, &MEDIASUBTYPE_NV12},
{&MEDIATYPE_Video, &MEDIASUBTYPE_NV24}
};
@@ -545,8 +536,7 @@ BOOL CALLBACK EnumFindProcessWnd (HWND hwnd, LPARAM lParam)
GetWindowThreadProcessId (hwnd, &procid);
GetClassName (hwnd, WindowClass, countof(WindowClass));
- if (procid == GetCurrentProcessId() && _tcscmp (WindowClass, _T("MediaPlayerClassicW")) == 0)
- {
+ if (procid == GetCurrentProcessId() && _tcscmp (WindowClass, _T("MediaPlayerClassicW")) == 0) {
HWND* pWnd = (HWND*) lParam;
*pWnd = hwnd;
return FALSE;
@@ -559,20 +549,25 @@ CMPCVideoDecFilter::CMPCVideoDecFilter(LPUNKNOWN lpunk, HRESULT* phr)
{
HWND hWnd = NULL;
- if(IsVistaOrAbove())
- {
- for (int i=0; i<countof(ffCodecs); i++)
- {
- if(ffCodecs[i].nFFCodec == CODEC_ID_H264)
+ if(IsVistaOrAbove()) {
+ for (int i=0; i<countof(ffCodecs); i++) {
+ if(ffCodecs[i].nFFCodec == CODEC_ID_H264) {
ffCodecs[i].DXVAModes = &DXVA_H264_VISTA;
+ }
}
}
- if(phr) *phr = S_OK;
+ if(phr) {
+ *phr = S_OK;
+ }
- if (m_pOutput) delete m_pOutput;
+ if (m_pOutput) {
+ delete m_pOutput;
+ }
m_pOutput = DNew CVideoDecOutputPin(NAME("CVideoDecOutputPin"), this, phr, L"Output");
- if(!m_pOutput) *phr = E_OUTOFMEMORY;
+ if(!m_pOutput) {
+ *phr = E_OUTOFMEMORY;
+ }
m_pCpuId = DNew CCpuId();
m_pAVCodec = NULL;
@@ -618,20 +613,37 @@ CMPCVideoDecFilter::CMPCVideoDecFilter(LPUNKNOWN lpunk, HRESULT* phr)
m_sar.SetSize(1,1);
CRegKey key;
- if(ERROR_SUCCESS == key.Open(HKEY_CURRENT_USER, _T("Software\\Gabest\\Filters\\MPC Video Decoder"), KEY_READ))
- {
+ if(ERROR_SUCCESS == key.Open(HKEY_CURRENT_USER, _T("Software\\Gabest\\Filters\\MPC Video Decoder"), KEY_READ)) {
DWORD dw;
- if(ERROR_SUCCESS == key.QueryDWORDValue(_T("ThreadNumber"), dw)) m_nThreadNumber = dw;
- if(ERROR_SUCCESS == key.QueryDWORDValue(_T("DiscardMode"), dw)) m_nDiscardMode = dw;
- if(ERROR_SUCCESS == key.QueryDWORDValue(_T("ErrorRecognition"), dw)) m_nErrorRecognition = dw;
- if(ERROR_SUCCESS == key.QueryDWORDValue(_T("IDCTAlgo"), dw)) m_nIDCTAlgo = dw;
- if(ERROR_SUCCESS == key.QueryDWORDValue(_T("ActiveCodecs"), dw)) m_nActiveCodecs = dw;
- if(ERROR_SUCCESS == key.QueryDWORDValue(_T("ARMode"), dw)) m_nARMode = dw;
- if(ERROR_SUCCESS == key.QueryDWORDValue(_T("DXVACheckCompatibility"), dw)) m_nDXVACheckCompatibility = dw;
- if(ERROR_SUCCESS == key.QueryDWORDValue(_T("DisableDXVA_SD"), dw)) m_nDXVA_SD = dw;
+ if(ERROR_SUCCESS == key.QueryDWORDValue(_T("ThreadNumber"), dw)) {
+ m_nThreadNumber = dw;
+ }
+ if(ERROR_SUCCESS == key.QueryDWORDValue(_T("DiscardMode"), dw)) {
+ m_nDiscardMode = dw;
+ }
+ if(ERROR_SUCCESS == key.QueryDWORDValue(_T("ErrorRecognition"), dw)) {
+ m_nErrorRecognition = dw;
+ }
+ if(ERROR_SUCCESS == key.QueryDWORDValue(_T("IDCTAlgo"), dw)) {
+ m_nIDCTAlgo = dw;
+ }
+ if(ERROR_SUCCESS == key.QueryDWORDValue(_T("ActiveCodecs"), dw)) {
+ m_nActiveCodecs = dw;
+ }
+ if(ERROR_SUCCESS == key.QueryDWORDValue(_T("ARMode"), dw)) {
+ m_nARMode = dw;
+ }
+ if(ERROR_SUCCESS == key.QueryDWORDValue(_T("DXVACheckCompatibility"), dw)) {
+ m_nDXVACheckCompatibility = dw;
+ }
+ if(ERROR_SUCCESS == key.QueryDWORDValue(_T("DisableDXVA_SD"), dw)) {
+ m_nDXVA_SD = dw;
+ }
}
- if(m_nDXVACheckCompatibility > 3) m_nDXVACheckCompatibility = 1; // skip level check by default
+ if(m_nDXVACheckCompatibility > 3) {
+ m_nDXVACheckCompatibility = 1; // skip level check by default
+ }
ff_avcodec_default_get_buffer = avcodec_default_get_buffer;
ff_avcodec_default_release_buffer = avcodec_default_release_buffer;
@@ -649,23 +661,28 @@ CMPCVideoDecFilter::CMPCVideoDecFilter(LPUNKNOWN lpunk, HRESULT* phr)
int nCodecs = countof(ffCodecs);
int nPinTypes = countof(sudPinTypesIn);
ASSERT (nCodecs == nPinTypes);
- for (int i=0; i<nPinTypes; i++)
+ for (int i=0; i<nPinTypes; i++) {
ASSERT (ffCodecs[i].clsMinorType == sudPinTypesIn[i].clsMinorType);
+ }
#endif
}
UINT CMPCVideoDecFilter::GetAdapter(IDirect3D9* pD3D, HWND hWnd)
{
- if(hWnd == NULL || pD3D == NULL)
+ if(hWnd == NULL || pD3D == NULL) {
return D3DADAPTER_DEFAULT;
+ }
HMONITOR hMonitor = MonitorFromWindow(hWnd, MONITOR_DEFAULTTONEAREST);
- if(hMonitor == NULL) return D3DADAPTER_DEFAULT;
+ if(hMonitor == NULL) {
+ return D3DADAPTER_DEFAULT;
+ }
- for(UINT adp = 0, num_adp = pD3D->GetAdapterCount(); adp < num_adp; ++adp)
- {
+ for(UINT adp = 0, num_adp = pD3D->GetAdapterCount(); adp < num_adp; ++adp) {
HMONITOR hAdpMon = pD3D->GetAdapterMonitor(adp);
- if(hAdpMon == hMonitor) return adp;
+ if(hAdpMon == hMonitor) {
+ return adp;
+ }
}
return D3DADAPTER_DEFAULT;
@@ -680,11 +697,9 @@ void CMPCVideoDecFilter::DetectVideoCard(HWND hWnd)
m_VideoDriverVersion.LowPart = 0;
pD3D9 = Direct3DCreate9(D3D_SDK_VERSION);
- if (pD3D9)
- {
+ if (pD3D9) {
D3DADAPTER_IDENTIFIER9 adapterIdentifier;
- if (pD3D9->GetAdapterIdentifier(GetAdapter(pD3D9, hWnd), 0, &adapterIdentifier) == S_OK)
- {
+ if (pD3D9->GetAdapterIdentifier(GetAdapter(pD3D9, hWnd), 0, &adapterIdentifier) == S_OK) {
m_nPCIVendor = adapterIdentifier.VendorId;
m_nPCIDevice = adapterIdentifier.DeviceId;
m_VideoDriverVersion = adapterIdentifier.DriverVersion;
@@ -712,15 +727,12 @@ bool CMPCVideoDecFilter::IsVideoInterlaced()
void CMPCVideoDecFilter::UpdateFrameTime (REFERENCE_TIME& rtStart, REFERENCE_TIME& rtStop)
{
- if (rtStart == _I64_MIN)
- {
+ if (rtStart == _I64_MIN) {
// If reference time has not been set by splitter, extrapolate start time
// from last known start time already delivered
rtStart = m_rtLastStart + m_rtAvrTimePerFrame*m_nCountEstimated;
m_nCountEstimated++;
- }
- else
- {
+ } else {
// Known start time, set as new reference
m_rtLastStart = rtStart;
m_nCountEstimated = 1;
@@ -738,13 +750,10 @@ void CMPCVideoDecFilter::GetOutputSize(int& w, int& h, int& arx, int& ary, int &
w = PictWidthRounded();
h = PictHeightRounded();
#else
- if (m_nDXVAMode == MODE_SOFTWARE)
- {
+ if (m_nDXVAMode == MODE_SOFTWARE) {
w = m_nWidth;
h = m_nHeight;
- }
- else
- {
+ } else {
// DXVA surface are multiple of 16 pixels!
w = PictWidthRounded();
h = PictHeightRounded();
@@ -778,111 +787,105 @@ int CMPCVideoDecFilter::PictHeightRounded()
int CMPCVideoDecFilter::FindCodec(const CMediaType* mtIn)
{
for (int i=0; i<countof(ffCodecs); i++)
- if (mtIn->subtype == *ffCodecs[i].clsMinorType)
- {
+ if (mtIn->subtype == *ffCodecs[i].clsMinorType) {
#ifndef REGISTER_FILTER
- switch (ffCodecs[i].nFFCodec)
- {
- case CODEC_ID_H264 :
- #if INTERNAL_DECODER_H264_DXVA
- m_bUseDXVA = DXVAFilters && DXVAFilters[TRA_DXVA_H264];
- #else
- m_bUseDXVA = false;
- #endif
- #if INTERNAL_DECODER_H264
- m_bUseFFmpeg = FFmpegFilters && FFmpegFilters[FFM_H264];
- #else
- m_bUseFFmpeg = false;
- #endif
- break;
- case CODEC_ID_VC1 :
- #if INTERNAL_DECODER_VC1_DXVA
- m_bUseDXVA = DXVAFilters && DXVAFilters[TRA_DXVA_VC1];
- #else
- m_bUseDXVA = false;
- #endif
- #if INTERNAL_DECODER_VC1
- m_bUseFFmpeg = FFmpegFilters && FFmpegFilters[FFM_VC1];
- #else
- m_bUseFFmpeg = false;
- #endif
- break;
- case CODEC_ID_MPEG2VIDEO :
- #if INTERNAL_DECODER_MPEG2_DXVA
- m_bUseDXVA = true;
- #endif
- m_bUseFFmpeg = false; // No Mpeg2 software support with ffmpeg!
- break;
- default :
- m_bUseDXVA = false;
+ switch (ffCodecs[i].nFFCodec) {
+ case CODEC_ID_H264 :
+#if INTERNAL_DECODER_H264_DXVA
+ m_bUseDXVA = DXVAFilters && DXVAFilters[TRA_DXVA_H264];
+#else
+ m_bUseDXVA = false;
+#endif
+#if INTERNAL_DECODER_H264
+ m_bUseFFmpeg = FFmpegFilters && FFmpegFilters[FFM_H264];
+#else
+ m_bUseFFmpeg = false;
+#endif
+ break;
+ case CODEC_ID_VC1 :
+#if INTERNAL_DECODER_VC1_DXVA
+ m_bUseDXVA = DXVAFilters && DXVAFilters[TRA_DXVA_VC1];
+#else
+ m_bUseDXVA = false;
+#endif
+#if INTERNAL_DECODER_VC1
+ m_bUseFFmpeg = FFmpegFilters && FFmpegFilters[FFM_VC1];
+#else
+ m_bUseFFmpeg = false;
+#endif
+ break;
+ case CODEC_ID_MPEG2VIDEO :
+#if INTERNAL_DECODER_MPEG2_DXVA
+ m_bUseDXVA = true;
+#endif
+ m_bUseFFmpeg = false; // No Mpeg2 software support with ffmpeg!
+ break;
+ default :
+ m_bUseDXVA = false;
}
return ((m_bUseDXVA || m_bUseFFmpeg) ? i : -1);
#else
bool bCodecActivated = false;
- switch (ffCodecs[i].nFFCodec)
- {
- case CODEC_ID_FLV1 :
- case CODEC_ID_VP6F :
- bCodecActivated = (m_nActiveCodecs & MPCVD_FLASH) != 0;
- break;
- case CODEC_ID_MPEG4 :
- if ((*ffCodecs[i].clsMinorType == MEDIASUBTYPE_XVID) ||
- (*ffCodecs[i].clsMinorType == MEDIASUBTYPE_xvid) ||
- (*ffCodecs[i].clsMinorType == MEDIASUBTYPE_XVIX) ||
- (*ffCodecs[i].clsMinorType == MEDIASUBTYPE_xvix) )
- {
- bCodecActivated = (m_nActiveCodecs & MPCVD_XVID) != 0;
- }
- else if ((*ffCodecs[i].clsMinorType == MEDIASUBTYPE_DX50) ||
- (*ffCodecs[i].clsMinorType == MEDIASUBTYPE_dx50) ||
- (*ffCodecs[i].clsMinorType == MEDIASUBTYPE_DIVX) ||
- (*ffCodecs[i].clsMinorType == MEDIASUBTYPE_divx) )
- {
- bCodecActivated = (m_nActiveCodecs & MPCVD_DIVX) != 0;
- }
- break;
- case CODEC_ID_WMV1 :
- case CODEC_ID_WMV2 :
- case CODEC_ID_WMV3 :
- bCodecActivated = (m_nActiveCodecs & MPCVD_WMV) != 0;
- break;
- case CODEC_ID_MSMPEG4V3 :
- case CODEC_ID_MSMPEG4V2 :
- case CODEC_ID_MSMPEG4V1 :
- bCodecActivated = (m_nActiveCodecs & MPCVD_MSMPEG4) != 0;
- break;
- case CODEC_ID_H264 :
- m_bUseDXVA = (m_nActiveCodecs & MPCVD_H264_DXVA) != 0;
- m_bUseFFmpeg = (m_nActiveCodecs & MPCVD_H264) != 0;
- bCodecActivated = m_bUseDXVA || m_bUseFFmpeg;
- break;
- case CODEC_ID_SVQ3 :
- case CODEC_ID_SVQ1 :
- bCodecActivated = (m_nActiveCodecs & MPCVD_SVQ3) != 0;
- break;
- case CODEC_ID_H263 :
- bCodecActivated = (m_nActiveCodecs & MPCVD_H263) != 0;
- break;
- case CODEC_ID_THEORA :
- bCodecActivated = (m_nActiveCodecs & MPCVD_THEORA) != 0;
- break;
- case CODEC_ID_VC1 :
- m_bUseDXVA = (m_nActiveCodecs & MPCVD_VC1_DXVA) != 0;
- m_bUseFFmpeg = (m_nActiveCodecs & MPCVD_VC1) != 0;
- bCodecActivated = m_bUseDXVA || m_bUseFFmpeg;
- break;
- case CODEC_ID_AMV :
- bCodecActivated = (m_nActiveCodecs & MPCVD_AMVV) != 0;
- break;
- case CODEC_ID_VP5 :
- case CODEC_ID_VP6 :
- case CODEC_ID_VP6A :
- bCodecActivated = (m_nActiveCodecs & MPCVD_VP6) != 0;
- break;
- case CODEC_ID_VP8 :
- bCodecActivated = (m_nActiveCodecs & MPCVD_VP8) != 0;
- break;
+ switch (ffCodecs[i].nFFCodec) {
+ case CODEC_ID_FLV1 :
+ case CODEC_ID_VP6F :
+ bCodecActivated = (m_nActiveCodecs & MPCVD_FLASH) != 0;
+ break;
+ case CODEC_ID_MPEG4 :
+ if ((*ffCodecs[i].clsMinorType == MEDIASUBTYPE_XVID) ||
+ (*ffCodecs[i].clsMinorType == MEDIASUBTYPE_xvid) ||
+ (*ffCodecs[i].clsMinorType == MEDIASUBTYPE_XVIX) ||
+ (*ffCodecs[i].clsMinorType == MEDIASUBTYPE_xvix) ) {
+ bCodecActivated = (m_nActiveCodecs & MPCVD_XVID) != 0;
+ } else if ((*ffCodecs[i].clsMinorType == MEDIASUBTYPE_DX50) ||
+ (*ffCodecs[i].clsMinorType == MEDIASUBTYPE_dx50) ||
+ (*ffCodecs[i].clsMinorType == MEDIASUBTYPE_DIVX) ||
+ (*ffCodecs[i].clsMinorType == MEDIASUBTYPE_divx) ) {
+ bCodecActivated = (m_nActiveCodecs & MPCVD_DIVX) != 0;
+ }
+ break;
+ case CODEC_ID_WMV1 :
+ case CODEC_ID_WMV2 :
+ case CODEC_ID_WMV3 :
+ bCodecActivated = (m_nActiveCodecs & MPCVD_WMV) != 0;
+ break;
+ case CODEC_ID_MSMPEG4V3 :
+ case CODEC_ID_MSMPEG4V2 :
+ case CODEC_ID_MSMPEG4V1 :
+ bCodecActivated = (m_nActiveCodecs & MPCVD_MSMPEG4) != 0;
+ break;
+ case CODEC_ID_H264 :
+ m_bUseDXVA = (m_nActiveCodecs & MPCVD_H264_DXVA) != 0;
+ m_bUseFFmpeg = (m_nActiveCodecs & MPCVD_H264) != 0;
+ bCodecActivated = m_bUseDXVA || m_bUseFFmpeg;
+ break;
+ case CODEC_ID_SVQ3 :
+ case CODEC_ID_SVQ1 :
+ bCodecActivated = (m_nActiveCodecs & MPCVD_SVQ3) != 0;
+ break;
+ case CODEC_ID_H263 :
+ bCodecActivated = (m_nActiveCodecs & MPCVD_H263) != 0;
+ break;
+ case CODEC_ID_THEORA :
+ bCodecActivated = (m_nActiveCodecs & MPCVD_THEORA) != 0;
+ break;
+ case CODEC_ID_VC1 :
+ m_bUseDXVA = (m_nActiveCodecs & MPCVD_VC1_DXVA) != 0;
+ m_bUseFFmpeg = (m_nActiveCodecs & MPCVD_VC1) != 0;
+ bCodecActivated = m_bUseDXVA || m_bUseFFmpeg;
+ break;
+ case CODEC_ID_AMV :
+ bCodecActivated = (m_nActiveCodecs & MPCVD_AMVV) != 0;
+ break;
+ case CODEC_ID_VP5 :
+ case CODEC_ID_VP6 :
+ case CODEC_ID_VP6A :
+ bCodecActivated = (m_nActiveCodecs & MPCVD_VP6) != 0;
+ break;
+ case CODEC_ID_VP8 :
+ bCodecActivated = (m_nActiveCodecs & MPCVD_VP8) != 0;
+ break;
}
return (bCodecActivated ? i : -1);
#endif
@@ -896,26 +899,38 @@ void CMPCVideoDecFilter::Cleanup()
SAFE_DELETE (m_pDXVADecoder);
// Release FFMpeg
- if (m_pAVCtx)
- {
- if (m_pAVCtx->intra_matrix) free(m_pAVCtx->intra_matrix);
- if (m_pAVCtx->inter_matrix) free(m_pAVCtx->inter_matrix);
- if (m_pAVCtx->extradata) free((unsigned char*)m_pAVCtx->extradata);
- if (m_pFFBuffer) free(m_pFFBuffer);
+ if (m_pAVCtx) {
+ if (m_pAVCtx->intra_matrix) {
+ free(m_pAVCtx->intra_matrix);
+ }
+ if (m_pAVCtx->inter_matrix) {
+ free(m_pAVCtx->inter_matrix);
+ }
+ if (m_pAVCtx->extradata) {
+ free((unsigned char*)m_pAVCtx->extradata);
+ }
+ if (m_pFFBuffer) {
+ free(m_pFFBuffer);
+ }
- if (m_pAVCtx->slice_offset) av_free(m_pAVCtx->slice_offset);
- if (m_pAVCtx->codec) avcodec_close(m_pAVCtx);
+ if (m_pAVCtx->slice_offset) {
+ av_free(m_pAVCtx->slice_offset);
+ }
+ if (m_pAVCtx->codec) {
+ avcodec_close(m_pAVCtx);
+ }
// Free thread resource if necessary
FFSetThreadNumber (m_pAVCtx, 0);
av_free(m_pAVCtx);
}
- if (m_pFrame) av_free(m_pFrame);
+ if (m_pFrame) {
+ av_free(m_pFrame);
+ }
#if HAS_FFMPEG_VIDEO_DECODERS
- if (m_pSwsContext)
- {
+ if (m_pSwsContext) {
sws_freeContext(m_pSwsContext);
m_pSwsContext = NULL;
}
@@ -932,8 +947,7 @@ void CMPCVideoDecFilter::Cleanup()
SAFE_DELETE_ARRAY (m_pVideoOutputFormat);
// Release DXVA ressources
- if (m_hDevice != INVALID_HANDLE_VALUE)
- {
+ if (m_hDevice != INVALID_HANDLE_VALUE) {
m_pDeviceManager->CloseDeviceHandle(m_hDevice);
m_hDevice = INVALID_HANDLE_VALUE;
}
@@ -946,16 +960,15 @@ void CMPCVideoDecFilter::Cleanup()
void CMPCVideoDecFilter::CalcAvgTimePerFrame()
{
CMediaType &mt = m_pInput->CurrentMediaType();
- if (mt.formattype==FORMAT_VideoInfo)
+ if (mt.formattype==FORMAT_VideoInfo) {
m_rtAvrTimePerFrame = ((VIDEOINFOHEADER*)mt.pbFormat)->AvgTimePerFrame;
- else if (mt.formattype==FORMAT_VideoInfo2)
+ } else if (mt.formattype==FORMAT_VideoInfo2) {
m_rtAvrTimePerFrame = ((VIDEOINFOHEADER2*)mt.pbFormat)->AvgTimePerFrame;
- else if (mt.formattype==FORMAT_MPEGVideo)
+ } else if (mt.formattype==FORMAT_MPEGVideo) {
m_rtAvrTimePerFrame = ((MPEG1VIDEOINFO*)mt.pbFormat)->hdr.AvgTimePerFrame;
- else if (mt.formattype==FORMAT_MPEG2Video)
+ } else if (mt.formattype==FORMAT_MPEG2Video) {
m_rtAvrTimePerFrame = ((MPEG2VIDEOINFO*)mt.pbFormat)->hdr.AvgTimePerFrame;
- else
- {
+ } else {
ASSERT (FALSE);
m_rtAvrTimePerFrame = 1;
}
@@ -975,7 +988,7 @@ void CMPCVideoDecFilter::LogLibAVCodec(void* par,int level,const char *fmt,va_li
void CMPCVideoDecFilter::OnGetBuffer(AVFrame *pic)
{
// Callback from FFMpeg to store Ref Time in frame (needed to have correct rtStart after avcodec_decode_video calls)
-// pic->rtStart = m_rtStart;
+ // pic->rtStart = m_rtStart;
}
STDMETHODIMP CMPCVideoDecFilter::NonDelegatingQueryInterface(REFIID riid, void** ppv)
@@ -992,11 +1005,11 @@ STDMETHODIMP CMPCVideoDecFilter::NonDelegatingQueryInterface(REFIID riid, void**
HRESULT CMPCVideoDecFilter::CheckInputType(const CMediaType* mtIn)
{
- for (int i=0; i<sizeof(sudPinTypesIn)/sizeof(AMOVIESETUP_MEDIATYPE); i++)
- {
+ for (int i=0; i<sizeof(sudPinTypesIn)/sizeof(AMOVIESETUP_MEDIATYPE); i++) {
if ((mtIn->majortype == *sudPinTypesIn[i].clsMajorType) &&
- (mtIn->subtype == *sudPinTypesIn[i].clsMinorType))
+ (mtIn->subtype == *sudPinTypesIn[i].clsMinorType)) {
return S_OK;
+ }
}
return VFW_E_TYPE_NOT_ACCEPTED;
@@ -1013,13 +1026,13 @@ HRESULT CMPCVideoDecFilter::SetMediaType(PIN_DIRECTION direction,const CMediaTyp
{
int nNewCodec;
- if (direction == PINDIR_INPUT)
- {
+ if (direction == PINDIR_INPUT) {
nNewCodec = FindCodec(pmt);
- if (nNewCodec == -1) return VFW_E_TYPE_NOT_ACCEPTED;
+ if (nNewCodec == -1) {
+ return VFW_E_TYPE_NOT_ACCEPTED;
+ }
- if (nNewCodec != m_nCodecNb)
- {
+ if (nNewCodec != m_nCodecNb) {
m_nCodecNb = nNewCodec;
m_bReorderBFrame = true;
@@ -1029,45 +1042,36 @@ HRESULT CMPCVideoDecFilter::SetMediaType(PIN_DIRECTION direction,const CMediaTyp
m_pAVCtx = avcodec_alloc_context();
CheckPointer (m_pAVCtx, E_POINTER);
- if ((m_nThreadNumber > 1) && IsMultiThreadSupported (ffCodecs[m_nCodecNb].nFFCodec))
+ if ((m_nThreadNumber > 1) && IsMultiThreadSupported (ffCodecs[m_nCodecNb].nFFCodec)) {
FFSetThreadNumber(m_pAVCtx, m_nThreadNumber);
+ }
m_pFrame = avcodec_alloc_frame();
CheckPointer (m_pFrame, E_POINTER);
- if(pmt->formattype == FORMAT_VideoInfo)
- {
+ if(pmt->formattype == FORMAT_VideoInfo) {
VIDEOINFOHEADER* vih = (VIDEOINFOHEADER*)pmt->pbFormat;
m_pAVCtx->width = vih->bmiHeader.biWidth;
m_pAVCtx->height = abs(vih->bmiHeader.biHeight);
m_pAVCtx->codec_tag = vih->bmiHeader.biCompression;
- }
- else if(pmt->formattype == FORMAT_VideoInfo2)
- {
+ } else if(pmt->formattype == FORMAT_VideoInfo2) {
VIDEOINFOHEADER2* vih2 = (VIDEOINFOHEADER2*)pmt->pbFormat;
m_pAVCtx->width = vih2->bmiHeader.biWidth;
m_pAVCtx->height = abs(vih2->bmiHeader.biHeight);
m_pAVCtx->codec_tag = vih2->bmiHeader.biCompression;
- }
- else if(pmt->formattype == FORMAT_MPEGVideo)
- {
+ } else if(pmt->formattype == FORMAT_MPEGVideo) {
MPEG1VIDEOINFO* mpgv = (MPEG1VIDEOINFO*)pmt->pbFormat;
m_pAVCtx->width = mpgv->hdr.bmiHeader.biWidth;
m_pAVCtx->height = abs(mpgv->hdr.bmiHeader.biHeight);
m_pAVCtx->codec_tag = mpgv->hdr.bmiHeader.biCompression;
- }
- else if(pmt->formattype == FORMAT_MPEG2Video)
- {
+ } else if(pmt->formattype == FORMAT_MPEG2Video) {
MPEG2VIDEOINFO* mpg2v = (MPEG2VIDEOINFO*)pmt->pbFormat;
m_pAVCtx->width = mpg2v->hdr.bmiHeader.biWidth;
m_pAVCtx->height = abs(mpg2v->hdr.bmiHeader.biHeight);
m_pAVCtx->codec_tag = mpg2v->hdr.bmiHeader.biCompression;
- if (mpg2v->hdr.bmiHeader.biCompression == NULL)
- {
+ if (mpg2v->hdr.bmiHeader.biCompression == NULL) {
m_pAVCtx->codec_tag = pmt->subtype.Data1;
- }
- else if ( (m_pAVCtx->codec_tag == MAKEFOURCC('a','v','c','1')) || (m_pAVCtx->codec_tag == MAKEFOURCC('A','V','C','1')))
- {
+ } else if ( (m_pAVCtx->codec_tag == MAKEFOURCC('a','v','c','1')) || (m_pAVCtx->codec_tag == MAKEFOURCC('A','V','C','1'))) {
m_pAVCtx->nal_length_size = mpg2v->dwFlags;
m_bReorderBFrame = false;
}
@@ -1087,9 +1091,9 @@ HRESULT CMPCVideoDecFilter::SetMediaType(PIN_DIRECTION direction,const CMediaTyp
m_pAVCtx->postgain = 1.0f;
m_pAVCtx->debug_mv = 0;
- #ifdef _DEBUG
+#ifdef _DEBUG
//m_pAVCtx->debug = FF_DEBUG_PICT_INFO | FF_DEBUG_STARTCODE | FF_DEBUG_PTS;
- #endif
+#endif
m_pAVCtx->opaque = this;
m_pAVCtx->get_buffer = get_buffer;
@@ -1098,53 +1102,52 @@ HRESULT CMPCVideoDecFilter::SetMediaType(PIN_DIRECTION direction,const CMediaTyp
ConnectTo (m_pAVCtx);
CalcAvgTimePerFrame();
- if (avcodec_open(m_pAVCtx, m_pAVCodec)<0)
+ if (avcodec_open(m_pAVCtx, m_pAVCodec)<0) {
return VFW_E_INVALIDMEDIATYPE;
+ }
- switch (ffCodecs[m_nCodecNb].nFFCodec)
- {
- case CODEC_ID_H264 :
- if((m_nDXVA_SD) && (PictWidthRounded() < 1280))
- {
- m_bDXVACompatible = false;
- }
- else
- {
- if(m_nDXVACheckCompatibility != 3)
- {
- // non-zero value indicates that an incompatibility was detected
- int nCompat = FFH264CheckCompatibility (PictWidthRounded(), PictHeightRounded(), m_pAVCtx, (BYTE*)m_pAVCtx->extradata, m_pAVCtx->extradata_size, m_nPCIVendor, m_nPCIDevice, m_VideoDriverVersion);
-
- if(nCompat > 0)
- {
- switch(m_nDXVACheckCompatibility)
- {
- case 0 :
- // full check
- m_bDXVACompatible = false;
- break;
- case 1 :
- // skip level check
- if(nCompat != DXVA_UNSUPPORTED_LEVEL) m_bDXVACompatible = false;
- break;
- case 2 :
- // skip reference frame check
- if(nCompat != DXVA_TOO_MANY_REF_FRAMES) m_bDXVACompatible = false;
- break;
+ switch (ffCodecs[m_nCodecNb].nFFCodec) {
+ case CODEC_ID_H264 :
+ if((m_nDXVA_SD) && (PictWidthRounded() < 1280)) {
+ m_bDXVACompatible = false;
+ } else {
+ if(m_nDXVACheckCompatibility != 3) {
+ // non-zero value indicates that an incompatibility was detected
+ int nCompat = FFH264CheckCompatibility (PictWidthRounded(), PictHeightRounded(), m_pAVCtx, (BYTE*)m_pAVCtx->extradata, m_pAVCtx->extradata_size, m_nPCIVendor, m_nPCIDevice, m_VideoDriverVersion);
+
+ if(nCompat > 0) {
+ switch(m_nDXVACheckCompatibility) {
+ case 0 :
+ // full check
+ m_bDXVACompatible = false;
+ break;
+ case 1 :
+ // skip level check
+ if(nCompat != DXVA_UNSUPPORTED_LEVEL) {
+ m_bDXVACompatible = false;
+ }
+ break;
+ case 2 :
+ // skip reference frame check
+ if(nCompat != DXVA_TOO_MANY_REF_FRAMES) {
+ m_bDXVACompatible = false;
+ }
+ break;
+ }
}
}
}
- }
- break;
- case CODEC_ID_MPEG2VIDEO :
- // DSP is disable for DXVA decoding (to keep default idct_permutation)
- m_pAVCtx->dsp_mask ^= AV_CPU_FLAG_FORCE;
- break;
+ break;
+ case CODEC_ID_MPEG2VIDEO :
+ // DSP is disable for DXVA decoding (to keep default idct_permutation)
+ m_pAVCtx->dsp_mask ^= AV_CPU_FLAG_FORCE;
+ break;
}
// Force single thread for DXVA !
- if (IsDXVASupported())
+ if (IsDXVASupported()) {
FFSetThreadNumber(m_pAVCtx, 1);
+ }
BuildDXVAOutputFormat();
}
@@ -1154,16 +1157,14 @@ HRESULT CMPCVideoDecFilter::SetMediaType(PIN_DIRECTION direction,const CMediaTyp
}
-VIDEO_OUTPUT_FORMATS DXVAFormats[] =
-{
+VIDEO_OUTPUT_FORMATS DXVAFormats[] = {
{&MEDIASUBTYPE_NV12, 1, 12, 'avxd'}, // DXVA2
{&MEDIASUBTYPE_NV12, 1, 12, 'AVXD'},
{&MEDIASUBTYPE_NV12, 1, 12, 'AVxD'},
{&MEDIASUBTYPE_NV12, 1, 12, 'AvXD'}
};
-VIDEO_OUTPUT_FORMATS SoftwareFormats[] =
-{
+VIDEO_OUTPUT_FORMATS SoftwareFormats[] = {
{&MEDIASUBTYPE_YV12, 3, 12, '21VY'},
{&MEDIASUBTYPE_YUY2, 1, 16, '2YUY'}, // Software
{&MEDIASUBTYPE_I420, 3, 12, '024I'},
@@ -1200,11 +1201,9 @@ void CMPCVideoDecFilter::BuildDXVAOutputFormat()
m_pVideoOutputFormat = DNew VIDEO_OUTPUT_FORMATS[m_nVideoOutputCount];
- if (IsDXVASupported())
- {
+ if (IsDXVASupported()) {
// Dynamic DXVA media types for DXVA1
- for (nPos=0; nPos<ffCodecs[m_nCodecNb].DXVAModeCount(); nPos++)
- {
+ for (nPos=0; nPos<ffCodecs[m_nCodecNb].DXVAModeCount(); nPos++) {
m_pVideoOutputFormat[nPos].subtype = ffCodecs[m_nCodecNb].DXVAModes->Decoder[nPos];
m_pVideoOutputFormat[nPos].biCompression = 'avxd';
m_pVideoOutputFormat[nPos].biBitCount = 12;
@@ -1217,17 +1216,19 @@ void CMPCVideoDecFilter::BuildDXVAOutputFormat()
}
// Software rendering
- if (m_bUseFFmpeg)
+ if (m_bUseFFmpeg) {
memcpy (&m_pVideoOutputFormat[nPos], SoftwareFormats, sizeof(SoftwareFormats));
+ }
}
int CMPCVideoDecFilter::GetPicEntryNumber()
{
- if (IsDXVASupported())
+ if (IsDXVASupported()) {
return ffCodecs[m_nCodecNb].DXVAModes->PicEntryNumber;
- else
+ } else {
return 0;
+ }
}
@@ -1244,44 +1245,32 @@ void CMPCVideoDecFilter::AllocExtradata(AVCodecContext* pAVCtx, const CMediaType
const BYTE* data = NULL;
unsigned int size = 0;
- if (pmt->formattype==FORMAT_VideoInfo)
- {
+ if (pmt->formattype==FORMAT_VideoInfo) {
size = pmt->cbFormat-sizeof(VIDEOINFOHEADER);
data = size?pmt->pbFormat+sizeof(VIDEOINFOHEADER):NULL;
- }
- else if (pmt->formattype==FORMAT_VideoInfo2)
- {
+ } else if (pmt->formattype==FORMAT_VideoInfo2) {
size = pmt->cbFormat-sizeof(VIDEOINFOHEADER2);
data = size?pmt->pbFormat+sizeof(VIDEOINFOHEADER2):NULL;
- }
- else if (pmt->formattype==FORMAT_MPEGVideo)
- {
+ } else if (pmt->formattype==FORMAT_MPEGVideo) {
MPEG1VIDEOINFO* mpeg1info = (MPEG1VIDEOINFO*)pmt->pbFormat;
- if (mpeg1info->cbSequenceHeader)
- {
+ if (mpeg1info->cbSequenceHeader) {
size = mpeg1info->cbSequenceHeader;
data = mpeg1info->bSequenceHeader;
}
- }
- else if (pmt->formattype==FORMAT_MPEG2Video)
- {
+ } else if (pmt->formattype==FORMAT_MPEG2Video) {
MPEG2VIDEOINFO* mpeg2info = (MPEG2VIDEOINFO*)pmt->pbFormat;
- if (mpeg2info->cbSequenceHeader)
- {
+ if (mpeg2info->cbSequenceHeader) {
size = mpeg2info->cbSequenceHeader;
data = (const uint8_t*)mpeg2info->dwSequenceHeader;
}
- }
- else if (pmt->formattype==FORMAT_VorbisFormat2)
- {
+ } else if (pmt->formattype==FORMAT_VorbisFormat2) {
const VORBISFORMAT2 *vf2=(const VORBISFORMAT2*)pmt->pbFormat;
UNUSED_ALWAYS(vf2);
size=pmt->cbFormat-sizeof(VORBISFORMAT2);
data=size?pmt->pbFormat+sizeof(VORBISFORMAT2):NULL;
}
- if (size)
- {
+ if (size) {
pAVCtx->extradata_size = size;
pAVCtx->extradata = (const unsigned char*)calloc(1,size+FF_INPUT_BUFFER_PADDING_SIZE);
memcpy((void*)pAVCtx->extradata, data, size);
@@ -1293,31 +1282,31 @@ HRESULT CMPCVideoDecFilter::CompleteConnect(PIN_DIRECTION direction, IPin* pRece
{
LOG(_T("CMPCVideoDecFilter::CompleteConnect"));
- if (direction==PINDIR_INPUT && m_pOutput->IsConnected())
- {
+ if (direction==PINDIR_INPUT && m_pOutput->IsConnected()) {
ReconnectOutput (m_nWidth, m_nHeight);
- }
- else if (direction==PINDIR_OUTPUT)
- {
- if (IsDXVASupported())
- {
- if (m_nDXVAMode == MODE_DXVA1)
+ } else if (direction==PINDIR_OUTPUT) {
+ if (IsDXVASupported()) {
+ if (m_nDXVAMode == MODE_DXVA1) {
m_pDXVADecoder->ConfigureDXVA1();
- else if (SUCCEEDED (ConfigureDXVA2 (pReceivePin)) && SUCCEEDED (SetEVRForDXVA2 (pReceivePin)) )
+ } else if (SUCCEEDED (ConfigureDXVA2 (pReceivePin)) && SUCCEEDED (SetEVRForDXVA2 (pReceivePin)) ) {
m_nDXVAMode = MODE_DXVA2;
+ }
}
- if (m_nDXVAMode == MODE_SOFTWARE && !FFSoftwareCheckCompatibility(m_pAVCtx))
+ if (m_nDXVAMode == MODE_SOFTWARE && !FFSoftwareCheckCompatibility(m_pAVCtx)) {
return VFW_E_INVALIDMEDIATYPE;
+ }
CLSID ClsidSourceFilter = GetCLSID(m_pInput->GetConnected());
- if((ClsidSourceFilter == __uuidof(CMpegSourceFilter)) || (ClsidSourceFilter == __uuidof(CMpegSplitterFilter)))
+ if((ClsidSourceFilter == __uuidof(CMpegSourceFilter)) || (ClsidSourceFilter == __uuidof(CMpegSplitterFilter))) {
m_bReorderBFrame = false;
+ }
}
// Cannot use YUY2 if horizontal or vertical resolution is not even
if ( ((m_pOutput->CurrentMediaType().subtype == MEDIASUBTYPE_NV12) && (m_nDXVAMode == MODE_SOFTWARE)) ||
- ((m_pOutput->CurrentMediaType().subtype == MEDIASUBTYPE_YUY2) && (m_pAVCtx->width&1 || m_pAVCtx->height&1)) )
+ ((m_pOutput->CurrentMediaType().subtype == MEDIASUBTYPE_YUY2) && (m_pAVCtx->width&1 || m_pAVCtx->height&1)) ) {
return VFW_E_INVALIDMEDIATYPE;
+ }
return __super::CompleteConnect (direction, pReceivePin);
}
@@ -1325,24 +1314,26 @@ HRESULT CMPCVideoDecFilter::CompleteConnect(PIN_DIRECTION direction, IPin* pRece
HRESULT CMPCVideoDecFilter::DecideBufferSize(IMemAllocator* pAllocator, ALLOCATOR_PROPERTIES* pProperties)
{
- if (UseDXVA2())
- {
+ if (UseDXVA2()) {
HRESULT hr;
ALLOCATOR_PROPERTIES Actual;
- if(m_pInput->IsConnected() == FALSE) return E_UNEXPECTED;
+ if(m_pInput->IsConnected() == FALSE) {
+ return E_UNEXPECTED;
+ }
pProperties->cBuffers = GetPicEntryNumber();
- if(FAILED(hr = pAllocator->SetProperties(pProperties, &Actual)))
+ if(FAILED(hr = pAllocator->SetProperties(pProperties, &Actual))) {
return hr;
+ }
return pProperties->cBuffers > Actual.cBuffers || pProperties->cbBuffer > Actual.cbBuffer
? E_FAIL
: NOERROR;
- }
- else
+ } else {
return __super::DecideBufferSize (pAllocator, pProperties);
+ }
}
@@ -1356,19 +1347,20 @@ HRESULT CMPCVideoDecFilter::NewSegment(REFERENCE_TIME rtStart, REFERENCE_TIME rt
ResetBuffer();
- if (m_pAVCtx)
+ if (m_pAVCtx) {
avcodec_flush_buffers (m_pAVCtx);
+ }
- if (m_pDXVADecoder)
+ if (m_pDXVADecoder) {
m_pDXVADecoder->Flush();
+ }
return __super::NewSegment (rtStart, rtStop, dRate);
}
HRESULT CMPCVideoDecFilter::BreakConnect(PIN_DIRECTION dir)
{
- if (dir == PINDIR_INPUT)
- {
+ if (dir == PINDIR_INPUT) {
Cleanup();
}
@@ -1377,34 +1369,31 @@ HRESULT CMPCVideoDecFilter::BreakConnect(PIN_DIRECTION dir)
void CMPCVideoDecFilter::SetTypeSpecificFlags(IMediaSample* pMS)
{
- if(CComQIPtr<IMediaSample2> pMS2 = pMS)
- {
+ if(CComQIPtr<IMediaSample2> pMS2 = pMS) {
AM_SAMPLE2_PROPERTIES props;
- if(SUCCEEDED(pMS2->GetProperties(sizeof(props), (BYTE*)&props)))
- {
+ if(SUCCEEDED(pMS2->GetProperties(sizeof(props), (BYTE*)&props))) {
props.dwTypeSpecificFlags &= ~0x7f;
- if(!m_pFrame->interlaced_frame)
+ if(!m_pFrame->interlaced_frame) {
props.dwTypeSpecificFlags |= AM_VIDEO_FLAG_WEAVE;
- else
- {
- if(m_pFrame->top_field_first)
+ } else {
+ if(m_pFrame->top_field_first) {
props.dwTypeSpecificFlags |= AM_VIDEO_FLAG_FIELD1FIRST;
+ }
}
- switch (m_pFrame->pict_type)
- {
- case FF_I_TYPE :
- case FF_SI_TYPE :
- props.dwTypeSpecificFlags |= AM_VIDEO_FLAG_I_SAMPLE;
- break;
- case FF_P_TYPE :
- case FF_SP_TYPE :
- props.dwTypeSpecificFlags |= AM_VIDEO_FLAG_P_SAMPLE;
- break;
- default :
- props.dwTypeSpecificFlags |= AM_VIDEO_FLAG_B_SAMPLE;
- break;
+ switch (m_pFrame->pict_type) {
+ case FF_I_TYPE :
+ case FF_SI_TYPE :
+ props.dwTypeSpecificFlags |= AM_VIDEO_FLAG_I_SAMPLE;
+ break;
+ case FF_P_TYPE :
+ case FF_SP_TYPE :
+ props.dwTypeSpecificFlags |= AM_VIDEO_FLAG_P_SAMPLE;
+ break;
+ default :
+ props.dwTypeSpecificFlags |= AM_VIDEO_FLAG_B_SAMPLE;
+ break;
}
pMS2->SetProperties(sizeof(props), (BYTE*)&props);
@@ -1415,11 +1404,12 @@ void CMPCVideoDecFilter::SetTypeSpecificFlags(IMediaSample* pMS)
#if HAS_FFMPEG_VIDEO_DECODERS
int CMPCVideoDecFilter::GetCspFromMediaType(GUID& subtype)
{
- if (subtype == MEDIASUBTYPE_I420 || subtype == MEDIASUBTYPE_IYUV || subtype == MEDIASUBTYPE_YV12)
+ if (subtype == MEDIASUBTYPE_I420 || subtype == MEDIASUBTYPE_IYUV || subtype == MEDIASUBTYPE_YV12) {
return FF_CSP_420P|FF_CSP_FLAGS_YUV_ADJ;
- else if (subtype == MEDIASUBTYPE_YUY2)
+ } else if (subtype == MEDIASUBTYPE_YUY2) {
return FF_CSP_YUY2;
-// else if (subtype == MEDIASUBTYPE_ARGB32 || subtype == MEDIASUBTYPE_RGB32 || subtype == MEDIASUBTYPE_RGB24 || subtype == MEDIASUBTYPE_RGB565)
+ }
+ // else if (subtype == MEDIASUBTYPE_ARGB32 || subtype == MEDIASUBTYPE_RGB32 || subtype == MEDIASUBTYPE_RGB24 || subtype == MEDIASUBTYPE_RGB565)
ASSERT (FALSE);
return FF_CSP_NULL;
@@ -1428,15 +1418,18 @@ int CMPCVideoDecFilter::GetCspFromMediaType(GUID& subtype)
void CMPCVideoDecFilter::InitSwscale()
{
- if (m_pSwsContext == NULL)
- {
+ if (m_pSwsContext == NULL) {
TYCbCr2RGB_coeffs coeffs(ffYCbCr_RGB_coeff_ITUR_BT601,0, 235, 16, 255.0, 0.0);
int32_t swscaleTable[7];
SwsParams params;
memset(&params,0,sizeof(params));
- if (m_pAVCtx->dsp_mask & CCpuId::MPC_MM_MMX) params.cpu |= SWS_CPU_CAPS_MMX|SWS_CPU_CAPS_MMX2;
- if (m_pAVCtx->dsp_mask & CCpuId::MPC_MM_3DNOW) params.cpu |= SWS_CPU_CAPS_3DNOW;
+ if (m_pAVCtx->dsp_mask & CCpuId::MPC_MM_MMX) {
+ params.cpu |= SWS_CPU_CAPS_MMX|SWS_CPU_CAPS_MMX2;
+ }
+ if (m_pAVCtx->dsp_mask & CCpuId::MPC_MM_3DNOW) {
+ params.cpu |= SWS_CPU_CAPS_3DNOW;
+ }
params.methodLuma.method=params.methodChroma.method=SWS_POINT;
@@ -1487,10 +1480,8 @@ HRESULT CMPCVideoDecFilter::SoftwareDecode(IMediaSample* pIn, BYTE* pDataIn, int
AVPacket avpkt;
av_init_packet(&avpkt);
- while (nSize > 0)
- {
- if (nSize+FF_INPUT_BUFFER_PADDING_SIZE > m_nFFBufferSize)
- {
+ while (nSize > 0) {
+ if (nSize+FF_INPUT_BUFFER_PADDING_SIZE > m_nFFBufferSize) {
m_nFFBufferSize = nSize+FF_INPUT_BUFFER_PADDING_SIZE;
m_pFFBuffer = (BYTE*)realloc(m_pFFBuffer, m_nFFBufferSize);
}
@@ -1509,15 +1500,20 @@ HRESULT CMPCVideoDecFilter::SoftwareDecode(IMediaSample* pIn, BYTE* pDataIn, int
avpkt.flags = AV_PKT_FLAG_KEY;
used_bytes = avcodec_decode_video2 (m_pAVCtx, m_pFrame, &got_picture, &avpkt);
- if (!got_picture || !m_pFrame->data[0]) return S_OK;
- if(pIn->IsPreroll() == S_OK || rtStart < 0) return S_OK;
+ if (!got_picture || !m_pFrame->data[0]) {
+ return S_OK;
+ }
+ if(pIn->IsPreroll() == S_OK || rtStart < 0) {
+ return S_OK;
+ }
CComPtr<IMediaSample> pOut;
BYTE* pDataOut = NULL;
UpdateAspectRatio();
- if(FAILED(hr = GetDeliveryBuffer(m_pAVCtx->width, m_pAVCtx->height, &pOut)) || FAILED(hr = pOut->GetPointer(&pDataOut)))
+ if(FAILED(hr = GetDeliveryBuffer(m_pAVCtx->width, m_pAVCtx->height, &pOut)) || FAILED(hr = pOut->GetPointer(&pDataOut))) {
return hr;
+ }
rtStart = m_pFrame->reordered_opaque;
rtStop = m_pFrame->reordered_opaque + m_rtAvrTimePerFrame;
@@ -1527,34 +1523,37 @@ HRESULT CMPCVideoDecFilter::SoftwareDecode(IMediaSample* pIn, BYTE* pDataIn, int
pOut->SetMediaTime(NULL, NULL);
#if HAS_FFMPEG_VIDEO_DECODERS
- if (m_pSwsContext == NULL) InitSwscale();
+ if (m_pSwsContext == NULL) {
+ InitSwscale();
+ }
// TODO : quick and dirty patch to fix convertion to YUY2 with swscale
- if (m_nOutCsp == FF_CSP_YUY2)
+ if (m_nOutCsp == FF_CSP_YUY2) {
CopyBuffer(pDataOut, m_pFrame->data, m_pAVCtx->width, m_pAVCtx->height, m_pFrame->linesize[0], MEDIASUBTYPE_I420, false);
+ }
- else if (m_pSwsContext != NULL)
- {
+ else if (m_pSwsContext != NULL) {
uint8_t* dst[4];
stride_t srcStride[4];
stride_t dstStride[4];
const TcspInfo *outcspInfo=csp_getInfo(m_nOutCsp);
- for (int i=0; i<4; i++)
- {
+ for (int i=0; i<4; i++) {
srcStride[i]=(stride_t)m_pFrame->linesize[i];
dstStride[i]=m_pOutSize.cx>>outcspInfo->shiftX[i];
- if (i==0)
+ if (i==0) {
dst[i]=pDataOut;
- else
+ } else {
dst[i]=dst[i-1]+dstStride[i-1]*(m_pOutSize.cy>>outcspInfo->shiftY[i-1]);
+ }
}
int nTempCsp = m_nOutCsp;
- if(outcspInfo->id==FF_CSP_420P)
+ if(outcspInfo->id==FF_CSP_420P) {
csp_yuv_adj_to_plane(nTempCsp,outcspInfo,odd2even(m_pOutSize.cy),(unsigned char**)dst,dstStride);
- else
+ } else {
csp_yuv_adj_to_plane(nTempCsp,outcspInfo,m_pAVCtx->height,(unsigned char**)dst,dstStride);
+ }
sws_scale_ordered (m_pSwsContext, m_pFrame->data, srcStride, 0, m_pAVCtx->height, dst, dstStride);
}
@@ -1667,24 +1666,17 @@ bool CMPCVideoDecFilter::FindPicture(int nIndex, int nStartCode)
{
DWORD dw = 0;
- for (int i=0; i<m_nFFBufferPos-nIndex; i++)
- {
+ for (int i=0; i<m_nFFBufferPos-nIndex; i++) {
dw = (dw<<8) + m_pFFBuffer[i+nIndex];
- if (i >= 4)
- {
- if (m_nFFPicEnd == INT_MIN)
- {
+ if (i >= 4) {
+ if (m_nFFPicEnd == INT_MIN) {
if ( (dw & 0xffffff00) == 0x00000100 &&
- (dw & 0x000000FF) == (DWORD)nStartCode )
- {
+ (dw & 0x000000FF) == (DWORD)nStartCode ) {
m_nFFPicEnd = i+nIndex-3;
}
- }
- else
- {
+ } else {
if ( (dw & 0xffffff00) == 0x00000100 &&
- ( (dw & 0x000000FF) == (DWORD)nStartCode || (dw & 0x000000FF) == 0xB3 ))
- {
+ ( (dw & 0x000000FF) == (DWORD)nStartCode || (dw & 0x000000FF) == 0xB3 )) {
m_nFFPicEnd = i+nIndex-3;
return true;
}
@@ -1702,8 +1694,7 @@ void CMPCVideoDecFilter::ResetBuffer()
m_nFFBufferPos = 0;
m_nFFPicEnd = INT_MIN;
- for (int i=0; i<MAX_BUFF_TIME; i++)
- {
+ for (int i=0; i<MAX_BUFF_TIME; i++) {
m_FFBufferTime[i].nBuffPos = INT_MIN;
m_FFBufferTime[i].rtStart = _I64_MIN;
m_FFBufferTime[i].rtStop = _I64_MIN;
@@ -1712,10 +1703,8 @@ void CMPCVideoDecFilter::ResetBuffer()
void CMPCVideoDecFilter::PushBufferTime(int nPos, REFERENCE_TIME& rtStart, REFERENCE_TIME& rtStop)
{
- for (int i=0; i<MAX_BUFF_TIME; i++)
- {
- if (m_FFBufferTime[i].nBuffPos == INT_MIN)
- {
+ for (int i=0; i<MAX_BUFF_TIME; i++) {
+ if (m_FFBufferTime[i].nBuffPos == INT_MIN) {
m_FFBufferTime[i].nBuffPos = nPos;
m_FFBufferTime[i].rtStart = rtStart;
m_FFBufferTime[i].rtStop = rtStop;
@@ -1730,10 +1719,8 @@ void CMPCVideoDecFilter::PopBufferTime(int nPos)
int i = 0;
// Shift buffer time list
- while (i<MAX_BUFF_TIME && m_FFBufferTime[i].nBuffPos!=INT_MIN)
- {
- if (m_FFBufferTime[i].nBuffPos >= nPos)
- {
+ while (i<MAX_BUFF_TIME && m_FFBufferTime[i].nBuffPos!=INT_MIN) {
+ if (m_FFBufferTime[i].nBuffPos >= nPos) {
m_FFBufferTime[nDestPos].nBuffPos = m_FFBufferTime[i].nBuffPos - nPos;
m_FFBufferTime[nDestPos].rtStart = m_FFBufferTime[i].rtStart;
m_FFBufferTime[nDestPos].rtStop = m_FFBufferTime[i].rtStop;
@@ -1743,8 +1730,7 @@ void CMPCVideoDecFilter::PopBufferTime(int nPos)
}
// Free unused slots
- for (i=nDestPos; i<MAX_BUFF_TIME; i++)
- {
+ for (i=nDestPos; i<MAX_BUFF_TIME; i++) {
m_FFBufferTime[i].nBuffPos = INT_MIN;
m_FFBufferTime[i].rtStart = _I64_MIN;
m_FFBufferTime[i].rtStop = _I64_MIN;
@@ -1753,11 +1739,11 @@ void CMPCVideoDecFilter::PopBufferTime(int nPos)
bool CMPCVideoDecFilter::AppendBuffer (BYTE* pDataIn, int nSize, REFERENCE_TIME rtStart, REFERENCE_TIME rtStop)
{
- if (rtStart != _I64_MIN)
+ if (rtStart != _I64_MIN) {
PushBufferTime (m_nFFBufferPos, rtStart, rtStop);
+ }
- if (m_nFFBufferPos+nSize+FF_INPUT_BUFFER_PADDING_SIZE > m_nFFBufferSize)
- {
+ if (m_nFFBufferPos+nSize+FF_INPUT_BUFFER_PADDING_SIZE > m_nFFBufferSize) {
m_nFFBufferSize = m_nFFBufferPos+nSize+FF_INPUT_BUFFER_PADDING_SIZE;
m_pFFBuffer = (BYTE*)realloc(m_pFFBuffer, m_nFFBufferSize);
}
@@ -1792,8 +1778,9 @@ HRESULT CMPCVideoDecFilter::Transform(IMediaSample* pIn)
REFERENCE_TIME rtStart = _I64_MIN;
REFERENCE_TIME rtStop = _I64_MIN;
- if(FAILED(hr = pIn->GetPointer(&pDataIn)))
+ if(FAILED(hr = pIn->GetPointer(&pDataIn))) {
return hr;
+ }
nSize = pIn->GetActualDataLength();
pIn->GetTime(&rtStart, &rtStop);
@@ -1818,23 +1805,23 @@ HRESULT CMPCVideoDecFilter::Transform(IMediaSample* pIn)
// m_nCountEstimated++;
// rtStart = rtStop = m_rtLastStart + m_nCountEstimated*m_rtAvrTimePerFrame;
//}
- if (rtStop <= rtStart && rtStop != _I64_MIN)
+ if (rtStop <= rtStart && rtStop != _I64_MIN) {
rtStop = rtStart + m_rtAvrTimePerFrame;
+ }
m_pAVCtx->reordered_opaque = rtStart;
m_pAVCtx->reordered_opaque2 = rtStop;
- if (m_pAVCtx->has_b_frames)
- {
+ if (m_pAVCtx->has_b_frames) {
m_BFrames[m_nPosB].rtStart = rtStart;
m_BFrames[m_nPosB].rtStop = rtStop;
m_nPosB = 1-m_nPosB;
}
-// m_rtStart = rtStart;
+ // m_rtStart = rtStart;
-// DumpBuffer (pDataIn, nSize);
-// TRACE ("Receive : %10I64d - %10I64d (%10I64d) Size=%d\n", rtStart, rtStop, rtStop - rtStart, nSize);
+ // DumpBuffer (pDataIn, nSize);
+ // TRACE ("Receive : %10I64d - %10I64d (%10I64d) Size=%d\n", rtStart, rtStop, rtStop - rtStart, nSize);
//char strMsg[300];
//FILE* hFile = fopen ("d:\\receive.txt", "at");
@@ -1847,49 +1834,42 @@ HRESULT CMPCVideoDecFilter::Transform(IMediaSample* pIn)
//fwrite (pDataIn, nSize, 1, hFile);
//fclose (hFile);
- switch (m_nDXVAMode)
- {
- case MODE_SOFTWARE :
- hr = SoftwareDecode (pIn, pDataIn, nSize, rtStart, rtStop);
- break;
- case MODE_DXVA1 :
- case MODE_DXVA2 :
- CheckPointer (m_pDXVADecoder, E_UNEXPECTED);
- UpdateAspectRatio();
+ switch (m_nDXVAMode) {
+ case MODE_SOFTWARE :
+ hr = SoftwareDecode (pIn, pDataIn, nSize, rtStart, rtStop);
+ break;
+ case MODE_DXVA1 :
+ case MODE_DXVA2 :
+ CheckPointer (m_pDXVADecoder, E_UNEXPECTED);
+ UpdateAspectRatio();
+
+ // Change aspect ratio for DXVA1
+ if ((m_nDXVAMode == MODE_DXVA1) &&
+ ReconnectOutput(PictWidthRounded(), PictHeightRounded(), true, PictWidth(), PictHeight()) == S_OK) {
+ m_pDXVADecoder->ConfigureDXVA1();
+ }
- // Change aspect ratio for DXVA1
- if ((m_nDXVAMode == MODE_DXVA1) &&
- ReconnectOutput(PictWidthRounded(), PictHeightRounded(), true, PictWidth(), PictHeight()) == S_OK)
- {
- m_pDXVADecoder->ConfigureDXVA1();
- }
+ if (m_pAVCtx->codec_id == CODEC_ID_MPEG2VIDEO) {
+ AppendBuffer (pDataIn, nSize, rtStart, rtStop);
+ hr = S_OK;
- if (m_pAVCtx->codec_id == CODEC_ID_MPEG2VIDEO)
- {
- AppendBuffer (pDataIn, nSize, rtStart, rtStop);
- hr = S_OK;
-
- while (FindPicture (max (m_nFFBufferPos-nSize-4, 0), 0x00))
- {
- if (m_FFBufferTime[0].nBuffPos != INT_MIN && m_FFBufferTime[0].nBuffPos < m_nFFPicEnd)
- {
- rtStart = m_FFBufferTime[0].rtStart;
- rtStop = m_FFBufferTime[0].rtStop;
+ while (FindPicture (max (m_nFFBufferPos-nSize-4, 0), 0x00)) {
+ if (m_FFBufferTime[0].nBuffPos != INT_MIN && m_FFBufferTime[0].nBuffPos < m_nFFPicEnd) {
+ rtStart = m_FFBufferTime[0].rtStart;
+ rtStop = m_FFBufferTime[0].rtStop;
+ } else {
+ rtStart = rtStop = _I64_MIN;
+ }
+ hr = m_pDXVADecoder->DecodeFrame (m_pFFBuffer, m_nFFPicEnd, rtStart, rtStop);
+ ShrinkBuffer();
}
- else
- rtStart = rtStop = _I64_MIN;
- hr = m_pDXVADecoder->DecodeFrame (m_pFFBuffer, m_nFFPicEnd, rtStart, rtStop);
- ShrinkBuffer();
+ } else {
+ hr = m_pDXVADecoder->DecodeFrame (pDataIn, nSize, rtStart, rtStop);
}
- }
- else
- {
- hr = m_pDXVADecoder->DecodeFrame (pDataIn, nSize, rtStart, rtStop);
- }
- break;
- default :
- ASSERT (FALSE);
- hr = E_UNEXPECTED;
+ break;
+ default :
+ ASSERT (FALSE);
+ hr = E_UNEXPECTED;
}
return hr;
@@ -1898,15 +1878,15 @@ HRESULT CMPCVideoDecFilter::Transform(IMediaSample* pIn)
void CMPCVideoDecFilter::UpdateAspectRatio()
{
- if(((m_nARMode) && (m_pAVCtx)) && ((m_pAVCtx->sample_aspect_ratio.num>0) && (m_pAVCtx->sample_aspect_ratio.den>0)))
- {
+ if(((m_nARMode) && (m_pAVCtx)) && ((m_pAVCtx->sample_aspect_ratio.num>0) && (m_pAVCtx->sample_aspect_ratio.den>0))) {
CSize SAR(m_pAVCtx->sample_aspect_ratio.num, m_pAVCtx->sample_aspect_ratio.den);
- if(m_sar != SAR)
- {
+ if(m_sar != SAR) {
m_sar = SAR;
CSize aspect(m_nWidth * SAR.cx, m_nHeight * SAR.cy);
int lnko = LNKO(aspect.cx, aspect.cy);
- if(lnko > 1) aspect.cx /= lnko, aspect.cy /= lnko;
+ if(lnko > 1) {
+ aspect.cx /= lnko, aspect.cy /= lnko;
+ }
SetAspect(aspect);
}
}
@@ -1915,8 +1895,7 @@ void CMPCVideoDecFilter::UpdateAspectRatio()
void CMPCVideoDecFilter::ReorderBFrames(REFERENCE_TIME& rtStart, REFERENCE_TIME& rtStop)
{
// Re-order B-frames if needed
- if (m_pAVCtx->has_b_frames && m_bReorderBFrame)
- {
+ if (m_pAVCtx->has_b_frames && m_bReorderBFrame) {
rtStart = m_BFrames [m_nPosB].rtStart;
rtStop = m_BFrames [m_nPosB].rtStop;
}
@@ -1933,14 +1912,13 @@ void CMPCVideoDecFilter::FillInVideoDescription(DXVA2_VideoDesc *pDesc)
BOOL CMPCVideoDecFilter::IsSupportedDecoderMode(const GUID& mode)
{
- if (IsDXVASupported())
- {
- for (int i=0; i<MAX_SUPPORTED_MODE; i++)
- {
- if (*ffCodecs[m_nCodecNb].DXVAModes->Decoder[i] == GUID_NULL)
+ if (IsDXVASupported()) {
+ for (int i=0; i<MAX_SUPPORTED_MODE; i++) {
+ if (*ffCodecs[m_nCodecNb].DXVAModes->Decoder[i] == GUID_NULL) {
break;
- else if (*ffCodecs[m_nCodecNb].DXVAModes->Decoder[i] == mode)
+ } else if (*ffCodecs[m_nCodecNb].DXVAModes->Decoder[i] == mode) {
return true;
+ }
}
}
@@ -1959,9 +1937,9 @@ BOOL CMPCVideoDecFilter::IsSupportedDecoderConfig(const D3DFORMAT nD3DFormat, co
}
HRESULT CMPCVideoDecFilter::FindDXVA2DecoderConfiguration(IDirectXVideoDecoderService *pDecoderService,
- const GUID& guidDecoder,
- DXVA2_ConfigPictureDecode *pSelectedConfig,
- BOOL *pbFoundDXVA2Configuration)
+ const GUID& guidDecoder,
+ DXVA2_ConfigPictureDecode *pSelectedConfig,
+ BOOL *pbFoundDXVA2Configuration)
{
HRESULT hr = S_OK;
UINT cFormats = 0;
@@ -1975,11 +1953,9 @@ HRESULT CMPCVideoDecFilter::FindDXVA2DecoderConfiguration(IDirectXVideoDecoderSe
hr = pDecoderService->GetDecoderRenderTargets(guidDecoder, &cFormats, &pFormats);
LOG (_T("GetDecoderRenderTargets => %d"), cFormats);
- if (SUCCEEDED(hr))
- {
+ if (SUCCEEDED(hr)) {
// Look for a format that matches our output format.
- for (UINT iFormat = 0; iFormat < cFormats; iFormat++)
- {
+ for (UINT iFormat = 0; iFormat < cFormats; iFormat++) {
LOG (_T("Try to negociate => 0x%08x"), pFormats[iFormat]);
// Fill in the video description. Set the width, height, format, and frame rate.
@@ -1989,24 +1965,22 @@ HRESULT CMPCVideoDecFilter::FindDXVA2DecoderConfiguration(IDirectXVideoDecoderSe
// Get the available configurations.
hr = pDecoderService->GetDecoderConfigurations(guidDecoder, &m_VideoDesc, NULL, &cConfigurations, &pConfig);
- if (FAILED(hr))
- {
+ if (FAILED(hr)) {
continue;
}
// Find a supported configuration.
- for (UINT iConfig = 0; iConfig < cConfigurations; iConfig++)
- {
- if (IsSupportedDecoderConfig(pFormats[iFormat], pConfig[iConfig], bIsPrefered))
- {
+ for (UINT iConfig = 0; iConfig < cConfigurations; iConfig++) {
+ if (IsSupportedDecoderConfig(pFormats[iFormat], pConfig[iConfig], bIsPrefered)) {
// This configuration is good.
- if (bIsPrefered || !*pbFoundDXVA2Configuration)
- {
+ if (bIsPrefered || !*pbFoundDXVA2Configuration) {
*pbFoundDXVA2Configuration = TRUE;
*pSelectedConfig = pConfig[iConfig];
}
- if (bIsPrefered) break;
+ if (bIsPrefered) {
+ break;
+ }
}
}
@@ -2041,70 +2015,61 @@ HRESULT CMPCVideoDecFilter::ConfigureDXVA2(IPin *pPin)
hr = pPin->QueryInterface(__uuidof(IMFGetService), (void**)&pGetService);
// Get the Direct3D device manager.
- if (SUCCEEDED(hr))
- {
+ if (SUCCEEDED(hr)) {
hr = pGetService->GetService(
- MR_VIDEO_ACCELERATION_SERVICE,
- __uuidof(IDirect3DDeviceManager9),
- (void**)&pDeviceManager);
+ MR_VIDEO_ACCELERATION_SERVICE,
+ __uuidof(IDirect3DDeviceManager9),
+ (void**)&pDeviceManager);
}
// Open a new device handle.
- if (SUCCEEDED(hr))
- {
+ if (SUCCEEDED(hr)) {
hr = pDeviceManager->OpenDeviceHandle(&hDevice);
}
// Get the video decoder service.
- if (SUCCEEDED(hr))
- {
+ if (SUCCEEDED(hr)) {
hr = pDeviceManager->GetVideoService(
- hDevice,
- __uuidof(IDirectXVideoDecoderService),
- (void**)&pDecoderService);
+ hDevice,
+ __uuidof(IDirectXVideoDecoderService),
+ (void**)&pDecoderService);
}
// Get the decoder GUIDs.
- if (SUCCEEDED(hr))
- {
+ if (SUCCEEDED(hr)) {
hr = pDecoderService->GetDecoderDeviceGuids(&cDecoderGuids, &pDecoderGuids);
}
- if (SUCCEEDED(hr))
- {
+ if (SUCCEEDED(hr)) {
// Look for the decoder GUIDs we want.
- for (UINT iGuid = 0; iGuid < cDecoderGuids; iGuid++)
- {
+ for (UINT iGuid = 0; iGuid < cDecoderGuids; iGuid++) {
// Do we support this mode?
- if (!IsSupportedDecoderMode(pDecoderGuids[iGuid]))
- {
+ if (!IsSupportedDecoderMode(pDecoderGuids[iGuid])) {
continue;
}
// Find a configuration that we support.
hr = FindDXVA2DecoderConfiguration(pDecoderService, pDecoderGuids[iGuid], &config, &bFoundDXVA2Configuration);
- if (FAILED(hr))
- {
+ if (FAILED(hr)) {
break;
}
- if (bFoundDXVA2Configuration)
- {
+ if (bFoundDXVA2Configuration) {
// Found a good configuration. Save the GUID.
guidDecoder = pDecoderGuids[iGuid];
}
}
}
- if (pDecoderGuids) CoTaskMemFree(pDecoderGuids);
- if (!bFoundDXVA2Configuration)
- {
+ if (pDecoderGuids) {
+ CoTaskMemFree(pDecoderGuids);
+ }
+ if (!bFoundDXVA2Configuration) {
hr = E_FAIL; // Unable to find a configuration.
}
- if (SUCCEEDED(hr))
- {
+ if (SUCCEEDED(hr)) {
// Store the things we will need later.
m_pDeviceManager = pDeviceManager;
m_pDecoderService = pDecoderService;
@@ -2114,10 +2079,8 @@ HRESULT CMPCVideoDecFilter::ConfigureDXVA2(IPin *pPin)
m_hDevice = hDevice;
}
- if (FAILED(hr))
- {
- if (hDevice != INVALID_HANDLE_VALUE)
- {
+ if (FAILED(hr)) {
+ if (hDevice != INVALID_HANDLE_VALUE) {
pDeviceManager->CloseDeviceHandle(hDevice);
}
}
@@ -2138,37 +2101,32 @@ HRESULT CMPCVideoDecFilter::SetEVRForDXVA2(IPin *pPin)
hr = pPin->QueryInterface(__uuidof(IMFGetService), (void**)&pGetService);
// Get the IDirectXVideoMemoryConfiguration interface.
- if (SUCCEEDED(hr))
- {
+ if (SUCCEEDED(hr)) {
hr = pGetService->GetService(
- MR_VIDEO_ACCELERATION_SERVICE,
- __uuidof(IDirectXVideoMemoryConfiguration),
- (void**)&pVideoConfig);
+ MR_VIDEO_ACCELERATION_SERVICE,
+ __uuidof(IDirectXVideoMemoryConfiguration),
+ (void**)&pVideoConfig);
- if (SUCCEEDED (pGetService->GetService(MR_VIDEO_RENDER_SERVICE, __uuidof(IMFVideoDisplayControl), (void**)&pVdc)))
- {
+ if (SUCCEEDED (pGetService->GetService(MR_VIDEO_RENDER_SERVICE, __uuidof(IMFVideoDisplayControl), (void**)&pVdc))) {
HWND hWnd;
- if (SUCCEEDED (pVdc->GetVideoWindow(&hWnd)))
- {
+ if (SUCCEEDED (pVdc->GetVideoWindow(&hWnd))) {
DetectVideoCard(hWnd);
}
}
}
// Notify the EVR.
- if (SUCCEEDED(hr))
- {
+ if (SUCCEEDED(hr)) {
DXVA2_SurfaceType surfaceType;
- for (DWORD iTypeIndex = 0; ; iTypeIndex++)
- {
+ for (DWORD iTypeIndex = 0; ; iTypeIndex++) {
hr = pVideoConfig->GetAvailableSurfaceTypeByIndex(iTypeIndex, &surfaceType);
- if (FAILED(hr))
+ if (FAILED(hr)) {
break;
+ }
- if (surfaceType == DXVA2_SurfaceType_DecoderRenderTarget)
- {
+ if (surfaceType == DXVA2_SurfaceType_DecoderRenderTarget) {
hr = pVideoConfig->SetSurfaceType(DXVA2_SurfaceType_DecoderRenderTarget);
break;
}
@@ -2186,17 +2144,19 @@ HRESULT CMPCVideoDecFilter::CreateDXVA2Decoder(UINT nNumRenderTargets, IDirect3D
m_pDecoderRenderTarget = NULL;
- if (m_pDXVADecoder) m_pDXVADecoder->SetDirectXVideoDec (NULL);
+ if (m_pDXVADecoder) {
+ m_pDXVADecoder->SetDirectXVideoDec (NULL);
+ }
hr = m_pDecoderService->CreateVideoDecoder (m_DXVADecoderGUID, &m_VideoDesc, &m_DXVA2Config,
- pDecoderRenderTargets, nNumRenderTargets, &pDirectXVideoDec);
+ pDecoderRenderTargets, nNumRenderTargets, &pDirectXVideoDec);
- if (SUCCEEDED (hr))
- {
- if (!m_pDXVADecoder)
- {
+ if (SUCCEEDED (hr)) {
+ if (!m_pDXVADecoder) {
m_pDXVADecoder = CDXVADecoder::CreateDecoder (this, pDirectXVideoDec, &m_DXVADecoderGUID, GetPicEntryNumber(), &m_DXVA2Config);
- if (m_pDXVADecoder) m_pDXVADecoder->SetExtraData ((BYTE*)m_pAVCtx->extradata, m_pAVCtx->extradata_size);
+ if (m_pDXVADecoder) {
+ m_pDXVADecoder->SetExtraData ((BYTE*)m_pAVCtx->extradata, m_pAVCtx->extradata_size);
+ }
}
m_pDXVADecoder->SetDirectXVideoDec (pDirectXVideoDec);
@@ -2214,18 +2174,14 @@ HRESULT CMPCVideoDecFilter::FindDXVA1DecoderConfiguration(IAMVideoAccelerator* p
pAMVideoAccelerator->GetUncompFormatsSupported (guidDecoder, &dwFormats, NULL);
- if (dwFormats > 0)
- {
+ if (dwFormats > 0) {
// Find the valid render target formats for this decoder GUID.
pPixelFormats = DNew DDPIXELFORMAT[dwFormats];
hr = pAMVideoAccelerator->GetUncompFormatsSupported (guidDecoder, &dwFormats, pPixelFormats);
- if (SUCCEEDED(hr))
- {
+ if (SUCCEEDED(hr)) {
// Look for a format that matches our output format.
- for (DWORD iFormat = 0; iFormat < dwFormats; iFormat++)
- {
- if (pPixelFormats[iFormat].dwFourCC == MAKEFOURCC ('N', 'V', '1', '2'))
- {
+ for (DWORD iFormat = 0; iFormat < dwFormats; iFormat++) {
+ if (pPixelFormats[iFormat].dwFourCC == MAKEFOURCC ('N', 'V', '1', '2')) {
memcpy (pPixelFormat, &pPixelFormats[iFormat], sizeof(DDPIXELFORMAT));
SAFE_DELETE_ARRAY(pPixelFormats)
return S_OK;
@@ -2242,11 +2198,11 @@ HRESULT CMPCVideoDecFilter::FindDXVA1DecoderConfiguration(IAMVideoAccelerator* p
HRESULT CMPCVideoDecFilter::CheckDXVA1Decoder(const GUID *pGuid)
{
- if (m_nCodecNb != -1)
- {
+ if (m_nCodecNb != -1) {
for (int i=0; i<MAX_SUPPORTED_MODE; i++)
- if (*ffCodecs[m_nCodecNb].DXVAModes->Decoder[i] == *pGuid)
+ if (*ffCodecs[m_nCodecNb].DXVAModes->Decoder[i] == *pGuid) {
return S_OK;
+ }
}
return E_INVALIDARG;
@@ -2260,11 +2216,11 @@ void CMPCVideoDecFilter::SetDXVA1Params(const GUID* pGuid, DDPIXELFORMAT* pPixel
WORD CMPCVideoDecFilter::GetDXVA1RestrictedMode()
{
- if (m_nCodecNb != -1)
- {
+ if (m_nCodecNb != -1) {
for (int i=0; i<MAX_SUPPORTED_MODE; i++)
- if (*ffCodecs[m_nCodecNb].DXVAModes->Decoder[i] == m_DXVADecoderGUID)
+ if (*ffCodecs[m_nCodecNb].DXVAModes->Decoder[i] == m_DXVADecoderGUID) {
return ffCodecs[m_nCodecNb].DXVAModes->RestrictedMode [i];
+ }
}
return DXVA_RESTRICTED_MODE_UNRESTRICTED;
@@ -2272,16 +2228,21 @@ WORD CMPCVideoDecFilter::GetDXVA1RestrictedMode()
HRESULT CMPCVideoDecFilter::CreateDXVA1Decoder(IAMVideoAccelerator* pAMVideoAccelerator, const GUID* pDecoderGuid, DWORD dwSurfaceCount)
{
- if (m_pDXVADecoder && m_DXVADecoderGUID == *pDecoderGuid)
+ if (m_pDXVADecoder && m_DXVADecoderGUID == *pDecoderGuid) {
return S_OK;
+ }
SAFE_DELETE (m_pDXVADecoder);
- if (!m_bUseDXVA) return E_FAIL;
+ if (!m_bUseDXVA) {
+ return E_FAIL;
+ }
m_nDXVAMode = MODE_DXVA1;
m_DXVADecoderGUID = *pDecoderGuid;
m_pDXVADecoder = CDXVADecoder::CreateDecoder (this, pAMVideoAccelerator, &m_DXVADecoderGUID, dwSurfaceCount);
- if (m_pDXVADecoder) m_pDXVADecoder->SetExtraData ((BYTE*)m_pAVCtx->extradata, m_pAVCtx->extradata_size);
+ if (m_pDXVADecoder) {
+ m_pDXVADecoder->SetExtraData ((BYTE*)m_pAVCtx->extradata, m_pAVCtx->extradata_size);
+ }
return S_OK;
}
@@ -2302,7 +2263,9 @@ STDMETHODIMP CMPCVideoDecFilter::GetPages(CAUUID* pPages)
pPages->pElems = (GUID*)CoTaskMemAlloc(sizeof(GUID) * pPages->cElems);
pPages->pElems[0] = __uuidof(CMPCVideoDecSettingsWnd);
- if (pPages->cElems>1) pPages->pElems[1] = __uuidof(CMPCVideoDecCodecWnd);
+ if (pPages->cElems>1) {
+ pPages->pElems[1] = __uuidof(CMPCVideoDecCodecWnd);
+ }
return S_OK;
}
@@ -2311,16 +2274,15 @@ STDMETHODIMP CMPCVideoDecFilter::CreatePage(const GUID& guid, IPropertyPage** pp
{
CheckPointer(ppPage, E_POINTER);
- if(*ppPage != NULL) return E_INVALIDARG;
+ if(*ppPage != NULL) {
+ return E_INVALIDARG;
+ }
HRESULT hr;
- if(guid == __uuidof(CMPCVideoDecSettingsWnd))
- {
+ if(guid == __uuidof(CMPCVideoDecSettingsWnd)) {
(*ppPage = DNew CInternalPropertyPageTempl<CMPCVideoDecSettingsWnd>(NULL, &hr))->AddRef();
- }
- else if(guid == __uuidof(CMPCVideoDecCodecWnd))
- {
+ } else if(guid == __uuidof(CMPCVideoDecCodecWnd)) {
(*ppPage = DNew CInternalPropertyPageTempl<CMPCVideoDecCodecWnd>(NULL, &hr))->AddRef();
}
@@ -2332,8 +2294,7 @@ STDMETHODIMP CMPCVideoDecFilter::CreatePage(const GUID& guid, IPropertyPage** pp
STDMETHODIMP CMPCVideoDecFilter::Apply()
{
CRegKey key;
- if(ERROR_SUCCESS == key.Create(HKEY_CURRENT_USER, _T("Software\\Gabest\\Filters\\MPC Video Decoder")))
- {
+ if(ERROR_SUCCESS == key.Create(HKEY_CURRENT_USER, _T("Software\\Gabest\\Filters\\MPC Video Decoder"))) {
key.SetDWORDValue(_T("ThreadNumber"), m_nThreadNumber);
key.SetDWORDValue(_T("DiscardMode"), m_nDiscardMode);
key.SetDWORDValue(_T("ErrorRecognition"), m_nErrorRecognition);
@@ -2392,10 +2353,11 @@ STDMETHODIMP_(int) CMPCVideoDecFilter::GetIDCTAlgo()
}
STDMETHODIMP_(GUID*) CMPCVideoDecFilter::GetDXVADecoderGuid()
{
- if (m_pGraph == NULL)
+ if (m_pGraph == NULL) {
return NULL;
- else
+ } else {
return &m_DXVADecoderGUID;
+ }
}
STDMETHODIMP CMPCVideoDecFilter::SetActiveCodecs(MPC_VIDEO_CODEC nValue)
{
diff --git a/src/filters/transform/MPCVideoDec/MPCVideoDecFilter.h b/src/filters/transform/MPCVideoDec/MPCVideoDecFilter.h
index 7800a0e16..846a8cab2 100644
--- a/src/filters/transform/MPCVideoDec/MPCVideoDecFilter.h
+++ b/src/filters/transform/MPCVideoDec/MPCVideoDecFilter.h
@@ -44,22 +44,19 @@ class CCpuId;
#define MAX_BUFF_TIME 20
-typedef enum
-{
+typedef enum {
MODE_SOFTWARE,
MODE_DXVA1,
MODE_DXVA2
} DXVA_MODE;
-typedef struct
-{
+typedef struct {
REFERENCE_TIME rtStart;
REFERENCE_TIME rtStop;
} B_FRAME;
-typedef struct
-{
+typedef struct {
REFERENCE_TIME rtStart;
REFERENCE_TIME rtStop;
int nBuffPos;
@@ -162,8 +159,8 @@ protected:
void SetTypeSpecificFlags(IMediaSample* pMS);
HRESULT SoftwareDecode(IMediaSample* pIn, BYTE* pDataIn, int nSize, REFERENCE_TIME& rtStart, REFERENCE_TIME& rtStop);
-//void FindStartCodeVC1 (BYTE** pDataIn, int& nSize);
-//void FindStartCodeH264 (BYTE** pDataIn, int& nSize);
+ //void FindStartCodeVC1 (BYTE** pDataIn, int& nSize);
+ //void FindStartCodeH264 (BYTE** pDataIn, int& nSize);
bool AppendBuffer (BYTE* pDataIn, int nSize, REFERENCE_TIME rtStart, REFERENCE_TIME rtStop);
bool FindPicture(int nIndex, int nStartCode);
void ShrinkBuffer();
@@ -190,7 +187,9 @@ public:
STDMETHODIMP NonDelegatingQueryInterface(REFIID riid, void** ppv);
virtual bool IsVideoInterlaced();
virtual void GetOutputSize(int& w, int& h, int& arx, int& ary, int &RealWidth, int &RealHeight);
- CTransformOutputPin* GetOutputPin() {return m_pOutput;}
+ CTransformOutputPin* GetOutputPin() {
+ return m_pOutput;
+ }
void UpdateFrameTime (REFERENCE_TIME& rtStart, REFERENCE_TIME& rtStop);
// === Overriden DirectShow functions
@@ -241,19 +240,37 @@ public:
int PictHeight();
int PictWidthRounded();
int PictHeightRounded();
- inline bool UseDXVA2() {return (m_nDXVAMode == MODE_DXVA2);};
- void FlushDXVADecoder() {if (m_pDXVADecoder) m_pDXVADecoder->Flush();}
- inline AVCodecContext* GetAVCtx() {return m_pAVCtx;};
- inline AVFrame* GetFrame() {return m_pFrame;}
+ inline bool UseDXVA2() {
+ return (m_nDXVAMode == MODE_DXVA2);
+ };
+ void FlushDXVADecoder() {
+ if (m_pDXVADecoder) {
+ m_pDXVADecoder->Flush();
+ }
+ }
+ inline AVCodecContext* GetAVCtx() {
+ return m_pAVCtx;
+ };
+ inline AVFrame* GetFrame() {
+ return m_pFrame;
+ }
bool IsDXVASupported();
- inline bool IsReorderBFrame() { return m_bReorderBFrame;};
- inline int GetPCIVendor() {return m_nPCIVendor;};
- inline REFERENCE_TIME GetAvrTimePerFrame() {return m_rtAvrTimePerFrame;};
+ inline bool IsReorderBFrame() {
+ return m_bReorderBFrame;
+ };
+ inline int GetPCIVendor() {
+ return m_nPCIVendor;
+ };
+ inline REFERENCE_TIME GetAvrTimePerFrame() {
+ return m_rtAvrTimePerFrame;
+ };
void UpdateAspectRatio();
void ReorderBFrames(REFERENCE_TIME& rtStart, REFERENCE_TIME& rtStop);
// === DXVA1 functions
- DDPIXELFORMAT* GetPixelFormat() {return &m_PixelFormat;}
+ DDPIXELFORMAT* GetPixelFormat() {
+ return &m_PixelFormat;
+ }
HRESULT FindDXVA1DecoderConfiguration(IAMVideoAccelerator* pAMVideoAccelerator, const GUID* guidDecoder, DDPIXELFORMAT* pPixelFormat);
HRESULT CheckDXVA1Decoder(const GUID *pGuid);
void SetDXVA1Params(const GUID* pGuid, DDPIXELFORMAT* pPixelFormat);
@@ -263,12 +280,14 @@ public:
// === DXVA2 functions
void FillInVideoDescription(DXVA2_VideoDesc *pDesc);
- DXVA2_ConfigPictureDecode* GetDXVA2Config() {return &m_DXVA2Config;};
+ DXVA2_ConfigPictureDecode* GetDXVA2Config() {
+ return &m_DXVA2Config;
+ };
HRESULT ConfigureDXVA2(IPin *pPin);
HRESULT SetEVRForDXVA2(IPin *pPin);
HRESULT FindDXVA2DecoderConfiguration(IDirectXVideoDecoderService *pDecoderService,
- const GUID& guidDecoder,
- DXVA2_ConfigPictureDecode *pSelectedConfig,
- BOOL *pbFoundDXVA2Configuration);
+ const GUID& guidDecoder,
+ DXVA2_ConfigPictureDecode *pSelectedConfig,
+ BOOL *pbFoundDXVA2Configuration);
HRESULT CreateDXVA2Decoder(UINT nNumRenderTargets, IDirect3DSurface9** pDecoderRenderTargets);
};
diff --git a/src/filters/transform/MPCVideoDec/MPCVideoDecSettingsWnd.cpp b/src/filters/transform/MPCVideoDec/MPCVideoDecSettingsWnd.cpp
index 81b74bbf7..11f7d613a 100644
--- a/src/filters/transform/MPCVideoDec/MPCVideoDecSettingsWnd.cpp
+++ b/src/filters/transform/MPCVideoDec/MPCVideoDecSettingsWnd.cpp
@@ -37,20 +37,21 @@
// CMPCVideoDecSettingsWnd
//
-int g_AVDiscard[] =
-{
+int g_AVDiscard[] = {
-16, ///< AVDISCARD_NONE discard nothing
- 0, ///< AVDISCARD_DEFAULT discard useless packets like 0 size packets in avi
- 8, ///< AVDISCARD_NONREF discard all non reference
- 16, ///< AVDISCARD_BIDIR discard all bidirectional frames
- 32, ///< AVDISCARD_NONKEY discard all frames except keyframes
- 48, ///< AVDISCARD_ALL discard all
+ 0, ///< AVDISCARD_DEFAULT discard useless packets like 0 size packets in avi
+ 8, ///< AVDISCARD_NONREF discard all non reference
+ 16, ///< AVDISCARD_BIDIR discard all bidirectional frames
+ 32, ///< AVDISCARD_NONKEY discard all frames except keyframes
+ 48, ///< AVDISCARD_ALL discard all
};
int FindDiscardIndex(int nValue)
{
for (int i=0; i<countof (g_AVDiscard); i++)
- if (g_AVDiscard[i] == nValue) return i;
+ if (g_AVDiscard[i] == nValue) {
+ return i;
+ }
return 1;
}
@@ -66,9 +67,13 @@ bool CMPCVideoDecSettingsWnd::OnConnect(const CInterfaceList<IUnknown, &IID_IUnk
m_pMDF.Release();
POSITION pos = pUnks.GetHeadPosition();
- while(pos && !(m_pMDF = pUnks.GetNext(pos)));
+ while(pos && !(m_pMDF = pUnks.GetNext(pos))) {
+ ;
+ }
- if(!m_pMDF) return false;
+ if(!m_pMDF) {
+ return false;
+ }
return true;
}
@@ -86,7 +91,7 @@ bool CMPCVideoDecSettingsWnd::OnActivate()
#if HAS_FFMPEG_VIDEO_DECODERS
m_grpFFMpeg.Create (ResStr (IDS_VDF_FFSETTINGS), WS_VISIBLE|WS_CHILD | BS_GROUPBOX, CRect (10, nPosY, 350, nPosY+150), this, (UINT)IDC_STATIC);
- #if INTERNAL_DECODER_H264
+#if INTERNAL_DECODER_H264
// Decoding threads
nPosY += VERTICAL_SPACING;
@@ -101,9 +106,9 @@ bool CMPCVideoDecSettingsWnd::OnActivate()
m_cbThreadNumber.AddString (_T("7"));
m_cbThreadNumber.AddString (_T("8"));
- #endif /* INTERNAL_DECODER_H264 */
+#endif /* INTERNAL_DECODER_H264 */
- #if INTERNAL_DECODER_H264
+#if INTERNAL_DECODER_H264
// H264 deblocking mode
nPosY += VERTICAL_SPACING;
@@ -116,7 +121,7 @@ bool CMPCVideoDecSettingsWnd::OnActivate()
m_cbDiscardMode.AddString (ResStr (IDS_VDF_DBLK_NONKFRM));
m_cbDiscardMode.AddString (ResStr (IDS_VDF_DBLK_ALL));
- #endif /* INTERNAL_DECODER_H264 */
+#endif /* INTERNAL_DECODER_H264 */
// Error recognition
nPosY += VERTICAL_SPACING;
@@ -172,27 +177,24 @@ bool CMPCVideoDecSettingsWnd::OnActivate()
DxvaGui = m_pMDF->GetDXVADecoderGuid();
- if (DxvaGui != NULL)
- {
+ if (DxvaGui != NULL) {
CString DXVAMode = GetDXVAMode (DxvaGui);
m_edtDXVAMode.SetWindowText (DXVAMode);
- }
- else
- {
+ } else {
m_txtDXVAMode.ShowWindow (SW_HIDE);
m_edtDXVAMode.ShowWindow (SW_HIDE);
}
- for(CWnd* pWnd = GetWindow(GW_CHILD); pWnd; pWnd = pWnd->GetNextWindow())
+ for(CWnd* pWnd = GetWindow(GW_CHILD); pWnd; pWnd = pWnd->GetNextWindow()) {
pWnd->SetFont(&m_font, FALSE);
+ }
- if (m_pMDF)
- {
+ if (m_pMDF) {
#if HAS_FFMPEG_VIDEO_DECODERS
- #if INTERNAL_DECODER_H264
+#if INTERNAL_DECODER_H264
m_cbThreadNumber.SetCurSel (m_pMDF->GetThreadNumber() - 1);
m_cbDiscardMode.SetCurSel (FindDiscardIndex (m_pMDF->GetDiscardMode()));
- #endif
+#endif
m_cbErrorRecognition.SetCurSel (m_pMDF->GetErrorRecognition()-1);
m_cbIDCTAlgo.SetCurSel (m_pMDF->GetIDCTAlgo());
@@ -214,13 +216,12 @@ bool CMPCVideoDecSettingsWnd::OnApply()
{
OnDeactivate();
- if(m_pMDF && m_cbDXVACompatibilityCheck.m_hWnd)
- {
+ if(m_pMDF && m_cbDXVACompatibilityCheck.m_hWnd) {
#if HAS_FFMPEG_VIDEO_DECODERS
- #if INTERNAL_DECODER_H264
+#if INTERNAL_DECODER_H264
m_pMDF->SetThreadNumber (m_cbThreadNumber.GetCurSel() + 1);
m_pMDF->SetDiscardMode (g_AVDiscard[m_cbDiscardMode.GetCurSel()]);
- #endif /* INTERNAL_DECODER_H264 */
+#endif /* INTERNAL_DECODER_H264 */
m_pMDF->SetErrorRecognition (m_cbErrorRecognition.GetCurSel()+1);
m_pMDF->SetIDCTAlgo (m_cbIDCTAlgo.GetCurSel());
@@ -257,9 +258,13 @@ bool CMPCVideoDecCodecWnd::OnConnect(const CInterfaceList<IUnknown, &IID_IUnknow
m_pMDF.Release();
POSITION pos = pUnks.GetHeadPosition();
- while(pos && !(m_pMDF = pUnks.GetNext(pos)));
+ while(pos && !(m_pMDF = pUnks.GetNext(pos))) {
+ ;
+ }
- if(!m_pMDF) return false;
+ if(!m_pMDF) {
+ return false;
+ }
return true;
}
@@ -320,8 +325,9 @@ bool CMPCVideoDecCodecWnd::OnActivate()
m_lstCodecs.SetCheck (nPos++, (nActiveCodecs & MPCVD_THEORA) != 0);
#endif
- for(CWnd* pWnd = GetWindow(GW_CHILD); pWnd; pWnd = pWnd->GetNextWindow())
+ for(CWnd* pWnd = GetWindow(GW_CHILD); pWnd; pWnd = pWnd->GetNextWindow()) {
pWnd->SetFont(&m_font, FALSE);
+ }
return true;
}
@@ -334,35 +340,64 @@ bool CMPCVideoDecCodecWnd::OnApply()
{
OnDeactivate();
- if(m_pMDF)
- {
+ if(m_pMDF) {
int nActiveCodecs = 0;
int nPos = 0;
#if INTERNAL_DECODER_H264_DXVA
- if (m_lstCodecs.GetCheck (nPos++)) nActiveCodecs |= MPCVD_H264_DXVA;
+ if (m_lstCodecs.GetCheck (nPos++)) {
+ nActiveCodecs |= MPCVD_H264_DXVA;
+ }
#endif
#if INTERNAL_DECODER_H264
- if (m_lstCodecs.GetCheck (nPos++)) nActiveCodecs |= MPCVD_H264;
+ if (m_lstCodecs.GetCheck (nPos++)) {
+ nActiveCodecs |= MPCVD_H264;
+ }
#endif
#if INTERNAL_DECODER_VC1_DXVA
- if (m_lstCodecs.GetCheck (nPos++)) nActiveCodecs |= MPCVD_VC1_DXVA;
+ if (m_lstCodecs.GetCheck (nPos++)) {
+ nActiveCodecs |= MPCVD_VC1_DXVA;
+ }
#endif
#if INTERNAL_DECODER_VC1
- if (m_lstCodecs.GetCheck (nPos++)) nActiveCodecs |= MPCVD_VC1;
+ if (m_lstCodecs.GetCheck (nPos++)) {
+ nActiveCodecs |= MPCVD_VC1;
+ }
#endif
#if HAS_FFMPEG_VIDEO_DECODERS
- if (m_lstCodecs.GetCheck (nPos++)) nActiveCodecs |= MPCVD_XVID;
- if (m_lstCodecs.GetCheck (nPos++)) nActiveCodecs |= MPCVD_DIVX;
- if (m_lstCodecs.GetCheck (nPos++)) nActiveCodecs |= MPCVD_MSMPEG4;
- if (m_lstCodecs.GetCheck (nPos++)) nActiveCodecs |= MPCVD_FLASH;
- if (m_lstCodecs.GetCheck (nPos++)) nActiveCodecs |= MPCVD_VP6;
- if (m_lstCodecs.GetCheck (nPos++)) nActiveCodecs |= MPCVD_VP8;
- if (m_lstCodecs.GetCheck (nPos++)) nActiveCodecs |= MPCVD_WMV;
- if (m_lstCodecs.GetCheck (nPos++)) nActiveCodecs |= MPCVD_H263;
- if (m_lstCodecs.GetCheck (nPos++)) nActiveCodecs |= MPCVD_SVQ3;
- if (m_lstCodecs.GetCheck (nPos++)) nActiveCodecs |= MPCVD_AMVV;
- if (m_lstCodecs.GetCheck (nPos++)) nActiveCodecs |= MPCVD_THEORA;
+ if (m_lstCodecs.GetCheck (nPos++)) {
+ nActiveCodecs |= MPCVD_XVID;
+ }
+ if (m_lstCodecs.GetCheck (nPos++)) {
+ nActiveCodecs |= MPCVD_DIVX;
+ }
+ if (m_lstCodecs.GetCheck (nPos++)) {
+ nActiveCodecs |= MPCVD_MSMPEG4;
+ }
+ if (m_lstCodecs.GetCheck (nPos++)) {
+ nActiveCodecs |= MPCVD_FLASH;
+ }
+ if (m_lstCodecs.GetCheck (nPos++)) {
+ nActiveCodecs |= MPCVD_VP6;
+ }
+ if (m_lstCodecs.GetCheck (nPos++)) {
+ nActiveCodecs |= MPCVD_VP8;
+ }
+ if (m_lstCodecs.GetCheck (nPos++)) {
+ nActiveCodecs |= MPCVD_WMV;
+ }
+ if (m_lstCodecs.GetCheck (nPos++)) {
+ nActiveCodecs |= MPCVD_H263;
+ }
+ if (m_lstCodecs.GetCheck (nPos++)) {
+ nActiveCodecs |= MPCVD_SVQ3;
+ }
+ if (m_lstCodecs.GetCheck (nPos++)) {
+ nActiveCodecs |= MPCVD_AMVV;
+ }
+ if (m_lstCodecs.GetCheck (nPos++)) {
+ nActiveCodecs |= MPCVD_THEORA;
+ }
#endif
m_pMDF->SetActiveCodecs ((MPC_VIDEO_CODEC)nActiveCodecs);
diff --git a/src/filters/transform/MPCVideoDec/MPCVideoDecSettingsWnd.h b/src/filters/transform/MPCVideoDec/MPCVideoDecSettingsWnd.h
index 289b206b9..a6b372002 100644
--- a/src/filters/transform/MPCVideoDec/MPCVideoDecSettingsWnd.h
+++ b/src/filters/transform/MPCVideoDec/MPCVideoDecSettingsWnd.h
@@ -54,8 +54,7 @@ class __declspec(uuid("D5AA0389-D274-48e1-BF50-ACB05A56DDE0"))
CButton m_cbDXVA_SD;
- enum
- {
+ enum {
IDC_PP_THREAD_NUMBER = 10000,
IDC_PP_ENABLE_DEBLOCKING,
IDC_PP_DISCARD_MODE,
diff --git a/src/filters/transform/MPCVideoDec/TlibavcodecExt.cpp b/src/filters/transform/MPCVideoDec/TlibavcodecExt.cpp
index af7c78397..2ea37a49b 100644
--- a/src/filters/transform/MPCVideoDec/TlibavcodecExt.cpp
+++ b/src/filters/transform/MPCVideoDec/TlibavcodecExt.cpp
@@ -40,15 +40,17 @@ void TlibavcodecExt::ConnectTo(AVCodecContext *pAVCtx)
int TlibavcodecExt::get_buffer(AVCodecContext *c, AVFrame *pic)
{
int ret=c->opaque->ff_avcodec_default_get_buffer(c,pic);
- if (ret==0)
+ if (ret==0) {
c->opaque->OnGetBuffer(pic);
+ }
return ret;
}
int TlibavcodecExt::reget_buffer(AVCodecContext *c, AVFrame *pic)
{
int ret=c->opaque->ff_avcodec_default_reget_buffer(c,pic);
- if (ret==0)
+ if (ret==0) {
c->opaque->OnRegetBuffer(pic);
+ }
return ret;
}
void TlibavcodecExt::release_buffer(AVCodecContext *c, AVFrame *pic)
diff --git a/src/filters/transform/MPCVideoDec/TlibavcodecExt.h b/src/filters/transform/MPCVideoDec/TlibavcodecExt.h
index 7e7d5c051..37b7ccf8e 100644
--- a/src/filters/transform/MPCVideoDec/TlibavcodecExt.h
+++ b/src/filters/transform/MPCVideoDec/TlibavcodecExt.h
@@ -36,8 +36,7 @@ typedef void (*FUNC_AV_DEFAULT_RELEASE_BUFFER)(AVCodecContext *s, AVFrame *pic
typedef int (*FUNC_AV_DEFAULT_REGET_BUFFER)(AVCodecContext *s, AVFrame *pic);
-struct TlibavcodecExt
-{
+struct TlibavcodecExt {
protected:
static int get_buffer(AVCodecContext *s, AVFrame *pic);
static void release_buffer(AVCodecContext *s, AVFrame *pic);
diff --git a/src/filters/transform/MPCVideoDec/VideoDecDXVAAllocator.cpp b/src/filters/transform/MPCVideoDec/VideoDecDXVAAllocator.cpp
index 4cfcc495a..1d8579612 100644
--- a/src/filters/transform/MPCVideoDec/VideoDecDXVAAllocator.cpp
+++ b/src/filters/transform/MPCVideoDec/VideoDecDXVAAllocator.cpp
@@ -40,16 +40,12 @@ STDMETHODIMP CDXVA2Sample::QueryInterface(REFIID riid, __deref_out void **ppv)
CheckPointer(ppv,E_POINTER);
ValidateReadWritePtr(ppv,sizeof(PVOID));
- if (riid == __uuidof(IMFGetService))
- {
+ if (riid == __uuidof(IMFGetService)) {
return GetInterface((IMFGetService*) this, ppv);
}
- if (riid == __uuidof(IMPCDXVA2Sample))
- {
+ if (riid == __uuidof(IMPCDXVA2Sample)) {
return GetInterface((IMPCDXVA2Sample*) this, ppv);
- }
- else
- {
+ } else {
return CMediaSample::QueryInterface(riid, ppv);
}
}
@@ -70,16 +66,11 @@ STDMETHODIMP_(ULONG) CDXVA2Sample::Release()
// IMFGetService::GetService
STDMETHODIMP CDXVA2Sample::GetService(REFGUID guidService, REFIID riid, LPVOID *ppv)
{
- if (guidService != MR_BUFFER_SERVICE)
- {
+ if (guidService != MR_BUFFER_SERVICE) {
return MF_E_UNSUPPORTED_SERVICE;
- }
- else if (m_pSurface == NULL)
- {
+ } else if (m_pSurface == NULL) {
return E_NOINTERFACE;
- }
- else
- {
+ } else {
return m_pSurface->QueryInterface(riid, ppv);
}
}
@@ -131,8 +122,7 @@ HRESULT CVideoDecDXVAAllocator::Alloc()
hr = __super::Alloc();
- if (SUCCEEDED(hr))
- {
+ if (SUCCEEDED(hr)) {
// Free the old resources.
Free();
@@ -140,20 +130,16 @@ HRESULT CVideoDecDXVAAllocator::Alloc()
// Allocate a new array of pointers.
m_ppRTSurfaceArray = DNew IDirect3DSurface9*[m_lCount];
- if (m_ppRTSurfaceArray == NULL)
- {
+ if (m_ppRTSurfaceArray == NULL) {
hr = E_OUTOFMEMORY;
- }
- else
- {
+ } else {
ZeroMemory(m_ppRTSurfaceArray, sizeof(IDirect3DSurface9*) * m_lCount);
}
}
// Allocate the surfaces.
D3DFORMAT m_dwFormat = m_pVideoDecFilter->m_VideoDesc.Format;
- if (SUCCEEDED(hr))
- {
+ if (SUCCEEDED(hr)) {
hr = pDXVA2Service->CreateSurface(
m_pVideoDecFilter->PictWidthRounded(),
m_pVideoDecFilter->PictHeightRounded(),
@@ -167,19 +153,15 @@ HRESULT CVideoDecDXVAAllocator::Alloc()
);
}
- if (SUCCEEDED(hr))
- {
+ if (SUCCEEDED(hr)) {
// Important : create samples in reverse order !
- for (m_lAllocated = m_lCount-1; m_lAllocated >= 0; m_lAllocated--)
- {
+ for (m_lAllocated = m_lCount-1; m_lAllocated >= 0; m_lAllocated--) {
CDXVA2Sample *pSample = DNew CDXVA2Sample(this, &hr);
- if (pSample == NULL)
- {
+ if (pSample == NULL) {
hr = E_OUTOFMEMORY;
break;
}
- if (FAILED(hr))
- {
+ if (FAILED(hr)) {
break;
}
// Assign the Direct3D surface pointer and the index.
@@ -190,11 +172,12 @@ HRESULT CVideoDecDXVAAllocator::Alloc()
}
hr = m_pVideoDecFilter->CreateDXVA2Decoder (m_lCount, m_ppRTSurfaceArray);
- if (FAILED (hr)) Free();
+ if (FAILED (hr)) {
+ Free();
+ }
}
- if (SUCCEEDED(hr))
- {
+ if (SUCCEEDED(hr)) {
m_bChanged = FALSE;
}
return hr;
@@ -205,22 +188,19 @@ void CVideoDecDXVAAllocator::Free()
CMediaSample *pSample = NULL;
m_pVideoDecFilter->FlushDXVADecoder();
-// m_FreeSurface.RemoveAll();
- do
- {
+ // m_FreeSurface.RemoveAll();
+ do {
pSample = m_lFree.RemoveHead();
- if (pSample)
- {
+ if (pSample) {
delete pSample;
}
} while (pSample);
- if (m_ppRTSurfaceArray)
- {
- for (long i = 0; i < m_nSurfaceArrayCount; i++)
- {
- if (m_ppRTSurfaceArray[i] != NULL)
+ if (m_ppRTSurfaceArray) {
+ for (long i = 0; i < m_nSurfaceArrayCount; i++) {
+ if (m_ppRTSurfaceArray[i] != NULL) {
m_ppRTSurfaceArray[i]->Release();
+ }
}
delete [] m_ppRTSurfaceArray;
diff --git a/src/filters/transform/MPCVideoDec/VideoDecDXVAAllocator.h b/src/filters/transform/MPCVideoDec/VideoDecDXVAAllocator.h
index 0f20d4383..7b3206112 100644
--- a/src/filters/transform/MPCVideoDec/VideoDecDXVAAllocator.h
+++ b/src/filters/transform/MPCVideoDec/VideoDecDXVAAllocator.h
@@ -33,8 +33,7 @@ class CVideoDecDXVAAllocator;
interface __declspec(uuid("AE7EC2A2-1913-4a80-8DD6-DF1497ABA494"))
IMPCDXVA2Sample :
-public IUnknown
-{
+public IUnknown {
STDMETHOD_(int, GetDXSurfaceId()) = 0;
};
@@ -80,13 +79,13 @@ public:
CVideoDecDXVAAllocator(CMPCVideoDecFilter* pVideoDecFilter, HRESULT* phr);
virtual ~CVideoDecDXVAAllocator();
-// STDMETHODIMP GetBuffer(__deref_out IMediaSample **ppBuffer, // Try for a circular buffer!
-// __in_opt REFERENCE_TIME * pStartTime,
-// __in_opt REFERENCE_TIME * pEndTime,
-// DWORD dwFlags);
-//
-// STDMETHODIMP ReleaseBuffer(IMediaSample *pBuffer);
-// CAtlList<int> m_FreeSurface;
+ // STDMETHODIMP GetBuffer(__deref_out IMediaSample **ppBuffer, // Try for a circular buffer!
+ // __in_opt REFERENCE_TIME * pStartTime,
+ // __in_opt REFERENCE_TIME * pEndTime,
+ // DWORD dwFlags);
+ //
+ // STDMETHODIMP ReleaseBuffer(IMediaSample *pBuffer);
+ // CAtlList<int> m_FreeSurface;
protected:
diff --git a/src/filters/transform/MPCVideoDec/VideoDecOutputPin.cpp b/src/filters/transform/MPCVideoDec/VideoDecOutputPin.cpp
index 06dbbbd4e..4fcc513dd 100644
--- a/src/filters/transform/MPCVideoDec/VideoDecOutputPin.cpp
+++ b/src/filters/transform/MPCVideoDec/VideoDecOutputPin.cpp
@@ -44,24 +44,21 @@ CVideoDecOutputPin::~CVideoDecOutputPin(void)
HRESULT CVideoDecOutputPin::InitAllocator(IMemAllocator **ppAlloc)
{
TRACE("CVideoDecOutputPin::InitAllocator");
- if (m_pVideoDecFilter->UseDXVA2())
- {
+ if (m_pVideoDecFilter->UseDXVA2()) {
HRESULT hr = S_FALSE;
m_pDXVA2Allocator = DNew CVideoDecDXVAAllocator(m_pVideoDecFilter, &hr);
- if (!m_pDXVA2Allocator)
- {
+ if (!m_pDXVA2Allocator) {
return E_OUTOFMEMORY;
}
- if (FAILED(hr))
- {
+ if (FAILED(hr)) {
delete m_pDXVA2Allocator;
return hr;
}
// Return the IMemAllocator interface.
return m_pDXVA2Allocator->QueryInterface(__uuidof(IMemAllocator), (void **)ppAlloc);
- }
- else
+ } else {
return __super::InitAllocator(ppAlloc);
+ }
}
STDMETHODIMP CVideoDecOutputPin::NonDelegatingQueryInterface(REFIID riid, void** ppv)
@@ -77,18 +74,15 @@ STDMETHODIMP CVideoDecOutputPin::GetUncompSurfacesInfo(const GUID *pGuid, LPAMVA
{
HRESULT hr = E_INVALIDARG;
- if (SUCCEEDED (m_pVideoDecFilter->CheckDXVA1Decoder (pGuid)))
- {
+ if (SUCCEEDED (m_pVideoDecFilter->CheckDXVA1Decoder (pGuid))) {
CComQIPtr<IAMVideoAccelerator> pAMVideoAccelerator = GetConnected();
- if (pAMVideoAccelerator)
- {
+ if (pAMVideoAccelerator) {
pUncompBufferInfo->dwMaxNumSurfaces = m_pVideoDecFilter->GetPicEntryNumber();
pUncompBufferInfo->dwMinNumSurfaces = m_pVideoDecFilter->GetPicEntryNumber();
hr = m_pVideoDecFilter->FindDXVA1DecoderConfiguration (pAMVideoAccelerator, pGuid, &pUncompBufferInfo->ddUncompPixelFormat);
- if (SUCCEEDED (hr))
- {
+ if (SUCCEEDED (hr)) {
memcpy (&m_ddUncompPixelFormat, &pUncompBufferInfo->ddUncompPixelFormat, sizeof(DDPIXELFORMAT));
m_GuidDecoderDXVA1 = *pGuid;
}
@@ -112,19 +106,16 @@ STDMETHODIMP CVideoDecOutputPin::GetCreateVideoAcceleratorData(const GUID *pGuid
CComQIPtr<IAMVideoAccelerator> pAMVideoAccelerator = GetConnected();
DXVA_ConnectMode* pConnectMode;
- if (pAMVideoAccelerator)
- {
+ if (pAMVideoAccelerator) {
memcpy (&UncompInfo.ddUncompPixelFormat, &m_ddUncompPixelFormat, sizeof (DDPIXELFORMAT));
UncompInfo.dwUncompWidth = m_pVideoDecFilter->PictWidthRounded();
UncompInfo.dwUncompHeight = m_pVideoDecFilter->PictHeightRounded();
hr = pAMVideoAccelerator->GetCompBufferInfo(&m_GuidDecoderDXVA1, &UncompInfo, &dwNumTypesCompBuffers, CompInfo);
- if (SUCCEEDED (hr))
- {
+ if (SUCCEEDED (hr)) {
hr = m_pVideoDecFilter->CreateDXVA1Decoder (pAMVideoAccelerator, pGuid, m_dwDXVA1SurfaceCount);
- if (SUCCEEDED (hr))
- {
+ if (SUCCEEDED (hr)) {
m_pVideoDecFilter->SetDXVA1Params (&m_GuidDecoderDXVA1, &m_ddUncompPixelFormat);
pConnectMode = (DXVA_ConnectMode*)CoTaskMemAlloc (sizeof(DXVA_ConnectMode));
diff --git a/src/filters/transform/MPCVideoDec/stdafx.cpp b/src/filters/transform/MPCVideoDec/stdafx.cpp
index 1734bc03d..5008ff8b5 100644
--- a/src/filters/transform/MPCVideoDec/stdafx.cpp
+++ b/src/filters/transform/MPCVideoDec/stdafx.cpp
@@ -31,11 +31,9 @@ void LOG(LPCTSTR fmt, ...)
{
va_list args;
va_start(args, fmt);
- if(TCHAR* buff = new TCHAR[_vsctprintf(fmt, args) + 1])
- {
+ if(TCHAR* buff = new TCHAR[_vsctprintf(fmt, args) + 1]) {
_vstprintf(buff, fmt, args);
- if(FILE* f = _tfopen(LOG_FILE, _T("at")))
- {
+ if(FILE* f = _tfopen(LOG_FILE, _T("at"))) {
fseek(f, 0, 2);
_ftprintf(f, _T("%s\n"), buff);
fclose(f);