summaryrefslogtreecommitdiffstats
path: root/plugins/pluginWinMF
diff options
context:
space:
mode:
Diffstat (limited to 'plugins/pluginWinMF')
-rwxr-xr-xplugins/pluginWinMF/dllmain_mf.cxx235
-rwxr-xr-xplugins/pluginWinMF/internals/mf_codec.cxx1161
-rwxr-xr-xplugins/pluginWinMF/internals/mf_codec.h163
-rwxr-xr-xplugins/pluginWinMF/internals/mf_codec_topology.cxx619
-rwxr-xr-xplugins/pluginWinMF/internals/mf_codec_topology.h66
-rwxr-xr-xplugins/pluginWinMF/internals/mf_custom_src.cxx821
-rwxr-xr-xplugins/pluginWinMF/internals/mf_custom_src.h126
-rwxr-xr-xplugins/pluginWinMF/internals/mf_devices.cxx95
-rwxr-xr-xplugins/pluginWinMF/internals/mf_devices.h12
-rwxr-xr-xplugins/pluginWinMF/internals/mf_display_watcher.cxx181
-rwxr-xr-xplugins/pluginWinMF/internals/mf_display_watcher.h32
-rwxr-xr-xplugins/pluginWinMF/internals/mf_sample_grabber.cxx99
-rwxr-xr-xplugins/pluginWinMF/internals/mf_sample_grabber.h18
-rwxr-xr-xplugins/pluginWinMF/internals/mf_sample_queue.cxx75
-rwxr-xr-xplugins/pluginWinMF/internals/mf_sample_queue.h37
-rwxr-xr-xplugins/pluginWinMF/internals/mf_utils.cxx2792
-rwxr-xr-xplugins/pluginWinMF/internals/mf_utils.h383
-rwxr-xr-xplugins/pluginWinMF/plugin_win_mf_codec_h264.cxx1163
-rwxr-xr-xplugins/pluginWinMF/plugin_win_mf_config.h12
-rwxr-xr-xplugins/pluginWinMF/plugin_win_mf_consumer_audio.cxx175
-rwxr-xr-xplugins/pluginWinMF/plugin_win_mf_consumer_video.cxx2151
-rwxr-xr-xplugins/pluginWinMF/plugin_win_mf_converter_video.cxx752
-rwxr-xr-xplugins/pluginWinMF/plugin_win_mf_producer_audio.cxx425
-rwxr-xr-xplugins/pluginWinMF/plugin_win_mf_producer_video.cxx1131
-rwxr-xr-xplugins/pluginWinMF/plugin_win_mf_tdav.cxx6
25 files changed, 6057 insertions, 6673 deletions
diff --git a/plugins/pluginWinMF/dllmain_mf.cxx b/plugins/pluginWinMF/dllmain_mf.cxx
index aeeb863..99cace8 100755
--- a/plugins/pluginWinMF/dllmain_mf.cxx
+++ b/plugins/pluginWinMF/dllmain_mf.cxx
@@ -1,18 +1,18 @@
/* Copyright (C) 2013 Mamadou DIOP
* Copyright (C) 2013 Doubango Telecom <http://www.doubango.org>
-*
+*
* This file is part of Open Source Doubango Framework.
*
* DOUBANGO is free software: you can redistribute it and/or modify
* it under the terms of the GNU General Public License as published by
* the Free Software Foundation, either version 3 of the License, or
* (at your option) any later version.
-*
+*
* DOUBANGO is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
* GNU General Public License for more details.
-*
+*
* You should have received a copy of the GNU General Public License
* along with DOUBANGO.
*/
@@ -71,174 +71,151 @@ PLUGIN_WIN_MF_END_DECLS /* END */
BOOL APIENTRY DllMain( HMODULE hModule,
DWORD ul_reason_for_call,
LPVOID lpReserved
- )
+ )
{
- switch (ul_reason_for_call)
- {
- case DLL_PROCESS_ATTACH:
- break;
- case DLL_THREAD_ATTACH:
- break;
- case DLL_THREAD_DETACH:
- break;
- case DLL_PROCESS_DETACH:
- break;
- }
- return TRUE;
+ switch (ul_reason_for_call) {
+ case DLL_PROCESS_ATTACH:
+ break;
+ case DLL_THREAD_ATTACH:
+ break;
+ case DLL_THREAD_DETACH:
+ break;
+ case DLL_PROCESS_DETACH:
+ break;
+ }
+ return TRUE;
}
-typedef enum PLUGIN_INDEX_E
-{
+typedef enum PLUGIN_INDEX_E {
#if PLUGIN_MF_ENABLE_AUDIO_IO
- PLUGIN_INDEX_AUDIO_CONSUMER,
- PLUGIN_INDEX_AUDIO_PRODUCER,
+ PLUGIN_INDEX_AUDIO_CONSUMER,
+ PLUGIN_INDEX_AUDIO_PRODUCER,
#endif
#if PLUGIN_MF_ENABLE_VIDEO_IO
- PLUGIN_INDEX_VIDEO_PRODUCER,
- PLUGIN_INDEX_VIDEO_CONSUMER,
+ PLUGIN_INDEX_VIDEO_PRODUCER,
+ PLUGIN_INDEX_VIDEO_CONSUMER,
#endif
#if PLUGIN_MF_ENABLE_VIDEO_CONVERTER
- PLUGIN_INDEX_VIDEO_CONVERTER,
+ PLUGIN_INDEX_VIDEO_CONVERTER,
#endif
- PLUGIN_INDEX_CODEC_H264_MAIN,
- PLUGIN_INDEX_CODEC_H264_BASE,
-
- PLUGIN_INDEX_COUNT
+ PLUGIN_INDEX_CODEC_H264_MAIN,
+ PLUGIN_INDEX_CODEC_H264_BASE,
+
+ PLUGIN_INDEX_COUNT
}
PLUGIN_INDEX_T;
int __plugin_get_def_count()
{
- int count = PLUGIN_INDEX_COUNT;
- if(!MFUtils::IsLowLatencyH264Supported())
- {
- count -= 2;
- }
- return count;
+ int count = PLUGIN_INDEX_COUNT;
+ if(!MFUtils::IsLowLatencyH264Supported()) {
+ count -= 2;
+ }
+ return count;
}
tsk_plugin_def_type_t __plugin_get_def_type_at(int index)
{
- switch(index){
+ switch(index) {
#if PLUGIN_MF_ENABLE_AUDIO_IO
- case PLUGIN_INDEX_AUDIO_CONSUMER:
- case PLUGIN_INDEX_AUDIO_PRODUCER:
- {
- return (index == PLUGIN_INDEX_AUDIO_CONSUMER) ? tsk_plugin_def_type_consumer : tsk_plugin_def_type_producer;
- }
+ case PLUGIN_INDEX_AUDIO_CONSUMER:
+ case PLUGIN_INDEX_AUDIO_PRODUCER: {
+ return (index == PLUGIN_INDEX_AUDIO_CONSUMER) ? tsk_plugin_def_type_consumer : tsk_plugin_def_type_producer;
+ }
#endif
#if PLUGIN_MF_ENABLE_VIDEO_IO
- case PLUGIN_INDEX_VIDEO_CONSUMER:
- {
- return MFUtils::IsD3D9Supported() ? tsk_plugin_def_type_consumer : tsk_plugin_def_type_none;
- }
- case PLUGIN_INDEX_VIDEO_PRODUCER:
- {
- return tsk_plugin_def_type_producer;
- }
+ case PLUGIN_INDEX_VIDEO_CONSUMER: {
+ return MFUtils::IsD3D9Supported() ? tsk_plugin_def_type_consumer : tsk_plugin_def_type_none;
+ }
+ case PLUGIN_INDEX_VIDEO_PRODUCER: {
+ return tsk_plugin_def_type_producer;
+ }
#endif
#if PLUGIN_MF_ENABLE_VIDEO_CONVERTER
- case PLUGIN_INDEX_VIDEO_CONVERTER:
- {
- return tsk_plugin_def_type_converter;
- }
-#endif
- case PLUGIN_INDEX_CODEC_H264_MAIN:
- case PLUGIN_INDEX_CODEC_H264_BASE:
- {
- return MFUtils::IsLowLatencyH264Supported() ? tsk_plugin_def_type_codec : tsk_plugin_def_type_none;
- }
- default:
- {
- TSK_DEBUG_ERROR("No plugin at index %d", index);
- return tsk_plugin_def_type_none;
- }
- }
+ case PLUGIN_INDEX_VIDEO_CONVERTER: {
+ return tsk_plugin_def_type_converter;
+ }
+#endif
+ case PLUGIN_INDEX_CODEC_H264_MAIN:
+ case PLUGIN_INDEX_CODEC_H264_BASE: {
+ return MFUtils::IsLowLatencyH264Supported() ? tsk_plugin_def_type_codec : tsk_plugin_def_type_none;
+ }
+ default: {
+ TSK_DEBUG_ERROR("No plugin at index %d", index);
+ return tsk_plugin_def_type_none;
+ }
+ }
}
tsk_plugin_def_media_type_t __plugin_get_def_media_type_at(int index)
{
- switch(index){
+ switch(index) {
#if PLUGIN_MF_ENABLE_AUDIO_IO
- case PLUGIN_INDEX_AUDIO_CONSUMER:
- case PLUGIN_INDEX_AUDIO_PRODUCER:
- {
- return tsk_plugin_def_media_type_audio;
- }
+ case PLUGIN_INDEX_AUDIO_CONSUMER:
+ case PLUGIN_INDEX_AUDIO_PRODUCER: {
+ return tsk_plugin_def_media_type_audio;
+ }
#endif
#if PLUGIN_MF_ENABLE_VIDEO_IO
- case PLUGIN_INDEX_VIDEO_CONSUMER:
- {
- return MFUtils::IsD3D9Supported() ? tsk_plugin_def_media_type_video : tsk_plugin_def_media_type_none;
- }
- case PLUGIN_INDEX_VIDEO_PRODUCER:
- {
- return tsk_plugin_def_media_type_video;
- }
+ case PLUGIN_INDEX_VIDEO_CONSUMER: {
+ return MFUtils::IsD3D9Supported() ? tsk_plugin_def_media_type_video : tsk_plugin_def_media_type_none;
+ }
+ case PLUGIN_INDEX_VIDEO_PRODUCER: {
+ return tsk_plugin_def_media_type_video;
+ }
#endif
#if PLUGIN_MF_ENABLE_VIDEO_CONVERTER
- case PLUGIN_INDEX_VIDEO_CONVERTER:
- {
- return tsk_plugin_def_media_type_video;
- }
-#endif
- case PLUGIN_INDEX_CODEC_H264_MAIN:
- case PLUGIN_INDEX_CODEC_H264_BASE:
- {
- return MFUtils::IsLowLatencyH264Supported() ? tsk_plugin_def_media_type_video : tsk_plugin_def_media_type_none;
- }
- default:
- {
- TSK_DEBUG_ERROR("No plugin at index %d", index);
- return tsk_plugin_def_media_type_none;
- }
- }
+ case PLUGIN_INDEX_VIDEO_CONVERTER: {
+ return tsk_plugin_def_media_type_video;
+ }
+#endif
+ case PLUGIN_INDEX_CODEC_H264_MAIN:
+ case PLUGIN_INDEX_CODEC_H264_BASE: {
+ return MFUtils::IsLowLatencyH264Supported() ? tsk_plugin_def_media_type_video : tsk_plugin_def_media_type_none;
+ }
+ default: {
+ TSK_DEBUG_ERROR("No plugin at index %d", index);
+ return tsk_plugin_def_media_type_none;
+ }
+ }
}
tsk_plugin_def_ptr_const_t __plugin_get_def_at(int index)
{
- switch(index){
+ switch(index) {
#if PLUGIN_MF_ENABLE_VIDEO_IO
- case PLUGIN_INDEX_VIDEO_PRODUCER:
- {
- return plugin_win_mf_producer_video_plugin_def_t;
- }
- case PLUGIN_INDEX_VIDEO_CONSUMER:
- {
- return MFUtils::IsD3D9Supported() ? plugin_win_mf_consumer_video_plugin_def_t : tsk_null;
- }
+ case PLUGIN_INDEX_VIDEO_PRODUCER: {
+ return plugin_win_mf_producer_video_plugin_def_t;
+ }
+ case PLUGIN_INDEX_VIDEO_CONSUMER: {
+ return MFUtils::IsD3D9Supported() ? plugin_win_mf_consumer_video_plugin_def_t : tsk_null;
+ }
#endif
#if PLUGIN_MF_ENABLE_AUDIO_IO
- case PLUGIN_INDEX_AUDIO_PRODUCER:
- {
- return plugin_win_mf_producer_audio_plugin_def_t;
- }
- case PLUGIN_INDEX_AUDIO_CONSUMER:
- {
- return plugin_win_mf_consumer_audio_plugin_def_t;
- }
+ case PLUGIN_INDEX_AUDIO_PRODUCER: {
+ return plugin_win_mf_producer_audio_plugin_def_t;
+ }
+ case PLUGIN_INDEX_AUDIO_CONSUMER: {
+ return plugin_win_mf_consumer_audio_plugin_def_t;
+ }
#endif
#if PLUGIN_MF_ENABLE_VIDEO_CONVERTER
- case PLUGIN_INDEX_VIDEO_CONVERTER:
- {
- return plugin_win_mf_converter_video_ms_plugin_def_t;
- }
-#endif
- case PLUGIN_INDEX_CODEC_H264_MAIN:
- {
- return MFUtils::IsLowLatencyH264Supported() ? mf_codec_h264_main_plugin_def_t : tsk_null;
- }
- case PLUGIN_INDEX_CODEC_H264_BASE:
- {
- return MFUtils::IsLowLatencyH264Supported() ? mf_codec_h264_base_plugin_def_t : tsk_null;
- }
- default:
- {
- TSK_DEBUG_ERROR("No plugin at index %d", index);
- return tsk_null;
- }
- }
+ case PLUGIN_INDEX_VIDEO_CONVERTER: {
+ return plugin_win_mf_converter_video_ms_plugin_def_t;
+ }
+#endif
+ case PLUGIN_INDEX_CODEC_H264_MAIN: {
+ return MFUtils::IsLowLatencyH264Supported() ? mf_codec_h264_main_plugin_def_t : tsk_null;
+ }
+ case PLUGIN_INDEX_CODEC_H264_BASE: {
+ return MFUtils::IsLowLatencyH264Supported() ? mf_codec_h264_base_plugin_def_t : tsk_null;
+ }
+ default: {
+ TSK_DEBUG_ERROR("No plugin at index %d", index);
+ return tsk_null;
+ }
+ }
}
diff --git a/plugins/pluginWinMF/internals/mf_codec.cxx b/plugins/pluginWinMF/internals/mf_codec.cxx
index e2968f4..27fb903 100755
--- a/plugins/pluginWinMF/internals/mf_codec.cxx
+++ b/plugins/pluginWinMF/internals/mf_codec.cxx
@@ -1,18 +1,18 @@
/* Copyright (C) 2013 Mamadou DIOP
* Copyright (C) 2013 Doubango Telecom <http://www.doubango.org>
-*
+*
* This file is part of Open Source Doubango Framework.
*
* DOUBANGO is free software: you can redistribute it and/or modify
* it under the terms of the GNU General Public License as published by
* the Free Software Foundation, either version 3 of the License, or
* (at your option) any later version.
-*
+*
* DOUBANGO is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
* GNU General Public License for more details.
-*
+*
* You should have received a copy of the GNU General Public License
* along with DOUBANGO.
*/
@@ -40,13 +40,13 @@
#endif
// Make sure usable on Win7 SDK targeting Win8 OS
-#if !defined(CODECAPI_AVLowLatencyMode)
+#if !defined(CODECAPI_AVLowLatencyMode)
DEFINE_GUID(CODECAPI_AVLowLatencyMode,
- 0x9c27891a, 0xed7a, 0x40e1, 0x88, 0xe8, 0xb2, 0x27, 0x27, 0xa0, 0x24, 0xee);
+ 0x9c27891a, 0xed7a, 0x40e1, 0x88, 0xe8, 0xb2, 0x27, 0x27, 0xa0, 0x24, 0xee);
#endif
#if !defined(CODECAPI_AVDecVideoH264ErrorConcealment)
DEFINE_GUID(CODECAPI_AVDecVideoH264ErrorConcealment,
-0xececace8, 0x3436, 0x462c, 0x92, 0x94, 0xcd, 0x7b, 0xac, 0xd7, 0x58, 0xa9);
+ 0xececace8, 0x3436, 0x462c, 0x92, 0x94, 0xcd, 0x7b, 0xac, 0xd7, 0x58, 0xa9);
#endif
//
@@ -54,117 +54,105 @@ DEFINE_GUID(CODECAPI_AVDecVideoH264ErrorConcealment,
//
MFCodec::MFCodec(MFCodecId_t eId, MFCodecType_t eType, IMFTransform *pMFT /*= NULL*/)
-: m_nRefCount(1)
-, m_eId(eId)
-, m_eType(eType)
-, m_pMFT(NULL)
-, m_pCodecAPI(NULL)
-, m_pOutputType(NULL)
-, m_pInputType(NULL)
-, m_dwInputID(0)
-, m_dwOutputID(0)
-, m_rtStart(0)
-, m_rtDuration(0)
-, m_pSampleIn(NULL)
-, m_pSampleOut(NULL)
-, m_pEventGenerator(NULL)
-, m_bIsAsync(FALSE)
-, m_bIsFirstFrame(TRUE)
-, m_bIsBundled(FALSE)
-, m_nMETransformNeedInputCount(0)
-, m_nMETransformHaveOutputCount(0)
-, m_pSampleQueueAsyncInput(NULL)
+ : m_nRefCount(1)
+ , m_eId(eId)
+ , m_eType(eType)
+ , m_pMFT(NULL)
+ , m_pCodecAPI(NULL)
+ , m_pOutputType(NULL)
+ , m_pInputType(NULL)
+ , m_dwInputID(0)
+ , m_dwOutputID(0)
+ , m_rtStart(0)
+ , m_rtDuration(0)
+ , m_pSampleIn(NULL)
+ , m_pSampleOut(NULL)
+ , m_pEventGenerator(NULL)
+ , m_bIsAsync(FALSE)
+ , m_bIsFirstFrame(TRUE)
+ , m_bIsBundled(FALSE)
+ , m_nMETransformNeedInputCount(0)
+ , m_nMETransformHaveOutputCount(0)
+ , m_pSampleQueueAsyncInput(NULL)
{
- MFUtils::Startup();
-
- HRESULT hr = S_OK;
-
- switch(eId)
- {
- case MFCodecId_H264Base:
- case MFCodecId_H264Main:
- {
- m_eMediaType = MFCodecMediaType_Video;
- m_guidCompressedFormat = MFVideoFormat_H264;
- break;
- }
- case MFCodecId_AAC:
- {
- m_eMediaType = MFCodecMediaType_Audio;
- m_guidCompressedFormat = MFAudioFormat_AAC;
- break;
- }
- default:
- {
- assert(false);
- break;
- }
- }
- CHECK_HR(hr = MFCreateMediaType(&m_pOutputType));
- CHECK_HR(hr = MFCreateMediaType(&m_pInputType));
- if(pMFT) // up to the caller to make sure all parameters are corrrect
- {
- m_pMFT = pMFT;
- m_pMFT->AddRef();
- }
- else
- {
- CHECK_HR(hr = MFUtils::GetBestCodec(
- (m_eType == MFCodecType_Encoder) ? TRUE : FALSE, // Encoder ?
- (m_eMediaType == MFCodecMediaType_Video) ? MFMediaType_Video : MFMediaType_Audio, // Media Type
- (m_eType == MFCodecType_Encoder) ? kMFCodecUncompressedFormat : m_guidCompressedFormat/*GUID_NULL*/, // Input
- (m_eType == MFCodecType_Encoder) ? m_guidCompressedFormat : kMFCodecUncompressedFormat, // Output
- &m_pMFT));
- }
- hr = m_pMFT->QueryInterface(IID_PPV_ARGS(&m_pCodecAPI));
- if(FAILED(hr) && m_eType == MFCodecType_Encoder) // Required only for Encoders
- {
- CHECK_HR(hr);
- }
-
-
- CHECK_HR(hr = MFUtils::IsAsyncMFT(m_pMFT, &m_bIsAsync));
- if(m_bIsAsync)
- {
- m_pSampleQueueAsyncInput = new MFSampleQueue();
- if(!m_pSampleQueueAsyncInput)
- {
- CHECK_HR(hr = E_OUTOFMEMORY);
- }
- CHECK_HR(hr = MFUtils::UnlockAsyncMFT(m_pMFT));
- CHECK_HR(hr = m_pMFT->QueryInterface(IID_PPV_ARGS(&m_pEventGenerator)));
- }
+ MFUtils::Startup();
+
+ HRESULT hr = S_OK;
+
+ switch(eId) {
+ case MFCodecId_H264Base:
+ case MFCodecId_H264Main: {
+ m_eMediaType = MFCodecMediaType_Video;
+ m_guidCompressedFormat = MFVideoFormat_H264;
+ break;
+ }
+ case MFCodecId_AAC: {
+ m_eMediaType = MFCodecMediaType_Audio;
+ m_guidCompressedFormat = MFAudioFormat_AAC;
+ break;
+ }
+ default: {
+ assert(false);
+ break;
+ }
+ }
+ CHECK_HR(hr = MFCreateMediaType(&m_pOutputType));
+ CHECK_HR(hr = MFCreateMediaType(&m_pInputType));
+ if(pMFT) { // up to the caller to make sure all parameters are corrrect
+ m_pMFT = pMFT;
+ m_pMFT->AddRef();
+ }
+ else {
+ CHECK_HR(hr = MFUtils::GetBestCodec(
+ (m_eType == MFCodecType_Encoder) ? TRUE : FALSE, // Encoder ?
+ (m_eMediaType == MFCodecMediaType_Video) ? MFMediaType_Video : MFMediaType_Audio, // Media Type
+ (m_eType == MFCodecType_Encoder) ? kMFCodecUncompressedFormat : m_guidCompressedFormat/*GUID_NULL*/, // Input
+ (m_eType == MFCodecType_Encoder) ? m_guidCompressedFormat : kMFCodecUncompressedFormat, // Output
+ &m_pMFT));
+ }
+ hr = m_pMFT->QueryInterface(IID_PPV_ARGS(&m_pCodecAPI));
+ if(FAILED(hr) && m_eType == MFCodecType_Encoder) { // Required only for Encoders
+ CHECK_HR(hr);
+ }
+
+
+ CHECK_HR(hr = MFUtils::IsAsyncMFT(m_pMFT, &m_bIsAsync));
+ if(m_bIsAsync) {
+ m_pSampleQueueAsyncInput = new MFSampleQueue();
+ if(!m_pSampleQueueAsyncInput) {
+ CHECK_HR(hr = E_OUTOFMEMORY);
+ }
+ CHECK_HR(hr = MFUtils::UnlockAsyncMFT(m_pMFT));
+ CHECK_HR(hr = m_pMFT->QueryInterface(IID_PPV_ARGS(&m_pEventGenerator)));
+ }
bail:
- if(FAILED(hr))
- {
- SafeRelease(&m_pMFT);
- SafeRelease(&m_pCodecAPI);
- }
- if(!IsValid())
- {
- TSK_DEBUG_ERROR("Failed to create codec with id = %d", m_eId);
- }
-}
+ if(FAILED(hr)) {
+ SafeRelease(&m_pMFT);
+ SafeRelease(&m_pCodecAPI);
+ }
+ if(!IsValid()) {
+ TSK_DEBUG_ERROR("Failed to create codec with id = %d", m_eId);
+ }
+}
MFCodec::~MFCodec()
{
- assert(m_nRefCount == 0);
+ assert(m_nRefCount == 0);
- if(m_bIsAsync && m_pMFT)
- {
- m_pMFT->ProcessMessage(MFT_MESSAGE_NOTIFY_END_OF_STREAM, NULL);
- m_pMFT->ProcessMessage(MFT_MESSAGE_COMMAND_DRAIN, NULL);
- }
+ if(m_bIsAsync && m_pMFT) {
+ m_pMFT->ProcessMessage(MFT_MESSAGE_NOTIFY_END_OF_STREAM, NULL);
+ m_pMFT->ProcessMessage(MFT_MESSAGE_COMMAND_DRAIN, NULL);
+ }
- SafeRelease(&m_pMFT);
- SafeRelease(&m_pCodecAPI);
+ SafeRelease(&m_pMFT);
+ SafeRelease(&m_pCodecAPI);
SafeRelease(&m_pOutputType);
- SafeRelease(&m_pInputType);
- SafeRelease(&m_pSampleIn);
- SafeRelease(&m_pSampleOut);
- SafeRelease(&m_pEventGenerator);
- SafeRelease(&m_pSampleQueueAsyncInput);
+ SafeRelease(&m_pInputType);
+ SafeRelease(&m_pSampleIn);
+ SafeRelease(&m_pSampleOut);
+ SafeRelease(&m_pEventGenerator);
+ SafeRelease(&m_pSampleQueueAsyncInput);
}
ULONG MFCodec::AddRef()
@@ -175,8 +163,7 @@ ULONG MFCodec::AddRef()
ULONG MFCodec::Release()
{
ULONG uCount = InterlockedDecrement(&m_nRefCount);
- if (uCount == 0)
- {
+ if (uCount == 0) {
delete this;
}
// For thread safety, return a temporary variable.
@@ -185,242 +172,210 @@ ULONG MFCodec::Release()
HRESULT MFCodec::QueryInterface(REFIID iid, void** ppv)
{
- if(!IsValid())
- {
- return E_FAIL;
- }
- return m_pMFT->QueryInterface(iid, ppv);
+ if(!IsValid()) {
+ return E_FAIL;
+ }
+ return m_pMFT->QueryInterface(iid, ppv);
}
// IMFAsyncCallback
STDMETHODIMP MFCodec::GetParameters(DWORD *pdwFlags, DWORD *pdwQueue)
{
- return E_NOTIMPL;
+ return E_NOTIMPL;
}
STDMETHODIMP MFCodec::Invoke(IMFAsyncResult *pAsyncResult)
{
- HRESULT hr = S_OK, hrStatus = S_OK;
+ HRESULT hr = S_OK, hrStatus = S_OK;
IMFMediaEvent* pEvent = NULL;
MediaEventType meType = MEUnknown;
-
+
CHECK_HR(hr = m_pEventGenerator->EndGetEvent(pAsyncResult, &pEvent));
- CHECK_HR(hr = pEvent->GetType(&meType));
+ CHECK_HR(hr = pEvent->GetType(&meType));
CHECK_HR(hr = pEvent->GetStatus(&hrStatus));
- if (SUCCEEDED(hrStatus))
- {
- switch(meType)
- {
- case METransformNeedInput:
- {
- InterlockedIncrement(&m_nMETransformNeedInputCount);
- break;
- }
-
- case METransformHaveOutput:
- {
- InterlockedIncrement(&m_nMETransformHaveOutputCount);
- break;
- }
- }
- }
-
+ if (SUCCEEDED(hrStatus)) {
+ switch(meType) {
+ case METransformNeedInput: {
+ InterlockedIncrement(&m_nMETransformNeedInputCount);
+ break;
+ }
+
+ case METransformHaveOutput: {
+ InterlockedIncrement(&m_nMETransformHaveOutputCount);
+ break;
+ }
+ }
+ }
+
CHECK_HR(hr = m_pEventGenerator->BeginGetEvent(this, NULL));
bail:
- SafeRelease(&pEvent);
+ SafeRelease(&pEvent);
return hr;
}
HRESULT MFCodec::ProcessInput(IMFSample* pSample)
{
- assert(IsReady());
-
- HRESULT hr = S_OK;
-
- if(m_bIsFirstFrame)
- {
- if(m_bIsAsync && !m_bIsBundled)
- {
- CHECK_HR(hr = m_pMFT->ProcessMessage(MFT_MESSAGE_NOTIFY_START_OF_STREAM, NULL));
- CHECK_HR(hr = m_pEventGenerator->BeginGetEvent(this, NULL));
- }
- m_bIsFirstFrame = FALSE;
- }
-
- if(m_bIsAsync)
- {
- if(m_nMETransformNeedInputCount == 1 && m_pSampleQueueAsyncInput->IsEmpty())
- {
- InterlockedDecrement(&m_nMETransformNeedInputCount);
- return m_pMFT->ProcessInput(m_dwInputID, pSample, 0);
- }
-
- if(m_pSampleQueueAsyncInput->Count() > kMFCodecQueuedFramesMax)
- {
- m_pSampleQueueAsyncInput->Clear();
- CHECK_HR(hr = E_UNEXPECTED);
- }
-
- // Input sample holds shared memory (also used by other samples)
- IMFSample *pSampleCopy = NULL;
- IMFMediaBuffer *pMediaBuffer = NULL, *pMediaBufferCopy = NULL;
- BYTE *pBufferPtr = NULL, *pBufferPtrCopy = NULL;
- DWORD dwDataLength = 0;
- BOOL bMediaBufferLocked = FALSE, bMediaBufferLockedCopy = FALSE;
-
- CHECK_HR(hr = pSample->GetBufferByIndex(0, &pMediaBuffer));
- hr = pMediaBuffer->GetCurrentLength(&dwDataLength);
- if(FAILED(hr))
- {
- goto endofcopy;
- }
- hr = pMediaBuffer->Lock(&pBufferPtr, NULL, NULL);
- if(FAILED(hr))
- {
- goto endofcopy;
- }
- bMediaBufferLocked = TRUE;
-
- hr = MFUtils::CreateMediaSample(dwDataLength, &pSampleCopy);
- if(FAILED(hr))
- {
- goto endofcopy;
- }
- hr = pSampleCopy->GetBufferByIndex(0, &pMediaBufferCopy);
- if(FAILED(hr))
- {
- goto endofcopy;
- }
- hr = pMediaBufferCopy->Lock(&pBufferPtrCopy, NULL, NULL);
- if(FAILED(hr))
- {
- goto endofcopy;
- }
- bMediaBufferLockedCopy = TRUE;
-
- memcpy(pBufferPtrCopy, pBufferPtr, dwDataLength);
- hr = pMediaBufferCopy->SetCurrentLength(dwDataLength);
- if(FAILED(hr))
- {
- goto endofcopy;
- }
-
- LONGLONG hnsSampleTime = 0;
- LONGLONG hnsSampleDuration = 0;
- hr = pSample->GetSampleTime(&hnsSampleTime);
- if(SUCCEEDED(hr))
- {
- hr = pSampleCopy->SetSampleTime(hnsSampleTime);
- }
- hr = pSample->GetSampleDuration(&hnsSampleDuration);
- if(SUCCEEDED(hr))
- {
- hr = pSampleCopy->SetSampleDuration(hnsSampleDuration);
- }
-
- // EnQueue
- hr = m_pSampleQueueAsyncInput->Queue(pSampleCopy);
+ assert(IsReady());
+
+ HRESULT hr = S_OK;
+
+ if(m_bIsFirstFrame) {
+ if(m_bIsAsync && !m_bIsBundled) {
+ CHECK_HR(hr = m_pMFT->ProcessMessage(MFT_MESSAGE_NOTIFY_START_OF_STREAM, NULL));
+ CHECK_HR(hr = m_pEventGenerator->BeginGetEvent(this, NULL));
+ }
+ m_bIsFirstFrame = FALSE;
+ }
+
+ if(m_bIsAsync) {
+ if(m_nMETransformNeedInputCount == 1 && m_pSampleQueueAsyncInput->IsEmpty()) {
+ InterlockedDecrement(&m_nMETransformNeedInputCount);
+ return m_pMFT->ProcessInput(m_dwInputID, pSample, 0);
+ }
+
+ if(m_pSampleQueueAsyncInput->Count() > kMFCodecQueuedFramesMax) {
+ m_pSampleQueueAsyncInput->Clear();
+ CHECK_HR(hr = E_UNEXPECTED);
+ }
+
+ // Input sample holds shared memory (also used by other samples)
+ IMFSample *pSampleCopy = NULL;
+ IMFMediaBuffer *pMediaBuffer = NULL, *pMediaBufferCopy = NULL;
+ BYTE *pBufferPtr = NULL, *pBufferPtrCopy = NULL;
+ DWORD dwDataLength = 0;
+ BOOL bMediaBufferLocked = FALSE, bMediaBufferLockedCopy = FALSE;
+
+ CHECK_HR(hr = pSample->GetBufferByIndex(0, &pMediaBuffer));
+ hr = pMediaBuffer->GetCurrentLength(&dwDataLength);
+ if(FAILED(hr)) {
+ goto endofcopy;
+ }
+ hr = pMediaBuffer->Lock(&pBufferPtr, NULL, NULL);
+ if(FAILED(hr)) {
+ goto endofcopy;
+ }
+ bMediaBufferLocked = TRUE;
+
+ hr = MFUtils::CreateMediaSample(dwDataLength, &pSampleCopy);
+ if(FAILED(hr)) {
+ goto endofcopy;
+ }
+ hr = pSampleCopy->GetBufferByIndex(0, &pMediaBufferCopy);
+ if(FAILED(hr)) {
+ goto endofcopy;
+ }
+ hr = pMediaBufferCopy->Lock(&pBufferPtrCopy, NULL, NULL);
+ if(FAILED(hr)) {
+ goto endofcopy;
+ }
+ bMediaBufferLockedCopy = TRUE;
+
+ memcpy(pBufferPtrCopy, pBufferPtr, dwDataLength);
+ hr = pMediaBufferCopy->SetCurrentLength(dwDataLength);
+ if(FAILED(hr)) {
+ goto endofcopy;
+ }
+
+ LONGLONG hnsSampleTime = 0;
+ LONGLONG hnsSampleDuration = 0;
+ hr = pSample->GetSampleTime(&hnsSampleTime);
+ if(SUCCEEDED(hr)) {
+ hr = pSampleCopy->SetSampleTime(hnsSampleTime);
+ }
+ hr = pSample->GetSampleDuration(&hnsSampleDuration);
+ if(SUCCEEDED(hr)) {
+ hr = pSampleCopy->SetSampleDuration(hnsSampleDuration);
+ }
+
+ // EnQueue
+ hr = m_pSampleQueueAsyncInput->Queue(pSampleCopy);
endofcopy:
- if(pMediaBuffer && bMediaBufferLocked)
- {
- pMediaBuffer->Unlock();
- }
- if(pMediaBufferCopy && bMediaBufferLockedCopy)
- {
- pMediaBufferCopy->Unlock();
- }
- SafeRelease(&pSampleCopy);
- SafeRelease(&pMediaBuffer);
- CHECK_HR(hr);
-
- while(m_nMETransformNeedInputCount > 0)
- {
- if(m_pSampleQueueAsyncInput->IsEmpty())
- {
- break;
- }
- IMFSample *_pSample = NULL;
- hr = m_pSampleQueueAsyncInput->Dequeue(&_pSample);
- if(SUCCEEDED(hr))
- {
- InterlockedDecrement(&m_nMETransformNeedInputCount);
- hr = m_pMFT->ProcessInput(m_dwInputID, _pSample, 0);
- }
- SafeRelease(&_pSample);
- CHECK_HR(hr);
- }
- }
- else
- {
- CHECK_HR(hr = m_pMFT->ProcessInput(m_dwInputID, pSample, 0));
- }
+ if(pMediaBuffer && bMediaBufferLocked) {
+ pMediaBuffer->Unlock();
+ }
+ if(pMediaBufferCopy && bMediaBufferLockedCopy) {
+ pMediaBufferCopy->Unlock();
+ }
+ SafeRelease(&pSampleCopy);
+ SafeRelease(&pMediaBuffer);
+ CHECK_HR(hr);
+
+ while(m_nMETransformNeedInputCount > 0) {
+ if(m_pSampleQueueAsyncInput->IsEmpty()) {
+ break;
+ }
+ IMFSample *_pSample = NULL;
+ hr = m_pSampleQueueAsyncInput->Dequeue(&_pSample);
+ if(SUCCEEDED(hr)) {
+ InterlockedDecrement(&m_nMETransformNeedInputCount);
+ hr = m_pMFT->ProcessInput(m_dwInputID, _pSample, 0);
+ }
+ SafeRelease(&_pSample);
+ CHECK_HR(hr);
+ }
+ }
+ else {
+ CHECK_HR(hr = m_pMFT->ProcessInput(m_dwInputID, pSample, 0));
+ }
bail:
- return hr;
+ return hr;
}
HRESULT MFCodec::ProcessOutput(IMFSample **ppSample)
{
- assert(IsReady());
+ assert(IsReady());
- if(m_bIsAsync)
- {
- if(m_nMETransformHaveOutputCount == 0)
- {
- return S_OK;
- }
- InterlockedDecrement(&m_nMETransformHaveOutputCount);
- }
+ if(m_bIsAsync) {
+ if(m_nMETransformHaveOutputCount == 0) {
+ return S_OK;
+ }
+ InterlockedDecrement(&m_nMETransformHaveOutputCount);
+ }
- *ppSample = NULL;
+ *ppSample = NULL;
IMFMediaBuffer* pBufferOut = NULL;
DWORD dwStatus;
- HRESULT hr = S_OK;
-
+ HRESULT hr = S_OK;
+
MFT_OUTPUT_STREAM_INFO mftStreamInfo = { 0 };
MFT_OUTPUT_DATA_BUFFER mftOutputData = { 0 };
- CHECK_HR(hr = m_pMFT->GetOutputStreamInfo(m_dwOutputID, &mftStreamInfo));
-
- BOOL bOutputStreamProvidesSamples = (mftStreamInfo.dwFlags & MFT_OUTPUT_STREAM_PROVIDES_SAMPLES) == MFT_OUTPUT_STREAM_PROVIDES_SAMPLES;
-
- if(!bOutputStreamProvidesSamples)
- {
- if(!m_pSampleOut)
- {
- CHECK_HR(hr = MFUtils::CreateMediaSample(mftStreamInfo.cbSize, &m_pSampleOut));
- hr = m_pSampleOut->GetBufferByIndex(0, &pBufferOut);
- if(FAILED(hr))
- {
- SafeRelease(&m_pSampleOut);
- CHECK_HR(hr);
- }
- }
- else
- {
- DWORD dwMaxLength = 0;
- CHECK_HR(hr = m_pSampleOut->GetBufferByIndex(0, &pBufferOut));
- CHECK_HR(hr = pBufferOut->GetMaxLength(&dwMaxLength));
- if(dwMaxLength < mftStreamInfo.cbSize)
- {
- CHECK_HR(hr = m_pSampleOut->RemoveAllBuffers());
- SafeRelease(&pBufferOut);
- CHECK_HR(hr = MFCreateMemoryBuffer(mftStreamInfo.cbSize, &pBufferOut));
- CHECK_HR(hr = m_pSampleOut->AddBuffer(pBufferOut));
- }
- }
- }
-
- if(pBufferOut)
- {
- CHECK_HR(hr = pBufferOut->SetCurrentLength(0));
- }
-
+ CHECK_HR(hr = m_pMFT->GetOutputStreamInfo(m_dwOutputID, &mftStreamInfo));
+
+ BOOL bOutputStreamProvidesSamples = (mftStreamInfo.dwFlags & MFT_OUTPUT_STREAM_PROVIDES_SAMPLES) == MFT_OUTPUT_STREAM_PROVIDES_SAMPLES;
+
+ if(!bOutputStreamProvidesSamples) {
+ if(!m_pSampleOut) {
+ CHECK_HR(hr = MFUtils::CreateMediaSample(mftStreamInfo.cbSize, &m_pSampleOut));
+ hr = m_pSampleOut->GetBufferByIndex(0, &pBufferOut);
+ if(FAILED(hr)) {
+ SafeRelease(&m_pSampleOut);
+ CHECK_HR(hr);
+ }
+ }
+ else {
+ DWORD dwMaxLength = 0;
+ CHECK_HR(hr = m_pSampleOut->GetBufferByIndex(0, &pBufferOut));
+ CHECK_HR(hr = pBufferOut->GetMaxLength(&dwMaxLength));
+ if(dwMaxLength < mftStreamInfo.cbSize) {
+ CHECK_HR(hr = m_pSampleOut->RemoveAllBuffers());
+ SafeRelease(&pBufferOut);
+ CHECK_HR(hr = MFCreateMemoryBuffer(mftStreamInfo.cbSize, &pBufferOut));
+ CHECK_HR(hr = m_pSampleOut->AddBuffer(pBufferOut));
+ }
+ }
+ }
+
+ if(pBufferOut) {
+ CHECK_HR(hr = pBufferOut->SetCurrentLength(0));
+ }
+
//Set the output sample
mftOutputData.pSample = bOutputStreamProvidesSamples ? NULL : m_pSampleOut;
//Set the output id
@@ -428,167 +383,145 @@ HRESULT MFCodec::ProcessOutput(IMFSample **ppSample)
//Generate the output sample
hr = m_pMFT->ProcessOutput(0, 1, &mftOutputData, &dwStatus);
- if (hr == MF_E_TRANSFORM_NEED_MORE_INPUT)
- {
+ if (hr == MF_E_TRANSFORM_NEED_MORE_INPUT) {
hr = S_OK;
goto bail;
}
- if (FAILED(hr))
- {
+ if (FAILED(hr)) {
goto bail;
}
*ppSample = mftOutputData.pSample;
- if(*ppSample)
- {
- (*ppSample)->AddRef();
- }
+ if(*ppSample) {
+ (*ppSample)->AddRef();
+ }
bail:
- if(bOutputStreamProvidesSamples)
- {
- SafeRelease(&mftOutputData.pSample);
- }
+ if(bOutputStreamProvidesSamples) {
+ SafeRelease(&mftOutputData.pSample);
+ }
SafeRelease(&pBufferOut);
return hr;
}
bool MFCodec::IsValid()
{
- return (m_pMFT && (m_eType == MFCodecType_Decoder || m_pCodecAPI));
+ return (m_pMFT && (m_eType == MFCodecType_Decoder || m_pCodecAPI));
}
bool MFCodec::IsReady()
{
- return (IsValid() && m_pOutputType && m_pInputType);
+ return (IsValid() && m_pOutputType && m_pInputType);
}
HRESULT MFCodec::Process(const void* pcInputPtr, UINT32 nInputSize, IMFSample **ppSampleOut)
{
- if(!pcInputPtr || !nInputSize || !ppSampleOut)
- {
- TSK_DEBUG_ERROR("Invalid parameter");
- return E_INVALIDARG;
- }
-
- *ppSampleOut = NULL;
-
- HRESULT hr = S_OK;
-
- IMFMediaBuffer* pBufferIn = NULL;
- BYTE* pBufferPtr = NULL;
- BOOL bMediaChangeHandled = FALSE; // Endless loop guard
-
- if(!m_pSampleIn)
- {
- CHECK_HR(hr = MFUtils::CreateMediaSample(nInputSize, &m_pSampleIn));
- hr = m_pSampleIn->GetBufferByIndex(0, &pBufferIn);
- if(FAILED(hr))
- {
- SafeRelease(&m_pSampleIn);
- CHECK_HR(hr);
- }
- }
- else
- {
- DWORD dwMaxLength = 0;
- CHECK_HR(hr = m_pSampleIn->GetBufferByIndex(0, &pBufferIn));
- CHECK_HR(hr = pBufferIn->GetMaxLength(&dwMaxLength));
- if(dwMaxLength < nInputSize)
- {
- CHECK_HR(hr = m_pSampleIn->RemoveAllBuffers());
- SafeRelease(&pBufferIn);
- CHECK_HR(hr = MFCreateMemoryBuffer(nInputSize, &pBufferIn));
- CHECK_HR(hr = m_pSampleIn->AddBuffer(pBufferIn));
- }
- }
-
- CHECK_HR(hr = pBufferIn->Lock(&pBufferPtr, NULL, NULL));
- memcpy(pBufferPtr, pcInputPtr, nInputSize);
- CHECK_HR(hr = pBufferIn->Unlock());
- CHECK_HR(hr = pBufferIn->SetCurrentLength(nInputSize));
-
- if(m_eType == MFCodecType_Encoder)
- {
- CHECK_HR(hr = m_pSampleIn->SetSampleDuration(m_rtDuration));
- CHECK_HR(hr = m_pSampleIn->SetSampleTime(m_rtStart)); // FIXME: use clock(), Same for custom source
- }
+ if(!pcInputPtr || !nInputSize || !ppSampleOut) {
+ TSK_DEBUG_ERROR("Invalid parameter");
+ return E_INVALIDARG;
+ }
+
+ *ppSampleOut = NULL;
+
+ HRESULT hr = S_OK;
+
+ IMFMediaBuffer* pBufferIn = NULL;
+ BYTE* pBufferPtr = NULL;
+ BOOL bMediaChangeHandled = FALSE; // Endless loop guard
+
+ if(!m_pSampleIn) {
+ CHECK_HR(hr = MFUtils::CreateMediaSample(nInputSize, &m_pSampleIn));
+ hr = m_pSampleIn->GetBufferByIndex(0, &pBufferIn);
+ if(FAILED(hr)) {
+ SafeRelease(&m_pSampleIn);
+ CHECK_HR(hr);
+ }
+ }
+ else {
+ DWORD dwMaxLength = 0;
+ CHECK_HR(hr = m_pSampleIn->GetBufferByIndex(0, &pBufferIn));
+ CHECK_HR(hr = pBufferIn->GetMaxLength(&dwMaxLength));
+ if(dwMaxLength < nInputSize) {
+ CHECK_HR(hr = m_pSampleIn->RemoveAllBuffers());
+ SafeRelease(&pBufferIn);
+ CHECK_HR(hr = MFCreateMemoryBuffer(nInputSize, &pBufferIn));
+ CHECK_HR(hr = m_pSampleIn->AddBuffer(pBufferIn));
+ }
+ }
+
+ CHECK_HR(hr = pBufferIn->Lock(&pBufferPtr, NULL, NULL));
+ memcpy(pBufferPtr, pcInputPtr, nInputSize);
+ CHECK_HR(hr = pBufferIn->Unlock());
+ CHECK_HR(hr = pBufferIn->SetCurrentLength(nInputSize));
+
+ if(m_eType == MFCodecType_Encoder) {
+ CHECK_HR(hr = m_pSampleIn->SetSampleDuration(m_rtDuration));
+ CHECK_HR(hr = m_pSampleIn->SetSampleTime(m_rtStart)); // FIXME: use clock(), Same for custom source
+ }
Label_ProcessInput:
- hr = ProcessInput(m_pSampleIn);
- while(hr == MF_E_NOTACCEPTING)
- {
- TSK_DEBUG_INFO("MF_E_NOTACCEPTING");
- IMFSample* pSample = NULL;
- hr = ProcessOutput(&pSample);
- if(SUCCEEDED(hr) && pSample)
- {
- SafeRelease(ppSampleOut);
- *ppSampleOut = pSample, pSample = NULL;
-
- hr = m_pSampleIn->SetUINT32(MFSampleExtension_Discontinuity, TRUE);
- hr = ProcessInput(m_pSampleIn);
- }
- }
- if(!*ppSampleOut)
- {
- hr = ProcessOutput(ppSampleOut);
- if(hr == MF_E_TRANSFORM_STREAM_CHANGE) /* Handling Stream Changes: http://msdn.microsoft.com/en-us/library/windows/desktop/ee663587(v=vs.85).aspx */
- {
- TSK_DEBUG_INFO("[MF Codec] Stream changed");
- if(m_eType == MFCodecType_Decoder)
- {
- IMFMediaType *pTypeOut = NULL;
- hr = m_pMFT->GetOutputAvailableType(m_dwOutputID, 0, &pTypeOut);
- if(SUCCEEDED(hr))
- {
- UINT32 uWidth = 0, uHeight = 0;
- hr = MFGetAttributeSize(pTypeOut, MF_MT_FRAME_SIZE, &uWidth, &uHeight);
- if(SUCCEEDED(hr))
- {
- TSK_DEBUG_INFO("[MF Decoder] New size: width=%u, height=%u", uWidth, uHeight);
- hr = m_pMFT->SetOutputType(m_dwOutputID, pTypeOut, 0);
- if(SUCCEEDED(hr))
- {
- SafeRelease(&m_pOutputType);
- pTypeOut->AddRef();
- m_pOutputType = pTypeOut;
- if(m_eMediaType == MFCodecMediaType_Video)
- {
- dynamic_cast<MFCodecVideo*>(this)->m_nWidth = uWidth;
- dynamic_cast<MFCodecVideo*>(this)->m_nHeight = uHeight;
- }
- }
- }
- }
- SafeRelease(&pTypeOut);
- if(SUCCEEDED(hr))
- {
- if(!bMediaChangeHandled)
- {
- bMediaChangeHandled = TRUE;
- goto Label_ProcessInput;
- }
- }
- }
- }
- }
-
- m_rtStart += m_rtDuration;
-
+ hr = ProcessInput(m_pSampleIn);
+ while(hr == MF_E_NOTACCEPTING) {
+ TSK_DEBUG_INFO("MF_E_NOTACCEPTING");
+ IMFSample* pSample = NULL;
+ hr = ProcessOutput(&pSample);
+ if(SUCCEEDED(hr) && pSample) {
+ SafeRelease(ppSampleOut);
+ *ppSampleOut = pSample, pSample = NULL;
+
+ hr = m_pSampleIn->SetUINT32(MFSampleExtension_Discontinuity, TRUE);
+ hr = ProcessInput(m_pSampleIn);
+ }
+ }
+ if(!*ppSampleOut) {
+ hr = ProcessOutput(ppSampleOut);
+ if(hr == MF_E_TRANSFORM_STREAM_CHANGE) { /* Handling Stream Changes: http://msdn.microsoft.com/en-us/library/windows/desktop/ee663587(v=vs.85).aspx */
+ TSK_DEBUG_INFO("[MF Codec] Stream changed");
+ if(m_eType == MFCodecType_Decoder) {
+ IMFMediaType *pTypeOut = NULL;
+ hr = m_pMFT->GetOutputAvailableType(m_dwOutputID, 0, &pTypeOut);
+ if(SUCCEEDED(hr)) {
+ UINT32 uWidth = 0, uHeight = 0;
+ hr = MFGetAttributeSize(pTypeOut, MF_MT_FRAME_SIZE, &uWidth, &uHeight);
+ if(SUCCEEDED(hr)) {
+ TSK_DEBUG_INFO("[MF Decoder] New size: width=%u, height=%u", uWidth, uHeight);
+ hr = m_pMFT->SetOutputType(m_dwOutputID, pTypeOut, 0);
+ if(SUCCEEDED(hr)) {
+ SafeRelease(&m_pOutputType);
+ pTypeOut->AddRef();
+ m_pOutputType = pTypeOut;
+ if(m_eMediaType == MFCodecMediaType_Video) {
+ dynamic_cast<MFCodecVideo*>(this)->m_nWidth = uWidth;
+ dynamic_cast<MFCodecVideo*>(this)->m_nHeight = uHeight;
+ }
+ }
+ }
+ }
+ SafeRelease(&pTypeOut);
+ if(SUCCEEDED(hr)) {
+ if(!bMediaChangeHandled) {
+ bMediaChangeHandled = TRUE;
+ goto Label_ProcessInput;
+ }
+ }
+ }
+ }
+ }
+
+ m_rtStart += m_rtDuration;
+
bail:
- SafeRelease(&pBufferIn);
- return hr;
+ SafeRelease(&pBufferIn);
+ return hr;
}
enum tmedia_chroma_e MFCodec::GetUncompressedChroma()
{
- if(kMFCodecUncompressedFormat == MFVideoFormat_NV12)
- {
- return tmedia_chroma_nv12;
- }
- assert(false);
- return tmedia_chroma_none;
+ if(kMFCodecUncompressedFormat == MFVideoFormat_NV12) {
+ return tmedia_chroma_nv12;
+ }
+ assert(false);
+ return tmedia_chroma_none;
}
//
@@ -596,12 +529,12 @@ enum tmedia_chroma_e MFCodec::GetUncompressedChroma()
//
MFCodecVideo::MFCodecVideo(MFCodecId_t eId, MFCodecType_t eType, IMFTransform *pMFT /*= NULL*/)
-: MFCodec(eId, eType, pMFT)
-, m_nFrameRate(0)
-, m_nWidth(0)
-, m_nHeight(0)
+ : MFCodec(eId, eType, pMFT)
+ , m_nFrameRate(0)
+ , m_nWidth(0)
+ , m_nHeight(0)
{
- assert(m_eMediaType == MFCodecMediaType_Video);
+ assert(m_eMediaType == MFCodecMediaType_Video);
}
MFCodecVideo::~MFCodecVideo()
@@ -610,279 +543,261 @@ MFCodecVideo::~MFCodecVideo()
}
HRESULT MFCodecVideo::Initialize(
- UINT32 nFrameRate,
- UINT32 nWidth,
- UINT32 nHeight,
- UINT32 nOutputBitRateInBps /*= 0*/
- )
+ UINT32 nFrameRate,
+ UINT32 nWidth,
+ UINT32 nHeight,
+ UINT32 nOutputBitRateInBps /*= 0*/
+)
{
- assert(IsValid());
-
- HRESULT hr = S_OK;
+ assert(IsValid());
- VARIANT var = {0};
+ HRESULT hr = S_OK;
- // make sure identifiers are zero-based (other layouts not supported yet)
- hr = m_pMFT->GetStreamIDs(1, &m_dwInputID, 1, &m_dwOutputID);
- if (hr == E_NOTIMPL)
- {
+ VARIANT var = {0};
+
+ // make sure identifiers are zero-based (other layouts not supported yet)
+ hr = m_pMFT->GetStreamIDs(1, &m_dwInputID, 1, &m_dwOutputID);
+ if (hr == E_NOTIMPL) {
m_dwInputID = 0;
m_dwOutputID = 0;
hr = S_OK;
}
- else if (FAILED(hr))
- {
- TSK_DEBUG_ERROR("The stream identifiers are not zero-based");
+ else if (FAILED(hr)) {
+ TSK_DEBUG_ERROR("The stream identifiers are not zero-based");
return hr;
}
- m_rtStart = 0;
- CHECK_HR(hr = MFFrameRateToAverageTimePerFrame(nFrameRate, 1, &m_rtDuration));
+ m_rtStart = 0;
+ CHECK_HR(hr = MFFrameRateToAverageTimePerFrame(nFrameRate, 1, &m_rtDuration));
+
+ CHECK_HR(hr = m_pOutputType->SetGUID(MF_MT_MAJOR_TYPE, (m_eMediaType == MFCodecMediaType_Video) ? MFMediaType_Video : MFMediaType_Audio));
+ CHECK_HR(hr = m_pInputType->SetGUID(MF_MT_MAJOR_TYPE, (m_eMediaType == MFCodecMediaType_Video) ? MFMediaType_Video : MFMediaType_Audio));
- CHECK_HR(hr = m_pOutputType->SetGUID(MF_MT_MAJOR_TYPE, (m_eMediaType == MFCodecMediaType_Video) ? MFMediaType_Video : MFMediaType_Audio));
- CHECK_HR(hr = m_pInputType->SetGUID(MF_MT_MAJOR_TYPE, (m_eMediaType == MFCodecMediaType_Video) ? MFMediaType_Video : MFMediaType_Audio));
+ CHECK_HR(hr = m_pOutputType->SetGUID(MF_MT_SUBTYPE, (m_eType == MFCodecType_Encoder) ? m_guidCompressedFormat : kMFCodecUncompressedFormat));
+ CHECK_HR(hr = m_pInputType->SetGUID(MF_MT_SUBTYPE, (m_eType == MFCodecType_Encoder) ? kMFCodecUncompressedFormat : m_guidCompressedFormat));
- CHECK_HR(hr = m_pOutputType->SetGUID(MF_MT_SUBTYPE, (m_eType == MFCodecType_Encoder) ? m_guidCompressedFormat : kMFCodecUncompressedFormat));
- CHECK_HR(hr = m_pInputType->SetGUID(MF_MT_SUBTYPE, (m_eType == MFCodecType_Encoder) ? kMFCodecUncompressedFormat : m_guidCompressedFormat));
-
- CHECK_HR(hr = m_pOutputType->SetUINT32(MF_MT_ALL_SAMPLES_INDEPENDENT, (m_eType == MFCodecType_Encoder) ? FALSE : TRUE));
- CHECK_HR(hr = m_pInputType->SetUINT32(MF_MT_ALL_SAMPLES_INDEPENDENT, (m_eType == MFCodecType_Encoder) ? TRUE : FALSE));
+ CHECK_HR(hr = m_pOutputType->SetUINT32(MF_MT_ALL_SAMPLES_INDEPENDENT, (m_eType == MFCodecType_Encoder) ? FALSE : TRUE));
+ CHECK_HR(hr = m_pInputType->SetUINT32(MF_MT_ALL_SAMPLES_INDEPENDENT, (m_eType == MFCodecType_Encoder) ? TRUE : FALSE));
+
+ CHECK_HR(hr = m_pOutputType->SetUINT32(MF_MT_FIXED_SIZE_SAMPLES, (m_eType == MFCodecType_Encoder) ? FALSE : TRUE));
+ CHECK_HR(hr = m_pInputType->SetUINT32(MF_MT_FIXED_SIZE_SAMPLES, (m_eType == MFCodecType_Encoder) ? TRUE : FALSE));
+
+ // Set bitrate
+ // Set (MF_MT_AVG_BITRATE) for MediaType
+ // Set (CODECAPI_AVEncCommonMeanBitRate) for H.264
+ hr = SetBitRate(nOutputBitRateInBps);
- CHECK_HR(hr = m_pOutputType->SetUINT32(MF_MT_FIXED_SIZE_SAMPLES, (m_eType == MFCodecType_Encoder) ? FALSE : TRUE));
- CHECK_HR(hr = m_pInputType->SetUINT32(MF_MT_FIXED_SIZE_SAMPLES, (m_eType == MFCodecType_Encoder) ? TRUE : FALSE));
-
- // Set bitrate
- // Set (MF_MT_AVG_BITRATE) for MediaType
- // Set (CODECAPI_AVEncCommonMeanBitRate) for H.264
- hr = SetBitRate(nOutputBitRateInBps);
-
CHECK_HR(hr = m_pOutputType->SetUINT32(MF_MT_INTERLACE_MODE, MFVideoInterlace_Progressive));
- CHECK_HR(hr = m_pInputType->SetUINT32(MF_MT_INTERLACE_MODE, MFVideoInterlace_Progressive));
+ CHECK_HR(hr = m_pInputType->SetUINT32(MF_MT_INTERLACE_MODE, MFVideoInterlace_Progressive));
CHECK_HR(hr = MFSetAttributeSize(m_pOutputType, MF_MT_FRAME_SIZE, nWidth, nHeight));
- CHECK_HR(hr = MFSetAttributeSize(m_pInputType, MF_MT_FRAME_SIZE, nWidth, nHeight));
+ CHECK_HR(hr = MFSetAttributeSize(m_pInputType, MF_MT_FRAME_SIZE, nWidth, nHeight));
CHECK_HR(hr = MFSetAttributeRatio(m_pOutputType, MF_MT_FRAME_RATE, nFrameRate, 1));
- CHECK_HR(hr = MFSetAttributeRatio(m_pInputType, MF_MT_FRAME_RATE, nFrameRate, 1));
+ CHECK_HR(hr = MFSetAttributeRatio(m_pInputType, MF_MT_FRAME_RATE, nFrameRate, 1));
CHECK_HR(hr = MFSetAttributeRatio(m_pOutputType, MF_MT_PIXEL_ASPECT_RATIO, 1, 1));
- CHECK_HR(hr = MFSetAttributeRatio(m_pInputType, MF_MT_PIXEL_ASPECT_RATIO, 1, 1));
-
- // Encoder: Output format must be set before input
- // Decoder: Input format must be set before output
- if(m_eType == MFCodecType_Encoder)
- {
- CHECK_HR(hr = m_pMFT->SetOutputType(m_dwOutputID, m_pOutputType, 0));
- CHECK_HR(hr = m_pMFT->SetInputType(m_dwInputID, m_pInputType, 0));
- }
- else
- {
- CHECK_HR(hr = m_pMFT->SetInputType(m_dwInputID, m_pInputType, 0));
- CHECK_HR(hr = m_pMFT->SetOutputType(m_dwOutputID, m_pOutputType, 0));
- }
-
- if(m_eId == MFCodecId_H264Base || m_eId == MFCodecId_H264Main)
- {
- if(m_eType == MFCodecType_Decoder)
- {
- // Only decoder support GetAttributes()
- IMFAttributes* pAttributes = NULL;
- hr = m_pMFT->GetAttributes(&pAttributes);
- if(SUCCEEDED(hr))
- {
- // FIXME: Very strange that "CODECAPI_AVLowLatencyMode" only works with "IMFAttributes->" and not "ICodecAPI->SetValue()"
- hr = pAttributes->SetUINT32(CODECAPI_AVLowLatencyMode, TRUE);
- }
- SafeRelease(&pAttributes);
- }
- else
- {
- var.vt = VT_BOOL;
- var.boolVal = VARIANT_TRUE;
- hr = m_pCodecAPI->SetValue(&CODECAPI_AVLowLatencyMode, &var);
-
- var.vt = VT_BOOL;
- var.boolVal = VARIANT_TRUE;
- hr = m_pCodecAPI->SetValue(&CODECAPI_AVEncCommonLowLatency, &var); // Correct for the decoder
-
- // Disable B-Frames
- var.vt = VT_UI4;
- var.ulVal = 0;
- hr = m_pCodecAPI->SetValue(&CODECAPI_AVEncMPVDefaultBPictureCount, &var);
-
- // Constant bitrate (updated using RTCP)
- var.vt = VT_UI4;
- var.ulVal = eAVEncCommonRateControlMode_CBR;
- hr = m_pCodecAPI->SetValue(&CODECAPI_AVEncCommonRateControlMode, &var);
- }
-
- hr = S_OK; // Not mandatory features
- }
+ CHECK_HR(hr = MFSetAttributeRatio(m_pInputType, MF_MT_PIXEL_ASPECT_RATIO, 1, 1));
+
+ // Encoder: Output format must be set before input
+ // Decoder: Input format must be set before output
+ if(m_eType == MFCodecType_Encoder) {
+ CHECK_HR(hr = m_pMFT->SetOutputType(m_dwOutputID, m_pOutputType, 0));
+ CHECK_HR(hr = m_pMFT->SetInputType(m_dwInputID, m_pInputType, 0));
+ }
+ else {
+ CHECK_HR(hr = m_pMFT->SetInputType(m_dwInputID, m_pInputType, 0));
+ CHECK_HR(hr = m_pMFT->SetOutputType(m_dwOutputID, m_pOutputType, 0));
+ }
+
+ if(m_eId == MFCodecId_H264Base || m_eId == MFCodecId_H264Main) {
+ if(m_eType == MFCodecType_Decoder) {
+ // Only decoder support GetAttributes()
+ IMFAttributes* pAttributes = NULL;
+ hr = m_pMFT->GetAttributes(&pAttributes);
+ if(SUCCEEDED(hr)) {
+ // FIXME: Very strange that "CODECAPI_AVLowLatencyMode" only works with "IMFAttributes->" and not "ICodecAPI->SetValue()"
+ hr = pAttributes->SetUINT32(CODECAPI_AVLowLatencyMode, TRUE);
+ }
+ SafeRelease(&pAttributes);
+ }
+ else {
+ var.vt = VT_BOOL;
+ var.boolVal = VARIANT_TRUE;
+ hr = m_pCodecAPI->SetValue(&CODECAPI_AVLowLatencyMode, &var);
+
+ var.vt = VT_BOOL;
+ var.boolVal = VARIANT_TRUE;
+ hr = m_pCodecAPI->SetValue(&CODECAPI_AVEncCommonLowLatency, &var); // Correct for the decoder
+
+ // Disable B-Frames
+ var.vt = VT_UI4;
+ var.ulVal = 0;
+ hr = m_pCodecAPI->SetValue(&CODECAPI_AVEncMPVDefaultBPictureCount, &var);
+
+ // Constant bitrate (updated using RTCP)
+ var.vt = VT_UI4;
+ var.ulVal = eAVEncCommonRateControlMode_CBR;
+ hr = m_pCodecAPI->SetValue(&CODECAPI_AVEncCommonRateControlMode, &var);
+ }
+
+ hr = S_OK; // Not mandatory features
+ }
bail:
- if(SUCCEEDED(hr))
- {
- m_nFrameRate = nFrameRate;
- m_nWidth = nWidth;
- m_nHeight = nHeight;
- }
+ if(SUCCEEDED(hr)) {
+ m_nFrameRate = nFrameRate;
+ m_nWidth = nWidth;
+ m_nHeight = nHeight;
+ }
- return hr;
+ return hr;
}
HRESULT MFCodecVideo::SetGOPSize(UINT32 nFramesCount)
{
- assert(IsValid());
+ assert(IsValid());
- HRESULT hr = S_OK;
+ HRESULT hr = S_OK;
- if(m_eType == MFCodecType_Encoder && (m_eId == MFCodecId_H264Base || m_eId == MFCodecId_H264Main))
- {
- VARIANT var = {0};
- var.vt = VT_UI4;
- var.ullVal = nFramesCount;
- CHECK_HR(hr = m_pCodecAPI->SetValue(&CODECAPI_AVEncMPVGOPSize, &var));
- }
+ if(m_eType == MFCodecType_Encoder && (m_eId == MFCodecId_H264Base || m_eId == MFCodecId_H264Main)) {
+ VARIANT var = {0};
+ var.vt = VT_UI4;
+ var.ullVal = nFramesCount;
+ CHECK_HR(hr = m_pCodecAPI->SetValue(&CODECAPI_AVEncMPVGOPSize, &var));
+ }
bail:
- return hr;
+ return hr;
}
HRESULT MFCodecVideo::SetBitRate(UINT32 nBitRateInBps)
{
- assert(IsValid());
+ assert(IsValid());
- HRESULT hr = S_OK;
+ HRESULT hr = S_OK;
- if(nBitRateInBps > 0 && m_eType == MFCodecType_Encoder)
- {
- CHECK_HR(hr = m_pOutputType->SetUINT32(MF_MT_AVG_BITRATE, nBitRateInBps));
+ if(nBitRateInBps > 0 && m_eType == MFCodecType_Encoder) {
+ CHECK_HR(hr = m_pOutputType->SetUINT32(MF_MT_AVG_BITRATE, nBitRateInBps));
- if((m_eId == MFCodecId_H264Base || m_eId == MFCodecId_H264Main))
- {
- VARIANT var = {0};
+ if((m_eId == MFCodecId_H264Base || m_eId == MFCodecId_H264Main)) {
+ VARIANT var = {0};
- // Set BitRate
- var.vt = VT_UI4;
- var.ullVal = nBitRateInBps;
- CHECK_HR(hr = m_pCodecAPI->SetValue(&CODECAPI_AVEncCommonMeanBitRate, &var));
- }
- }
+ // Set BitRate
+ var.vt = VT_UI4;
+ var.ullVal = nBitRateInBps;
+ CHECK_HR(hr = m_pCodecAPI->SetValue(&CODECAPI_AVEncCommonMeanBitRate, &var));
+ }
+ }
bail:
- return hr;
+ return hr;
}
HRESULT MFCodecVideo::IsSetSliceMaxSizeInBytesSupported(BOOL &supported)
{
- HRESULT hr = S_OK;
- supported = FALSE;
+ HRESULT hr = S_OK;
+ supported = FALSE;
- if ((m_eId == MFCodecId_H264Base || m_eId == MFCodecId_H264Main))
- {
+ if ((m_eId == MFCodecId_H264Base || m_eId == MFCodecId_H264Main)) {
#if defined(CODECAPI_AVEncSliceControlMode) && defined(CODECAPI_AVEncSliceControlSize)
- if (m_pCodecAPI->IsSupported(&CODECAPI_AVEncSliceControlMode) == S_OK && m_pCodecAPI->IsSupported(&CODECAPI_AVEncSliceControlSize) == S_OK) {
- supported = TRUE;
- }
+ if (m_pCodecAPI->IsSupported(&CODECAPI_AVEncSliceControlMode) == S_OK && m_pCodecAPI->IsSupported(&CODECAPI_AVEncSliceControlSize) == S_OK) {
+ supported = TRUE;
+ }
#endif
- }
- return hr;
+ }
+ return hr;
}
HRESULT MFCodecVideo::SetSliceMaxSizeInBytes(UINT32 nSliceMaxSizeInBytes)
{
- assert(IsValid() && nSliceMaxSizeInBytes > 0);
+ assert(IsValid() && nSliceMaxSizeInBytes > 0);
- HRESULT hr = S_OK;
+ HRESULT hr = S_OK;
- if ((m_eId == MFCodecId_H264Base || m_eId == MFCodecId_H264Main))
- {
+ if ((m_eId == MFCodecId_H264Base || m_eId == MFCodecId_H264Main)) {
#if defined(CODECAPI_AVEncSliceControlMode) && defined(CODECAPI_AVEncSliceControlSize)
- if (m_pCodecAPI->IsSupported(&CODECAPI_AVEncSliceControlMode) == S_OK && m_pCodecAPI->IsSupported(&CODECAPI_AVEncSliceControlSize) == S_OK) {
- VARIANT var = { 0 };
- var.vt = VT_UI4;
+ if (m_pCodecAPI->IsSupported(&CODECAPI_AVEncSliceControlMode) == S_OK && m_pCodecAPI->IsSupported(&CODECAPI_AVEncSliceControlSize) == S_OK) {
+ VARIANT var = { 0 };
+ var.vt = VT_UI4;
- var.ulVal = 1; // Bits
- CHECK_HR(hr = m_pCodecAPI->SetValue(&CODECAPI_AVEncSliceControlMode, &var));
+ var.ulVal = 1; // Bits
+ CHECK_HR(hr = m_pCodecAPI->SetValue(&CODECAPI_AVEncSliceControlMode, &var));
- var.ulVal = (nSliceMaxSizeInBytes << 3); // From Bytes to Bits
- CHECK_HR(hr = m_pCodecAPI->SetValue(&CODECAPI_AVEncSliceControlSize, &var));
- }
+ var.ulVal = (nSliceMaxSizeInBytes << 3); // From Bytes to Bits
+ CHECK_HR(hr = m_pCodecAPI->SetValue(&CODECAPI_AVEncSliceControlSize, &var));
+ }
#else
- CHECK_HR(hr = S_OK);
+ CHECK_HR(hr = S_OK);
#endif
- }
+ }
bail:
- return hr;
+ return hr;
}
HRESULT MFCodecVideo::RequestKeyFrame()
{
- assert(IsValid());
-
- HRESULT hr = S_OK;
+ assert(IsValid());
+
+ HRESULT hr = S_OK;
- if ((m_eId == MFCodecId_H264Base || m_eId == MFCodecId_H264Main))
- {
+ if ((m_eId == MFCodecId_H264Base || m_eId == MFCodecId_H264Main)) {
#if defined(CODECAPI_AVEncVideoForceKeyFrame)
- if (m_pCodecAPI->IsSupported(&CODECAPI_AVEncVideoForceKeyFrame) == S_OK) {
- VARIANT var = { 0 };
+ if (m_pCodecAPI->IsSupported(&CODECAPI_AVEncVideoForceKeyFrame) == S_OK) {
+ VARIANT var = { 0 };
- var.vt = VT_UI4;
- var.ulVal = 1;
- CHECK_HR(hr = m_pCodecAPI->SetValue(&CODECAPI_AVEncVideoForceKeyFrame, &var));
- }
+ var.vt = VT_UI4;
+ var.ulVal = 1;
+ CHECK_HR(hr = m_pCodecAPI->SetValue(&CODECAPI_AVEncVideoForceKeyFrame, &var));
+ }
#else
- CHECK_HR(hr = S_OK);
+ CHECK_HR(hr = S_OK);
#endif
- }
+ }
bail:
- return hr;
+ return hr;
}
//
// MFCodecVideo
//
MFCodecVideoH264::MFCodecVideoH264(MFCodecId_t eId, MFCodecType_t eType, IMFTransform *pMFT /*= NULL*/)
-: MFCodecVideo(eId, eType, pMFT)
+ : MFCodecVideo(eId, eType, pMFT)
{
- assert(eId == MFCodecId_H264Base || eId == MFCodecId_H264Main);
-
- HRESULT hr = S_OK;
+ assert(eId == MFCodecId_H264Base || eId == MFCodecId_H264Main);
- if(m_pOutputType)
- {
- CHECK_HR(hr = m_pOutputType->SetUINT32(MF_MT_MPEG2_PROFILE, (m_eId == MFCodecId_H264Base) ? eAVEncH264VProfile_Base : eAVEncH264VProfile_Main));
- }
+ HRESULT hr = S_OK;
+
+ if(m_pOutputType) {
+ CHECK_HR(hr = m_pOutputType->SetUINT32(MF_MT_MPEG2_PROFILE, (m_eId == MFCodecId_H264Base) ? eAVEncH264VProfile_Base : eAVEncH264VProfile_Main));
+ }
bail:
- assert(SUCCEEDED(hr));
+ assert(SUCCEEDED(hr));
}
MFCodecVideoH264::~MFCodecVideoH264()
{
-
+
}
MFCodecVideoH264* MFCodecVideoH264::CreateCodecH264Base(MFCodecType_t eType, IMFTransform *pMFT /*= NULL*/)
{
- MFCodecVideoH264* pCodec = new MFCodecVideoH264(MFCodecId_H264Base, eType, pMFT);
- if(pCodec && !pCodec->IsValid())
- {
- SafeRelease(&pCodec);
- }
- return pCodec;
+ MFCodecVideoH264* pCodec = new MFCodecVideoH264(MFCodecId_H264Base, eType, pMFT);
+ if(pCodec && !pCodec->IsValid()) {
+ SafeRelease(&pCodec);
+ }
+ return pCodec;
}
MFCodecVideoH264* MFCodecVideoH264::CreateCodecH264Main(MFCodecType_t eType, IMFTransform *pMFT /*= NULL*/)
{
- MFCodecVideoH264* pCodec = new MFCodecVideoH264(MFCodecId_H264Main, eType, pMFT);
- if(pCodec && !pCodec->IsValid())
- {
- SafeRelease(&pCodec);
- }
- return pCodec;
+ MFCodecVideoH264* pCodec = new MFCodecVideoH264(MFCodecId_H264Main, eType, pMFT);
+ if(pCodec && !pCodec->IsValid()) {
+ SafeRelease(&pCodec);
+ }
+ return pCodec;
}
diff --git a/plugins/pluginWinMF/internals/mf_codec.h b/plugins/pluginWinMF/internals/mf_codec.h
index 51b06dc..d4f00c7 100755
--- a/plugins/pluginWinMF/internals/mf_codec.h
+++ b/plugins/pluginWinMF/internals/mf_codec.h
@@ -1,18 +1,18 @@
/* Copyright (C) 2013 Mamadou DIOP
* Copyright (C) 2013 Doubango Telecom <http://www.doubango.org>
-*
+*
* This file is part of Open Source Doubango Framework.
*
* DOUBANGO is free software: you can redistribute it and/or modify
* it under the terms of the GNU General Public License as published by
* the Free Software Foundation, either version 3 of the License, or
* (at your option) any later version.
-*
+*
* DOUBANGO is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
* GNU General Public License for more details.
-*
+*
* You should have received a copy of the GNU General Public License
* along with DOUBANGO.
*/
@@ -30,125 +30,136 @@
class MFSampleQueue;
-typedef enum MFCodecId_e
-{
- MFCodecId_H264Base,
- MFCodecId_H264Main,
- MFCodecId_AAC
+typedef enum MFCodecId_e {
+ MFCodecId_H264Base,
+ MFCodecId_H264Main,
+ MFCodecId_AAC
}
MFCodecId_t;
-typedef enum MFCodecType_e
-{
- MFCodecType_Encoder,
- MFCodecType_Decoder
+typedef enum MFCodecType_e {
+ MFCodecType_Encoder,
+ MFCodecType_Decoder
}
MFCodecType_t;
-typedef enum MFCodecMediaType_e
-{
- MFCodecMediaType_Audio,
- MFCodecMediaType_Video
+typedef enum MFCodecMediaType_e {
+ MFCodecMediaType_Audio,
+ MFCodecMediaType_Video
}
MFCodecMediaType_t;
-class MFCodec : IMFAsyncCallback
+class MFCodec : IMFAsyncCallback
{
protected:
- MFCodec(MFCodecId_t eId, MFCodecType_t eType, IMFTransform *pMFT = NULL);
- virtual ~MFCodec();
- HRESULT ProcessInput(IMFSample* pSample);
- HRESULT ProcessOutput(IMFSample **ppSample);
+ MFCodec(MFCodecId_t eId, MFCodecType_t eType, IMFTransform *pMFT = NULL);
+ virtual ~MFCodec();
+ HRESULT ProcessInput(IMFSample* pSample);
+ HRESULT ProcessOutput(IMFSample **ppSample);
public:
- virtual bool IsValid();
- virtual bool IsReady();
- virtual HRESULT Process(const void* pcInputPtr, UINT32 nInputSize, IMFSample **ppSampleOut);
- static enum tmedia_chroma_e GetUncompressedChroma();
- inline IMFTransform* GetMFT(){ return m_pMFT; }
- inline MFCodecId_t GetId() { return m_eId; }
- inline MFCodecType_t GetType() { return m_eType; }
- inline void setBundled(BOOL bBundled) { m_bIsBundled = bBundled; }
-
- // IUnknown
+ virtual bool IsValid();
+ virtual bool IsReady();
+ virtual HRESULT Process(const void* pcInputPtr, UINT32 nInputSize, IMFSample **ppSampleOut);
+ static enum tmedia_chroma_e GetUncompressedChroma();
+ inline IMFTransform* GetMFT() {
+ return m_pMFT;
+ }
+ inline MFCodecId_t GetId() {
+ return m_eId;
+ }
+ inline MFCodecType_t GetType() {
+ return m_eType;
+ }
+ inline void setBundled(BOOL bBundled) {
+ m_bIsBundled = bBundled;
+ }
+
+ // IUnknown
STDMETHODIMP QueryInterface(REFIID iid, void** ppv);
STDMETHODIMP_(ULONG) AddRef();
STDMETHODIMP_(ULONG) Release();
- // IMFAsyncCallback
- STDMETHODIMP GetParameters(DWORD *pdwFlags, DWORD *pdwQueue);
- STDMETHODIMP Invoke(IMFAsyncResult *pAsyncResult);
+ // IMFAsyncCallback
+ STDMETHODIMP GetParameters(DWORD *pdwFlags, DWORD *pdwQueue);
+ STDMETHODIMP Invoke(IMFAsyncResult *pAsyncResult);
private:
- long m_nRefCount;
+ long m_nRefCount;
protected:
- MFCodecId_t m_eId; // Codec Id
- MFCodecType_t m_eType; // Codec type.
- MFCodecMediaType_t m_eMediaType; // Codec Media type.
- DWORD m_dwInputID; // Input stream ID.
+ MFCodecId_t m_eId; // Codec Id
+ MFCodecType_t m_eType; // Codec type.
+ MFCodecMediaType_t m_eMediaType; // Codec Media type.
+ DWORD m_dwInputID; // Input stream ID.
DWORD m_dwOutputID; // Output stream ID.
- GUID m_guidCompressedFormat; // Compressed Media format (e.g. MFVideoFormat_H264)
+ GUID m_guidCompressedFormat; // Compressed Media format (e.g. MFVideoFormat_H264)
IMFTransform *m_pMFT; // Pointer to the encoder MFT.
- ICodecAPI *m_pCodecAPI; // Pointer to CodecAPI.
+ ICodecAPI *m_pCodecAPI; // Pointer to CodecAPI.
IMFMediaType *m_pOutputType; // Output media type of the codec.
- IMFMediaType *m_pInputType; // Input media type of the codec.
+ IMFMediaType *m_pInputType; // Input media type of the codec.
- LONGLONG m_rtStart;
+ LONGLONG m_rtStart;
UINT64 m_rtDuration;
- IMFSample *m_pSampleIn;
- IMFSample *m_pSampleOut;
+ IMFSample *m_pSampleIn;
+ IMFSample *m_pSampleOut;
- MFSampleQueue *m_pSampleQueueAsyncInput;
- BOOL m_bIsBundled; // Bundled with a producer or cosumer -> do not monitor events
- BOOL m_bIsAsync;
- IMFMediaEventGenerator *m_pEventGenerator;
- BOOL m_bIsFirstFrame;
- long m_nMETransformNeedInputCount, m_nMETransformHaveOutputCount;
+ MFSampleQueue *m_pSampleQueueAsyncInput;
+ BOOL m_bIsBundled; // Bundled with a producer or cosumer -> do not monitor events
+ BOOL m_bIsAsync;
+ IMFMediaEventGenerator *m_pEventGenerator;
+ BOOL m_bIsFirstFrame;
+ long m_nMETransformNeedInputCount, m_nMETransformHaveOutputCount;
};
class MFCodecVideo : public MFCodec
{
- friend class MFCodec;
+ friend class MFCodec;
protected:
- MFCodecVideo(MFCodecId_t eId, MFCodecType_t eType, IMFTransform *pMFT = NULL);
- virtual ~MFCodecVideo();
+ MFCodecVideo(MFCodecId_t eId, MFCodecType_t eType, IMFTransform *pMFT = NULL);
+ virtual ~MFCodecVideo();
public:
- virtual HRESULT Initialize(
- UINT32 nFrameRate,
- UINT32 nWidth,
- UINT32 nHeight,
- UINT32 nOutputBitRateInBps = 0 // Only for encoders
- );
- virtual HRESULT SetGOPSize(UINT32 nFramesCount);
- virtual HRESULT SetBitRate(UINT32 nBitRateInBps);
- virtual HRESULT SetSliceMaxSizeInBytes(UINT32 nSliceMaxSizeInBytes);
- virtual HRESULT RequestKeyFrame();
-
- virtual HRESULT IsSetSliceMaxSizeInBytesSupported(BOOL &supported);
- virtual inline UINT32 GetFrameRate() { return m_nFrameRate; }
- virtual inline UINT32 GetWidth() { return m_nWidth; }
- virtual inline UINT32 GetHeight() { return m_nHeight; }
+ virtual HRESULT Initialize(
+ UINT32 nFrameRate,
+ UINT32 nWidth,
+ UINT32 nHeight,
+ UINT32 nOutputBitRateInBps = 0 // Only for encoders
+ );
+ virtual HRESULT SetGOPSize(UINT32 nFramesCount);
+ virtual HRESULT SetBitRate(UINT32 nBitRateInBps);
+ virtual HRESULT SetSliceMaxSizeInBytes(UINT32 nSliceMaxSizeInBytes);
+ virtual HRESULT RequestKeyFrame();
+
+ virtual HRESULT IsSetSliceMaxSizeInBytesSupported(BOOL &supported);
+ virtual inline UINT32 GetFrameRate() {
+ return m_nFrameRate;
+ }
+ virtual inline UINT32 GetWidth() {
+ return m_nWidth;
+ }
+ virtual inline UINT32 GetHeight() {
+ return m_nHeight;
+ }
protected:
- UINT32 m_nFrameRate;
- UINT32 m_nWidth;
- UINT32 m_nHeight;
+ UINT32 m_nFrameRate;
+ UINT32 m_nWidth;
+ UINT32 m_nHeight;
};
class MFCodecVideoH264 : public MFCodecVideo
{
protected:
- MFCodecVideoH264(MFCodecId_t eId, MFCodecType_t eType, IMFTransform *pMFT = NULL);
+ MFCodecVideoH264(MFCodecId_t eId, MFCodecType_t eType, IMFTransform *pMFT = NULL);
public:
- virtual ~MFCodecVideoH264();
- static MFCodecVideoH264* CreateCodecH264Base(MFCodecType_t eType, IMFTransform *pMFT = NULL);
- static MFCodecVideoH264* CreateCodecH264Main(MFCodecType_t eType, IMFTransform *pMFT = NULL);
+ virtual ~MFCodecVideoH264();
+ static MFCodecVideoH264* CreateCodecH264Base(MFCodecType_t eType, IMFTransform *pMFT = NULL);
+ static MFCodecVideoH264* CreateCodecH264Main(MFCodecType_t eType, IMFTransform *pMFT = NULL);
protected:
diff --git a/plugins/pluginWinMF/internals/mf_codec_topology.cxx b/plugins/pluginWinMF/internals/mf_codec_topology.cxx
index 1ee2a16..1f63466 100755
--- a/plugins/pluginWinMF/internals/mf_codec_topology.cxx
+++ b/plugins/pluginWinMF/internals/mf_codec_topology.cxx
@@ -1,18 +1,18 @@
/* Copyright (C) 2013 Mamadou DIOP
* Copyright (C) 2013 Doubango Telecom <http://www.doubango.org>
-*
+*
* This file is part of Open Source Doubango Framework.
*
* DOUBANGO is free software: you can redistribute it and/or modify
* it under the terms of the GNU General Public License as published by
* the Free Software Foundation, either version 3 of the License, or
* (at your option) any later version.
-*
+*
* DOUBANGO is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
* GNU General Public License for more details.
-*
+*
* You should have received a copy of the GNU General Public License
* along with DOUBANGO.
*/
@@ -25,134 +25,117 @@
// MFCodecTopologySampleGrabberCB
//
-class MFCodecTopologySampleGrabberCB : public IMFSampleGrabberSinkCallback
+class MFCodecTopologySampleGrabberCB : public IMFSampleGrabberSinkCallback
{
long m_cRef;
MFCodecTopology *m_pCodecTopology;
MFCodecTopologySampleGrabberCB(MFCodecTopology *pCodecTopology)
- : m_cRef(1)
- {
- m_pCodecTopology = pCodecTopology;
- m_pCodecTopology->AddRef();
- }
- virtual ~MFCodecTopologySampleGrabberCB()
- {
- SafeRelease(&m_pCodecTopology);
- }
+ : m_cRef(1) {
+ m_pCodecTopology = pCodecTopology;
+ m_pCodecTopology->AddRef();
+ }
+ virtual ~MFCodecTopologySampleGrabberCB() {
+ SafeRelease(&m_pCodecTopology);
+ }
public:
// Create a new instance of the object.
- static HRESULT MFCodecTopologySampleGrabberCB::CreateInstance(MFCodecTopology *pCodecTopology, MFCodecTopologySampleGrabberCB **ppCB)
- {
- *ppCB = new (std::nothrow) MFCodecTopologySampleGrabberCB(pCodecTopology);
-
- if (ppCB == NULL)
- {
- return E_OUTOFMEMORY;
- }
- return S_OK;
- }
-
- STDMETHODIMP MFCodecTopologySampleGrabberCB::QueryInterface(REFIID riid, void** ppv)
- {
- static const QITAB qit[] =
- {
- QITABENT(MFCodecTopologySampleGrabberCB, IMFSampleGrabberSinkCallback),
- QITABENT(MFCodecTopologySampleGrabberCB, IMFClockStateSink),
- { 0 }
- };
- return QISearch(this, qit, riid, ppv);
- }
-
- STDMETHODIMP_(ULONG) MFCodecTopologySampleGrabberCB::AddRef()
- {
- return InterlockedIncrement(&m_cRef);
- }
-
- STDMETHODIMP_(ULONG) MFCodecTopologySampleGrabberCB::Release()
- {
- ULONG cRef = InterlockedDecrement(&m_cRef);
- if (cRef == 0)
- {
- delete this;
- }
- return cRef;
-
- }
-
- // IMFClockStateSink methods
-
- STDMETHODIMP MFCodecTopologySampleGrabberCB::OnClockStart(MFTIME hnsSystemTime, LONGLONG llClockStartOffset)
- {
- TSK_DEBUG_INFO("MFCodecTopologySampleGrabberCB::OnClockStart(%lld, %lld)", hnsSystemTime, llClockStartOffset);
- return S_OK;
- }
-
- STDMETHODIMP MFCodecTopologySampleGrabberCB::OnClockStop(MFTIME hnsSystemTime)
- {
- TSK_DEBUG_INFO("MFCodecTopologySampleGrabberCB::OnClockStop(%lld)", hnsSystemTime);
- return S_OK;
- }
-
- STDMETHODIMP MFCodecTopologySampleGrabberCB::OnClockPause(MFTIME hnsSystemTime)
- {
- TSK_DEBUG_INFO("MFCodecTopologySampleGrabberCB::OnClockPause(%lld)", hnsSystemTime);
- return S_OK;
- }
-
- STDMETHODIMP MFCodecTopologySampleGrabberCB::OnClockRestart(MFTIME hnsSystemTime)
- {
- TSK_DEBUG_INFO("MFCodecTopologySampleGrabberCB::OnClockRestart(%lld)", hnsSystemTime);
- return S_OK;
- }
-
- STDMETHODIMP MFCodecTopologySampleGrabberCB::OnClockSetRate(MFTIME hnsSystemTime, float flRate)
- {
- TSK_DEBUG_INFO("MFCodecTopologySampleGrabberCB::OnClockSetRate(%lld, %f)", hnsSystemTime, flRate);
- return S_OK;
- }
-
- // IMFSampleGrabberSink methods.
-
- STDMETHODIMP MFCodecTopologySampleGrabberCB::OnSetPresentationClock(IMFPresentationClock* pClock)
- {
- TSK_DEBUG_INFO("MFCodecTopologySampleGrabberCB::OnSetPresentationClock");
- return S_OK;
- }
-
- STDMETHODIMP MFCodecTopologySampleGrabberCB::OnProcessSample(
- REFGUID guidMajorMediaType, DWORD dwSampleFlags,
- LONGLONG llSampleTime, LONGLONG llSampleDuration, const BYTE * pSampleBuffer,
- DWORD dwSampleSize)
- {
- HRESULT hr = S_OK;
- IMFSample *pSample = NULL;
- IMFMediaBuffer* pMediaBuffer = NULL;
- BYTE* _pcBufferPtr = NULL;
-
- CHECK_HR(hr = MFUtils::CreateMediaSample(dwSampleSize, &pSample));
- CHECK_HR(hr = pSample->SetSampleTime(llSampleTime));
- CHECK_HR(hr = pSample->SetSampleDuration(llSampleDuration));
- CHECK_HR(hr = pSample->GetBufferByIndex(0, &pMediaBuffer));
- CHECK_HR(hr = pMediaBuffer->Lock(&_pcBufferPtr, NULL, NULL));
- memcpy(_pcBufferPtr, pSampleBuffer, dwSampleSize);
- CHECK_HR(hr = pMediaBuffer->SetCurrentLength(dwSampleSize));
- CHECK_HR(hr = pMediaBuffer->Unlock());
-
- m_pCodecTopology->m_SampleQueue.Queue(pSample); // thread-safe
-
+ static HRESULT MFCodecTopologySampleGrabberCB::CreateInstance(MFCodecTopology *pCodecTopology, MFCodecTopologySampleGrabberCB **ppCB) {
+ *ppCB = new (std::nothrow) MFCodecTopologySampleGrabberCB(pCodecTopology);
+
+ if (ppCB == NULL) {
+ return E_OUTOFMEMORY;
+ }
+ return S_OK;
+ }
+
+ STDMETHODIMP MFCodecTopologySampleGrabberCB::QueryInterface(REFIID riid, void** ppv) {
+ static const QITAB qit[] = {
+ QITABENT(MFCodecTopologySampleGrabberCB, IMFSampleGrabberSinkCallback),
+ QITABENT(MFCodecTopologySampleGrabberCB, IMFClockStateSink),
+ { 0 }
+ };
+ return QISearch(this, qit, riid, ppv);
+ }
+
+ STDMETHODIMP_(ULONG) MFCodecTopologySampleGrabberCB::AddRef() {
+ return InterlockedIncrement(&m_cRef);
+ }
+
+ STDMETHODIMP_(ULONG) MFCodecTopologySampleGrabberCB::Release() {
+ ULONG cRef = InterlockedDecrement(&m_cRef);
+ if (cRef == 0) {
+ delete this;
+ }
+ return cRef;
+
+ }
+
+ // IMFClockStateSink methods
+
+ STDMETHODIMP MFCodecTopologySampleGrabberCB::OnClockStart(MFTIME hnsSystemTime, LONGLONG llClockStartOffset) {
+ TSK_DEBUG_INFO("MFCodecTopologySampleGrabberCB::OnClockStart(%lld, %lld)", hnsSystemTime, llClockStartOffset);
+ return S_OK;
+ }
+
+ STDMETHODIMP MFCodecTopologySampleGrabberCB::OnClockStop(MFTIME hnsSystemTime) {
+ TSK_DEBUG_INFO("MFCodecTopologySampleGrabberCB::OnClockStop(%lld)", hnsSystemTime);
+ return S_OK;
+ }
+
+ STDMETHODIMP MFCodecTopologySampleGrabberCB::OnClockPause(MFTIME hnsSystemTime) {
+ TSK_DEBUG_INFO("MFCodecTopologySampleGrabberCB::OnClockPause(%lld)", hnsSystemTime);
+ return S_OK;
+ }
+
+ STDMETHODIMP MFCodecTopologySampleGrabberCB::OnClockRestart(MFTIME hnsSystemTime) {
+ TSK_DEBUG_INFO("MFCodecTopologySampleGrabberCB::OnClockRestart(%lld)", hnsSystemTime);
+ return S_OK;
+ }
+
+ STDMETHODIMP MFCodecTopologySampleGrabberCB::OnClockSetRate(MFTIME hnsSystemTime, float flRate) {
+ TSK_DEBUG_INFO("MFCodecTopologySampleGrabberCB::OnClockSetRate(%lld, %f)", hnsSystemTime, flRate);
+ return S_OK;
+ }
+
+ // IMFSampleGrabberSink methods.
+
+ STDMETHODIMP MFCodecTopologySampleGrabberCB::OnSetPresentationClock(IMFPresentationClock* pClock) {
+ TSK_DEBUG_INFO("MFCodecTopologySampleGrabberCB::OnSetPresentationClock");
+ return S_OK;
+ }
+
+ STDMETHODIMP MFCodecTopologySampleGrabberCB::OnProcessSample(
+ REFGUID guidMajorMediaType, DWORD dwSampleFlags,
+ LONGLONG llSampleTime, LONGLONG llSampleDuration, const BYTE * pSampleBuffer,
+ DWORD dwSampleSize) {
+ HRESULT hr = S_OK;
+ IMFSample *pSample = NULL;
+ IMFMediaBuffer* pMediaBuffer = NULL;
+ BYTE* _pcBufferPtr = NULL;
+
+ CHECK_HR(hr = MFUtils::CreateMediaSample(dwSampleSize, &pSample));
+ CHECK_HR(hr = pSample->SetSampleTime(llSampleTime));
+ CHECK_HR(hr = pSample->SetSampleDuration(llSampleDuration));
+ CHECK_HR(hr = pSample->GetBufferByIndex(0, &pMediaBuffer));
+ CHECK_HR(hr = pMediaBuffer->Lock(&_pcBufferPtr, NULL, NULL));
+ memcpy(_pcBufferPtr, pSampleBuffer, dwSampleSize);
+ CHECK_HR(hr = pMediaBuffer->SetCurrentLength(dwSampleSize));
+ CHECK_HR(hr = pMediaBuffer->Unlock());
+
+ m_pCodecTopology->m_SampleQueue.Queue(pSample); // thread-safe
+
bail:
- SafeRelease(&pSample);
- SafeRelease(&pMediaBuffer);
- return hr;
- }
-
- STDMETHODIMP MFCodecTopologySampleGrabberCB::OnShutdown()
- {
- TSK_DEBUG_INFO("MFCodecTopologySampleGrabberCB::OnShutdown");
- return S_OK;
- }
+ SafeRelease(&pSample);
+ SafeRelease(&pMediaBuffer);
+ return hr;
+ }
+
+ STDMETHODIMP MFCodecTopologySampleGrabberCB::OnShutdown() {
+ TSK_DEBUG_INFO("MFCodecTopologySampleGrabberCB::OnShutdown");
+ return S_OK;
+ }
};
//
@@ -161,36 +144,36 @@ bail:
MFCodecTopology::MFCodecTopology(MFCodec* pCodec, HRESULT &hr)
-: m_nRefCount(1)
-, m_bInitialized(FALSE)
-, m_bStarted(FALSE)
-, m_pCodec(NULL)
-, m_pSource(NULL)
-, m_pSession(NULL)
-, m_pTopologyFull(NULL)
-, m_pTopologyPartial(NULL)
-, m_pOutputType(NULL)
-, m_pInputType(NULL)
-, m_pGrabberCallback(NULL)
-, m_pGrabberActivate(NULL)
-, m_pTread(NULL)
+ : m_nRefCount(1)
+ , m_bInitialized(FALSE)
+ , m_bStarted(FALSE)
+ , m_pCodec(NULL)
+ , m_pSource(NULL)
+ , m_pSession(NULL)
+ , m_pTopologyFull(NULL)
+ , m_pTopologyPartial(NULL)
+ , m_pOutputType(NULL)
+ , m_pInputType(NULL)
+ , m_pGrabberCallback(NULL)
+ , m_pGrabberActivate(NULL)
+ , m_pTread(NULL)
{
- hr = S_OK;
+ hr = S_OK;
- if(!pCodec)
- {
- CHECK_HR(hr = E_POINTER);
- }
+ if(!pCodec) {
+ CHECK_HR(hr = E_POINTER);
+ }
- m_pCodec = pCodec;
- m_pCodec->AddRef();
+ m_pCodec = pCodec;
+ m_pCodec->AddRef();
-bail: ;
+bail:
+ ;
}
MFCodecTopology::~MFCodecTopology()
{
- DeInitialize();
+ DeInitialize();
}
ULONG MFCodecTopology::AddRef()
@@ -201,8 +184,7 @@ ULONG MFCodecTopology::AddRef()
ULONG MFCodecTopology::Release()
{
ULONG uCount = InterlockedDecrement(&m_nRefCount);
- if (uCount == 0)
- {
+ if (uCount == 0) {
delete this;
}
// For thread safety, return a temporary variable.
@@ -211,245 +193,226 @@ ULONG MFCodecTopology::Release()
HRESULT MFCodecTopology::QueryInterface(REFIID iid, void** ppv)
{
- return E_NOTIMPL;
+ return E_NOTIMPL;
}
HRESULT MFCodecTopology::Start()
{
- HRESULT hr = S_OK;
-
- if(m_bStarted)
- {
- return S_OK;
- }
-
- if(!m_bInitialized)
- {
- CHECK_HR(hr = E_FAIL);
- }
-
- CHECK_HR(hr = MFUtils::RunSession(m_pSession, m_pTopologyFull));
-
- // Start asynchronous watcher thread
- m_bStarted = TRUE;
- int ret = tsk_thread_create(&m_pTread, MFCodecTopology::RunSessionThread, this);
- if(ret != 0)
- {
- TSK_DEBUG_ERROR("Failed to create thread");
- m_bStarted = FALSE;
- if(m_pTread)
- {
- tsk_thread_join(&m_pTread);
- }
- MFUtils::ShutdownSession(m_pSession, m_pSource);
- CHECK_HR(hr = E_FAIL);
- }
-
- // FIXME
- Sleep(2000);
+ HRESULT hr = S_OK;
+
+ if(m_bStarted) {
+ return S_OK;
+ }
+
+ if(!m_bInitialized) {
+ CHECK_HR(hr = E_FAIL);
+ }
+
+ CHECK_HR(hr = MFUtils::RunSession(m_pSession, m_pTopologyFull));
+
+ // Start asynchronous watcher thread
+ m_bStarted = TRUE;
+ int ret = tsk_thread_create(&m_pTread, MFCodecTopology::RunSessionThread, this);
+ if(ret != 0) {
+ TSK_DEBUG_ERROR("Failed to create thread");
+ m_bStarted = FALSE;
+ if(m_pTread) {
+ tsk_thread_join(&m_pTread);
+ }
+ MFUtils::ShutdownSession(m_pSession, m_pSource);
+ CHECK_HR(hr = E_FAIL);
+ }
+
+ // FIXME
+ Sleep(2000);
bail:
- return hr;
+ return hr;
}
HRESULT MFCodecTopology::Stop()
{
- HRESULT hr = S_OK;
+ HRESULT hr = S_OK;
- if(!m_bStarted)
- {
- return S_OK;
- }
+ if(!m_bStarted) {
+ return S_OK;
+ }
- m_bStarted = FALSE;
+ m_bStarted = FALSE;
hr = MFUtils::ShutdownSession(m_pSession, NULL); // stop session to wakeup the asynchronous thread
- if(m_pTread)
- {
+ if(m_pTread) {
tsk_thread_join(&m_pTread);
}
hr = MFUtils::ShutdownSession(NULL, m_pSource);
-
- return hr;
+
+ return hr;
}
HRESULT MFCodecTopology::Initialize()
{
- HRESULT hr = S_OK;
- IMFAttributes* pSessionAttributes = NULL;
+ HRESULT hr = S_OK;
+ IMFAttributes* pSessionAttributes = NULL;
- if(m_bInitialized)
- {
- CHECK_HR(hr = E_FAIL);
- }
+ if(m_bInitialized) {
+ CHECK_HR(hr = E_FAIL);
+ }
- // Set session attributes
- CHECK_HR(hr = MFCreateAttributes(&pSessionAttributes, 1));
- CHECK_HR(hr = pSessionAttributes->SetUINT32(PLUGIN_MF_LOW_LATENCY, 1));
+ // Set session attributes
+ CHECK_HR(hr = MFCreateAttributes(&pSessionAttributes, 1));
+ CHECK_HR(hr = pSessionAttributes->SetUINT32(PLUGIN_MF_LOW_LATENCY, 1));
- // Get input and output type
- CHECK_HR(hr = m_pCodec->GetInputType(&m_pInputType));
- CHECK_HR(hr = m_pCodec->GetOutputType(&m_pOutputType));
+ // Get input and output type
+ CHECK_HR(hr = m_pCodec->GetInputType(&m_pInputType));
+ CHECK_HR(hr = m_pCodec->GetOutputType(&m_pOutputType));
- // Create custom source
- CHECK_HR(hr = CMFSource::CreateInstanceEx(IID_IMFMediaSource, (void**)&m_pSource, m_pInputType));
+ // Create custom source
+ CHECK_HR(hr = CMFSource::CreateInstanceEx(IID_IMFMediaSource, (void**)&m_pSource, m_pInputType));
- // Create the sample grabber sink.
- CHECK_HR(hr = MFCodecTopologySampleGrabberCB::CreateInstance(this, &m_pGrabberCallback));
- CHECK_HR(hr = MFCreateSampleGrabberSinkActivate(m_pOutputType, m_pGrabberCallback, &m_pGrabberActivate));
+ // Create the sample grabber sink.
+ CHECK_HR(hr = MFCodecTopologySampleGrabberCB::CreateInstance(this, &m_pGrabberCallback));
+ CHECK_HR(hr = MFCreateSampleGrabberSinkActivate(m_pOutputType, m_pGrabberCallback, &m_pGrabberActivate));
- // To run as fast as possible, set this attribute (requires Windows 7 or later):
- CHECK_HR(hr = m_pGrabberActivate->SetUINT32(MF_SAMPLEGRABBERSINK_IGNORE_CLOCK, TRUE));
+ // To run as fast as possible, set this attribute (requires Windows 7 or later):
+ CHECK_HR(hr = m_pGrabberActivate->SetUINT32(MF_SAMPLEGRABBERSINK_IGNORE_CLOCK, TRUE));
- // Create the Media Session.
- CHECK_HR(hr = MFCreateMediaSession(pSessionAttributes, &m_pSession));
+ // Create the Media Session.
+ CHECK_HR(hr = MFCreateMediaSession(pSessionAttributes, &m_pSession));
- // Create the topology.
- CHECK_HR(hr = MFUtils::CreateTopology(
- m_pSource,
- m_pCodec->GetMFT(),
- m_pGrabberActivate,
- NULL, // no preview
- m_pOutputType,
- &m_pTopologyPartial));
- // Resolve topology (adds video processors if needed).
- CHECK_HR(hr = MFUtils::ResolveTopology(m_pTopologyPartial, &m_pTopologyFull));
+ // Create the topology.
+ CHECK_HR(hr = MFUtils::CreateTopology(
+ m_pSource,
+ m_pCodec->GetMFT(),
+ m_pGrabberActivate,
+ NULL, // no preview
+ m_pOutputType,
+ &m_pTopologyPartial));
+ // Resolve topology (adds video processors if needed).
+ CHECK_HR(hr = MFUtils::ResolveTopology(m_pTopologyPartial, &m_pTopologyFull));
- m_bInitialized = TRUE;
+ m_bInitialized = TRUE;
bail:
- SafeRelease(&pSessionAttributes);
+ SafeRelease(&pSessionAttributes);
- if(FAILED(hr))
- {
- DeInitialize();
- }
+ if(FAILED(hr)) {
+ DeInitialize();
+ }
- return hr;
+ return hr;
}
void* TSK_STDCALL MFCodecTopology::RunSessionThread(void *pArg)
{
- MFCodecTopology *pSelf = (MFCodecTopology *)pArg;
- HRESULT hrStatus = S_OK;
- HRESULT hr = S_OK;
- IMFMediaEvent *pEvent = NULL;
- MediaEventType met;
-
- TSK_DEBUG_INFO("RunSessionThread (MFCodecTopology) - ENTER");
-
- while(pSelf->isStarted())
- {
- CHECK_HR(hr = pSelf->m_pSession->GetEvent(0, &pEvent));
- CHECK_HR(hr = pEvent->GetStatus(&hrStatus));
- CHECK_HR(hr = pEvent->GetType(&met));
-
- if (FAILED(hrStatus) /*&& hrStatus != MF_E_NO_SAMPLE_TIMESTAMP*/)
- {
- TSK_DEBUG_ERROR("Session error: 0x%x (event id: %d)\n", hrStatus, met);
- hr = hrStatus;
- goto bail;
- }
- if (met == MESessionEnded)
- {
- break;
- }
- SafeRelease(&pEvent);
- }
+ MFCodecTopology *pSelf = (MFCodecTopology *)pArg;
+ HRESULT hrStatus = S_OK;
+ HRESULT hr = S_OK;
+ IMFMediaEvent *pEvent = NULL;
+ MediaEventType met;
+
+ TSK_DEBUG_INFO("RunSessionThread (MFCodecTopology) - ENTER");
+
+ while(pSelf->isStarted()) {
+ CHECK_HR(hr = pSelf->m_pSession->GetEvent(0, &pEvent));
+ CHECK_HR(hr = pEvent->GetStatus(&hrStatus));
+ CHECK_HR(hr = pEvent->GetType(&met));
+
+ if (FAILED(hrStatus) /*&& hrStatus != MF_E_NO_SAMPLE_TIMESTAMP*/) {
+ TSK_DEBUG_ERROR("Session error: 0x%x (event id: %d)\n", hrStatus, met);
+ hr = hrStatus;
+ goto bail;
+ }
+ if (met == MESessionEnded) {
+ break;
+ }
+ SafeRelease(&pEvent);
+ }
bail:
- TSK_DEBUG_INFO("RunSessionThread (MFCodecTopology) - EXIT");
+ TSK_DEBUG_INFO("RunSessionThread (MFCodecTopology) - EXIT");
- return NULL;
+ return NULL;
}
HRESULT MFCodecTopology::DeInitialize()
{
- Stop();
-
- SafeRelease(&m_pCodec);
- SafeRelease(&m_pSource);
- SafeRelease(&m_pCodec);
- SafeRelease(&m_pSession);
- SafeRelease(&m_pTopologyFull);
- SafeRelease(&m_pTopologyPartial);
- SafeRelease(&m_pOutputType);
- SafeRelease(&m_pInputType);
- SafeRelease(&m_pGrabberCallback);
- SafeRelease(&m_pGrabberActivate);
-
- if(m_pTread)
- {
- tsk_thread_join(&m_pTread);
- }
+ Stop();
+
+ SafeRelease(&m_pCodec);
+ SafeRelease(&m_pSource);
+ SafeRelease(&m_pCodec);
+ SafeRelease(&m_pSession);
+ SafeRelease(&m_pTopologyFull);
+ SafeRelease(&m_pTopologyPartial);
+ SafeRelease(&m_pOutputType);
+ SafeRelease(&m_pInputType);
+ SafeRelease(&m_pGrabberCallback);
+ SafeRelease(&m_pGrabberActivate);
+
+ if(m_pTread) {
+ tsk_thread_join(&m_pTread);
+ }
- m_SampleQueue.Clear();
+ m_SampleQueue.Clear();
- m_bInitialized = FALSE;
+ m_bInitialized = FALSE;
- return S_OK;
+ return S_OK;
}
HRESULT MFCodecTopology::ProcessInput(IMFSample* pSample)
{
- HRESULT hr = S_OK;
- IMFMediaBuffer* pMediaBuffer = NULL;
- BYTE* _pcBufferPtr = NULL;
-
- if(!pSample)
- {
- CHECK_HR(hr = E_POINTER);
- }
-
- if(m_pCodec->GetMediaType() != MFCodecMediaType_Video)
- {
- CHECK_HR(hr = E_NOTIMPL);
- }
-
- if(!m_bStarted)
- {
- CHECK_HR(hr = Start());
- }
-
- CHECK_HR(hr = pSample->GetBufferByIndex(0, &pMediaBuffer));
-
- DWORD dwDataLength = 0;
- BOOL bLocked = FALSE;
- CHECK_HR(hr = pMediaBuffer->GetCurrentLength(&dwDataLength));
- bLocked = TRUE;
- if(dwDataLength > 0)
- {
- CHECK_HR(hr = pMediaBuffer->Lock(&_pcBufferPtr, NULL, NULL));
- CHECK_HR(hr = m_pSource->CopyVideoBuffer(
- dynamic_cast<MFCodecVideo*>(m_pCodec)->GetWidth(),
- dynamic_cast<MFCodecVideo*>(m_pCodec)->GetHeight(),
- _pcBufferPtr, dwDataLength));
- }
+ HRESULT hr = S_OK;
+ IMFMediaBuffer* pMediaBuffer = NULL;
+ BYTE* _pcBufferPtr = NULL;
+
+ if(!pSample) {
+ CHECK_HR(hr = E_POINTER);
+ }
+
+ if(m_pCodec->GetMediaType() != MFCodecMediaType_Video) {
+ CHECK_HR(hr = E_NOTIMPL);
+ }
+
+ if(!m_bStarted) {
+ CHECK_HR(hr = Start());
+ }
+
+ CHECK_HR(hr = pSample->GetBufferByIndex(0, &pMediaBuffer));
+
+ DWORD dwDataLength = 0;
+ BOOL bLocked = FALSE;
+ CHECK_HR(hr = pMediaBuffer->GetCurrentLength(&dwDataLength));
+ bLocked = TRUE;
+ if(dwDataLength > 0) {
+ CHECK_HR(hr = pMediaBuffer->Lock(&_pcBufferPtr, NULL, NULL));
+ CHECK_HR(hr = m_pSource->CopyVideoBuffer(
+ dynamic_cast<MFCodecVideo*>(m_pCodec)->GetWidth(),
+ dynamic_cast<MFCodecVideo*>(m_pCodec)->GetHeight(),
+ _pcBufferPtr, dwDataLength));
+ }
bail:
- if(bLocked)
- {
- pMediaBuffer->Unlock();
- }
- SafeRelease(&pMediaBuffer);
- return hr;
+ if(bLocked) {
+ pMediaBuffer->Unlock();
+ }
+ SafeRelease(&pMediaBuffer);
+ return hr;
}
HRESULT MFCodecTopology::ProcessOutput(IMFSample **ppSample)
{
- HRESULT hr = S_OK;
+ HRESULT hr = S_OK;
- if(!ppSample)
- {
- CHECK_HR(hr = E_POINTER);
- }
+ if(!ppSample) {
+ CHECK_HR(hr = E_POINTER);
+ }
- if(!m_SampleQueue.IsEmpty())
- {
- CHECK_HR(hr = m_SampleQueue.Dequeue(ppSample)); // thread-safe
- }
+ if(!m_SampleQueue.IsEmpty()) {
+ CHECK_HR(hr = m_SampleQueue.Dequeue(ppSample)); // thread-safe
+ }
bail:
- return hr;
+ return hr;
}
//
@@ -458,11 +421,11 @@ bail:
MFCodecVideoTopology::MFCodecVideoTopology(MFCodec* pCodec, HRESULT &hr)
-: MFCodecTopology(pCodec, hr)
-, m_nWidth(0)
-, m_nHeight(0)
+ : MFCodecTopology(pCodec, hr)
+ , m_nWidth(0)
+ , m_nHeight(0)
{
- assert(pCodec->GetMediaType() == MFCodecMediaType_Video);
+ assert(pCodec->GetMediaType() == MFCodecMediaType_Video);
}
MFCodecVideoTopology::~MFCodecVideoTopology()
diff --git a/plugins/pluginWinMF/internals/mf_codec_topology.h b/plugins/pluginWinMF/internals/mf_codec_topology.h
index c5d2f34..6a4bb94 100755
--- a/plugins/pluginWinMF/internals/mf_codec_topology.h
+++ b/plugins/pluginWinMF/internals/mf_codec_topology.h
@@ -1,18 +1,18 @@
/* Copyright (C) 2013 Mamadou DIOP
* Copyright (C) 2013 Doubango Telecom <http://www.doubango.org>
-*
+*
* This file is part of Open Source Doubango Framework.
*
* DOUBANGO is free software: you can redistribute it and/or modify
* it under the terms of the GNU General Public License as published by
* the Free Software Foundation, either version 3 of the License, or
* (at your option) any later version.
-*
+*
* DOUBANGO is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
* GNU General Public License for more details.
-*
+*
* You should have received a copy of the GNU General Public License
* along with DOUBANGO.
*/
@@ -28,59 +28,63 @@ class MFCodecTopologySampleGrabberCB;
class MFCodecTopology : IUnknown
{
- friend class MFCodecTopologySampleGrabberCB;
+ friend class MFCodecTopologySampleGrabberCB;
public:
- MFCodecTopology(MFCodec* pCodec, HRESULT &hr);
- virtual ~MFCodecTopology();
+ MFCodecTopology(MFCodec* pCodec, HRESULT &hr);
+ virtual ~MFCodecTopology();
- virtual HRESULT Initialize();
- virtual HRESULT DeInitialize();
+ virtual HRESULT Initialize();
+ virtual HRESULT DeInitialize();
- virtual HRESULT ProcessInput(IMFSample* pSample);
- virtual HRESULT ProcessOutput(IMFSample **ppSample);
+ virtual HRESULT ProcessInput(IMFSample* pSample);
+ virtual HRESULT ProcessOutput(IMFSample **ppSample);
- // IUnknown
+ // IUnknown
STDMETHODIMP QueryInterface(REFIID iid, void** ppv);
STDMETHODIMP_(ULONG) AddRef();
STDMETHODIMP_(ULONG) Release();
- inline BOOL isStarted() { return m_bStarted; }
- inline BOOL isInitialized() { return m_bInitialized; }
+ inline BOOL isStarted() {
+ return m_bStarted;
+ }
+ inline BOOL isInitialized() {
+ return m_bInitialized;
+ }
private:
- static void* TSK_STDCALL RunSessionThread(void *pArg);
+ static void* TSK_STDCALL RunSessionThread(void *pArg);
protected:
- HRESULT Start();
- HRESULT Stop();
+ HRESULT Start();
+ HRESULT Stop();
private:
- long m_nRefCount;
+ long m_nRefCount;
protected:
- BOOL m_bInitialized;
- BOOL m_bStarted;
- MFCodec* m_pCodec;
- CMFSource *m_pSource;
- IMFMediaSession *m_pSession;
+ BOOL m_bInitialized;
+ BOOL m_bStarted;
+ MFCodec* m_pCodec;
+ CMFSource *m_pSource;
+ IMFMediaSession *m_pSession;
IMFTopology *m_pTopologyFull;
- IMFTopology *m_pTopologyPartial;
- IMFMediaType *m_pOutputType;
- IMFMediaType *m_pInputType;
- MFCodecTopologySampleGrabberCB *m_pGrabberCallback;
+ IMFTopology *m_pTopologyPartial;
+ IMFMediaType *m_pOutputType;
+ IMFMediaType *m_pInputType;
+ MFCodecTopologySampleGrabberCB *m_pGrabberCallback;
IMFActivate *m_pGrabberActivate;
- tsk_thread_handle_t* m_pTread;
- SampleQueue m_SampleQueue;
+ tsk_thread_handle_t* m_pTread;
+ SampleQueue m_SampleQueue;
};
class MFCodecVideoTopology : public MFCodecTopology
{
public:
- MFCodecVideoTopology(MFCodec* pCodec, HRESULT &hr);
- virtual ~MFCodecVideoTopology();
+ MFCodecVideoTopology(MFCodec* pCodec, HRESULT &hr);
+ virtual ~MFCodecVideoTopology();
private:
- UINT32 m_nWidth, m_nHeight;
+ UINT32 m_nWidth, m_nHeight;
};
diff --git a/plugins/pluginWinMF/internals/mf_custom_src.cxx b/plugins/pluginWinMF/internals/mf_custom_src.cxx
index 1de9904..186887e 100755
--- a/plugins/pluginWinMF/internals/mf_custom_src.cxx
+++ b/plugins/pluginWinMF/internals/mf_custom_src.cxx
@@ -1,20 +1,20 @@
-/*
+/*
* Copyright (C) Microsoft Corporation. All rights reserved.
* Copyright (C) 2013 Mamadou DIOP
* Copyright (C) 2013 Doubango Telecom <http://www.doubango.org>
-*
+*
* This file is part of Open Source Doubango Framework.
*
* DOUBANGO is free software: you can redistribute it and/or modify
* it under the terms of the GNU General Public License as published by
* the Free Software Foundation, either version 3 of the License, or
* (at your option) any later version.
-*
+*
* DOUBANGO is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
* GNU General Public License for more details.
-*
+*
* You should have received a copy of the GNU General Public License
* along with DOUBANGO.
*/
@@ -63,7 +63,7 @@ LONGLONG BufferSizeFromAudioDuration(const WAVEFORMATEX *pWav, LONGLONG duration
HRESULT CMFSource_CreateInstance(REFIID iid, void **ppMFT)
{
- return CMFSource::CreateInstance(iid, ppMFT);
+ return CMFSource::CreateInstance(iid, ppMFT);
}
@@ -77,30 +77,26 @@ HRESULT CMFSource_CreateInstance(REFIID iid, void **ppMFT)
HRESULT CMFSource::CreateInstance(REFIID iid, void **ppSource) // Called when source used as plugin
{
- return CreateInstanceEx(iid, ppSource, NULL);
+ return CreateInstanceEx(iid, ppSource, NULL);
}
HRESULT CMFSource::CreateInstanceEx(REFIID iid, void **ppSource, IMFMediaType *pMediaType) // Called when source directly called
{
- if (ppSource == NULL)
- {
+ if (ppSource == NULL) {
return E_POINTER;
}
HRESULT hr = S_OK;
CMFSource *pSource = new (std::nothrow) CMFSource(hr, pMediaType); // Created with ref count = 1.
- if (pSource == NULL)
- {
+ if (pSource == NULL) {
return E_OUTOFMEMORY;
}
- if (SUCCEEDED(hr))
- {
+ if (SUCCEEDED(hr)) {
hr = pSource->QueryInterface(iid, ppSource);
- if(SUCCEEDED(hr))
- {
- ((CMFSource*)(*ppSource))->AddRef();
- }
+ if(SUCCEEDED(hr)) {
+ ((CMFSource*)(*ppSource))->AddRef();
+ }
}
SafeRelease(&pSource);
@@ -115,22 +111,21 @@ HRESULT CMFSource::CreateInstanceEx(REFIID iid, void **ppSource, IMFMediaType *p
//-------------------------------------------------------------------
CMFSource::CMFSource(HRESULT& hr, IMFMediaType *pMediaType)
- : m_nRefCount(1),
- m_pEventQueue(NULL),
- m_pPresentationDescriptor(NULL),
- m_IsShutdown(FALSE),
- m_state(STATE_STOPPED),
- m_pStream(NULL),
- m_pMediaType(NULL)
+ : m_nRefCount(1),
+ m_pEventQueue(NULL),
+ m_pPresentationDescriptor(NULL),
+ m_IsShutdown(FALSE),
+ m_state(STATE_STOPPED),
+ m_pStream(NULL),
+ m_pMediaType(NULL)
{
// Create the media event queue.
hr = MFCreateEventQueue(&m_pEventQueue);
- if(pMediaType)
- {
- m_pMediaType = pMediaType;
- pMediaType->AddRef();
- }
+ if(pMediaType) {
+ m_pMediaType = pMediaType;
+ pMediaType->AddRef();
+ }
InitializeCriticalSection(&m_critSec);
}
@@ -145,7 +140,7 @@ CMFSource::~CMFSource()
{
assert(m_IsShutdown);
assert(m_nRefCount == 0);
- SafeRelease(&m_pMediaType);
+ SafeRelease(&m_pMediaType);
DeleteCriticalSection(&m_critSec);
}
@@ -154,26 +149,22 @@ CMFSource::~CMFSource()
HRESULT CMFSource::CopyVideoBuffer(UINT32 nWidth, UINT32 nHeight, const void* pBufferPtr, UINT32 nBufferSize)
{
- if(!pBufferPtr)
- {
- TSK_DEBUG_ERROR("Invalid buffer pointer");
- return E_POINTER;
- }
-
- if(!nWidth || !nHeight || !nBufferSize)
- {
- TSK_DEBUG_ERROR("Invalid parameter");
- return E_INVALIDARG;
- }
- if(m_pStream)
- {
- return m_pStream->CopyVideoBuffer(nWidth, nHeight, pBufferPtr, nBufferSize);
- }
- else
- {
- TSK_DEBUG_ERROR("No stream associated to this source");
- return E_NOT_VALID_STATE;
- }
+ if(!pBufferPtr) {
+ TSK_DEBUG_ERROR("Invalid buffer pointer");
+ return E_POINTER;
+ }
+
+ if(!nWidth || !nHeight || !nBufferSize) {
+ TSK_DEBUG_ERROR("Invalid parameter");
+ return E_INVALIDARG;
+ }
+ if(m_pStream) {
+ return m_pStream->CopyVideoBuffer(nWidth, nHeight, pBufferPtr, nBufferSize);
+ }
+ else {
+ TSK_DEBUG_ERROR("No stream associated to this source");
+ return E_NOT_VALID_STATE;
+ }
}
// IUnknown methods
@@ -186,8 +177,7 @@ ULONG CMFSource::AddRef()
ULONG CMFSource::Release()
{
ULONG uCount = InterlockedDecrement(&m_nRefCount);
- if (uCount == 0)
- {
+ if (uCount == 0) {
delete this;
}
// For thread safety, return a temporary variable.
@@ -196,8 +186,7 @@ ULONG CMFSource::Release()
HRESULT CMFSource::QueryInterface(REFIID iid, void** ppv)
{
- static const QITAB qit[] =
- {
+ static const QITAB qit[] = {
QITABENT(CMFSource, IMFMediaEventGenerator),
QITABENT(CMFSource, IMFMediaSource),
{ 0 }
@@ -220,8 +209,7 @@ HRESULT CMFSource::BeginGetEvent(IMFAsyncCallback* pCallback, IUnknown* punkStat
hr = CheckShutdown();
- if (SUCCEEDED(hr))
- {
+ if (SUCCEEDED(hr)) {
hr = m_pEventQueue->BeginGetEvent(pCallback, punkState);
}
@@ -238,8 +226,7 @@ HRESULT CMFSource::EndGetEvent(IMFAsyncResult* pResult, IMFMediaEvent** ppEvent)
hr = CheckShutdown();
- if (SUCCEEDED(hr))
- {
+ if (SUCCEEDED(hr)) {
hr = m_pEventQueue->EndGetEvent(pResult, ppEvent);
}
@@ -263,16 +250,14 @@ HRESULT CMFSource::GetEvent(DWORD dwFlags, IMFMediaEvent** ppEvent)
// Check shutdown
hr = CheckShutdown();
- if (SUCCEEDED(hr))
- {
+ if (SUCCEEDED(hr)) {
pQueue = m_pEventQueue;
pQueue->AddRef();
}
LeaveCriticalSection(&m_critSec);
- if (SUCCEEDED(hr))
- {
+ if (SUCCEEDED(hr)) {
hr = pQueue->GetEvent(dwFlags, ppEvent);
}
@@ -288,8 +273,7 @@ HRESULT CMFSource::QueueEvent(MediaEventType met, REFGUID guidExtendedType, HRES
hr = CheckShutdown();
- if (SUCCEEDED(hr))
- {
+ if (SUCCEEDED(hr)) {
hr = m_pEventQueue->QueueEventParamVar(met, guidExtendedType, hrStatus, pvValue);
}
@@ -309,8 +293,7 @@ HRESULT CMFSource::QueueEvent(MediaEventType met, REFGUID guidExtendedType, HRES
HRESULT CMFSource::CreatePresentationDescriptor(IMFPresentationDescriptor** ppPresentationDescriptor)
{
- if (ppPresentationDescriptor == NULL)
- {
+ if (ppPresentationDescriptor == NULL) {
return E_POINTER;
}
@@ -320,17 +303,14 @@ HRESULT CMFSource::CreatePresentationDescriptor(IMFPresentationDescriptor** ppPr
hr = CheckShutdown();
- if (SUCCEEDED(hr))
- {
- if (m_pPresentationDescriptor == NULL)
- {
+ if (SUCCEEDED(hr)) {
+ if (m_pPresentationDescriptor == NULL) {
hr = CreatePresentationDescriptor();
}
}
// Clone our default presentation descriptor.
- if (SUCCEEDED(hr))
- {
+ if (SUCCEEDED(hr)) {
hr = m_pPresentationDescriptor->Clone(ppPresentationDescriptor);
}
@@ -347,8 +327,7 @@ HRESULT CMFSource::CreatePresentationDescriptor(IMFPresentationDescriptor** ppPr
HRESULT CMFSource::GetCharacteristics(DWORD* pdwCharacteristics)
{
- if (pdwCharacteristics == NULL)
- {
+ if (pdwCharacteristics == NULL) {
return E_POINTER;
}
@@ -358,8 +337,7 @@ HRESULT CMFSource::GetCharacteristics(DWORD* pdwCharacteristics)
hr = CheckShutdown();
- if (SUCCEEDED(hr))
- {
+ if (SUCCEEDED(hr)) {
*pdwCharacteristics = MFMEDIASOURCE_CAN_PAUSE | MFMEDIASOURCE_IS_LIVE;
}
@@ -377,7 +355,7 @@ HRESULT CMFSource::Start(
IMFPresentationDescriptor* pPresentationDescriptor,
const GUID* pguidTimeFormat,
const PROPVARIANT* pvarStartPosition
- )
+)
{
HRESULT hr = S_OK;
LONGLONG llStartOffset = 0;
@@ -392,14 +370,12 @@ HRESULT CMFSource::Start(
// Check parameters.
// Start position and presentation descriptor cannot be NULL.
- if (pvarStartPosition == NULL || pPresentationDescriptor == NULL)
- {
+ if (pvarStartPosition == NULL || pPresentationDescriptor == NULL) {
return E_INVALIDARG;
}
// Check the time format. Must be "reference time" units.
- if ((pguidTimeFormat != NULL) && (*pguidTimeFormat != GUID_NULL))
- {
+ if ((pguidTimeFormat != NULL) && (*pguidTimeFormat != GUID_NULL)) {
// Unrecognized time format GUID.
return MF_E_UNSUPPORTED_TIME_FORMAT;
}
@@ -410,33 +386,27 @@ HRESULT CMFSource::Start(
CHECK_HR(hr = CheckShutdown());
// Check the start position.
- if (pvarStartPosition->vt == VT_I8)
- {
+ if (pvarStartPosition->vt == VT_I8) {
// Start position is given in pvarStartPosition in 100-ns units.
llStartOffset = pvarStartPosition->hVal.QuadPart;
- if (m_state != STATE_STOPPED)
- {
+ if (m_state != STATE_STOPPED) {
// Source is running or paused, so this is a seek.
bIsSeek = TRUE;
}
}
- else if (pvarStartPosition->vt == VT_EMPTY)
- {
+ else if (pvarStartPosition->vt == VT_EMPTY) {
// Start position is "current position".
// For stopped, that means 0. Otherwise, use the current position.
- if (m_state == STATE_STOPPED)
- {
+ if (m_state == STATE_STOPPED) {
llStartOffset = 0;
}
- else
- {
+ else {
llStartOffset = GetCurrentPosition();
bIsRestartFromCurrentPosition = TRUE;
}
}
- else
- {
+ else {
// We don't support this time format.
hr = MF_E_UNSUPPORTED_TIME_FORMAT;
goto bail;
@@ -457,12 +427,10 @@ HRESULT CMFSource::Start(
var.hVal.QuadPart = llStartOffset;
// Send the source event.
- if (bIsSeek)
- {
+ if (bIsSeek) {
CHECK_HR(hr = QueueEvent(MESourceSeeked, GUID_NULL, hr, &var));
}
- else
- {
+ else {
// For starting, if we are RESTARTING from the current position and our
// previous state was running/paused, then we need to add the
// MF_EVENT_SOURCE_ACTUAL_START attribute to the event. This requires
@@ -472,8 +440,7 @@ HRESULT CMFSource::Start(
CHECK_HR(hr = MFCreateMediaEvent(MESourceStarted, GUID_NULL, hr, &var, &pEvent));
// For restarts, set the actual start time as an attribute.
- if (bIsRestartFromCurrentPosition)
- {
+ if (bIsRestartFromCurrentPosition) {
CHECK_HR(hr = pEvent->SetUINT64(MF_EVENT_SOURCE_ACTUAL_START, llStartOffset));
}
@@ -484,31 +451,26 @@ HRESULT CMFSource::Start(
bQueuedStartEvent = TRUE;
// Send the stream event.
- if (m_pStream)
- {
- if (bIsSeek)
- {
+ if (m_pStream) {
+ if (bIsSeek) {
CHECK_HR(hr = m_pStream->QueueEvent(MEStreamSeeked, GUID_NULL, hr, &var));
}
- else
- {
+ else {
CHECK_HR(hr = m_pStream->QueueEvent(MEStreamStarted, GUID_NULL, hr, &var));
}
}
- if (bIsSeek)
- {
+ if (bIsSeek) {
// For seek requests, flush any queued samples.
CHECK_HR(hr = m_pStream->Flush());
}
- else
- {
+ else {
// Otherwise, deliver any queued samples.
CHECK_HR(hr = m_pStream->DeliverQueuedSamples());
}
- // Initialize Stream parameters
- CHECK_HR(hr = m_pStream->InitializeParams());
+ // Initialize Stream parameters
+ CHECK_HR(hr = m_pStream->InitializeParams());
m_state = STATE_STARTED;
@@ -522,8 +484,7 @@ bail:
// event (with a success code), then we need to raise an
// MEError event.
- if (FAILED(hr) && bQueuedStartEvent)
- {
+ if (FAILED(hr) && bQueuedStartEvent) {
hr = QueueEvent(MEError, GUID_NULL, hr, &var);
}
@@ -531,7 +492,7 @@ bail:
SafeRelease(&pEvent);
LeaveCriticalSection(&m_critSec);
-
+
return hr;
}
@@ -550,31 +511,25 @@ HRESULT CMFSource::Pause()
hr = CheckShutdown();
// Pause is only allowed from started state.
- if (SUCCEEDED(hr))
- {
- if (m_state != STATE_STARTED)
- {
+ if (SUCCEEDED(hr)) {
+ if (m_state != STATE_STARTED) {
hr = MF_E_INVALID_STATE_TRANSITION;
}
}
// Send the appropriate events.
- if (SUCCEEDED(hr))
- {
- if (m_pStream)
- {
+ if (SUCCEEDED(hr)) {
+ if (m_pStream) {
hr = m_pStream->QueueEvent(MEStreamPaused, GUID_NULL, S_OK, NULL);
}
}
- if (SUCCEEDED(hr))
- {
+ if (SUCCEEDED(hr)) {
hr = QueueEvent(MESourcePaused, GUID_NULL, S_OK, NULL);
}
// Update our state.
- if (SUCCEEDED(hr))
- {
+ if (SUCCEEDED(hr)) {
m_state = STATE_PAUSED;
}
@@ -597,8 +552,7 @@ HRESULT CMFSource::Stop()
hr = CheckShutdown();
- if (SUCCEEDED(hr))
- {
+ if (SUCCEEDED(hr)) {
// Update our state.
m_state = STATE_STOPPED;
@@ -610,15 +564,12 @@ HRESULT CMFSource::Stop()
// Queue events.
//
- if (SUCCEEDED(hr))
- {
- if (m_pStream)
- {
+ if (SUCCEEDED(hr)) {
+ if (m_pStream) {
hr = m_pStream->QueueEvent(MEStreamStopped, GUID_NULL, S_OK, NULL);
}
}
- if (SUCCEEDED(hr))
- {
+ if (SUCCEEDED(hr)) {
hr = QueueEvent(MESourceStopped, GUID_NULL, S_OK, NULL);
}
@@ -645,17 +596,14 @@ HRESULT CMFSource::Shutdown()
hr = CheckShutdown();
- if (SUCCEEDED(hr))
- {
+ if (SUCCEEDED(hr)) {
// Shut down the stream object.
- if (m_pStream)
- {
+ if (m_pStream) {
(void)m_pStream->Shutdown();
}
// Shut down the event queue.
- if (m_pEventQueue)
- {
+ if (m_pEventQueue) {
(void)m_pEventQueue->Shutdown();
}
@@ -694,44 +642,39 @@ HRESULT CMFSource::CreatePresentationDescriptor()
// Create the stream descriptor.
hr = MFCreateStreamDescriptor(
- 0, // stream identifier
- 1, // Number of media types.
- &m_pMediaType, // Array of media types
- &pStreamDescriptor
- );
+ 0, // stream identifier
+ 1, // Number of media types.
+ &m_pMediaType, // Array of media types
+ &pStreamDescriptor
+ );
// Set the default media type on the media type handler.
- if (SUCCEEDED(hr))
- {
+ if (SUCCEEDED(hr)) {
hr = pStreamDescriptor->GetMediaTypeHandler(&pHandler);
}
-
- if (SUCCEEDED(hr))
- {
- hr = pHandler->SetCurrentMediaType(m_pMediaType);
+
+ if (SUCCEEDED(hr)) {
+ hr = pHandler->SetCurrentMediaType(m_pMediaType);
}
// Create the presentation descriptor.
- if (SUCCEEDED(hr))
- {
+ if (SUCCEEDED(hr)) {
hr = MFCreatePresentationDescriptor(
- 1, // Number of stream descriptors
- &pStreamDescriptor, // Array of stream descriptors
- &m_pPresentationDescriptor
- );
+ 1, // Number of stream descriptors
+ &pStreamDescriptor, // Array of stream descriptors
+ &m_pPresentationDescriptor
+ );
}
// Select the first stream
- if (SUCCEEDED(hr))
- {
+ if (SUCCEEDED(hr)) {
hr = m_pPresentationDescriptor->SelectStream(0);
}
// Set the file/stream duration as an attribute on the presentation descriptor.
- if (SUCCEEDED(hr))
- {
- hr = m_pPresentationDescriptor->SetUINT64(MF_PD_DURATION, (UINT64)ULLONG_MAX);
+ if (SUCCEEDED(hr)) {
+ hr = m_pPresentationDescriptor->SetUINT64(MF_PD_DURATION, (UINT64)ULLONG_MAX);
}
-
+
SafeRelease(&pStreamDescriptor);
SafeRelease(&pHandler);
return hr;
@@ -764,7 +707,7 @@ HRESULT CMFSource::ValidatePresentationDescriptor(IMFPresentationDescriptor *pPD
IMFStreamDescriptor *pStreamDescriptor = NULL;
IMFMediaTypeHandler *pHandler = NULL;
IMFMediaType *pMediaType = NULL;
- GUID majorType;
+ GUID majorType;
DWORD cStreamDescriptors = 0;
BOOL fSelected = FALSE;
@@ -772,85 +715,71 @@ HRESULT CMFSource::ValidatePresentationDescriptor(IMFPresentationDescriptor *pPD
// Make sure there is only one stream.
hr = pPD->GetStreamDescriptorCount(&cStreamDescriptors);
- if (SUCCEEDED(hr))
- {
- if (cStreamDescriptors != 1)
- {
+ if (SUCCEEDED(hr)) {
+ if (cStreamDescriptors != 1) {
hr = MF_E_UNSUPPORTED_REPRESENTATION;
}
}
// Get the stream descriptor.
- if (SUCCEEDED(hr))
- {
+ if (SUCCEEDED(hr)) {
hr = pPD->GetStreamDescriptorByIndex(0, &fSelected, &pStreamDescriptor);
}
// Make sure it's selected. (This media source has only one stream, so it
// is not useful to deselect the only stream.)
- if (SUCCEEDED(hr))
- {
- if (!fSelected)
- {
+ if (SUCCEEDED(hr)) {
+ if (!fSelected) {
hr = MF_E_UNSUPPORTED_REPRESENTATION;
}
}
// Get the media type handler, so that we can get the media type.
- if (SUCCEEDED(hr))
- {
+ if (SUCCEEDED(hr)) {
hr = pStreamDescriptor->GetMediaTypeHandler(&pHandler);
}
- if (SUCCEEDED(hr))
- {
+ if (SUCCEEDED(hr)) {
hr = pHandler->GetCurrentMediaType(&pMediaType);
}
- hr = pMediaType->GetMajorType(&majorType);
-
- if (SUCCEEDED(hr))
- {
- if(majorType == MFMediaType_Video)
- {
- if (SUCCEEDED(hr))
- {
- hr = MFUtils::ValidateVideoFormat(pMediaType);
- }
- }
- else
- {
- WAVEFORMATEX *pFormat = NULL;
- UINT32 cbWaveFormat = 0;
-
- if (SUCCEEDED(hr))
- {
- hr = MFCreateWaveFormatExFromMFMediaType(
- pMediaType,
- &pFormat,
- &cbWaveFormat);
- }
- if (SUCCEEDED(hr))
- {
- /*assert(this->WaveFormat() != NULL);
-
- if (cbWaveFormat < this->WaveFormatSize())
- {
- hr = MF_E_INVALIDMEDIATYPE;
- }*/
- }
-
- if (SUCCEEDED(hr))
- {
- /*if (memcmp(pFormat, WaveFormat(), WaveFormatSize()) != 0)
- {
- hr = MF_E_INVALIDMEDIATYPE;
- }*/
- }
-
- CoTaskMemFree(pFormat);
- }
- }
+ hr = pMediaType->GetMajorType(&majorType);
+
+ if (SUCCEEDED(hr)) {
+ if(majorType == MFMediaType_Video) {
+ if (SUCCEEDED(hr)) {
+ hr = MFUtils::ValidateVideoFormat(pMediaType);
+ }
+ }
+ else {
+ WAVEFORMATEX *pFormat = NULL;
+ UINT32 cbWaveFormat = 0;
+
+ if (SUCCEEDED(hr)) {
+ hr = MFCreateWaveFormatExFromMFMediaType(
+ pMediaType,
+ &pFormat,
+ &cbWaveFormat);
+ }
+ if (SUCCEEDED(hr)) {
+ /*assert(this->WaveFormat() != NULL);
+
+ if (cbWaveFormat < this->WaveFormatSize())
+ {
+ hr = MF_E_INVALIDMEDIATYPE;
+ }*/
+ }
+
+ if (SUCCEEDED(hr)) {
+ /*if (memcmp(pFormat, WaveFormat(), WaveFormatSize()) != 0)
+ {
+ hr = MF_E_INVALIDMEDIATYPE;
+ }*/
+ }
+
+ CoTaskMemFree(pFormat);
+ }
+ }
SafeRelease(&pStreamDescriptor);
SafeRelease(&pHandler);
@@ -882,27 +811,23 @@ HRESULT CMFSource::QueueNewStreamEvent(IMFPresentationDescriptor *pPD)
hr = pPD->GetStreamDescriptorByIndex(0, &fSelected, &pSD);
- if (SUCCEEDED(hr))
- {
+ if (SUCCEEDED(hr)) {
// The stream must be selected, because we don't allow the app
// to de-select the stream. See ValidatePresentationDescriptor.
assert(fSelected);
- if (m_pStream)
- {
+ if (m_pStream) {
// The stream already exists, and is still selected.
// Send the MEUpdatedStream event.
hr = QueueEventWithIUnknown(this, MEUpdatedStream, S_OK, m_pStream);
}
- else
- {
+ else {
// The stream does not exist, and is now selected.
// Create a new stream.
hr = CreateCMFStreamSource(pSD);
- if (SUCCEEDED(hr))
- {
+ if (SUCCEEDED(hr)) {
// CreateCMFStreamSource creates the stream, so m_pStream is no longer NULL.
assert(m_pStream != NULL);
@@ -926,13 +851,11 @@ HRESULT CMFSource::CreateCMFStreamSource(IMFStreamDescriptor *pSD)
HRESULT hr = S_OK;
m_pStream = new (std::nothrow) CMFStreamSource(this, pSD, hr);
- if (m_pStream == NULL)
- {
+ if (m_pStream == NULL) {
hr = E_OUTOFMEMORY;
}
- if (FAILED(hr))
- {
+ if (FAILED(hr)) {
SafeRelease(&m_pStream);
}
@@ -948,12 +871,10 @@ HRESULT CMFSource::CreateCMFStreamSource(IMFStreamDescriptor *pSD)
LONGLONG CMFSource::GetCurrentPosition() const
{
- if (m_pStream)
- {
+ if (m_pStream) {
return m_pStream->GetCurrentPosition();
}
- else
- {
+ else {
// If no stream is selected, we are at time 0 by definition.
return 0;
}
@@ -977,11 +898,11 @@ CMFStreamSource::CMFStreamSource(CMFSource *pSource, IMFStreamDescriptor *pSD,
m_pEventQueue(NULL),
m_IsShutdown(FALSE),
m_rtCurrentPosition(0),
- m_rtDuration(0),
+ m_rtDuration(0),
m_discontinuity(FALSE),
m_EOS(FALSE),
- m_pMediaBuffer(NULL),
- m_nBufferSize(0)
+ m_pMediaBuffer(NULL),
+ m_nBufferSize(0)
{
m_pSource = pSource;
m_pSource->AddRef();
@@ -992,12 +913,12 @@ CMFStreamSource::CMFStreamSource(CMFSource *pSource, IMFStreamDescriptor *pSD,
// Create the media event queue.
CHECK_HR(hr = MFCreateEventQueue(&m_pEventQueue));
- //CHECK_HR(hr = InitializeParams());
-
+ //CHECK_HR(hr = InitializeParams());
+
InitializeCriticalSection(&m_critSec);
bail:
- return;
+ return;
}
@@ -1010,7 +931,7 @@ CMFStreamSource::~CMFStreamSource()
assert(m_IsShutdown);
assert(m_nRefCount == 0);
- SafeRelease(&m_pMediaBuffer);
+ SafeRelease(&m_pMediaBuffer);
DeleteCriticalSection(&m_critSec);
}
@@ -1020,38 +941,36 @@ CMFStreamSource::~CMFStreamSource()
HRESULT CMFStreamSource::CopyVideoBuffer(UINT32 nWidth, UINT32 nHeight, const void* pBufferPtr, UINT32 nBufferSize)
{
- // Buffer pointer and size validity already checked by source (caller)
- if(m_guidMajorType != MFMediaType_Video)
- {
- TSK_DEBUG_ERROR("Calling CopyVideoBuffer on no-video stream");
+ // Buffer pointer and size validity already checked by source (caller)
+ if(m_guidMajorType != MFMediaType_Video) {
+ TSK_DEBUG_ERROR("Calling CopyVideoBuffer on no-video stream");
#if defined(E_ILLEGAL_METHOD_CALL)
- return E_ILLEGAL_METHOD_CALL;
+ return E_ILLEGAL_METHOD_CALL;
#else
- return _HRESULT_TYPEDEF_(0x8000000EL);
+ return _HRESULT_TYPEDEF_(0x8000000EL);
#endif
- }
- if(nWidth != m_structVideoParams.nWidth || nHeight != m_structVideoParams.nHeigh || nBufferSize != m_nBufferSize)
- {
- TSK_DEBUG_ERROR("Invalid argument %u#%u or %u#%u or %u#%u. If the call is from a video consumer then, you can safely ignore this message.", nWidth, m_structVideoParams.nWidth, nHeight, m_structVideoParams.nHeigh, nBufferSize, m_nBufferSize);
+ }
+ if(nWidth != m_structVideoParams.nWidth || nHeight != m_structVideoParams.nHeigh || nBufferSize != m_nBufferSize) {
+ TSK_DEBUG_ERROR("Invalid argument %u#%u or %u#%u or %u#%u. If the call is from a video consumer then, you can safely ignore this message.", nWidth, m_structVideoParams.nWidth, nHeight, m_structVideoParams.nHeigh, nBufferSize, m_nBufferSize);
#if defined(E_BOUNDS)
- return E_BOUNDS;
+ return E_BOUNDS;
#else
- return _HRESULT_TYPEDEF_(0x8000000BL);
+ return _HRESULT_TYPEDEF_(0x8000000BL);
#endif
- }
-
- HRESULT hr = S_OK;
-
- BYTE* pMediaBufferPtr = NULL;
- DWORD cbMaxLength = nBufferSize, cbCurrentLength = nBufferSize;
- CHECK_HR(hr = m_pMediaBuffer->Lock(&pMediaBufferPtr, &cbMaxLength, &cbCurrentLength));
-
- memcpy(pMediaBufferPtr, pBufferPtr, nBufferSize);
- CHECK_HR(hr = m_pMediaBuffer->SetCurrentLength(nBufferSize));
- CHECK_HR(hr = m_pMediaBuffer->Unlock());
-
+ }
+
+ HRESULT hr = S_OK;
+
+ BYTE* pMediaBufferPtr = NULL;
+ DWORD cbMaxLength = nBufferSize, cbCurrentLength = nBufferSize;
+ CHECK_HR(hr = m_pMediaBuffer->Lock(&pMediaBufferPtr, &cbMaxLength, &cbCurrentLength));
+
+ memcpy(pMediaBufferPtr, pBufferPtr, nBufferSize);
+ CHECK_HR(hr = m_pMediaBuffer->SetCurrentLength(nBufferSize));
+ CHECK_HR(hr = m_pMediaBuffer->Unlock());
+
bail:
- return hr;
+ return hr;
}
// IUnknown methods
@@ -1064,8 +983,7 @@ ULONG CMFStreamSource::AddRef()
ULONG CMFStreamSource::Release()
{
ULONG uCount = InterlockedDecrement(&m_nRefCount);
- if (uCount == 0)
- {
+ if (uCount == 0) {
delete this;
}
// For thread safety, return a temporary variable.
@@ -1074,8 +992,7 @@ ULONG CMFStreamSource::Release()
HRESULT CMFStreamSource::QueryInterface(REFIID iid, void** ppv)
{
- static const QITAB qit[] =
- {
+ static const QITAB qit[] = {
QITABENT(CMFStreamSource, IMFMediaEventGenerator),
QITABENT(CMFStreamSource, IMFMediaStream),
{ 0 }
@@ -1094,8 +1011,7 @@ HRESULT CMFStreamSource::BeginGetEvent(IMFAsyncCallback* pCallback, IUnknown* pu
EnterCriticalSection(&m_critSec);
hr = CheckShutdown();
- if (SUCCEEDED(hr))
- {
+ if (SUCCEEDED(hr)) {
hr = m_pEventQueue->BeginGetEvent(pCallback, punkState);
}
@@ -1110,8 +1026,7 @@ HRESULT CMFStreamSource::EndGetEvent(IMFAsyncResult* pResult, IMFMediaEvent** pp
EnterCriticalSection(&m_critSec);
hr = CheckShutdown();
- if (SUCCEEDED(hr))
- {
+ if (SUCCEEDED(hr)) {
hr = m_pEventQueue->EndGetEvent(pResult, ppEvent);
}
@@ -1129,16 +1044,14 @@ HRESULT CMFStreamSource::GetEvent(DWORD dwFlags, IMFMediaEvent** ppEvent)
hr = CheckShutdown();
- if (SUCCEEDED(hr))
- {
+ if (SUCCEEDED(hr)) {
pQueue = m_pEventQueue;
pQueue->AddRef();
}
LeaveCriticalSection(&m_critSec);
- if (SUCCEEDED(hr))
- {
+ if (SUCCEEDED(hr)) {
hr = pQueue->GetEvent(dwFlags, ppEvent);
}
@@ -1153,8 +1066,7 @@ HRESULT CMFStreamSource::QueueEvent(MediaEventType met, REFGUID guidExtendedType
EnterCriticalSection(&m_critSec);
hr = CheckShutdown();
- if (SUCCEEDED(hr))
- {
+ if (SUCCEEDED(hr)) {
hr = m_pEventQueue->QueueEventParamVar(met, guidExtendedType, hrStatus, pvValue);
}
@@ -1173,8 +1085,7 @@ HRESULT CMFStreamSource::QueueEvent(MediaEventType met, REFGUID guidExtendedType
HRESULT CMFStreamSource::GetMediaSource(IMFMediaSource** ppMediaSource)
{
- if (ppMediaSource == NULL)
- {
+ if (ppMediaSource == NULL) {
return E_POINTER;
}
@@ -1187,16 +1098,13 @@ HRESULT CMFStreamSource::GetMediaSource(IMFMediaSource** ppMediaSource)
hr = CheckShutdown();
- if (SUCCEEDED(hr))
- {
- if (m_pSource == NULL)
- {
+ if (SUCCEEDED(hr)) {
+ if (m_pSource == NULL) {
hr = E_UNEXPECTED;
}
}
- if (SUCCEEDED(hr))
- {
+ if (SUCCEEDED(hr)) {
hr = m_pSource->QueryInterface(IID_PPV_ARGS(ppMediaSource));
}
@@ -1212,13 +1120,11 @@ HRESULT CMFStreamSource::GetMediaSource(IMFMediaSource** ppMediaSource)
HRESULT CMFStreamSource::GetStreamDescriptor(IMFStreamDescriptor** ppStreamDescriptor)
{
- if (ppStreamDescriptor == NULL)
- {
+ if (ppStreamDescriptor == NULL) {
return E_POINTER;
}
- if (m_pStreamDescriptor == NULL)
- {
+ if (m_pStreamDescriptor == NULL) {
return E_UNEXPECTED;
}
@@ -1228,8 +1134,7 @@ HRESULT CMFStreamSource::GetStreamDescriptor(IMFStreamDescriptor** ppStreamDescr
hr = CheckShutdown();
- if (SUCCEEDED(hr))
- {
+ if (SUCCEEDED(hr)) {
*ppStreamDescriptor = m_pStreamDescriptor;
(*ppStreamDescriptor)->AddRef();
}
@@ -1250,8 +1155,7 @@ HRESULT CMFStreamSource::GetStreamDescriptor(IMFStreamDescriptor** ppStreamDescr
HRESULT CMFStreamSource::RequestSample(IUnknown* pToken)
{
- if (m_pSource == NULL)
- {
+ if (m_pSource == NULL) {
return E_UNEXPECTED;
}
@@ -1266,60 +1170,49 @@ HRESULT CMFStreamSource::RequestSample(IUnknown* pToken)
hr = CheckShutdown();
// Check if we already reached the end of the stream.
- if (SUCCEEDED(hr))
- {
- if (m_EOS)
- {
+ if (SUCCEEDED(hr)) {
+ if (m_EOS) {
hr = MF_E_END_OF_STREAM;
}
}
// Check the source is stopped.
// GetState does not hold the source's critical section. Safe to call.
- if (SUCCEEDED(hr))
- {
- if (m_pSource->GetState() == CMFSource::STATE_STOPPED)
- {
+ if (SUCCEEDED(hr)) {
+ if (m_pSource->GetState() == CMFSource::STATE_STOPPED) {
hr = MF_E_INVALIDREQUEST;
}
}
- if (SUCCEEDED(hr))
- {
+ if (SUCCEEDED(hr)) {
// Create a new audio sample.
hr = CreateSample(&pSample);
}
- if (SUCCEEDED(hr))
- {
+ if (SUCCEEDED(hr)) {
// If the caller provided a token, attach it to the sample as
// an attribute.
// NOTE: If we processed sample requests asynchronously, we would
// need to call AddRef on the token and put the token onto a FIFO
// queue. See documenation for IMFMediaStream::RequestSample.
- if (pToken && pSample)
- {
+ if (pToken && pSample) {
hr = pSample->SetUnknown(MFSampleExtension_Token, pToken);
}
}
// If paused, queue the sample for later delivery. Otherwise, deliver the sample now.
- if (SUCCEEDED(hr) && pSample)
- {
- if (m_pSource->GetState() == CMFSource::STATE_PAUSED)
- {
+ if (SUCCEEDED(hr) && pSample) {
+ if (m_pSource->GetState() == CMFSource::STATE_PAUSED) {
hr = m_sampleQueue.Queue(pSample);
}
- else
- {
+ else {
hr = DeliverSample(pSample);
}
}
// Cache a pointer to the source, prior to leaving the critical section.
- if (SUCCEEDED(hr))
- {
+ if (SUCCEEDED(hr)) {
pSource = m_pSource;
pSource->AddRef();
}
@@ -1334,10 +1227,8 @@ HRESULT CMFStreamSource::RequestSample(IUnknown* pToken)
// source's critical section while holding the stream's critical section, at
// risk of deadlock.
- if (SUCCEEDED(hr))
- {
- if (m_EOS)
- {
+ if (SUCCEEDED(hr)) {
+ if (m_EOS) {
hr = pSource->QueueEvent(MEEndOfPresentation, GUID_NULL, S_OK, NULL);
}
}
@@ -1352,78 +1243,72 @@ HRESULT CMFStreamSource::RequestSample(IUnknown* pToken)
HRESULT CMFStreamSource::InitializeParams()
{
- HRESULT hr = S_OK;
-
- IMFMediaTypeHandler *pMediaTypeHandler = NULL;
- IMFMediaType* pMediaType = NULL;
-
- CHECK_HR(hr = m_pStreamDescriptor->GetMediaTypeHandler(&pMediaTypeHandler));
- CHECK_HR(hr = pMediaTypeHandler->GetCurrentMediaType(&pMediaType));
-
- GUID majorType, subType;
- pMediaType->GetMajorType(&majorType);
- if(majorType == MFMediaType_Video)
- {
- memset(&m_structVideoParams, 0, sizeof(m_structVideoParams));
- CHECK_HR(hr = MFGetAttributeSize(pMediaType, MF_MT_FRAME_SIZE, &m_structVideoParams.nWidth, &m_structVideoParams.nHeigh));
- CHECK_HR(hr = pMediaType->GetGUID(MF_MT_SUBTYPE, &subType));
-
- m_guidMajorType = MFMediaType_Video;
- m_guidSubType = subType;
-
- // Guess video size
- UINT32 nBufferSize;
- if(subType == MFVideoFormat_RGB32)
- {
- nBufferSize = (m_structVideoParams.nWidth * m_structVideoParams.nHeigh << 2);
- }
- else if(subType == MFVideoFormat_RGB24)
- {
- nBufferSize = (m_structVideoParams.nWidth * m_structVideoParams.nHeigh << 2);
- }
- else if(subType == MFVideoFormat_NV12 || subType == MFVideoFormat_I420)
- {
- nBufferSize = (m_structVideoParams.nWidth * m_structVideoParams.nHeigh * 3) >> 1;
- }
- else
- {
- TSK_DEBUG_ERROR("Video subType not supported");
- CHECK_HR(hr = E_NOTIMPL);
- }
-
- // Allocate media buffer
- SafeRelease(&m_pMediaBuffer);
- CHECK_HR(hr = MFCreateMemoryBuffer(nBufferSize, &m_pMediaBuffer));
- m_nBufferSize = nBufferSize;
- {
- //FIXME: DeliverSample() stops if no data
- BYTE* pBuffer = NULL;
- CHECK_HR(hr = m_pMediaBuffer->Lock(&pBuffer, NULL, NULL));
- memset(pBuffer, 0, nBufferSize);
- CHECK_HR(hr = m_pMediaBuffer->SetCurrentLength(nBufferSize));
- CHECK_HR(hr = m_pMediaBuffer->Unlock());
- }
-
- // Retrieve video Frame rate
- UINT32 unNumerator, unDenominator;
- CHECK_HR(hr = MFGetAttributeRatio(pMediaType, MF_MT_FRAME_RATE, &unNumerator, &unDenominator));
- m_structVideoParams.nFps = (unNumerator / unDenominator);
-
- // Retrieve sample duration based on framerate
- m_rtCurrentPosition = 0;
- CHECK_HR(hr = MFFrameRateToAverageTimePerFrame(m_structVideoParams.nFps, 1, &m_rtDuration));
- }
- else
- {
- TSK_DEBUG_ERROR("Only video media type is supported");
- CHECK_HR(hr = E_NOTIMPL);
- }
+ HRESULT hr = S_OK;
+
+ IMFMediaTypeHandler *pMediaTypeHandler = NULL;
+ IMFMediaType* pMediaType = NULL;
+
+ CHECK_HR(hr = m_pStreamDescriptor->GetMediaTypeHandler(&pMediaTypeHandler));
+ CHECK_HR(hr = pMediaTypeHandler->GetCurrentMediaType(&pMediaType));
+
+ GUID majorType, subType;
+ pMediaType->GetMajorType(&majorType);
+ if(majorType == MFMediaType_Video) {
+ memset(&m_structVideoParams, 0, sizeof(m_structVideoParams));
+ CHECK_HR(hr = MFGetAttributeSize(pMediaType, MF_MT_FRAME_SIZE, &m_structVideoParams.nWidth, &m_structVideoParams.nHeigh));
+ CHECK_HR(hr = pMediaType->GetGUID(MF_MT_SUBTYPE, &subType));
+
+ m_guidMajorType = MFMediaType_Video;
+ m_guidSubType = subType;
+
+ // Guess video size
+ UINT32 nBufferSize;
+ if(subType == MFVideoFormat_RGB32) {
+ nBufferSize = (m_structVideoParams.nWidth * m_structVideoParams.nHeigh << 2);
+ }
+ else if(subType == MFVideoFormat_RGB24) {
+ nBufferSize = (m_structVideoParams.nWidth * m_structVideoParams.nHeigh << 2);
+ }
+ else if(subType == MFVideoFormat_NV12 || subType == MFVideoFormat_I420) {
+ nBufferSize = (m_structVideoParams.nWidth * m_structVideoParams.nHeigh * 3) >> 1;
+ }
+ else {
+ TSK_DEBUG_ERROR("Video subType not supported");
+ CHECK_HR(hr = E_NOTIMPL);
+ }
+
+ // Allocate media buffer
+ SafeRelease(&m_pMediaBuffer);
+ CHECK_HR(hr = MFCreateMemoryBuffer(nBufferSize, &m_pMediaBuffer));
+ m_nBufferSize = nBufferSize;
+ {
+ //FIXME: DeliverSample() stops if no data
+ BYTE* pBuffer = NULL;
+ CHECK_HR(hr = m_pMediaBuffer->Lock(&pBuffer, NULL, NULL));
+ memset(pBuffer, 0, nBufferSize);
+ CHECK_HR(hr = m_pMediaBuffer->SetCurrentLength(nBufferSize));
+ CHECK_HR(hr = m_pMediaBuffer->Unlock());
+ }
+
+ // Retrieve video Frame rate
+ UINT32 unNumerator, unDenominator;
+ CHECK_HR(hr = MFGetAttributeRatio(pMediaType, MF_MT_FRAME_RATE, &unNumerator, &unDenominator));
+ m_structVideoParams.nFps = (unNumerator / unDenominator);
+
+ // Retrieve sample duration based on framerate
+ m_rtCurrentPosition = 0;
+ CHECK_HR(hr = MFFrameRateToAverageTimePerFrame(m_structVideoParams.nFps, 1, &m_rtDuration));
+ }
+ else {
+ TSK_DEBUG_ERROR("Only video media type is supported");
+ CHECK_HR(hr = E_NOTIMPL);
+ }
bail:
- SafeRelease(&pMediaTypeHandler);
- SafeRelease(&pMediaType);
+ SafeRelease(&pMediaTypeHandler);
+ SafeRelease(&pMediaType);
- return hr;
+ return hr;
}
// NOTE: Some of these methods hold the stream's critical section
@@ -1436,29 +1321,27 @@ bail:
HRESULT CMFStreamSource::CreateSample(IMFSample **ppSample)
{
- *ppSample = NULL;
+ *ppSample = NULL;
HRESULT hr = S_OK;
IMFSample *pSample = NULL;
- DWORD nCurrentLength = 0;
-
- CHECK_HR(hr = m_pMediaBuffer->GetCurrentLength(&nCurrentLength));
-
- if(nCurrentLength > 0)
- {
- CHECK_HR(hr = MFCreateSample(&pSample));
- CHECK_HR(hr = pSample->SetSampleTime(m_rtCurrentPosition));
- CHECK_HR(hr = pSample->SetSampleDuration(m_rtDuration));
- m_rtCurrentPosition += m_rtDuration;
- CHECK_HR(hr = pSample->AddBuffer(m_pMediaBuffer));
-
- if((*ppSample = pSample))
- {
- (*ppSample)->AddRef();
- }
- }
-
+ DWORD nCurrentLength = 0;
+
+ CHECK_HR(hr = m_pMediaBuffer->GetCurrentLength(&nCurrentLength));
+
+ if(nCurrentLength > 0) {
+ CHECK_HR(hr = MFCreateSample(&pSample));
+ CHECK_HR(hr = pSample->SetSampleTime(m_rtCurrentPosition));
+ CHECK_HR(hr = pSample->SetSampleDuration(m_rtDuration));
+ m_rtCurrentPosition += m_rtDuration;
+ CHECK_HR(hr = pSample->AddBuffer(m_pMediaBuffer));
+
+ if((*ppSample = pSample)) {
+ (*ppSample)->AddRef();
+ }
+ }
+
bail:
SafeRelease(&pSample);
return hr;
@@ -1472,15 +1355,13 @@ HRESULT CMFStreamSource::DeliverSample(IMFSample *pSample)
{
HRESULT hr = S_OK;
- if(pSample)
- {
- // Send the MEMediaSample event with the new sample.
- hr = QueueEventWithIUnknown(this, MEMediaSample, hr, pSample);
- }
+ if(pSample) {
+ // Send the MEMediaSample event with the new sample.
+ hr = QueueEventWithIUnknown(this, MEMediaSample, hr, pSample);
+ }
// See if we reached the end of the stream.
- if (SUCCEEDED(hr))
- {
+ if (SUCCEEDED(hr)) {
hr = CheckEndOfStream(); // This method sends MEEndOfStream if needed.
}
@@ -1506,25 +1387,20 @@ HRESULT CMFStreamSource::DeliverQueuedSamples()
// If we already reached the end of the stream, send the MEEndStream
// event again.
- if (m_EOS)
- {
+ if (m_EOS) {
hr = QueueEvent(MEEndOfStream, GUID_NULL, S_OK, NULL);
}
- if (SUCCEEDED(hr))
- {
+ if (SUCCEEDED(hr)) {
// Deliver any queued samples.
- while (!m_sampleQueue.IsEmpty())
- {
+ while (!m_sampleQueue.IsEmpty()) {
hr = m_sampleQueue.Dequeue(&pSample);
- if (FAILED(hr))
- {
+ if (FAILED(hr)) {
break;
}
hr = DeliverSample(pSample);
- if (FAILED(hr))
- {
+ if (FAILED(hr)) {
break;
}
@@ -1536,10 +1412,8 @@ HRESULT CMFStreamSource::DeliverQueuedSamples()
// If we reached the end of the stream, send the end-of-presentation event from
// the media source.
- if (SUCCEEDED(hr))
- {
- if (m_EOS)
- {
+ if (SUCCEEDED(hr)) {
+ if (m_EOS) {
hr = m_pSource->QueueEvent(MEEndOfPresentation, GUID_NULL, S_OK, NULL);
}
}
@@ -1578,8 +1452,7 @@ HRESULT CMFStreamSource::Shutdown()
Flush();
// Shut down the event queue.
- if (m_pEventQueue)
- {
+ if (m_pEventQueue) {
m_pEventQueue->Shutdown();
}
@@ -1602,37 +1475,37 @@ HRESULT CMFStreamSource::SetPosition(LONGLONG rtNewPosition)
{
EnterCriticalSection(&m_critSec);
- HRESULT hr = S_OK;
+ HRESULT hr = S_OK;
-/*
- // Check if the requested position is beyond the end of the stream.
- LONGLONG duration = AudioDurationFromBufferSize(m_pRiff->Format(), m_pRiff->Chunk().DataSize());
+ /*
+ // Check if the requested position is beyond the end of the stream.
+ LONGLONG duration = AudioDurationFromBufferSize(m_pRiff->Format(), m_pRiff->Chunk().DataSize());
- if (rtNewPosition > duration)
- {
- LeaveCriticalSection(&m_critSec);
+ if (rtNewPosition > duration)
+ {
+ LeaveCriticalSection(&m_critSec);
- return MF_E_INVALIDREQUEST; // Start position is past the end of the presentation.
- }
+ return MF_E_INVALIDREQUEST; // Start position is past the end of the presentation.
+ }
- if (m_rtCurrentPosition != rtNewPosition)
- {
- LONGLONG offset = BufferSizeFromAudioDuration(m_pRiff->Format(), rtNewPosition);
+ if (m_rtCurrentPosition != rtNewPosition)
+ {
+ LONGLONG offset = BufferSizeFromAudioDuration(m_pRiff->Format(), rtNewPosition);
- // The chunk size is a DWORD. So if our calculations are correct, there is no
- // way that the maximum valid seek position can be larger than a DWORD.
- assert(offset <= MAXDWORD);
+ // The chunk size is a DWORD. So if our calculations are correct, there is no
+ // way that the maximum valid seek position can be larger than a DWORD.
+ assert(offset <= MAXDWORD);
- hr = m_pRiff->MoveToChunkOffset((DWORD)offset);
+ hr = m_pRiff->MoveToChunkOffset((DWORD)offset);
- if (SUCCEEDED(hr))
- {
- m_rtCurrentPosition = rtNewPosition;
- m_discontinuity = TRUE;
- m_EOS = FALSE;
+ if (SUCCEEDED(hr))
+ {
+ m_rtCurrentPosition = rtNewPosition;
+ m_discontinuity = TRUE;
+ m_EOS = FALSE;
+ }
}
- }
-*/
+ */
LeaveCriticalSection(&m_critSec);
return hr;
}
@@ -1640,18 +1513,18 @@ HRESULT CMFStreamSource::SetPosition(LONGLONG rtNewPosition)
HRESULT CMFStreamSource::CheckEndOfStream()
{
HRESULT hr = S_OK;
-/*
- if (m_pRiff->BytesRemainingInChunk() < m_pRiff->Format()->nBlockAlign)
- {
- // The remaining data is smaller than the audio block size. (In theory there shouldn't be
- // partial bits of data at the end, so we should reach an even zero bytes, but the file
- // might not be authored correctly.)
- m_EOS = TRUE;
-
- // Send the end-of-stream event,
- hr = QueueEvent(MEEndOfStream, GUID_NULL, S_OK, NULL);
- }
- */
+ /*
+ if (m_pRiff->BytesRemainingInChunk() < m_pRiff->Format()->nBlockAlign)
+ {
+ // The remaining data is smaller than the audio block size. (In theory there shouldn't be
+ // partial bits of data at the end, so we should reach an even zero bytes, but the file
+ // might not be authored correctly.)
+ m_EOS = TRUE;
+
+ // Send the end-of-stream event,
+ hr = QueueEvent(MEEndOfStream, GUID_NULL, S_OK, NULL);
+ }
+ */
return hr;
}
@@ -1697,8 +1570,7 @@ LONGLONG AudioDurationFromBufferSize(const WAVEFORMATEX *pWav, DWORD cbAudioData
{
assert(pWav != NULL);
- if (pWav->nAvgBytesPerSec == 0)
- {
+ if (pWav->nAvgBytesPerSec == 0) {
return 0;
}
return (LONGLONG)cbAudioDataSize * 10000000 / pWav->nAvgBytesPerSec;
@@ -1711,8 +1583,7 @@ LONGLONG BufferSizeFromAudioDuration(const WAVEFORMATEX *pWav, LONGLONG duration
ULONG ulRemainder = (ULONG)(cbSize % pWav->nBlockAlign);
// Round up to the next block.
- if(ulRemainder)
- {
+ if(ulRemainder) {
cbSize += pWav->nBlockAlign - ulRemainder;
}
diff --git a/plugins/pluginWinMF/internals/mf_custom_src.h b/plugins/pluginWinMF/internals/mf_custom_src.h
index f9194c9..15d8b90 100755
--- a/plugins/pluginWinMF/internals/mf_custom_src.h
+++ b/plugins/pluginWinMF/internals/mf_custom_src.h
@@ -1,20 +1,20 @@
-/*
+/*
* Copyright (C) Microsoft Corporation. All rights reserved.
* Copyright (C) 2013 Mamadou DIOP
* Copyright (C) 2013 Doubango Telecom <http://www.doubango.org>
-*
+*
* This file is part of Open Source Doubango Framework.
*
* DOUBANGO is free software: you can redistribute it and/or modify
* it under the terms of the GNU General Public License as published by
* the Free Software Foundation, either version 3 of the License, or
* (at your option) any later version.
-*
+*
* DOUBANGO is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
* GNU General Public License for more details.
-*
+*
* You should have received a copy of the GNU General Public License
* along with DOUBANGO.
*/
@@ -50,10 +50,10 @@ class CMFSource : public IMFMediaSource
public:
static HRESULT CreateInstance(REFIID iid, void **ppSource);
- static HRESULT CreateInstanceEx(REFIID iid, void **ppSource, IMFMediaType *pMediaType);
+ static HRESULT CreateInstanceEx(REFIID iid, void **ppSource, IMFMediaType *pMediaType);
- // IMFCustomSource
- HRESULT CopyVideoBuffer(UINT32 nWidth, UINT32 nHeight, const void* pBufferPtr, UINT32 nBufferSize);
+ // IMFCustomSource
+ HRESULT CopyVideoBuffer(UINT32 nWidth, UINT32 nHeight, const void* pBufferPtr, UINT32 nBufferSize);
// IUnknown
STDMETHODIMP QueryInterface(REFIID iid, void** ppv);
@@ -80,8 +80,7 @@ public:
private:
- enum State
- {
+ enum State {
STATE_STOPPED,
STATE_PAUSED,
STATE_STARTED
@@ -92,14 +91,11 @@ private:
CMFSource(HRESULT &hr, IMFMediaType *pMediaType);
virtual ~CMFSource();
- HRESULT CheckShutdown() const
- {
- if (m_IsShutdown)
- {
+ HRESULT CheckShutdown() const {
+ if (m_IsShutdown) {
return MF_E_SHUTDOWN;
}
- else
- {
+ else {
return S_OK;
}
}
@@ -110,7 +106,9 @@ private:
HRESULT ValidatePresentationDescriptor(IMFPresentationDescriptor *pPD);
LONGLONG GetCurrentPosition() const;
- State GetState() const { return m_state; }
+ State GetState() const {
+ return m_state;
+ }
IMFMediaEventQueue *m_pEventQueue; // Event generator helper
IMFPresentationDescriptor *m_pPresentationDescriptor; // Default presentation
@@ -122,7 +120,7 @@ private:
BOOL m_IsShutdown; // Flag to indicate if Shutdown() method was called.
State m_state; // Current state (running, stopped, paused)
- IMFMediaType *m_pMediaType; // The supported mediaType
+ IMFMediaType *m_pMediaType; // The supported mediaType
};
@@ -131,22 +129,21 @@ class SampleQueue
protected:
// Nodes in the linked list
- struct Node
- {
+ struct Node {
Node *prev;
Node *next;
IMFSample* item;
- Node() : prev(NULL), next(NULL)
- {
+ Node() : prev(NULL), next(NULL) {
}
- Node(IMFSample* item) : prev(NULL), next(NULL)
- {
+ Node(IMFSample* item) : prev(NULL), next(NULL) {
this->item = item;
}
- IMFSample* Item() const { return item; }
+ IMFSample* Item() const {
+ return item;
+ }
};
@@ -155,27 +152,22 @@ protected:
public:
- SampleQueue()
- {
+ SampleQueue() {
m_anchor.next = &m_anchor;
m_anchor.prev = &m_anchor;
}
- virtual ~SampleQueue()
- {
+ virtual ~SampleQueue() {
Clear();
}
- HRESULT Queue(IMFSample* item)
- {
- if (item == NULL)
- {
+ HRESULT Queue(IMFSample* item) {
+ if (item == NULL) {
return E_POINTER;
}
Node *pNode = new (std::nothrow) Node(item);
- if (pNode == NULL)
- {
+ if (pNode == NULL) {
return E_OUTOFMEMORY;
}
@@ -195,14 +187,11 @@ public:
}
- HRESULT Dequeue(IMFSample* *ppItem)
- {
- if (IsEmpty())
- {
+ HRESULT Dequeue(IMFSample**ppItem) {
+ if (IsEmpty()) {
return E_FAIL;
}
- if (ppItem == NULL)
- {
+ if (ppItem == NULL) {
return E_POINTER;
}
@@ -220,17 +209,16 @@ public:
return S_OK;
}
- BOOL IsEmpty() const { return m_anchor.next == &m_anchor; }
+ BOOL IsEmpty() const {
+ return m_anchor.next == &m_anchor;
+ }
- void Clear()
- {
+ void Clear() {
Node *n = m_anchor.next;
// Delete the nodes
- while (n != &m_anchor)
- {
- if (n->item)
- {
+ while (n != &m_anchor) {
+ if (n->item) {
n->item->Release();
}
@@ -260,8 +248,8 @@ class CMFStreamSource : public IMFMediaStream
public:
- // IMFCustomSource
- HRESULT CopyVideoBuffer(UINT32 nWidth, UINT32 nHeight, const void* pBufferPtr, UINT32 nBufferSize);
+ // IMFCustomSource
+ HRESULT CopyVideoBuffer(UINT32 nWidth, UINT32 nHeight, const void* pBufferPtr, UINT32 nBufferSize);
// IUnknown
STDMETHODIMP QueryInterface(REFIID iid, void** ppv);
@@ -285,26 +273,25 @@ private:
~CMFStreamSource();
- HRESULT CheckShutdown() const
- {
- if (m_IsShutdown)
- {
+ HRESULT CheckShutdown() const {
+ if (m_IsShutdown) {
return MF_E_SHUTDOWN;
}
- else
- {
+ else {
return S_OK;
}
}
- HRESULT InitializeParams();
+ HRESULT InitializeParams();
HRESULT Shutdown();
HRESULT CreateSample(IMFSample **pSample);
HRESULT DeliverSample(IMFSample *pSample);
HRESULT DeliverQueuedSamples();
HRESULT Flush();
- LONGLONG GetCurrentPosition() const { return m_rtCurrentPosition; }
+ LONGLONG GetCurrentPosition() const {
+ return m_rtCurrentPosition;
+ }
HRESULT SetPosition(LONGLONG rtNewPosition);
HRESULT CheckEndOfStream();
@@ -313,7 +300,7 @@ private:
CRITICAL_SECTION m_critSec;
BOOL m_IsShutdown; // Flag to indicate if source's Shutdown() method was called.
LONGLONG m_rtCurrentPosition; // Current position in the stream, in 100-ns units
- UINT64 m_rtDuration; // Sample duration, in 100-ns units
+ UINT64 m_rtDuration; // Sample duration, in 100-ns units
BOOL m_discontinuity; // Is the next sample a discontinuity?
BOOL m_EOS; // Did we reach the end of the stream?
@@ -322,18 +309,17 @@ private:
IMFStreamDescriptor *m_pStreamDescriptor; // Stream descriptor for this stream.
SampleQueue m_sampleQueue; // Queue for samples while paused.
- GUID m_guidMajorType; // major media type (e.g. MFMediaType_Video or MFMediaType_Audio)
- GUID m_guidSubType; // Media subtype (e.g. MFVideoFormat_RGB32 or MFVideoFormat_H264)
- IMFMediaBuffer *m_pMediaBuffer; // Pointer to the data to deliver
- UINT32 m_nBufferSize; // Size of the data to deliver
-
- struct
- {
- UINT32 nWidth;
- UINT32 nHeigh;
- UINT32 nFps;
- }
- m_structVideoParams;
+ GUID m_guidMajorType; // major media type (e.g. MFMediaType_Video or MFMediaType_Audio)
+ GUID m_guidSubType; // Media subtype (e.g. MFVideoFormat_RGB32 or MFVideoFormat_H264)
+ IMFMediaBuffer *m_pMediaBuffer; // Pointer to the data to deliver
+ UINT32 m_nBufferSize; // Size of the data to deliver
+
+ struct {
+ UINT32 nWidth;
+ UINT32 nHeigh;
+ UINT32 nFps;
+ }
+ m_structVideoParams;
};
diff --git a/plugins/pluginWinMF/internals/mf_devices.cxx b/plugins/pluginWinMF/internals/mf_devices.cxx
index 22b862e..49005de 100755
--- a/plugins/pluginWinMF/internals/mf_devices.cxx
+++ b/plugins/pluginWinMF/internals/mf_devices.cxx
@@ -1,18 +1,18 @@
/* Copyright (C) 2013 Mamadou DIOP
* Copyright (C) 2013 Doubango Telecom <http://www.doubango.org>
-*
+*
* This file is part of Open Source Doubango Framework.
*
* DOUBANGO is free software: you can redistribute it and/or modify
* it under the terms of the GNU General Public License as published by
* the Free Software Foundation, either version 3 of the License, or
* (at your option) any later version.
-*
+*
* DOUBANGO is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
* GNU General Public License for more details.
-*
+*
* You should have received a copy of the GNU General Public License
* along with DOUBANGO.
*/
@@ -20,25 +20,25 @@
#include "mf_utils.h"
DeviceList::DeviceList()
-: m_ppDevices(NULL)
-, m_cDevices(0)
+ : m_ppDevices(NULL)
+ , m_cDevices(0)
{
-
+
}
DeviceList::~DeviceList()
{
- Clear();
+ Clear();
}
UINT32 DeviceList::Count()const
{
- return m_cDevices;
+ return m_cDevices;
}
void DeviceList::Clear()
{
- for (UINT32 i = 0; i < m_cDevices; i++) {
+ for (UINT32 i = 0; i < m_cDevices; i++) {
SafeRelease(&m_ppDevices[i]);
}
CoTaskMemFree(m_ppDevices);
@@ -49,7 +49,7 @@ void DeviceList::Clear()
HRESULT DeviceList::EnumerateDevices(const GUID& sourceType)
{
- HRESULT hr = S_OK;
+ HRESULT hr = S_OK;
IMFAttributes *pAttributes = NULL;
Clear();
@@ -60,17 +60,15 @@ HRESULT DeviceList::EnumerateDevices(const GUID& sourceType)
hr = MFCreateAttributes(&pAttributes, 1);
// Ask for source type = video capture devices
- if (SUCCEEDED(hr))
- {
+ if (SUCCEEDED(hr)) {
hr = pAttributes->SetGUID(
- MF_DEVSOURCE_ATTRIBUTE_SOURCE_TYPE,
- sourceType
- );
+ MF_DEVSOURCE_ATTRIBUTE_SOURCE_TYPE,
+ sourceType
+ );
}
// Enumerate devices.
- if (SUCCEEDED(hr))
- {
+ if (SUCCEEDED(hr)) {
hr = MFEnumDeviceSources(pAttributes, &m_ppDevices, &m_cDevices);
}
@@ -81,8 +79,7 @@ HRESULT DeviceList::EnumerateDevices(const GUID& sourceType)
HRESULT DeviceList::GetDeviceAtIndex(UINT32 index, IMFActivate **ppActivate)
{
- if (index >= Count())
- {
+ if (index >= Count()) {
return E_INVALIDARG;
}
@@ -94,58 +91,52 @@ HRESULT DeviceList::GetDeviceAtIndex(UINT32 index, IMFActivate **ppActivate)
HRESULT DeviceList::GetDeviceBest(IMFActivate **ppActivate, WCHAR *pszName /*= NULL*/)
{
- UINT32 index = 0;
- if(pszName)
- {
- WCHAR *_pszName = NULL;
- BOOL bFound = FALSE;
- for(UINT32 i = 0; i < Count() && !bFound; ++i)
- {
- if((SUCCEEDED(GetDeviceName(i, &_pszName))))
- {
- if(wcscmp(_pszName, pszName) == 0)
- {
- index = i;
- bFound = TRUE;
- // do not break the loop because we need to free(_pszName)
- }
- }
- if(_pszName)
- {
- CoTaskMemFree(_pszName), _pszName = NULL;
- }
- }
- }
- return GetDeviceAtIndex(index, ppActivate);
+ UINT32 index = 0;
+ if(pszName) {
+ WCHAR *_pszName = NULL;
+ BOOL bFound = FALSE;
+ for(UINT32 i = 0; i < Count() && !bFound; ++i) {
+ if((SUCCEEDED(GetDeviceName(i, &_pszName)))) {
+ if(wcscmp(_pszName, pszName) == 0) {
+ index = i;
+ bFound = TRUE;
+ // do not break the loop because we need to free(_pszName)
+ }
+ }
+ if(_pszName) {
+ CoTaskMemFree(_pszName), _pszName = NULL;
+ }
+ }
+ }
+ return GetDeviceAtIndex(index, ppActivate);
}
// The caller must free the memory for the string by calling CoTaskMemFree
HRESULT DeviceList::GetDeviceName(UINT32 index, WCHAR **ppszName)
{
- if (index >= Count())
- {
+ if (index >= Count()) {
return E_INVALIDARG;
}
HRESULT hr = S_OK;
hr = m_ppDevices[index]->GetAllocatedString(
- MF_DEVSOURCE_ATTRIBUTE_FRIENDLY_NAME,
- ppszName,
- NULL
- );
+ MF_DEVSOURCE_ATTRIBUTE_FRIENDLY_NAME,
+ ppszName,
+ NULL
+ );
return hr;
}
HRESULT DeviceListAudio::EnumerateDevices()
{
- // call base class function
- return DeviceList::EnumerateDevices(MF_DEVSOURCE_ATTRIBUTE_SOURCE_TYPE_AUDCAP_GUID);
+ // call base class function
+ return DeviceList::EnumerateDevices(MF_DEVSOURCE_ATTRIBUTE_SOURCE_TYPE_AUDCAP_GUID);
}
HRESULT DeviceListVideo::EnumerateDevices()
{
- // call base class function
- return DeviceList::EnumerateDevices(MF_DEVSOURCE_ATTRIBUTE_SOURCE_TYPE_VIDCAP_GUID);
+ // call base class function
+ return DeviceList::EnumerateDevices(MF_DEVSOURCE_ATTRIBUTE_SOURCE_TYPE_VIDCAP_GUID);
}
diff --git a/plugins/pluginWinMF/internals/mf_devices.h b/plugins/pluginWinMF/internals/mf_devices.h
index 03d010f..3a16a29 100755
--- a/plugins/pluginWinMF/internals/mf_devices.h
+++ b/plugins/pluginWinMF/internals/mf_devices.h
@@ -1,18 +1,18 @@
/* Copyright (C) 2013 Mamadou DIOP
* Copyright (C) 2013 Doubango Telecom <http://www.doubango.org>
-*
+*
* This file is part of Open Source Doubango Framework.
*
* DOUBANGO is free software: you can redistribute it and/or modify
* it under the terms of the GNU General Public License as published by
* the Free Software Foundation, either version 3 of the License, or
* (at your option) any later version.
-*
+*
* DOUBANGO is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
* GNU General Public License for more details.
-*
+*
* You should have received a copy of the GNU General Public License
* along with DOUBANGO.
*/
@@ -46,19 +46,19 @@ public:
HRESULT GetDeviceName(UINT32 index, WCHAR **ppszName);
protected:
- HRESULT EnumerateDevices(const GUID& sourceType);
+ HRESULT EnumerateDevices(const GUID& sourceType);
};
class DeviceListAudio : public DeviceList
{
public:
- HRESULT EnumerateDevices();
+ HRESULT EnumerateDevices();
};
class DeviceListVideo : public DeviceList
{
public:
- HRESULT EnumerateDevices();
+ HRESULT EnumerateDevices();
};
#endif /* PLUGIN_WIN_MF_DEVICES_H */
diff --git a/plugins/pluginWinMF/internals/mf_display_watcher.cxx b/plugins/pluginWinMF/internals/mf_display_watcher.cxx
index 62dbc5f..38f3687 100755
--- a/plugins/pluginWinMF/internals/mf_display_watcher.cxx
+++ b/plugins/pluginWinMF/internals/mf_display_watcher.cxx
@@ -1,18 +1,18 @@
/* Copyright (C) 2013 Mamadou DIOP
* Copyright (C) 2013 Doubango Telecom <http://www.doubango.org>
-*
+*
* This file is part of Open Source Doubango Framework.
*
* DOUBANGO is free software: you can redistribute it and/or modify
* it under the terms of the GNU General Public License as published by
* the Free Software Foundation, either version 3 of the License, or
* (at your option) any later version.
-*
+*
* DOUBANGO is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
* GNU General Public License for more details.
-*
+*
* You should have received a copy of the GNU General Public License
* along with DOUBANGO.
*/
@@ -24,137 +24,126 @@
#include <assert.h>
DisplayWatcher::DisplayWatcher(HWND hWnd, IMFMediaSink* pMediaSink, HRESULT &hr)
-: m_pDisplayControl(NULL)
-, m_hWnd(hWnd)
-, m_pWndProc(NULL)
-, m_bStarted(FALSE)
-, m_bFullScreen(FALSE)
+ : m_pDisplayControl(NULL)
+ , m_hWnd(hWnd)
+ , m_pWndProc(NULL)
+ , m_bStarted(FALSE)
+ , m_bFullScreen(FALSE)
{
- IMFGetService *pService = NULL;
+ IMFGetService *pService = NULL;
- CHECK_HR(hr = pMediaSink->QueryInterface(__uuidof(IMFGetService), (void**)&pService));
- CHECK_HR(hr = pService->GetService(MR_VIDEO_RENDER_SERVICE, __uuidof(IMFVideoDisplayControl), (void**)&m_pDisplayControl));
- CHECK_HR(hr = m_pDisplayControl->SetAspectRatioMode(MFVideoARMode_PreservePicture));
+ CHECK_HR(hr = pMediaSink->QueryInterface(__uuidof(IMFGetService), (void**)&pService));
+ CHECK_HR(hr = pService->GetService(MR_VIDEO_RENDER_SERVICE, __uuidof(IMFVideoDisplayControl), (void**)&m_pDisplayControl));
+ CHECK_HR(hr = m_pDisplayControl->SetAspectRatioMode(MFVideoARMode_PreservePicture));
bail:
- SafeRelease(&pService);
+ SafeRelease(&pService);
}
DisplayWatcher::~DisplayWatcher()
{
- Stop();
+ Stop();
- SafeRelease(&m_pDisplayControl);
+ SafeRelease(&m_pDisplayControl);
}
HRESULT DisplayWatcher::Start()
{
- HRESULT hr = S_OK;
- HWND hWnd = m_hWnd; // save()
- CHECK_HR(hr = Stop());
-
- if((m_hWnd = hWnd) && m_pDisplayControl)
- {
- CHECK_HR(hr = m_pDisplayControl->SetVideoWindow(hWnd));
-
- BOOL ret = SetPropA(m_hWnd, "This", this);
- assert(ret);
-
+ HRESULT hr = S_OK;
+ HWND hWnd = m_hWnd; // save()
+ CHECK_HR(hr = Stop());
+
+ if((m_hWnd = hWnd) && m_pDisplayControl) {
+ CHECK_HR(hr = m_pDisplayControl->SetVideoWindow(hWnd));
+
+ BOOL ret = SetPropA(m_hWnd, "This", this);
+ assert(ret);
+
#if _M_X64
- m_pWndProc = (WNDPROC)SetWindowLongPtr(m_hWnd, GWLP_WNDPROC, (LONG_PTR)DisplayWatcher::WndProc);
+ m_pWndProc = (WNDPROC)SetWindowLongPtr(m_hWnd, GWLP_WNDPROC, (LONG_PTR)DisplayWatcher::WndProc);
#else
- m_pWndProc = (WNDPROC)SetWindowLongPtr(m_hWnd, GWL_WNDPROC, (LONG)DisplayWatcher::WndProc);
+ m_pWndProc = (WNDPROC)SetWindowLongPtr(m_hWnd, GWL_WNDPROC, (LONG)DisplayWatcher::WndProc);
#endif
- UpdatePosition(); // black screen if attached later
- }
- m_bStarted = TRUE;
+ UpdatePosition(); // black screen if attached later
+ }
+ m_bStarted = TRUE;
bail:
- return hr;
+ return hr;
}
HRESULT DisplayWatcher::SetFullscreen(BOOL bEnabled)
{
- if(m_pDisplayControl)
- {
- HRESULT hr = m_pDisplayControl->SetFullscreen(bEnabled);
- m_bFullScreen = SUCCEEDED(hr);
- return hr;
- }
-
- return E_FAIL;
+ if(m_pDisplayControl) {
+ HRESULT hr = m_pDisplayControl->SetFullscreen(bEnabled);
+ m_bFullScreen = SUCCEEDED(hr);
+ return hr;
+ }
+
+ return E_FAIL;
}
HRESULT DisplayWatcher::SetHwnd(HWND hWnd)
{
- BOOL bWasStarted = m_bStarted;
- Stop();
- m_hWnd = hWnd;
- if(bWasStarted)
- {
- return Start();
- }
- return S_OK;
+ BOOL bWasStarted = m_bStarted;
+ Stop();
+ m_hWnd = hWnd;
+ if(bWasStarted) {
+ return Start();
+ }
+ return S_OK;
}
HRESULT DisplayWatcher::Stop()
{
- if(m_hWnd && m_pWndProc)
- {
- // Restore
-
+ if(m_hWnd && m_pWndProc) {
+ // Restore
+
#if _M_X64
- SetWindowLongPtr(m_hWnd, GWLP_WNDPROC, (LONG_PTR)m_pWndProc);
+ SetWindowLongPtr(m_hWnd, GWLP_WNDPROC, (LONG_PTR)m_pWndProc);
#else
- SetWindowLongPtr(m_hWnd, GWL_WNDPROC, (LONG)m_pWndProc);
+ SetWindowLongPtr(m_hWnd, GWL_WNDPROC, (LONG)m_pWndProc);
#endif
- }
- m_hWnd = NULL;
- m_pWndProc = NULL;
- m_bStarted = FALSE;
- return S_OK;
+ }
+ m_hWnd = NULL;
+ m_pWndProc = NULL;
+ m_bStarted = FALSE;
+ return S_OK;
}
void DisplayWatcher::UpdatePosition()
{
- if(m_pDisplayControl && m_hWnd)
- {
- RECT rcDst = { 0, 0, 0, 0 };
- GetClientRect(m_hWnd, &rcDst);
- m_pDisplayControl->SetVideoPosition(NULL, &rcDst);
- }
+ if(m_pDisplayControl && m_hWnd) {
+ RECT rcDst = { 0, 0, 0, 0 };
+ GetClientRect(m_hWnd, &rcDst);
+ m_pDisplayControl->SetVideoPosition(NULL, &rcDst);
+ }
}
LRESULT CALLBACK DisplayWatcher::WndProc(HWND hWnd, UINT uMsg, WPARAM wParam, LPARAM lParam)
{
- switch(uMsg)
- {
- case WM_CREATE:
- case WM_SIZE:
- case WM_MOVE:
- {
- DisplayWatcher* This = dynamic_cast<DisplayWatcher*>((DisplayWatcher*)GetPropA(hWnd, "This"));
- if(This)
- {
- This->UpdatePosition();
- }
- break;
- }
-
- case WM_CHAR:
- case WM_KEYUP:
- {
- DisplayWatcher* This = dynamic_cast<DisplayWatcher*>((DisplayWatcher*)GetPropA(hWnd, "This"));
- if(This)
- {
- if(This->m_bFullScreen && (wParam == 0x1B || wParam == VK_ESCAPE))
- {
- This->SetFullscreen(FALSE);
- }
- }
-
- break;
- }
- }
-
- return DefWindowProc(hWnd, uMsg, wParam, lParam);
+ switch(uMsg) {
+ case WM_CREATE:
+ case WM_SIZE:
+ case WM_MOVE: {
+ DisplayWatcher* This = dynamic_cast<DisplayWatcher*>((DisplayWatcher*)GetPropA(hWnd, "This"));
+ if(This) {
+ This->UpdatePosition();
+ }
+ break;
+ }
+
+ case WM_CHAR:
+ case WM_KEYUP: {
+ DisplayWatcher* This = dynamic_cast<DisplayWatcher*>((DisplayWatcher*)GetPropA(hWnd, "This"));
+ if(This) {
+ if(This->m_bFullScreen && (wParam == 0x1B || wParam == VK_ESCAPE)) {
+ This->SetFullscreen(FALSE);
+ }
+ }
+
+ break;
+ }
+ }
+
+ return DefWindowProc(hWnd, uMsg, wParam, lParam);
} \ No newline at end of file
diff --git a/plugins/pluginWinMF/internals/mf_display_watcher.h b/plugins/pluginWinMF/internals/mf_display_watcher.h
index d41d6a6..127fb35 100755
--- a/plugins/pluginWinMF/internals/mf_display_watcher.h
+++ b/plugins/pluginWinMF/internals/mf_display_watcher.h
@@ -1,18 +1,18 @@
/* Copyright (C) 2013 Mamadou DIOP
* Copyright (C) 2013 Doubango Telecom <http://www.doubango.org>
-*
+*
* This file is part of Open Source Doubango Framework.
*
* DOUBANGO is free software: you can redistribute it and/or modify
* it under the terms of the GNU General Public License as published by
* the Free Software Foundation, either version 3 of the License, or
* (at your option) any later version.
-*
+*
* DOUBANGO is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
* GNU General Public License for more details.
-*
+*
* You should have received a copy of the GNU General Public License
* along with DOUBANGO.
*/
@@ -31,25 +31,25 @@
class DisplayWatcher
{
public:
- DisplayWatcher(HWND hWnd, IMFMediaSink* pMediaSink, HRESULT &hr);
- virtual ~DisplayWatcher();
+ DisplayWatcher(HWND hWnd, IMFMediaSink* pMediaSink, HRESULT &hr);
+ virtual ~DisplayWatcher();
public:
- HRESULT Start();
- HRESULT SetFullscreen(BOOL bEnabled);
- HRESULT SetHwnd(HWND hWnd);
- HRESULT Stop();
+ HRESULT Start();
+ HRESULT SetFullscreen(BOOL bEnabled);
+ HRESULT SetHwnd(HWND hWnd);
+ HRESULT Stop();
private:
- void UpdatePosition();
- static LRESULT CALLBACK WndProc(HWND hWnd, UINT uMsg, WPARAM wParam, LPARAM lParam);
+ void UpdatePosition();
+ static LRESULT CALLBACK WndProc(HWND hWnd, UINT uMsg, WPARAM wParam, LPARAM lParam);
private:
- IMFVideoDisplayControl *m_pDisplayControl;
- HWND m_hWnd;
- WNDPROC m_pWndProc;
- BOOL m_bStarted;
- BOOL m_bFullScreen;
+ IMFVideoDisplayControl *m_pDisplayControl;
+ HWND m_hWnd;
+ WNDPROC m_pWndProc;
+ BOOL m_bStarted;
+ BOOL m_bFullScreen;
};
#endif /* PLUGIN_WIN_MF_DISPLAY_WATCHER_H */
diff --git a/plugins/pluginWinMF/internals/mf_sample_grabber.cxx b/plugins/pluginWinMF/internals/mf_sample_grabber.cxx
index 87aa6af..8d148a3 100755
--- a/plugins/pluginWinMF/internals/mf_sample_grabber.cxx
+++ b/plugins/pluginWinMF/internals/mf_sample_grabber.cxx
@@ -1,18 +1,18 @@
/* Copyright (C) 2013 Mamadou DIOP
* Copyright (C) 2013 Doubango Telecom <http://www.doubango.org>
-*
+*
* This file is part of Open Source Doubango Framework.
*
* DOUBANGO is free software: you can redistribute it and/or modify
* it under the terms of the GNU General Public License as published by
* the Free Software Foundation, either version 3 of the License, or
* (at your option) any later version.
-*
+*
* DOUBANGO is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
* GNU General Public License for more details.
-*
+*
* You should have received a copy of the GNU General Public License
* along with DOUBANGO.
*/
@@ -28,108 +28,105 @@
// Create a new instance of the object.
HRESULT SampleGrabberCB::CreateInstance(const struct tmedia_producer_s* pcWrappedProducer, SampleGrabberCB **ppCB)
{
- assert(pcWrappedProducer);
+ assert(pcWrappedProducer);
- *ppCB = new (std::nothrow) SampleGrabberCB(pcWrappedProducer);
+ *ppCB = new (std::nothrow) SampleGrabberCB(pcWrappedProducer);
- if (ppCB == NULL)
- {
- return E_OUTOFMEMORY;
- }
- return S_OK;
+ if (ppCB == NULL) {
+ return E_OUTOFMEMORY;
+ }
+ return S_OK;
}
STDMETHODIMP SampleGrabberCB::QueryInterface(REFIID riid, void** ppv)
{
- static const QITAB qit[] =
- {
- QITABENT(SampleGrabberCB, IMFSampleGrabberSinkCallback),
- QITABENT(SampleGrabberCB, IMFClockStateSink),
- { 0 }
- };
- return QISearch(this, qit, riid, ppv);
+ static const QITAB qit[] = {
+ QITABENT(SampleGrabberCB, IMFSampleGrabberSinkCallback),
+ QITABENT(SampleGrabberCB, IMFClockStateSink),
+ { 0 }
+ };
+ return QISearch(this, qit, riid, ppv);
}
STDMETHODIMP_(ULONG) SampleGrabberCB::AddRef()
{
- return InterlockedIncrement(&m_cRef);
+ return InterlockedIncrement(&m_cRef);
}
STDMETHODIMP_(ULONG) SampleGrabberCB::Release()
{
- ULONG cRef = InterlockedDecrement(&m_cRef);
- if (cRef == 0)
- {
- delete this;
- }
- return cRef;
+ ULONG cRef = InterlockedDecrement(&m_cRef);
+ if (cRef == 0) {
+ delete this;
+ }
+ return cRef;
}
// IMFClockStateSink methods.
-// In these example, the IMFClockStateSink methods do not perform any actions.
+// In these example, the IMFClockStateSink methods do not perform any actions.
// You can use these methods to track the state of the sample grabber sink.
STDMETHODIMP SampleGrabberCB::OnClockStart(MFTIME hnsSystemTime, LONGLONG llClockStartOffset)
{
- TSK_DEBUG_INFO("SampleGrabberCB::OnClockStart(%lld, %lld)", hnsSystemTime, llClockStartOffset);
- return S_OK;
+ TSK_DEBUG_INFO("SampleGrabberCB::OnClockStart(%lld, %lld)", hnsSystemTime, llClockStartOffset);
+ return S_OK;
}
STDMETHODIMP SampleGrabberCB::OnClockStop(MFTIME hnsSystemTime)
{
- TSK_DEBUG_INFO("SampleGrabberCB::OnClockStop(%lld)", hnsSystemTime);
- return S_OK;
+ TSK_DEBUG_INFO("SampleGrabberCB::OnClockStop(%lld)", hnsSystemTime);
+ return S_OK;
}
STDMETHODIMP SampleGrabberCB::OnClockPause(MFTIME hnsSystemTime)
{
- TSK_DEBUG_INFO("SampleGrabberCB::OnClockPause(%lld)", hnsSystemTime);
- return S_OK;
+ TSK_DEBUG_INFO("SampleGrabberCB::OnClockPause(%lld)", hnsSystemTime);
+ return S_OK;
}
STDMETHODIMP SampleGrabberCB::OnClockRestart(MFTIME hnsSystemTime)
{
- TSK_DEBUG_INFO("SampleGrabberCB::OnClockRestart(%lld)", hnsSystemTime);
- return S_OK;
+ TSK_DEBUG_INFO("SampleGrabberCB::OnClockRestart(%lld)", hnsSystemTime);
+ return S_OK;
}
STDMETHODIMP SampleGrabberCB::OnClockSetRate(MFTIME hnsSystemTime, float flRate)
{
- TSK_DEBUG_INFO("SampleGrabberCB::OnClockSetRate(%lld, %f)", hnsSystemTime, flRate);
- return S_OK;
+ TSK_DEBUG_INFO("SampleGrabberCB::OnClockSetRate(%lld, %f)", hnsSystemTime, flRate);
+ return S_OK;
}
// IMFSampleGrabberSink methods.
STDMETHODIMP SampleGrabberCB::OnSetPresentationClock(IMFPresentationClock* pClock)
{
- TSK_DEBUG_INFO("SampleGrabberCB::OnSetPresentationClock");
- return S_OK;
+ TSK_DEBUG_INFO("SampleGrabberCB::OnSetPresentationClock");
+ return S_OK;
}
STDMETHODIMP SampleGrabberCB::OnProcessSample(
- REFGUID guidMajorMediaType, DWORD dwSampleFlags,
- LONGLONG llSampleTime, LONGLONG llSampleDuration, const BYTE * pSampleBuffer,
- DWORD dwSampleSize)
+ REFGUID guidMajorMediaType, DWORD dwSampleFlags,
+ LONGLONG llSampleTime, LONGLONG llSampleDuration, const BYTE * pSampleBuffer,
+ DWORD dwSampleSize)
{
- if (m_pWrappedProducer && TMEDIA_PRODUCER(m_pWrappedProducer)->enc_cb.callback) {
+ if (m_pWrappedProducer && TMEDIA_PRODUCER(m_pWrappedProducer)->enc_cb.callback) {
#if 1
- if (m_bMuted) {
- // Send zeros. Do not skip sending data to avoid NAT issues and session deconnection.
- // Some TelePresence systems disconnect the session when the remote peer stops sending video data.
- memset((void*)pSampleBuffer, 0, dwSampleSize);
- }
+ if (m_bMuted) {
+ // Send zeros. Do not skip sending data to avoid NAT issues and session deconnection.
+ // Some TelePresence systems disconnect the session when the remote peer stops sending video data.
+ memset((void*)pSampleBuffer, 0, dwSampleSize);
+ }
#endif
- TMEDIA_PRODUCER(m_pWrappedProducer)->enc_cb.callback(TMEDIA_PRODUCER(m_pWrappedProducer)->enc_cb.callback_data, pSampleBuffer, dwSampleSize);
- }
+ TMEDIA_PRODUCER(m_pWrappedProducer)->enc_cb.callback(TMEDIA_PRODUCER(m_pWrappedProducer)->enc_cb.callback_data, pSampleBuffer, dwSampleSize);
+ }
- return S_OK;
+ return S_OK;
}
STDMETHODIMP SampleGrabberCB::OnShutdown()
{
- TSK_DEBUG_INFO("SampleGrabberCB::OnShutdown");
- return S_OK;
+ TSK_DEBUG_INFO("SampleGrabberCB::OnShutdown");
+ return S_OK;
} \ No newline at end of file
diff --git a/plugins/pluginWinMF/internals/mf_sample_grabber.h b/plugins/pluginWinMF/internals/mf_sample_grabber.h
index 858f3c1..9ea239d 100755
--- a/plugins/pluginWinMF/internals/mf_sample_grabber.h
+++ b/plugins/pluginWinMF/internals/mf_sample_grabber.h
@@ -1,18 +1,18 @@
/* Copyright (C) 2013 Mamadou DIOP
* Copyright (C) 2013 Doubango Telecom <http://www.doubango.org>
-*
+*
* This file is part of Open Source Doubango Framework.
*
* DOUBANGO is free software: you can redistribute it and/or modify
* it under the terms of the GNU General Public License as published by
* the Free Software Foundation, either version 3 of the License, or
* (at your option) any later version.
-*
+*
* DOUBANGO is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
* GNU General Public License for more details.
-*
+*
* You should have received a copy of the GNU General Public License
* along with DOUBANGO.
*/
@@ -31,9 +31,9 @@
// Sample Grabber callback [Declaration]
// http://msdn.microsoft.com/en-us/library/windows/desktop/hh184779(v=vs.85).aspx
//
-class SampleGrabberCB : public IMFSampleGrabberSinkCallback
+class SampleGrabberCB : public IMFSampleGrabberSinkCallback
{
- bool m_bMuted;
+ bool m_bMuted;
long m_cRef;
const struct tmedia_producer_s* m_pWrappedProducer;
@@ -42,7 +42,9 @@ class SampleGrabberCB : public IMFSampleGrabberSinkCallback
public:
static HRESULT CreateInstance(const struct tmedia_producer_s* pcWrappedProducer, SampleGrabberCB **ppCB);
- void SetMute(bool bMuted) { m_bMuted = bMuted; }
+ void SetMute(bool bMuted) {
+ m_bMuted = bMuted;
+ }
// IUnknown methods
STDMETHODIMP QueryInterface(REFIID iid, void** ppv);
@@ -59,8 +61,8 @@ public:
// IMFSampleGrabberSinkCallback methods
STDMETHODIMP OnSetPresentationClock(IMFPresentationClock* pClock);
STDMETHODIMP OnProcessSample(REFGUID guidMajorMediaType, DWORD dwSampleFlags,
- LONGLONG llSampleTime, LONGLONG llSampleDuration, const BYTE * pSampleBuffer,
- DWORD dwSampleSize);
+ LONGLONG llSampleTime, LONGLONG llSampleDuration, const BYTE * pSampleBuffer,
+ DWORD dwSampleSize);
STDMETHODIMP OnShutdown();
};
diff --git a/plugins/pluginWinMF/internals/mf_sample_queue.cxx b/plugins/pluginWinMF/internals/mf_sample_queue.cxx
index 05c2bc6..32630b8 100755
--- a/plugins/pluginWinMF/internals/mf_sample_queue.cxx
+++ b/plugins/pluginWinMF/internals/mf_sample_queue.cxx
@@ -1,18 +1,18 @@
/* Copyright (C) 2013 Mamadou DIOP
* Copyright (C) 2013 Doubango Telecom <http://www.doubango.org>
-*
+*
* This file is part of Open Source Doubango Framework.
*
* DOUBANGO is free software: you can redistribute it and/or modify
* it under the terms of the GNU General Public License as published by
* the Free Software Foundation, either version 3 of the License, or
* (at your option) any later version.
-*
+*
* DOUBANGO is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
* GNU General Public License for more details.
-*
+*
* You should have received a copy of the GNU General Public License
* along with DOUBANGO.
*/
@@ -21,39 +21,38 @@
#include <assert.h>
MFSampleQueue::MFSampleQueue()
-: m_nRefCount(1)
-, m_nCount(0)
+ : m_nRefCount(1)
+ , m_nCount(0)
{
- InitializeCriticalSection(&m_critSec);
+ InitializeCriticalSection(&m_critSec);
- m_anchor.next = &m_anchor;
- m_anchor.prev = &m_anchor;
+ m_anchor.next = &m_anchor;
+ m_anchor.prev = &m_anchor;
}
MFSampleQueue::~MFSampleQueue()
{
- assert(m_nRefCount == 0);
+ assert(m_nRefCount == 0);
- Clear();
+ Clear();
- DeleteCriticalSection(&m_critSec);
+ DeleteCriticalSection(&m_critSec);
}
STDMETHODIMP MFSampleQueue::QueryInterface(REFIID iid, void** ppv)
{
- return E_NOTIMPL;
+ return E_NOTIMPL;
}
STDMETHODIMP_(ULONG) MFSampleQueue::AddRef()
{
- return InterlockedIncrement(&m_nRefCount);
+ return InterlockedIncrement(&m_nRefCount);
}
STDMETHODIMP_(ULONG) MFSampleQueue::Release()
{
- ULONG uCount = InterlockedDecrement(&m_nRefCount);
- if (uCount == 0)
- {
+ ULONG uCount = InterlockedDecrement(&m_nRefCount);
+ if (uCount == 0) {
delete this;
}
// For thread safety, return a temporary variable.
@@ -62,20 +61,18 @@ STDMETHODIMP_(ULONG) MFSampleQueue::Release()
HRESULT MFSampleQueue::Queue(IMFSample* item)
{
- if (item == NULL)
- {
+ if (item == NULL) {
return E_POINTER;
}
Node *pNode = new (std::nothrow) Node(item);
- if (pNode == NULL)
- {
+ if (pNode == NULL) {
return E_OUTOFMEMORY;
}
item->AddRef();
- EnterCriticalSection(&m_critSec);
+ EnterCriticalSection(&m_critSec);
Node *pBefore = m_anchor.prev;
@@ -87,25 +84,23 @@ HRESULT MFSampleQueue::Queue(IMFSample* item)
pNode->prev = pBefore;
pNode->next = pAfter;
- m_nCount++;
+ m_nCount++;
- LeaveCriticalSection(&m_critSec);
+ LeaveCriticalSection(&m_critSec);
return S_OK;
}
-HRESULT MFSampleQueue::Dequeue(IMFSample* *ppItem)
+HRESULT MFSampleQueue::Dequeue(IMFSample**ppItem)
{
- if (ppItem == NULL)
- {
+ if (ppItem == NULL) {
return E_POINTER;
}
- EnterCriticalSection(&m_critSec);
+ EnterCriticalSection(&m_critSec);
- if (IsEmpty())
- {
- LeaveCriticalSection(&m_critSec);
+ if (IsEmpty()) {
+ LeaveCriticalSection(&m_critSec);
return E_FAIL;
}
@@ -120,24 +115,22 @@ HRESULT MFSampleQueue::Dequeue(IMFSample* *ppItem)
*ppItem = pNode->item;
delete pNode;
- m_nCount--;
+ m_nCount--;
- LeaveCriticalSection(&m_critSec);
+ LeaveCriticalSection(&m_critSec);
return S_OK;
}
HRESULT MFSampleQueue::Clear()
{
- EnterCriticalSection(&m_critSec);
+ EnterCriticalSection(&m_critSec);
- Node *n = m_anchor.next;
+ Node *n = m_anchor.next;
// Delete the nodes
- while (n != &m_anchor)
- {
- if (n->item)
- {
+ while (n != &m_anchor) {
+ if (n->item) {
n->item->Release();
}
@@ -150,9 +143,9 @@ HRESULT MFSampleQueue::Clear()
m_anchor.next = &m_anchor;
m_anchor.prev = &m_anchor;
- m_nCount = 0;
+ m_nCount = 0;
- LeaveCriticalSection(&m_critSec);
+ LeaveCriticalSection(&m_critSec);
- return S_OK;
+ return S_OK;
}
diff --git a/plugins/pluginWinMF/internals/mf_sample_queue.h b/plugins/pluginWinMF/internals/mf_sample_queue.h
index b42ecde..b110a06 100755
--- a/plugins/pluginWinMF/internals/mf_sample_queue.h
+++ b/plugins/pluginWinMF/internals/mf_sample_queue.h
@@ -1,18 +1,18 @@
/* Copyright (C) 2013 Mamadou DIOP
* Copyright (C) 2013 Doubango Telecom <http://www.doubango.org>
-*
+*
* This file is part of Open Source Doubango Framework.
*
* DOUBANGO is free software: you can redistribute it and/or modify
* it under the terms of the GNU General Public License as published by
* the Free Software Foundation, either version 3 of the License, or
* (at your option) any later version.
-*
+*
* DOUBANGO is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
* GNU General Public License for more details.
-*
+*
* You should have received a copy of the GNU General Public License
* along with DOUBANGO.
*/
@@ -32,49 +32,52 @@ class MFSampleQueue : public IUnknown
protected:
// Nodes in the linked list
- struct Node
- {
+ struct Node {
Node *prev;
Node *next;
IMFSample* item;
- Node() : prev(NULL), next(NULL)
- {
+ Node() : prev(NULL), next(NULL) {
}
- Node(IMFSample* item) : prev(NULL), next(NULL)
- {
+ Node(IMFSample* item) : prev(NULL), next(NULL) {
this->item = item;
}
- IMFSample* Item() const { return item; }
+ IMFSample* Item() const {
+ return item;
+ }
};
protected:
Node m_anchor;
- long m_nCount;
- CRITICAL_SECTION m_critSec;
+ long m_nCount;
+ CRITICAL_SECTION m_critSec;
private:
- long m_nRefCount;
+ long m_nRefCount;
public:
MFSampleQueue();
virtual ~MFSampleQueue();
- // IUnknown
+ // IUnknown
STDMETHODIMP QueryInterface(REFIID iid, void** ppv);
STDMETHODIMP_(ULONG) AddRef();
STDMETHODIMP_(ULONG) Release();
HRESULT Queue(IMFSample* item);
- HRESULT Dequeue(IMFSample* *ppItem);
+ HRESULT Dequeue(IMFSample**ppItem);
HRESULT Clear();
- inline BOOL IsEmpty() const { return m_anchor.next == &m_anchor; }
- inline long Count() { return m_nCount; }
+ inline BOOL IsEmpty() const {
+ return m_anchor.next == &m_anchor;
+ }
+ inline long Count() {
+ return m_nCount;
+ }
};
diff --git a/plugins/pluginWinMF/internals/mf_utils.cxx b/plugins/pluginWinMF/internals/mf_utils.cxx
index d1f326c..bcb63f1 100755
--- a/plugins/pluginWinMF/internals/mf_utils.cxx
+++ b/plugins/pluginWinMF/internals/mf_utils.cxx
@@ -1,18 +1,18 @@
/* Copyright (C) 2013 Mamadou DIOP
* Copyright (C) 2013 Doubango Telecom <http://www.doubango.org>
-*
+*
* This file is part of Open Source Doubango Framework.
*
* DOUBANGO is free software: you can redistribute it and/or modify
* it under the terms of the GNU General Public License as published by
* the Free Software Foundation, either version 3 of the License, or
* (at your option) any later version.
-*
+*
* DOUBANGO is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
* GNU General Public License for more details.
-*
+*
* You should have received a copy of the GNU General Public License
* along with DOUBANGO.
*/
@@ -74,35 +74,34 @@ const TOPOID MFUtils::g_ullTopoIdSource = 333;
const TOPOID MFUtils::g_ullTopoIdVideoProcessor = 444;
// Preferred VideoSubTypes
-static const VideoSubTypeGuidPair PreferredVideoSubTypeGuidPairs[] =
-{
- { tmedia_chroma_yuv420p, MFVideoFormat_I420 },
- { tmedia_chroma_nv12, MFVideoFormat_NV12 },
- { tmedia_chroma_uyvy422, MFVideoFormat_UYVY },
- { tmedia_chroma_yuyv422, MFVideoFormat_YUY2 },
- /* TODO: Add more YUV formats */
- { tmedia_chroma_rgb565le, MFVideoFormat_RGB565 },
- { tmedia_chroma_bgr24, MFVideoFormat_RGB24 },
- { tmedia_chroma_rgb32, MFVideoFormat_RGB32 },
+static const VideoSubTypeGuidPair PreferredVideoSubTypeGuidPairs[] = {
+ { tmedia_chroma_yuv420p, MFVideoFormat_I420 },
+ { tmedia_chroma_nv12, MFVideoFormat_NV12 },
+ { tmedia_chroma_uyvy422, MFVideoFormat_UYVY },
+ { tmedia_chroma_yuyv422, MFVideoFormat_YUY2 },
+ /* TODO: Add more YUV formats */
+ { tmedia_chroma_rgb565le, MFVideoFormat_RGB565 },
+ { tmedia_chroma_bgr24, MFVideoFormat_RGB24 },
+ { tmedia_chroma_rgb32, MFVideoFormat_RGB32 },
};
static const tsk_size_t PreferredVideoSubTypeGuidPairsCount = sizeof(PreferredVideoSubTypeGuidPairs)/sizeof(PreferredVideoSubTypeGuidPairs[0]);
// Video Processor
-DEFINE_GUID(CLSID_VideoProcessorMFT,
- 0x88753b26, 0x5b24, 0x49bd, 0xb2, 0xe7, 0xc, 0x44, 0x5c, 0x78, 0xc9, 0x82);
+DEFINE_GUID(CLSID_VideoProcessorMFT,
+ 0x88753b26, 0x5b24, 0x49bd, 0xb2, 0xe7, 0xc, 0x44, 0x5c, 0x78, 0xc9, 0x82);
// {4BE8D3C0-0515-4A37-AD55-E4BAE19AF471}
DEFINE_GUID(CLSID_MF_INTEL_H264EncFilter, // Intel Quick Sync Encoder
-0x4be8d3c0, 0x0515, 0x4a37, 0xad, 0x55, 0xe4, 0xba, 0xe1, 0x9a, 0xf4, 0x71);
+ 0x4be8d3c0, 0x0515, 0x4a37, 0xad, 0x55, 0xe4, 0xba, 0xe1, 0x9a, 0xf4, 0x71);
// {0855C9AC-BC6F-4371-8954-671CCD4EC16F}
DEFINE_GUID(CLSID_MF_INTEL_H264DecFilter, // Intel Quick Sync Decoder
-0x0855c9ac, 0xbc6f, 0x4371, 0x89, 0x54, 0x67, 0x1c, 0xcd, 0x4e, 0xc1, 0x6f);
+ 0x0855c9ac, 0xbc6f, 0x4371, 0x89, 0x54, 0x67, 0x1c, 0xcd, 0x4e, 0xc1, 0x6f);
#if WINVER < 0x0602/* From "sdkddkver.h" and defines the SDK version not the host */
// 6ca50344-051a-4ded-9779-a43305165e35
DEFINE_GUID(CLSID_CMSH264EncoderMFT, // MS H.264 encoder
-0x6ca50344, 0x051a, 0x4ded, 0x97, 0x79, 0xa4, 0x33, 0x05, 0x16, 0x5e, 0x35);
+ 0x6ca50344, 0x051a, 0x4ded, 0x97, 0x79, 0xa4, 0x33, 0x05, 0x16, 0x5e, 0x35);
#endif /* WINVER */
#define IsWin7_OrLater(dwMajorVersion, dwMinorVersion) ( (dwMajorVersion > 6) || ( (dwMajorVersion == 6) && (dwMinorVersion >= 1) ) )
@@ -111,249 +110,231 @@ DEFINE_GUID(CLSID_CMSH264EncoderMFT, // MS H.264 encoder
HRESULT MFUtils::Startup()
{
- if(!g_bStarted)
- {
- HRESULT hr = CoInitializeEx(NULL, COINIT_MULTITHREADED);
- if(SUCCEEDED(hr) || hr == 0x80010106) // 0x80010106 when called from managed code (e.g. Boghe) - More info: http://support.microsoft.com/kb/824480
- {
- hr = MFStartup(MF_VERSION);
- }
- g_bStarted = SUCCEEDED(hr);
-
- OSVERSIONINFO osvi;
- ZeroMemory(&osvi, sizeof(OSVERSIONINFO));
- osvi.dwOSVersionInfoSize = sizeof(OSVERSIONINFO);
- GetVersionEx(&osvi);
- g_dwMajorVersion = osvi.dwMajorVersion;
- g_dwMinorVersion = osvi.dwMinorVersion;
-
- return hr;
- }
- return S_OK;
+ if(!g_bStarted) {
+ HRESULT hr = CoInitializeEx(NULL, COINIT_MULTITHREADED);
+ if(SUCCEEDED(hr) || hr == 0x80010106) { // 0x80010106 when called from managed code (e.g. Boghe) - More info: http://support.microsoft.com/kb/824480
+ hr = MFStartup(MF_VERSION);
+ }
+ g_bStarted = SUCCEEDED(hr);
+
+ OSVERSIONINFO osvi;
+ ZeroMemory(&osvi, sizeof(OSVERSIONINFO));
+ osvi.dwOSVersionInfoSize = sizeof(OSVERSIONINFO);
+ GetVersionEx(&osvi);
+ g_dwMajorVersion = osvi.dwMajorVersion;
+ g_dwMinorVersion = osvi.dwMinorVersion;
+
+ return hr;
+ }
+ return S_OK;
}
HRESULT MFUtils::Shutdown()
{
- if(g_bStarted)
- {
- g_bStarted = false;
- return S_OK;
- }
- return S_OK;
+ if(g_bStarted) {
+ g_bStarted = false;
+ return S_OK;
+ }
+ return S_OK;
}
BOOL MFUtils::IsD3D9Supported()
{
- if (MFUtils::g_bD3D9Checked)
- {
- return MFUtils::g_bD3D9Supported;
- }
- MFUtils::g_bD3D9Checked = TRUE;
- HRESULT hr = S_OK;
- IDirect3D9* pD3D = NULL;
- D3DDISPLAYMODE mode = { 0 };
- D3DPRESENT_PARAMETERS pp = {0};
- IDirect3DDevice9* pDevice = NULL;
-
- CHECK_HR(hr = MFUtils::Startup());
-
- if (!(pD3D = Direct3DCreate9(D3D_SDK_VERSION)))
- {
+ if (MFUtils::g_bD3D9Checked) {
+ return MFUtils::g_bD3D9Supported;
+ }
+ MFUtils::g_bD3D9Checked = TRUE;
+ HRESULT hr = S_OK;
+ IDirect3D9* pD3D = NULL;
+ D3DDISPLAYMODE mode = { 0 };
+ D3DPRESENT_PARAMETERS pp = {0};
+ IDirect3DDevice9* pDevice = NULL;
+
+ CHECK_HR(hr = MFUtils::Startup());
+
+ if (!(pD3D = Direct3DCreate9(D3D_SDK_VERSION))) {
CHECK_HR(hr = E_OUTOFMEMORY);
}
hr = pD3D->GetAdapterDisplayMode(
- D3DADAPTER_DEFAULT,
- &mode
- );
- if (FAILED(hr))
- {
- goto bail;
- }
+ D3DADAPTER_DEFAULT,
+ &mode
+ );
+ if (FAILED(hr)) {
+ goto bail;
+ }
hr = pD3D->CheckDeviceType(
- D3DADAPTER_DEFAULT,
- D3DDEVTYPE_HAL,
- mode.Format,
- D3DFMT_X8R8G8B8,
- TRUE // windowed
- );
- if (FAILED(hr))
- {
- goto bail;
- }
+ D3DADAPTER_DEFAULT,
+ D3DDEVTYPE_HAL,
+ mode.Format,
+ D3DFMT_X8R8G8B8,
+ TRUE // windowed
+ );
+ if (FAILED(hr)) {
+ goto bail;
+ }
pp.BackBufferFormat = D3DFMT_X8R8G8B8;
pp.SwapEffect = D3DSWAPEFFECT_DISCARD;
pp.PresentationInterval = D3DPRESENT_INTERVAL_IMMEDIATE;
- pp.Windowed = TRUE;
- pp.hDeviceWindow = GetDesktopWindow();
+ pp.Windowed = TRUE;
+ pp.hDeviceWindow = GetDesktopWindow();
hr = pD3D->CreateDevice(
- D3DADAPTER_DEFAULT,
- D3DDEVTYPE_HAL,
- pp.hDeviceWindow,
- D3DCREATE_HARDWARE_VERTEXPROCESSING,
- &pp,
- &pDevice
- );
- if (FAILED(hr))
- {
- goto bail;
- }
-
- // Everythings is OK
- MFUtils::g_bD3D9Supported = TRUE;
- TSK_DEBUG_INFO("D3D9 supported");
+ D3DADAPTER_DEFAULT,
+ D3DDEVTYPE_HAL,
+ pp.hDeviceWindow,
+ D3DCREATE_HARDWARE_VERTEXPROCESSING,
+ &pp,
+ &pDevice
+ );
+ if (FAILED(hr)) {
+ goto bail;
+ }
+
+ // Everythings is OK
+ MFUtils::g_bD3D9Supported = TRUE;
+ TSK_DEBUG_INFO("D3D9 supported");
bail:
- if (!MFUtils::g_bD3D9Supported) {
- TSK_DEBUG_WARN("D3D9 not supported");
- }
- SafeRelease(&pDevice);
- SafeRelease(&pD3D);
- return MFUtils::g_bD3D9Supported;
+ if (!MFUtils::g_bD3D9Supported) {
+ TSK_DEBUG_WARN("D3D9 not supported");
+ }
+ SafeRelease(&pDevice);
+ SafeRelease(&pD3D);
+ return MFUtils::g_bD3D9Supported;
}
BOOL MFUtils::IsLowLatencyH264Supported()
{
- if(MFUtils::g_bLowLatencyH264Checked)
- {
- return MFUtils::g_bLowLatencyH264Supported;
- }
+ if(MFUtils::g_bLowLatencyH264Checked) {
+ return MFUtils::g_bLowLatencyH264Supported;
+ }
#if PLUGIN_MF_DISABLE_CODECS
- MFUtils::g_bLowLatencyH264Checked = TRUE;
- MFUtils::g_bLowLatencyH264Supported = FALSE;
+ MFUtils::g_bLowLatencyH264Checked = TRUE;
+ MFUtils::g_bLowLatencyH264Supported = FALSE;
#else
- Startup();
-
- HRESULT hr = S_OK;
- IMFTransform *pEncoderMFT = NULL;
- IMFTransform *pDecoderMFT = NULL;
- MFCodecVideoH264* pEncoderCodec = NULL;
- MFCodecVideoH264* pDecoderCodec = NULL;
-
- static const BOOL IsEncoderYes = TRUE;
-
- // Encoder
- hr = MFUtils::GetBestCodec(IsEncoderYes, MFMediaType_Video, MFVideoFormat_NV12, MFVideoFormat_H264, &pEncoderMFT);
- if(FAILED(hr))
- {
- TSK_DEBUG_INFO("No low latency H.264 encoder");
- goto bail;
- }
-
- // Decoder
- hr = MFUtils::GetBestCodec(!IsEncoderYes, MFMediaType_Video, MFVideoFormat_H264, MFVideoFormat_NV12, &pDecoderMFT);
- if(FAILED(hr))
- {
- TSK_DEBUG_INFO("No low latency H.264 decoder");
- goto bail;
- }
-
- // Make sure both encoder and decoder are working well. Check encoding/decoding 1080p@30 would work.
-
- TSK_DEBUG_INFO("Probing H.264 MFT encoder...");
- pEncoderCodec = MFCodecVideoH264::CreateCodecH264Main(MFCodecType_Encoder, pEncoderMFT);
- if(!pEncoderCodec)
- {
- CHECK_HR(hr = E_FAIL);
- }
- CHECK_HR(hr = pEncoderCodec->Initialize(
- 30, // FPS
- 1920, // WIDTH
- 1080, // HEIGHT
- tmedia_get_video_bandwidth_kbps_2(1920, 1080, 30) * 1024) // BITRATE
- );
- CHECK_HR(pEncoderCodec->IsSetSliceMaxSizeInBytesSupported(MFUtils::g_bLowLatencyH264SupportsMaxSliceSize));
-
- TSK_DEBUG_INFO("Probing H.264 MFT decoder...");
- pDecoderCodec = MFCodecVideoH264::CreateCodecH264Main(MFCodecType_Decoder, pDecoderMFT);
- if(!pDecoderCodec)
- {
- CHECK_HR(hr = E_FAIL);
- }
- CHECK_HR(hr = pDecoderCodec->Initialize(
- 30, // FPS
- 1920, // WIDTH
- 1080 // HEIGHT
- ));
+ Startup();
+
+ HRESULT hr = S_OK;
+ IMFTransform *pEncoderMFT = NULL;
+ IMFTransform *pDecoderMFT = NULL;
+ MFCodecVideoH264* pEncoderCodec = NULL;
+ MFCodecVideoH264* pDecoderCodec = NULL;
+
+ static const BOOL IsEncoderYes = TRUE;
+
+ // Encoder
+ hr = MFUtils::GetBestCodec(IsEncoderYes, MFMediaType_Video, MFVideoFormat_NV12, MFVideoFormat_H264, &pEncoderMFT);
+ if(FAILED(hr)) {
+ TSK_DEBUG_INFO("No low latency H.264 encoder");
+ goto bail;
+ }
+
+ // Decoder
+ hr = MFUtils::GetBestCodec(!IsEncoderYes, MFMediaType_Video, MFVideoFormat_H264, MFVideoFormat_NV12, &pDecoderMFT);
+ if(FAILED(hr)) {
+ TSK_DEBUG_INFO("No low latency H.264 decoder");
+ goto bail;
+ }
+
+ // Make sure both encoder and decoder are working well. Check encoding/decoding 1080p@30 would work.
+
+ TSK_DEBUG_INFO("Probing H.264 MFT encoder...");
+ pEncoderCodec = MFCodecVideoH264::CreateCodecH264Main(MFCodecType_Encoder, pEncoderMFT);
+ if(!pEncoderCodec) {
+ CHECK_HR(hr = E_FAIL);
+ }
+ CHECK_HR(hr = pEncoderCodec->Initialize(
+ 30, // FPS
+ 1920, // WIDTH
+ 1080, // HEIGHT
+ tmedia_get_video_bandwidth_kbps_2(1920, 1080, 30) * 1024) // BITRATE
+ );
+ CHECK_HR(pEncoderCodec->IsSetSliceMaxSizeInBytesSupported(MFUtils::g_bLowLatencyH264SupportsMaxSliceSize));
+
+ TSK_DEBUG_INFO("Probing H.264 MFT decoder...");
+ pDecoderCodec = MFCodecVideoH264::CreateCodecH264Main(MFCodecType_Decoder, pDecoderMFT);
+ if(!pDecoderCodec) {
+ CHECK_HR(hr = E_FAIL);
+ }
+ CHECK_HR(hr = pDecoderCodec->Initialize(
+ 30, // FPS
+ 1920, // WIDTH
+ 1080 // HEIGHT
+ ));
bail:
- MFUtils::g_bLowLatencyH264Checked = TRUE;
- MFUtils::g_bLowLatencyH264Supported = SUCCEEDED(hr) ? TRUE : FALSE;
- SafeRelease(&pEncoderMFT);
- SafeRelease(&pEncoderCodec);
- SafeRelease(&pDecoderMFT);
- SafeRelease(&pDecoderCodec);
+ MFUtils::g_bLowLatencyH264Checked = TRUE;
+ MFUtils::g_bLowLatencyH264Supported = SUCCEEDED(hr) ? TRUE : FALSE;
+ SafeRelease(&pEncoderMFT);
+ SafeRelease(&pEncoderCodec);
+ SafeRelease(&pDecoderMFT);
+ SafeRelease(&pDecoderCodec);
#endif /* PLUGIN_MF_DISABLE_CODECS */
- return MFUtils::g_bLowLatencyH264Supported;
+ return MFUtils::g_bLowLatencyH264Supported;
}
BOOL MFUtils::IsLowLatencyH264SupportsMaxSliceSize()
{
- return MFUtils::IsLowLatencyH264Supported() && MFUtils::g_bLowLatencyH264SupportsMaxSliceSize;
+ return MFUtils::IsLowLatencyH264Supported() && MFUtils::g_bLowLatencyH264SupportsMaxSliceSize;
}
HRESULT MFUtils::IsAsyncMFT(
- IMFTransform *pMFT, // The MFT to check
- BOOL* pbIsAsync // Whether the MFT is Async
- )
+ IMFTransform *pMFT, // The MFT to check
+ BOOL* pbIsAsync // Whether the MFT is Async
+)
{
- if(!pbIsAsync || !pMFT)
- {
- return E_POINTER;
- }
+ if(!pbIsAsync || !pMFT) {
+ return E_POINTER;
+ }
- IMFAttributes *pAttributes = NULL;
- UINT32 nIsAsync = 0;
- HRESULT hr = S_OK;
+ IMFAttributes *pAttributes = NULL;
+ UINT32 nIsAsync = 0;
+ HRESULT hr = S_OK;
hr = pMFT->GetAttributes(&pAttributes);
- if(SUCCEEDED(hr))
- {
- hr = pAttributes->GetUINT32(MF_TRANSFORM_ASYNC, &nIsAsync);
- }
-
- // Never fails: just say not Async
- CHECK_HR(hr = S_OK);
+ if(SUCCEEDED(hr)) {
+ hr = pAttributes->GetUINT32(MF_TRANSFORM_ASYNC, &nIsAsync);
+ }
+
+ // Never fails: just say not Async
+ CHECK_HR(hr = S_OK);
- *pbIsAsync = !!nIsAsync;
+ *pbIsAsync = !!nIsAsync;
bail:
- return hr;
+ return hr;
}
HRESULT MFUtils::UnlockAsyncMFT(
- IMFTransform *pMFT // The MFT to unlock
- )
+ IMFTransform *pMFT // The MFT to unlock
+)
{
- IMFAttributes *pAttributes = NULL;
- UINT32 nValue = 0;
- HRESULT hr = S_OK;
+ IMFAttributes *pAttributes = NULL;
+ UINT32 nValue = 0;
+ HRESULT hr = S_OK;
hr = pMFT->GetAttributes(&pAttributes);
- if(FAILED(hr))
- {
- hr = S_OK;
- goto bail;
- }
-
- hr = pAttributes->GetUINT32(MF_TRANSFORM_ASYNC, &nValue);
- if(FAILED(hr))
- {
- hr = S_OK;
- goto bail;
- }
-
- if(nValue == TRUE)
- {
- CHECK_HR(hr = pAttributes->SetUINT32(MF_TRANSFORM_ASYNC_UNLOCK, TRUE));
- }
-
+ if(FAILED(hr)) {
+ hr = S_OK;
+ goto bail;
+ }
+
+ hr = pAttributes->GetUINT32(MF_TRANSFORM_ASYNC, &nValue);
+ if(FAILED(hr)) {
+ hr = S_OK;
+ goto bail;
+ }
+
+ if(nValue == TRUE) {
+ CHECK_HR(hr = pAttributes->SetUINT32(MF_TRANSFORM_ASYNC_UNLOCK, TRUE));
+ }
+
bail:
- SafeRelease(&pAttributes);
- return hr;
+ SafeRelease(&pAttributes);
+ return hr;
}
//-------------------------------------------------------------------
// CreatePCMAudioType
@@ -367,7 +348,7 @@ HRESULT MFUtils::CreatePCMAudioType(
UINT32 bitsPerSample, // Bits per sample
UINT32 cChannels, // Number of channels
IMFMediaType **ppType // Receives a pointer to the media type.
- )
+)
{
HRESULT hr = S_OK;
@@ -381,48 +362,39 @@ HRESULT MFUtils::CreatePCMAudioType(
hr = MFCreateMediaType(&pType);
// Set attributes on the type.
- if (SUCCEEDED(hr))
- {
+ if (SUCCEEDED(hr)) {
hr = pType->SetGUID(MF_MT_MAJOR_TYPE, MFMediaType_Audio);
}
- if (SUCCEEDED(hr))
- {
+ if (SUCCEEDED(hr)) {
hr = pType->SetGUID(MF_MT_SUBTYPE, MFAudioFormat_PCM);
}
- if (SUCCEEDED(hr))
- {
+ if (SUCCEEDED(hr)) {
hr = pType->SetUINT32(MF_MT_AUDIO_NUM_CHANNELS, cChannels);
}
- if (SUCCEEDED(hr))
- {
+ if (SUCCEEDED(hr)) {
hr = pType->SetUINT32(MF_MT_AUDIO_SAMPLES_PER_SECOND, sampleRate);
}
- if (SUCCEEDED(hr))
- {
+ if (SUCCEEDED(hr)) {
hr = pType->SetUINT32(MF_MT_AUDIO_BLOCK_ALIGNMENT, blockAlign);
}
- if (SUCCEEDED(hr))
- {
+ if (SUCCEEDED(hr)) {
hr = pType->SetUINT32(MF_MT_AUDIO_AVG_BYTES_PER_SECOND, bytesPerSecond);
}
- if (SUCCEEDED(hr))
- {
+ if (SUCCEEDED(hr)) {
hr = pType->SetUINT32(MF_MT_AUDIO_BITS_PER_SAMPLE, bitsPerSample);
}
- if (SUCCEEDED(hr))
- {
+ if (SUCCEEDED(hr)) {
hr = pType->SetUINT32(MF_MT_ALL_SAMPLES_INDEPENDENT, TRUE);
}
- if (SUCCEEDED(hr))
- {
+ if (SUCCEEDED(hr)) {
// Return the type to the caller.
*ppType = pType;
(*ppType)->AddRef();
@@ -440,13 +412,13 @@ HRESULT MFUtils::CreatePCMAudioType(
// format.
//-------------------------------------------------------------------
HRESULT MFUtils::CreateVideoType(
- const GUID* subType, // video subType
- IMFMediaType **ppType, // Receives a pointer to the media type.
- UINT32 unWidth, // Video width (0 to ignore)
- UINT32 unHeight // Video height (0 to ignore)
- )
+ const GUID* subType, // video subType
+ IMFMediaType **ppType, // Receives a pointer to the media type.
+ UINT32 unWidth, // Video width (0 to ignore)
+ UINT32 unHeight // Video height (0 to ignore)
+)
{
- HRESULT hr = S_OK;
+ HRESULT hr = S_OK;
IMFMediaType *pType = NULL;
@@ -456,17 +428,16 @@ HRESULT MFUtils::CreateVideoType(
CHECK_HR(hr = pType->SetGUID(MF_MT_SUBTYPE, *subType));
- CHECK_HR(hr = pType->SetUINT32(MF_MT_ALL_SAMPLES_INDEPENDENT, TRUE)); // UnCompressed
+ CHECK_HR(hr = pType->SetUINT32(MF_MT_ALL_SAMPLES_INDEPENDENT, TRUE)); // UnCompressed
- CHECK_HR(hr = pType->SetUINT32(MF_MT_FIXED_SIZE_SAMPLES, TRUE)); // UnCompressed
+ CHECK_HR(hr = pType->SetUINT32(MF_MT_FIXED_SIZE_SAMPLES, TRUE)); // UnCompressed
- CHECK_HR(hr = pType->SetUINT32(MF_MT_INTERLACE_MODE, MFVideoInterlace_Progressive));
+ CHECK_HR(hr = pType->SetUINT32(MF_MT_INTERLACE_MODE, MFVideoInterlace_Progressive));
+
+ if(unWidth > 0 && unHeight > 0) {
+ CHECK_HR(hr = MFSetAttributeSize(pType, MF_MT_FRAME_SIZE, unWidth, unHeight));
+ }
- if(unWidth > 0 && unHeight > 0)
- {
- CHECK_HR(hr = MFSetAttributeSize(pType, MF_MT_FRAME_SIZE, unWidth, unHeight));
- }
-
*ppType = pType;
(*ppType)->AddRef();
@@ -481,7 +452,7 @@ bail:
//-------------------------------------------------------------------
HRESULT MFUtils::ValidateVideoFormat(IMFMediaType *pmt)
{
- GUID major_type = GUID_NULL;
+ GUID major_type = GUID_NULL;
GUID subtype = GUID_NULL;
MFVideoInterlaceMode interlace = MFVideoInterlace_Unknown;
UINT32 val = 0;
@@ -492,8 +463,7 @@ HRESULT MFUtils::ValidateVideoFormat(IMFMediaType *pmt)
// Major type must be video.
CHECK_HR(hr = pmt->GetGUID(MF_MT_MAJOR_TYPE, &major_type));
- if (major_type != MFMediaType_Video)
- {
+ if (major_type != MFMediaType_Video) {
CHECK_HR(hr = MF_E_INVALIDMEDIATYPE);
}
@@ -504,24 +474,20 @@ HRESULT MFUtils::ValidateVideoFormat(IMFMediaType *pmt)
#if 0
// Look for the subtype in our list of accepted types.
- for (DWORD i = 0; i < g_NumVideoSubtypes; i++)
- {
- if (subtype == *g_VideoSubtypes[i])
- {
+ for (DWORD i = 0; i < g_NumVideoSubtypes; i++) {
+ if (subtype == *g_VideoSubtypes[i]) {
bFoundMatchingSubtype = TRUE;
break;
}
}
- if (!bFoundMatchingSubtype)
- {
+ if (!bFoundMatchingSubtype) {
CHECK_HR(hr = MF_E_INVALIDMEDIATYPE);
}
#endif
// Video must be progressive frames.
CHECK_HR(hr = pmt->GetUINT32(MF_MT_INTERLACE_MODE, (UINT32*)&interlace));
- if (interlace != MFVideoInterlace_Progressive)
- {
+ if (interlace != MFVideoInterlace_Progressive) {
CHECK_HR(hr = MF_E_INVALIDMEDIATYPE);
}
@@ -533,9 +499,9 @@ HRESULT MFUtils::ConvertVideoTypeToUncompressedType(
IMFMediaType *pType, // Pointer to an encoded video type.
const GUID& subtype, // Uncompressed subtype (eg, RGB-32, AYUV)
IMFMediaType **ppType // Receives a matching uncompressed video type.
- )
+)
{
- IMFMediaType *pTypeUncomp = NULL;
+ IMFMediaType *pTypeUncomp = NULL;
HRESULT hr = S_OK;
GUID majortype = { 0 };
@@ -543,59 +509,51 @@ HRESULT MFUtils::ConvertVideoTypeToUncompressedType(
hr = pType->GetMajorType(&majortype);
- if (majortype != MFMediaType_Video)
- {
+ if (majortype != MFMediaType_Video) {
return MF_E_INVALIDMEDIATYPE;
}
// Create a new media type and copy over all of the items.
// This ensures that extended color information is retained.
- if (SUCCEEDED(hr))
- {
+ if (SUCCEEDED(hr)) {
hr = MFCreateMediaType(&pTypeUncomp);
}
- if (SUCCEEDED(hr))
- {
+ if (SUCCEEDED(hr)) {
hr = pType->CopyAllItems(pTypeUncomp);
}
// Set the subtype.
- if (SUCCEEDED(hr))
- {
+ if (SUCCEEDED(hr)) {
hr = pTypeUncomp->SetGUID(MF_MT_SUBTYPE, subtype);
}
// Uncompressed means all samples are independent.
- if (SUCCEEDED(hr))
- {
+ if (SUCCEEDED(hr)) {
hr = pTypeUncomp->SetUINT32(MF_MT_ALL_SAMPLES_INDEPENDENT, TRUE);
}
// Fix up PAR if not set on the original type.
- if (SUCCEEDED(hr))
- {
+ if (SUCCEEDED(hr)) {
hr = MFGetAttributeRatio(
- pTypeUncomp,
- MF_MT_PIXEL_ASPECT_RATIO,
- (UINT32*)&par.Numerator,
- (UINT32*)&par.Denominator
- );
+ pTypeUncomp,
+ MF_MT_PIXEL_ASPECT_RATIO,
+ (UINT32*)&par.Numerator,
+ (UINT32*)&par.Denominator
+ );
// Default to square pixels.
- if (FAILED(hr))
- {
+ if (FAILED(hr)) {
hr = MFSetAttributeRatio(
- pTypeUncomp,
- MF_MT_PIXEL_ASPECT_RATIO,
- 1, 1
- );
+ pTypeUncomp,
+ MF_MT_PIXEL_ASPECT_RATIO,
+ 1, 1
+ );
}
}
- if (SUCCEEDED(hr))
- {
+ if (SUCCEEDED(hr)) {
*ppType = pTypeUncomp;
(*ppType)->AddRef();
}
@@ -605,21 +563,21 @@ HRESULT MFUtils::ConvertVideoTypeToUncompressedType(
}
HRESULT MFUtils::CreateMediaSample(
- DWORD cbData, // Maximum buffer size
- IMFSample **ppSample // Receives the sample
+ DWORD cbData, // Maximum buffer size
+ IMFSample **ppSample // Receives the sample
)
{
- assert(ppSample);
+ assert(ppSample);
- HRESULT hr = S_OK;
+ HRESULT hr = S_OK;
IMFSample *pSample = NULL;
IMFMediaBuffer *pBuffer = NULL;
- CHECK_HR(hr = MFCreateSample(&pSample));
- CHECK_HR(hr = MFCreateMemoryBuffer(cbData, &pBuffer));
+ CHECK_HR(hr = MFCreateSample(&pSample));
+ CHECK_HR(hr = MFCreateMemoryBuffer(cbData, &pBuffer));
CHECK_HR(hr = pSample->AddBuffer(pBuffer));
-
+
*ppSample = pSample;
(*ppSample)->AddRef();
@@ -631,222 +589,198 @@ bail:
// Gets the best encoder and decoder. Up to the caller to release the returned pointer
HRESULT MFUtils::GetBestCodec(
- BOOL bEncoder, // Whether we request an encoder or not (TRUE=encoder, FALSE=decoder)
- const GUID& mediaType, // The MediaType
- const GUID& inputFormat, // The input MediaFormat (e.g. MFVideoFormat_NV12)
- const GUID& outputFormat, // The output MediaFormat (e.g. MFVideoFormat_H264)
- IMFTransform **ppMFT // Receives the decoder/encoder transform
- )
+ BOOL bEncoder, // Whether we request an encoder or not (TRUE=encoder, FALSE=decoder)
+ const GUID& mediaType, // The MediaType
+ const GUID& inputFormat, // The input MediaFormat (e.g. MFVideoFormat_NV12)
+ const GUID& outputFormat, // The output MediaFormat (e.g. MFVideoFormat_H264)
+ IMFTransform **ppMFT // Receives the decoder/encoder transform
+)
{
- assert(ppMFT);
- assert(mediaType == MFMediaType_Video || mediaType == MFMediaType_Audio); // only audio and video codecs are support for now
-
- *ppMFT = NULL;
-
- HRESULT hr = S_OK;
-
- if(outputFormat == MFVideoFormat_H264 || inputFormat == MFVideoFormat_H264)
- {
- if(bEncoder)
- {
- // Force using Intel Quick Sync Encoder
- hr = CoCreateInstance(CLSID_MF_INTEL_H264EncFilter, NULL,
- CLSCTX_INPROC_SERVER, IID_PPV_ARGS(ppMFT));
- if(SUCCEEDED(hr) && *ppMFT)
- {
- TSK_DEBUG_INFO("Using Intel Quick Sync encoder :)");
- return hr;
- }
- TSK_DEBUG_INFO("Not using Intel Quick Sync encoder :(");
- }
- else
- {
+ assert(ppMFT);
+ assert(mediaType == MFMediaType_Video || mediaType == MFMediaType_Audio); // only audio and video codecs are support for now
+
+ *ppMFT = NULL;
+
+ HRESULT hr = S_OK;
+
+ if(outputFormat == MFVideoFormat_H264 || inputFormat == MFVideoFormat_H264) {
+ if(bEncoder) {
+ // Force using Intel Quick Sync Encoder
+ hr = CoCreateInstance(CLSID_MF_INTEL_H264EncFilter, NULL,
+ CLSCTX_INPROC_SERVER, IID_PPV_ARGS(ppMFT));
+ if(SUCCEEDED(hr) && *ppMFT) {
+ TSK_DEBUG_INFO("Using Intel Quick Sync encoder :)");
+ return hr;
+ }
+ TSK_DEBUG_INFO("Not using Intel Quick Sync encoder :(");
+ }
+ else {
#if !PLUGIN_MF_DISABLE_ASYNC_DECODERS // Intel Quick Sync decoder is asynchronous
- // Force using Intel Quick Sync Decoder
- hr = CoCreateInstance(CLSID_MF_INTEL_H264DecFilter, NULL,
- CLSCTX_INPROC_SERVER, IID_PPV_ARGS(ppMFT));
+ // Force using Intel Quick Sync Decoder
+ hr = CoCreateInstance(CLSID_MF_INTEL_H264DecFilter, NULL,
+ CLSCTX_INPROC_SERVER, IID_PPV_ARGS(ppMFT));
#endif
- if(SUCCEEDED(hr) && *ppMFT)
- {
- TSK_DEBUG_INFO("Using Intel Quick Sync decoder :)");
- return hr;
- }
- TSK_DEBUG_INFO("Not using Intel Quick Sync decoder :(");
- }
- }
-
- UINT32 count = 0;
- BOOL bAsync = FALSE;
- GUID guidActivateCLSID = GUID_NULL;
-
- IMFActivate **ppActivate = NULL;
-
- MFT_REGISTER_TYPE_INFO infoInput = { mediaType, inputFormat };
- MFT_REGISTER_TYPE_INFO infoOutput = { mediaType, outputFormat };
-
- UINT32 unFlags = MFT_ENUM_FLAG_HARDWARE |
- MFT_ENUM_FLAG_SYNCMFT |
- MFT_ENUM_FLAG_ASYNCMFT |
- MFT_ENUM_FLAG_LOCALMFT |
- MFT_ENUM_FLAG_TRANSCODE_ONLY | // Otherwise Intel Quick Sync will not be listed
- MFT_ENUM_FLAG_SORTANDFILTER;
-
- hr = MFTEnumEx(
- (mediaType == MFMediaType_Video) ? (bEncoder ? MFT_CATEGORY_VIDEO_ENCODER : MFT_CATEGORY_VIDEO_DECODER) : (bEncoder ? MFT_CATEGORY_AUDIO_ENCODER : MFT_CATEGORY_AUDIO_DECODER),
- unFlags,
- (inputFormat == GUID_NULL) ? NULL : &infoInput, // Input type
- (outputFormat == GUID_NULL) ? NULL : &infoOutput, // Output type
- &ppActivate,
- &count
- );
-
- for(UINT32 i = 0; i < count; ++i)
- {
- SafeRelease(ppMFT);
- hr = ppActivate[i]->GetGUID(MFT_TRANSFORM_CLSID_Attribute, &guidActivateCLSID);
- if(FAILED(hr))
- {
- continue;
- }
-
- if(bEncoder)
- {
- // Encoder
- if(guidActivateCLSID == CLSID_CMSH264EncoderMFT) // MS H.264 encoder ?
- {
- if(PLUGIN_MF_DISABLE_MS_H264_ENCODER)
- {
- // Microsoft H.264 encoder is disabled
- TSK_DEBUG_INFO("MS H.264 encoder is disabled...skipping");
- continue;
- }
- if(!IsWin8_OrLater(g_dwMajorVersion, g_dwMinorVersion))
- {
- // Microsoft H.264 encoder doesn't support low latency on Win7.
- TSK_DEBUG_INFO("MS H.264 encoder doesn't support low delay on (%ld, %ld)...skipping", g_dwMajorVersion, g_dwMinorVersion);
- continue;
- }
- }
- }
- else
- {
- // Decoder
- if(guidActivateCLSID == CLSID_CMSH264DecoderMFT) // MS H.264 decoder ?
- {
- if(!IsWin8_OrLater(g_dwMajorVersion, g_dwMinorVersion))
- {
- // Microsoft H.264 decoder doesn't support low latency on Win7.
- TSK_DEBUG_INFO("MS H.264 decoder doesn't support low delay on (%ld, %ld)...skipping", g_dwMajorVersion, g_dwMinorVersion);
- continue;
- }
- }
- }
-
- hr = ppActivate[i]->ActivateObject(IID_PPV_ARGS(ppMFT));
- if(SUCCEEDED(hr) && *ppMFT) // For now we just get the first one. FIXME: Give HW encoders/decoders higher priority.
- {
- if(bEncoder)
- {
- // Encoder
-
- }
- else
- {
- // Decoder
+ if(SUCCEEDED(hr) && *ppMFT) {
+ TSK_DEBUG_INFO("Using Intel Quick Sync decoder :)");
+ return hr;
+ }
+ TSK_DEBUG_INFO("Not using Intel Quick Sync decoder :(");
+ }
+ }
+
+ UINT32 count = 0;
+ BOOL bAsync = FALSE;
+ GUID guidActivateCLSID = GUID_NULL;
+
+ IMFActivate **ppActivate = NULL;
+
+ MFT_REGISTER_TYPE_INFO infoInput = { mediaType, inputFormat };
+ MFT_REGISTER_TYPE_INFO infoOutput = { mediaType, outputFormat };
+
+ UINT32 unFlags = MFT_ENUM_FLAG_HARDWARE |
+ MFT_ENUM_FLAG_SYNCMFT |
+ MFT_ENUM_FLAG_ASYNCMFT |
+ MFT_ENUM_FLAG_LOCALMFT |
+ MFT_ENUM_FLAG_TRANSCODE_ONLY | // Otherwise Intel Quick Sync will not be listed
+ MFT_ENUM_FLAG_SORTANDFILTER;
+
+ hr = MFTEnumEx(
+ (mediaType == MFMediaType_Video) ? (bEncoder ? MFT_CATEGORY_VIDEO_ENCODER : MFT_CATEGORY_VIDEO_DECODER) : (bEncoder ? MFT_CATEGORY_AUDIO_ENCODER : MFT_CATEGORY_AUDIO_DECODER),
+ unFlags,
+ (inputFormat == GUID_NULL) ? NULL : &infoInput, // Input type
+ (outputFormat == GUID_NULL) ? NULL : &infoOutput, // Output type
+ &ppActivate,
+ &count
+ );
+
+ for(UINT32 i = 0; i < count; ++i) {
+ SafeRelease(ppMFT);
+ hr = ppActivate[i]->GetGUID(MFT_TRANSFORM_CLSID_Attribute, &guidActivateCLSID);
+ if(FAILED(hr)) {
+ continue;
+ }
+
+ if(bEncoder) {
+ // Encoder
+ if(guidActivateCLSID == CLSID_CMSH264EncoderMFT) { // MS H.264 encoder ?
+ if(PLUGIN_MF_DISABLE_MS_H264_ENCODER) {
+ // Microsoft H.264 encoder is disabled
+ TSK_DEBUG_INFO("MS H.264 encoder is disabled...skipping");
+ continue;
+ }
+ if(!IsWin8_OrLater(g_dwMajorVersion, g_dwMinorVersion)) {
+ // Microsoft H.264 encoder doesn't support low latency on Win7.
+ TSK_DEBUG_INFO("MS H.264 encoder doesn't support low delay on (%ld, %ld)...skipping", g_dwMajorVersion, g_dwMinorVersion);
+ continue;
+ }
+ }
+ }
+ else {
+ // Decoder
+ if(guidActivateCLSID == CLSID_CMSH264DecoderMFT) { // MS H.264 decoder ?
+ if(!IsWin8_OrLater(g_dwMajorVersion, g_dwMinorVersion)) {
+ // Microsoft H.264 decoder doesn't support low latency on Win7.
+ TSK_DEBUG_INFO("MS H.264 decoder doesn't support low delay on (%ld, %ld)...skipping", g_dwMajorVersion, g_dwMinorVersion);
+ continue;
+ }
+ }
+ }
+
+ hr = ppActivate[i]->ActivateObject(IID_PPV_ARGS(ppMFT));
+ if(SUCCEEDED(hr) && *ppMFT) { // For now we just get the first one. FIXME: Give HW encoders/decoders higher priority.
+ if(bEncoder) {
+ // Encoder
+
+ }
+ else {
+ // Decoder
#if PLUGIN_MF_DISABLE_ASYNC_DECODERS
- hr = IsAsyncMFT(*ppMFT, &bAsync);
- if(bAsync)
- {
- TSK_DEBUG_INFO("Skipping async decoder because not supported yet");
- continue; // Async decoders not supported yet
- }
+ hr = IsAsyncMFT(*ppMFT, &bAsync);
+ if(bAsync) {
+ TSK_DEBUG_INFO("Skipping async decoder because not supported yet");
+ continue; // Async decoders not supported yet
+ }
#endif
- }
- break;
- }
- }
-
- for (UINT32 i = 0; i < count; i++)
- {
- ppActivate[i]->Release();
- }
- CoTaskMemFree(ppActivate);
-
- return *ppMFT ? S_OK : MF_E_NOT_FOUND;
+ }
+ break;
+ }
+ }
+
+ for (UINT32 i = 0; i < count; i++) {
+ ppActivate[i]->Release();
+ }
+ CoTaskMemFree(ppActivate);
+
+ return *ppMFT ? S_OK : MF_E_NOT_FOUND;
}
HRESULT MFUtils::IsVideoProcessorSupported(BOOL *pbSupported)
{
- HRESULT hr = S_OK;
- IMFTransform *pTransform = NULL;
-
- if(!pbSupported)
- {
- CHECK_HR(hr = E_POINTER);
- }
-
- hr = CoCreateInstance(CLSID_VideoProcessorMFT, NULL,
- CLSCTX_INPROC_SERVER, IID_PPV_ARGS(&pTransform));
- *pbSupported = SUCCEEDED(hr);
- if(FAILED(hr))
- {
- hr = S_OK; // not an error
- }
+ HRESULT hr = S_OK;
+ IMFTransform *pTransform = NULL;
+
+ if(!pbSupported) {
+ CHECK_HR(hr = E_POINTER);
+ }
+
+ hr = CoCreateInstance(CLSID_VideoProcessorMFT, NULL,
+ CLSCTX_INPROC_SERVER, IID_PPV_ARGS(&pTransform));
+ *pbSupported = SUCCEEDED(hr);
+ if(FAILED(hr)) {
+ hr = S_OK; // not an error
+ }
bail:
- SafeRelease(&pTransform);
- return hr;
+ SafeRelease(&pTransform);
+ return hr;
}
HRESULT MFUtils::GetBestVideoProcessor(
- const GUID& inputFormat, // The input MediaFormat (e.g. MFVideoFormat_I420)
- const GUID& outputFormat, // The output MediaFormat (e.g. MFVideoFormat_NV12)
- IMFTransform **ppProcessor // Receives the video processor
- )
+ const GUID& inputFormat, // The input MediaFormat (e.g. MFVideoFormat_I420)
+ const GUID& outputFormat, // The output MediaFormat (e.g. MFVideoFormat_NV12)
+ IMFTransform **ppProcessor // Receives the video processor
+)
{
- assert(ppProcessor);
-
- *ppProcessor = NULL;
-
- HRESULT hr = S_OK;
- UINT32 count = 0;
-
- IMFActivate **ppActivate = NULL;
-
- MFT_REGISTER_TYPE_INFO infoInput = { MFMediaType_Video, inputFormat };
- MFT_REGISTER_TYPE_INFO infoOutput = { MFMediaType_Video, outputFormat };
-
- UINT32 unFlags = MFT_ENUM_FLAG_HARDWARE |
- MFT_ENUM_FLAG_SYNCMFT |
- MFT_ENUM_FLAG_LOCALMFT |
- MFT_ENUM_FLAG_SORTANDFILTER;
-
- hr = MFTEnumEx(
- MFT_CATEGORY_VIDEO_PROCESSOR,
- unFlags,
- &infoInput, // Input type
- &infoOutput, // Output type
- &ppActivate,
- &count
- );
-
- for(UINT32 i = 0; i < count; ++i)
- {
- hr = ppActivate[i]->ActivateObject(IID_PPV_ARGS(ppProcessor));
- if(SUCCEEDED(hr) && *ppProcessor)
- {
- break;
- }
- SafeRelease(ppProcessor);
- }
-
- for (UINT32 i = 0; i < count; i++)
- {
- ppActivate[i]->Release();
- }
- CoTaskMemFree(ppActivate);
-
- return *ppProcessor ? S_OK : MF_E_NOT_FOUND;
+ assert(ppProcessor);
+
+ *ppProcessor = NULL;
+
+ HRESULT hr = S_OK;
+ UINT32 count = 0;
+
+ IMFActivate **ppActivate = NULL;
+
+ MFT_REGISTER_TYPE_INFO infoInput = { MFMediaType_Video, inputFormat };
+ MFT_REGISTER_TYPE_INFO infoOutput = { MFMediaType_Video, outputFormat };
+
+ UINT32 unFlags = MFT_ENUM_FLAG_HARDWARE |
+ MFT_ENUM_FLAG_SYNCMFT |
+ MFT_ENUM_FLAG_LOCALMFT |
+ MFT_ENUM_FLAG_SORTANDFILTER;
+
+ hr = MFTEnumEx(
+ MFT_CATEGORY_VIDEO_PROCESSOR,
+ unFlags,
+ &infoInput, // Input type
+ &infoOutput, // Output type
+ &ppActivate,
+ &count
+ );
+
+ for(UINT32 i = 0; i < count; ++i) {
+ hr = ppActivate[i]->ActivateObject(IID_PPV_ARGS(ppProcessor));
+ if(SUCCEEDED(hr) && *ppProcessor) {
+ break;
+ }
+ SafeRelease(ppProcessor);
+ }
+
+ for (UINT32 i = 0; i < count; i++) {
+ ppActivate[i]->Release();
+ }
+ CoTaskMemFree(ppActivate);
+
+ return *ppProcessor ? S_OK : MF_E_NOT_FOUND;
}
// Add an transform node to a topology.
@@ -855,26 +789,26 @@ HRESULT MFUtils::AddTransformNode(
IMFTransform *pMFT, // MFT.
DWORD dwId, // Identifier of the stream sink.
IMFTopologyNode **ppNode // Receives the node pointer.
- )
+)
{
- *ppNode = NULL;
+ *ppNode = NULL;
IMFTopologyNode *pNode = NULL;
- HRESULT hr = S_OK;
-
+ HRESULT hr = S_OK;
+
// Create the node.
CHECK_HR(hr = MFCreateTopologyNode(MF_TOPOLOGY_TRANSFORM_NODE, &pNode));
// Set the object pointer.
CHECK_HR(hr = pNode->SetObject(pMFT));
- CHECK_HR(hr = pNode->SetUINT32(MF_TOPONODE_STREAMID, dwId));
- CHECK_HR(hr = pNode->SetUINT32(MF_TOPONODE_NOSHUTDOWN_ON_REMOVE, FALSE));
+ CHECK_HR(hr = pNode->SetUINT32(MF_TOPONODE_STREAMID, dwId));
+ CHECK_HR(hr = pNode->SetUINT32(MF_TOPONODE_NOSHUTDOWN_ON_REMOVE, FALSE));
// Add the node to the topology.
CHECK_HR(hr = pTopology->AddNode(pNode));
// Return the pointer to the caller.
*ppNode = pNode;
(*ppNode)->AddRef();
-
+
bail:
SafeRelease(&pNode);
return hr;
@@ -882,13 +816,13 @@ bail:
// Sets the IMFStreamSink pointer on an output node.
HRESULT MFUtils::BindOutputNode(
- IMFTopologyNode *pNode // The Node
- )
+ IMFTopologyNode *pNode // The Node
+)
{
- assert(pNode);
+ assert(pNode);
- HRESULT hr = S_OK;
- IUnknown *pNodeObject = NULL;
+ HRESULT hr = S_OK;
+ IUnknown *pNodeObject = NULL;
IMFActivate *pActivate = NULL;
IMFStreamSink *pStream = NULL;
IMFMediaSink *pSink = NULL;
@@ -905,44 +839,38 @@ HRESULT MFUtils::BindOutputNode(
// First, check if it's an activation object.
CHECK_HR(hr = pNodeObject->QueryInterface(IID_PPV_ARGS(&pActivate)));
- if (SUCCEEDED(hr))
- {
+ if (SUCCEEDED(hr)) {
DWORD dwStreamID = 0;
- // The object pointer is an activation object.
-
+ // The object pointer is an activation object.
+
// Try to create the media sink.
hr = pActivate->ActivateObject(IID_PPV_ARGS(&pSink));
// Look up the stream ID. (Default to zero.)
- if (SUCCEEDED(hr))
- {
- dwStreamID = MFGetAttributeUINT32(pNode, MF_TOPONODE_STREAMID, 0);
+ if (SUCCEEDED(hr)) {
+ dwStreamID = MFGetAttributeUINT32(pNode, MF_TOPONODE_STREAMID, 0);
}
// Now try to get or create the stream sink.
// Check if the media sink already has a stream sink with the requested ID.
- if (SUCCEEDED(hr))
- {
+ if (SUCCEEDED(hr)) {
hr = pSink->GetStreamSinkById(dwStreamID, &pStream);
- if (FAILED(hr))
- {
+ if (FAILED(hr)) {
// Try to add a new stream sink.
hr = pSink->AddStreamSink(dwStreamID, NULL, &pStream);
}
}
- // Replace the node's object pointer with the stream sink.
- if (SUCCEEDED(hr))
- {
+ // Replace the node's object pointer with the stream sink.
+ if (SUCCEEDED(hr)) {
hr = pNode->SetObject(pStream);
}
}
- else
- {
+ else {
// Not an activation object. Is it a stream sink?
hr = pNodeObject->QueryInterface(IID_PPV_ARGS(&pStream));
}
@@ -957,54 +885,54 @@ bail:
// Add an output node to a topology.
HRESULT MFUtils::AddOutputNode(
- IMFTopology *pTopology, // Topology.
- IMFActivate *pActivate, // Media sink activation object.
- DWORD dwId, // Identifier of the stream sink.
- IMFTopologyNode **ppNode) // Receives the node pointer
+ IMFTopology *pTopology, // Topology.
+ IMFActivate *pActivate, // Media sink activation object.
+ DWORD dwId, // Identifier of the stream sink.
+ IMFTopologyNode **ppNode) // Receives the node pointer
{
- IMFTopologyNode *pNode = NULL;
+ IMFTopologyNode *pNode = NULL;
- HRESULT hr = S_OK;
- CHECK_HR(hr = MFCreateTopologyNode(MF_TOPOLOGY_OUTPUT_NODE, &pNode));
- CHECK_HR(hr = pNode->SetObject(pActivate));
- CHECK_HR(hr = pNode->SetUINT32(MF_TOPONODE_STREAMID, dwId));
- CHECK_HR(hr = pNode->SetUINT32(MF_TOPONODE_NOSHUTDOWN_ON_REMOVE, FALSE));
- CHECK_HR(hr = pTopology->AddNode(pNode));
+ HRESULT hr = S_OK;
+ CHECK_HR(hr = MFCreateTopologyNode(MF_TOPOLOGY_OUTPUT_NODE, &pNode));
+ CHECK_HR(hr = pNode->SetObject(pActivate));
+ CHECK_HR(hr = pNode->SetUINT32(MF_TOPONODE_STREAMID, dwId));
+ CHECK_HR(hr = pNode->SetUINT32(MF_TOPONODE_NOSHUTDOWN_ON_REMOVE, FALSE));
+ CHECK_HR(hr = pTopology->AddNode(pNode));
- // Return the pointer to the caller.
- *ppNode = pNode;
- (*ppNode)->AddRef();
+ // Return the pointer to the caller.
+ *ppNode = pNode;
+ (*ppNode)->AddRef();
bail:
- SafeRelease(&pNode);
- return hr;
+ SafeRelease(&pNode);
+ return hr;
}
// Add a source node to a topology
HRESULT MFUtils::AddSourceNode(
- IMFTopology *pTopology, // Topology.
- IMFMediaSource *pSource, // Media source.
- IMFPresentationDescriptor *pPD, // Presentation descriptor.
- IMFStreamDescriptor *pSD, // Stream descriptor.
- IMFTopologyNode **ppNode // Receives the node pointer.
- )
+ IMFTopology *pTopology, // Topology.
+ IMFMediaSource *pSource, // Media source.
+ IMFPresentationDescriptor *pPD, // Presentation descriptor.
+ IMFStreamDescriptor *pSD, // Stream descriptor.
+ IMFTopologyNode **ppNode // Receives the node pointer.
+)
{
- IMFTopologyNode *pNode = NULL;
+ IMFTopologyNode *pNode = NULL;
- HRESULT hr = S_OK;
- CHECK_HR(hr = MFCreateTopologyNode(MF_TOPOLOGY_SOURCESTREAM_NODE, &pNode));
- CHECK_HR(hr = pNode->SetUnknown(MF_TOPONODE_SOURCE, pSource));
- CHECK_HR(hr = pNode->SetUnknown(MF_TOPONODE_PRESENTATION_DESCRIPTOR, pPD));
- CHECK_HR(hr = pNode->SetUnknown(MF_TOPONODE_STREAM_DESCRIPTOR, pSD));
- CHECK_HR(hr = pTopology->AddNode(pNode));
+ HRESULT hr = S_OK;
+ CHECK_HR(hr = MFCreateTopologyNode(MF_TOPOLOGY_SOURCESTREAM_NODE, &pNode));
+ CHECK_HR(hr = pNode->SetUnknown(MF_TOPONODE_SOURCE, pSource));
+ CHECK_HR(hr = pNode->SetUnknown(MF_TOPONODE_PRESENTATION_DESCRIPTOR, pPD));
+ CHECK_HR(hr = pNode->SetUnknown(MF_TOPONODE_STREAM_DESCRIPTOR, pSD));
+ CHECK_HR(hr = pTopology->AddNode(pNode));
- // Return the pointer to the caller.
- *ppNode = pNode;
- (*ppNode)->AddRef();
+ // Return the pointer to the caller.
+ *ppNode = pNode;
+ (*ppNode)->AddRef();
bail:
- SafeRelease(&pNode);
- return hr;
+ SafeRelease(&pNode);
+ return hr;
}
// Create the topology
@@ -1013,386 +941,345 @@ bail:
// \-> (SinkPreview)
//
HRESULT MFUtils::CreateTopology(
- IMFMediaSource *pSource, // Media source
- IMFTransform *pTransform, // Transform filter (e.g. encoder or decoder) to insert between the source and Sink. NULL is valid.
- IMFActivate *pSinkActivateMain, // Main sink (e.g. sample grabber or EVR).
- IMFActivate *pSinkActivatePreview, // Preview sink. Optional. Could be NULL.
- IMFMediaType *pIputTypeMain, // Main sink input MediaType
- IMFTopology **ppTopo // Receives the newly created topology
- )
+ IMFMediaSource *pSource, // Media source
+ IMFTransform *pTransform, // Transform filter (e.g. encoder or decoder) to insert between the source and Sink. NULL is valid.
+ IMFActivate *pSinkActivateMain, // Main sink (e.g. sample grabber or EVR).
+ IMFActivate *pSinkActivatePreview, // Preview sink. Optional. Could be NULL.
+ IMFMediaType *pIputTypeMain, // Main sink input MediaType
+ IMFTopology **ppTopo // Receives the newly created topology
+)
{
- IMFTopology *pTopology = NULL;
- IMFPresentationDescriptor *pPD = NULL;
- IMFStreamDescriptor *pSD = NULL;
- IMFMediaTypeHandler *pHandler = NULL;
- IMFTopologyNode *pNodeSource = NULL;
- IMFTopologyNode *pNodeSinkMain = NULL;
- IMFTopologyNode *pNodeSinkPreview = NULL;
- IMFTopologyNode *pNodeTransform = NULL;
- IMFTopologyNode *pNodeTee = NULL;
- IMFMediaType *pMediaType = NULL;
+ IMFTopology *pTopology = NULL;
+ IMFPresentationDescriptor *pPD = NULL;
+ IMFStreamDescriptor *pSD = NULL;
+ IMFMediaTypeHandler *pHandler = NULL;
+ IMFTopologyNode *pNodeSource = NULL;
+ IMFTopologyNode *pNodeSinkMain = NULL;
+ IMFTopologyNode *pNodeSinkPreview = NULL;
+ IMFTopologyNode *pNodeTransform = NULL;
+ IMFTopologyNode *pNodeTee = NULL;
+ IMFMediaType *pMediaType = NULL;
IMFTransform *pVideoProcessor = NULL;
IMFTopologyNode *pNodeVideoProcessor = NULL;
- IMFTransform *pConvFrameRate = NULL;
- IMFTransform *pConvSize = NULL;
- IMFTransform *pConvColor = NULL;
- IMFTopologyNode *pNodeConvFrameRate = NULL;
- IMFTopologyNode *pNodeConvSize = NULL;
- IMFTopologyNode *pNodeConvColor = NULL;
- IMFMediaType *pTransformInputType = NULL;
- IMFMediaType *pSinkMainInputType = NULL;
- const IMFTopologyNode *pcNodeBeforeSinkMain = NULL;
-
- HRESULT hr = S_OK;
- DWORD cStreams = 0;
- BOOL bSourceFound = FALSE;
- BOOL bSupportedSize = FALSE;
- BOOL bSupportedFps = FALSE;
- BOOL bSupportedFormat = FALSE;
- BOOL bVideoProcessorSupported = FALSE;
- GUID inputMajorType, inputSubType;
-
- CHECK_HR(hr = IsVideoProcessorSupported(&bVideoProcessorSupported));
- CHECK_HR(hr = pIputTypeMain->GetMajorType(&inputMajorType));
-
- CHECK_HR(hr = MFCreateTopology(&pTopology));
- CHECK_HR(hr = pSource->CreatePresentationDescriptor(&pPD));
- CHECK_HR(hr = pPD->GetStreamDescriptorCount(&cStreams));
-
- for (DWORD i = 0; i < cStreams; i++)
- {
- BOOL fSelected = FALSE;
- GUID majorType;
-
- CHECK_HR(hr = pPD->GetStreamDescriptorByIndex(i, &fSelected, &pSD));
- CHECK_HR(hr = pSD->GetMediaTypeHandler(&pHandler));
- CHECK_HR(hr = pHandler->GetMajorType(&majorType));
-
- if (majorType == inputMajorType && fSelected)
- {
- CHECK_HR(hr = AddSourceNode(pTopology, pSource, pPD, pSD, &pNodeSource));
- CHECK_HR(hr = pNodeSource->SetTopoNodeID(MFUtils::g_ullTopoIdSource));
- CHECK_HR(hr = AddOutputNode(pTopology, pSinkActivateMain, 0, &pNodeSinkMain));
- CHECK_HR(hr = pNodeSinkMain->SetTopoNodeID(MFUtils::g_ullTopoIdSinkMain));
- CHECK_HR(hr = MFUtils::BindOutputNode(pNodeSinkMain)); // To avoid MF_E_TOPO_SINK_ACTIVATES_UNSUPPORTED
-
- //
- // Create preview
- //
-
- if(pSinkActivatePreview)
- {
- CHECK_HR(hr = AddOutputNode(pTopology, pSinkActivatePreview, 0, &pNodeSinkPreview));
- CHECK_HR(hr = pNodeSinkPreview->SetTopoNodeID(MFUtils::g_ullTopoIdSinkPreview));
- CHECK_HR(hr = MFUtils::BindOutputNode(pNodeSinkPreview));
-
- CHECK_HR(hr = MFCreateTopologyNode(MF_TOPOLOGY_TEE_NODE, &pNodeTee));
- CHECK_HR(hr = pTopology->AddNode(pNodeTee));
- }
-
- //
- // Create converters
- //
-
- if(majorType == MFMediaType_Video)
- {
- // Even when size matches the topology could add a resizer which doesn't keep ratio when resizing while video processor does.
- if(!bVideoProcessorSupported)
- {
- hr = IsSupported(
- pPD,
- i,
- pIputTypeMain,
- &bSupportedSize,
- &bSupportedFps,
- &bSupportedFormat);
- }
-
- CHECK_HR(hr = pIputTypeMain->GetGUID(MF_MT_SUBTYPE, &inputSubType));
-
- if(!bSupportedSize || !bSupportedFps || !bSupportedFormat)
- {
- // Use video processor single MFT or 3 different MFTs
- if(!pVideoProcessor)
- {
- hr = CoCreateInstance(CLSID_VideoProcessorMFT, NULL,
- CLSCTX_INPROC_SERVER, IID_PPV_ARGS(&pVideoProcessor));
- }
- if(!pVideoProcessor)
- {
- // Video Resizer DSP(http://msdn.microsoft.com/en-us/library/windows/desktop/ff819491(v=vs.85).aspx) supports I420 only
- if(!bSupportedSize && !pConvSize && inputSubType == MFVideoFormat_I420)
- {
- hr = CoCreateInstance(CLSID_CResizerDMO, NULL,
- CLSCTX_INPROC_SERVER, IID_PPV_ARGS(&pConvSize));
- }
- // Frame Rate Converter DSP(http://msdn.microsoft.com/en-us/library/windows/desktop/ff819100(v=vs.85).aspx) supports neither NV12 nor I420
- /*if(!bSupportedFps && !pConvFrameRate)
- {
- hr = CoCreateInstance(CLSID_CFrameRateConvertDmo, NULL,
- CLSCTX_INPROC_SERVER, IID_PPV_ARGS(&pConvFrameRate));
- }*/
- // Color Converter DSP (http://msdn.microsoft.com/en-us/library/windows/desktop/ff819079(v=vs.85).aspx) supports both NV12 and I420
- if(!bSupportedFormat && !pConvColor)
- {
- hr = CoCreateInstance(CLSID_CColorConvertDMO, NULL,
- CLSCTX_INPROC_SERVER, IID_PPV_ARGS(&pConvColor));
- }
- }
- }
- else
- {
- // MediaType supported
- CHECK_HR(hr = pHandler->SetCurrentMediaType(pIputTypeMain));
- }
-
- if(pVideoProcessor && !pNodeVideoProcessor)
- {
- CHECK_HR(hr = AddTransformNode(pTopology, pVideoProcessor, 0, &pNodeVideoProcessor));
- CHECK_HR(hr = pNodeVideoProcessor->SetTopoNodeID(MFUtils::g_ullTopoIdVideoProcessor));
- }
- if(pConvColor && !pNodeConvColor)
- {
- CHECK_HR(hr = AddTransformNode(pTopology, pConvColor, 0, &pNodeConvColor));
- }
- if(pConvFrameRate && !pNodeConvFrameRate)
- {
- CHECK_HR(hr = AddTransformNode(pTopology, pConvFrameRate, 0, &pNodeConvFrameRate));
- }
- if(pConvSize && !pNodeConvSize)
- {
- CHECK_HR(hr = AddTransformNode(pTopology, pConvSize, 0, &pNodeConvSize));
- }
- } // if(majorType == MFMediaType_Video)
-
-
- //
- // Set media type
- //
-
- if(pTransform)
- {
- CHECK_HR(hr = AddTransformNode(pTopology, pTransform, 0, &pNodeTransform));
- hr = pTransform->GetInputCurrentType(0, &pTransformInputType);
- if(FAILED(hr))
- {
- pTransformInputType = pIputTypeMain;
- pTransformInputType->AddRef();
- hr = S_OK;
- }
- if(pVideoProcessor)
- {
- CHECK_HR(hr = pVideoProcessor->SetOutputType(0, pTransformInputType, 0));
- }
- else
- {
- if(pConvColor)
- {
- /*CHECK_HR*/(hr = pConvColor->SetOutputType(0, pTransformInputType, 0));
- }
- if(pConvFrameRate)
- {
- /*CHECK_HR*/(hr = pConvFrameRate->SetOutputType(0, pTransformInputType, 0));
- }
- if(pConvSize)
- {
- // Transform requires NV12
- //Video Resizer DSP(http://msdn.microsoft.com/en-us/library/windows/desktop/ff819491(v=vs.85).aspx) doesn't support NV12
- //*CHECK_HR*/(hr = pConvSize->SetOutputType(0, pTransformInputType, 0));
- }
- }
- }
- else
- {
- hr = pNodeSinkMain->GetInputPrefType(0, &pSinkMainInputType);
- if(FAILED(hr))
- {
- pSinkMainInputType = pIputTypeMain;
- pSinkMainInputType->AddRef();
- hr = S_OK;
- }
- if(SUCCEEDED(hr))
- {
- if(pVideoProcessor)
- {
- CHECK_HR(hr = pVideoProcessor->SetOutputType(0, pSinkMainInputType, 0));
- }
- else
- {
- //!\ MUST NOT SET OUTPUT TYPE
- if(pConvColor)
- {
- //*CHECK_HR*/(hr = pConvColor->SetOutputType(0, pSinkMainInputType, 0));
- }
- if(pConvFrameRate)
- {
- //*CHECK_HR*/(hr = pConvFrameRate->SetOutputType(0, pSinkMainInputType, 0));
- }
- if(pConvSize)
- {
- //*CHECK_HR*/(hr = pConvSize->SetOutputType(0, pSinkMainInputType, 0));
- }
- }
- }
- }
-
- //
- // Connect
- //
-
- if(pNodeTee)
- {
- // Connect(Source -> Tee)
- CHECK_HR(hr = pNodeSource->ConnectOutput(0, pNodeTee, 0));
-
- // Connect(Tee -> SinkPreview)
- CHECK_HR(hr = pNodeTee->ConnectOutput(1, pNodeSinkPreview, 0));
-
- // Connect(Tee ->(Processors)
- if(pVideoProcessor)
- {
- CHECK_HR(hr = pNodeTee->ConnectOutput(0, pNodeVideoProcessor, 0));
- pcNodeBeforeSinkMain = pNodeVideoProcessor;
- }
- else if(pNodeConvFrameRate || pNodeConvSize || pNodeConvColor)
- {
- CHECK_HR(hr = ConnectConverters(
- pNodeTee,
- 0,
- pNodeConvFrameRate,
- pNodeConvColor,
- pNodeConvSize
- ));
- pcNodeBeforeSinkMain = pNodeConvSize ? pNodeConvSize : (pNodeConvColor ? pNodeConvColor : pNodeConvFrameRate);
- }
- else
- {
- pcNodeBeforeSinkMain = pNodeTee;
- }
- }
- else
- {
- // Connect(Source -> (Processors))
- if(pVideoProcessor)
- {
- CHECK_HR(hr = pNodeSource->ConnectOutput(0, pNodeVideoProcessor, 0));
- pcNodeBeforeSinkMain = pNodeVideoProcessor;
- }
- else if(pNodeConvFrameRate || pNodeConvFrameRate || pNodeConvColor)
- {
- CHECK_HR(hr = ConnectConverters(
- pNodeSource,
- 0,
- pNodeConvFrameRate,
- pNodeConvSize,
- pNodeConvColor
- ));
- pcNodeBeforeSinkMain = pNodeConvSize ? pNodeConvSize : (pNodeConvColor ? pNodeConvColor : pNodeConvFrameRate);
- }
- else
- {
- pcNodeBeforeSinkMain = pNodeSource;
- }
- }
-
-
- if(pNodeTransform)
- {
- // Connect(X->Transform)
- CHECK_HR(hr = ((IMFTopologyNode *)pcNodeBeforeSinkMain)->ConnectOutput(0, pNodeTransform, 0));
- pcNodeBeforeSinkMain = pNodeTransform;
- }
-
- // Connect(X -> SinkMain)
- CHECK_HR(hr = ((IMFTopologyNode *)pcNodeBeforeSinkMain)->ConnectOutput(0, pNodeSinkMain, 0));
-
- bSourceFound = TRUE;
- break;
- }
- else
- {
- CHECK_HR(hr = pPD->DeselectStream(i));
- }
- SafeRelease(&pSD);
- SafeRelease(&pHandler);
- }
-
- *ppTopo = pTopology;
- (*ppTopo)->AddRef();
+ IMFTransform *pConvFrameRate = NULL;
+ IMFTransform *pConvSize = NULL;
+ IMFTransform *pConvColor = NULL;
+ IMFTopologyNode *pNodeConvFrameRate = NULL;
+ IMFTopologyNode *pNodeConvSize = NULL;
+ IMFTopologyNode *pNodeConvColor = NULL;
+ IMFMediaType *pTransformInputType = NULL;
+ IMFMediaType *pSinkMainInputType = NULL;
+ const IMFTopologyNode *pcNodeBeforeSinkMain = NULL;
+
+ HRESULT hr = S_OK;
+ DWORD cStreams = 0;
+ BOOL bSourceFound = FALSE;
+ BOOL bSupportedSize = FALSE;
+ BOOL bSupportedFps = FALSE;
+ BOOL bSupportedFormat = FALSE;
+ BOOL bVideoProcessorSupported = FALSE;
+ GUID inputMajorType, inputSubType;
+
+ CHECK_HR(hr = IsVideoProcessorSupported(&bVideoProcessorSupported));
+ CHECK_HR(hr = pIputTypeMain->GetMajorType(&inputMajorType));
+
+ CHECK_HR(hr = MFCreateTopology(&pTopology));
+ CHECK_HR(hr = pSource->CreatePresentationDescriptor(&pPD));
+ CHECK_HR(hr = pPD->GetStreamDescriptorCount(&cStreams));
+
+ for (DWORD i = 0; i < cStreams; i++) {
+ BOOL fSelected = FALSE;
+ GUID majorType;
+
+ CHECK_HR(hr = pPD->GetStreamDescriptorByIndex(i, &fSelected, &pSD));
+ CHECK_HR(hr = pSD->GetMediaTypeHandler(&pHandler));
+ CHECK_HR(hr = pHandler->GetMajorType(&majorType));
+
+ if (majorType == inputMajorType && fSelected) {
+ CHECK_HR(hr = AddSourceNode(pTopology, pSource, pPD, pSD, &pNodeSource));
+ CHECK_HR(hr = pNodeSource->SetTopoNodeID(MFUtils::g_ullTopoIdSource));
+ CHECK_HR(hr = AddOutputNode(pTopology, pSinkActivateMain, 0, &pNodeSinkMain));
+ CHECK_HR(hr = pNodeSinkMain->SetTopoNodeID(MFUtils::g_ullTopoIdSinkMain));
+ CHECK_HR(hr = MFUtils::BindOutputNode(pNodeSinkMain)); // To avoid MF_E_TOPO_SINK_ACTIVATES_UNSUPPORTED
+
+ //
+ // Create preview
+ //
+
+ if(pSinkActivatePreview) {
+ CHECK_HR(hr = AddOutputNode(pTopology, pSinkActivatePreview, 0, &pNodeSinkPreview));
+ CHECK_HR(hr = pNodeSinkPreview->SetTopoNodeID(MFUtils::g_ullTopoIdSinkPreview));
+ CHECK_HR(hr = MFUtils::BindOutputNode(pNodeSinkPreview));
+
+ CHECK_HR(hr = MFCreateTopologyNode(MF_TOPOLOGY_TEE_NODE, &pNodeTee));
+ CHECK_HR(hr = pTopology->AddNode(pNodeTee));
+ }
+
+ //
+ // Create converters
+ //
+
+ if(majorType == MFMediaType_Video) {
+ // Even when size matches the topology could add a resizer which doesn't keep ratio when resizing while video processor does.
+ if(!bVideoProcessorSupported) {
+ hr = IsSupported(
+ pPD,
+ i,
+ pIputTypeMain,
+ &bSupportedSize,
+ &bSupportedFps,
+ &bSupportedFormat);
+ }
+
+ CHECK_HR(hr = pIputTypeMain->GetGUID(MF_MT_SUBTYPE, &inputSubType));
+
+ if(!bSupportedSize || !bSupportedFps || !bSupportedFormat) {
+ // Use video processor single MFT or 3 different MFTs
+ if(!pVideoProcessor) {
+ hr = CoCreateInstance(CLSID_VideoProcessorMFT, NULL,
+ CLSCTX_INPROC_SERVER, IID_PPV_ARGS(&pVideoProcessor));
+ }
+ if(!pVideoProcessor) {
+ // Video Resizer DSP(http://msdn.microsoft.com/en-us/library/windows/desktop/ff819491(v=vs.85).aspx) supports I420 only
+ if(!bSupportedSize && !pConvSize && inputSubType == MFVideoFormat_I420) {
+ hr = CoCreateInstance(CLSID_CResizerDMO, NULL,
+ CLSCTX_INPROC_SERVER, IID_PPV_ARGS(&pConvSize));
+ }
+ // Frame Rate Converter DSP(http://msdn.microsoft.com/en-us/library/windows/desktop/ff819100(v=vs.85).aspx) supports neither NV12 nor I420
+ /*if(!bSupportedFps && !pConvFrameRate)
+ {
+ hr = CoCreateInstance(CLSID_CFrameRateConvertDmo, NULL,
+ CLSCTX_INPROC_SERVER, IID_PPV_ARGS(&pConvFrameRate));
+ }*/
+ // Color Converter DSP (http://msdn.microsoft.com/en-us/library/windows/desktop/ff819079(v=vs.85).aspx) supports both NV12 and I420
+ if(!bSupportedFormat && !pConvColor) {
+ hr = CoCreateInstance(CLSID_CColorConvertDMO, NULL,
+ CLSCTX_INPROC_SERVER, IID_PPV_ARGS(&pConvColor));
+ }
+ }
+ }
+ else {
+ // MediaType supported
+ CHECK_HR(hr = pHandler->SetCurrentMediaType(pIputTypeMain));
+ }
+
+ if(pVideoProcessor && !pNodeVideoProcessor) {
+ CHECK_HR(hr = AddTransformNode(pTopology, pVideoProcessor, 0, &pNodeVideoProcessor));
+ CHECK_HR(hr = pNodeVideoProcessor->SetTopoNodeID(MFUtils::g_ullTopoIdVideoProcessor));
+ }
+ if(pConvColor && !pNodeConvColor) {
+ CHECK_HR(hr = AddTransformNode(pTopology, pConvColor, 0, &pNodeConvColor));
+ }
+ if(pConvFrameRate && !pNodeConvFrameRate) {
+ CHECK_HR(hr = AddTransformNode(pTopology, pConvFrameRate, 0, &pNodeConvFrameRate));
+ }
+ if(pConvSize && !pNodeConvSize) {
+ CHECK_HR(hr = AddTransformNode(pTopology, pConvSize, 0, &pNodeConvSize));
+ }
+ } // if(majorType == MFMediaType_Video)
+
+
+ //
+ // Set media type
+ //
+
+ if(pTransform) {
+ CHECK_HR(hr = AddTransformNode(pTopology, pTransform, 0, &pNodeTransform));
+ hr = pTransform->GetInputCurrentType(0, &pTransformInputType);
+ if(FAILED(hr)) {
+ pTransformInputType = pIputTypeMain;
+ pTransformInputType->AddRef();
+ hr = S_OK;
+ }
+ if(pVideoProcessor) {
+ CHECK_HR(hr = pVideoProcessor->SetOutputType(0, pTransformInputType, 0));
+ }
+ else {
+ if(pConvColor) {
+ /*CHECK_HR*/(hr = pConvColor->SetOutputType(0, pTransformInputType, 0));
+ }
+ if(pConvFrameRate) {
+ /*CHECK_HR*/(hr = pConvFrameRate->SetOutputType(0, pTransformInputType, 0));
+ }
+ if(pConvSize) {
+ // Transform requires NV12
+ //Video Resizer DSP(http://msdn.microsoft.com/en-us/library/windows/desktop/ff819491(v=vs.85).aspx) doesn't support NV12
+ //*CHECK_HR*/(hr = pConvSize->SetOutputType(0, pTransformInputType, 0));
+ }
+ }
+ }
+ else {
+ hr = pNodeSinkMain->GetInputPrefType(0, &pSinkMainInputType);
+ if(FAILED(hr)) {
+ pSinkMainInputType = pIputTypeMain;
+ pSinkMainInputType->AddRef();
+ hr = S_OK;
+ }
+ if(SUCCEEDED(hr)) {
+ if(pVideoProcessor) {
+ CHECK_HR(hr = pVideoProcessor->SetOutputType(0, pSinkMainInputType, 0));
+ }
+ else {
+ //!\ MUST NOT SET OUTPUT TYPE
+ if(pConvColor) {
+ //*CHECK_HR*/(hr = pConvColor->SetOutputType(0, pSinkMainInputType, 0));
+ }
+ if(pConvFrameRate) {
+ //*CHECK_HR*/(hr = pConvFrameRate->SetOutputType(0, pSinkMainInputType, 0));
+ }
+ if(pConvSize) {
+ //*CHECK_HR*/(hr = pConvSize->SetOutputType(0, pSinkMainInputType, 0));
+ }
+ }
+ }
+ }
+
+ //
+ // Connect
+ //
+
+ if(pNodeTee) {
+ // Connect(Source -> Tee)
+ CHECK_HR(hr = pNodeSource->ConnectOutput(0, pNodeTee, 0));
+
+ // Connect(Tee -> SinkPreview)
+ CHECK_HR(hr = pNodeTee->ConnectOutput(1, pNodeSinkPreview, 0));
+
+ // Connect(Tee ->(Processors)
+ if(pVideoProcessor) {
+ CHECK_HR(hr = pNodeTee->ConnectOutput(0, pNodeVideoProcessor, 0));
+ pcNodeBeforeSinkMain = pNodeVideoProcessor;
+ }
+ else if(pNodeConvFrameRate || pNodeConvSize || pNodeConvColor) {
+ CHECK_HR(hr = ConnectConverters(
+ pNodeTee,
+ 0,
+ pNodeConvFrameRate,
+ pNodeConvColor,
+ pNodeConvSize
+ ));
+ pcNodeBeforeSinkMain = pNodeConvSize ? pNodeConvSize : (pNodeConvColor ? pNodeConvColor : pNodeConvFrameRate);
+ }
+ else {
+ pcNodeBeforeSinkMain = pNodeTee;
+ }
+ }
+ else {
+ // Connect(Source -> (Processors))
+ if(pVideoProcessor) {
+ CHECK_HR(hr = pNodeSource->ConnectOutput(0, pNodeVideoProcessor, 0));
+ pcNodeBeforeSinkMain = pNodeVideoProcessor;
+ }
+ else if(pNodeConvFrameRate || pNodeConvFrameRate || pNodeConvColor) {
+ CHECK_HR(hr = ConnectConverters(
+ pNodeSource,
+ 0,
+ pNodeConvFrameRate,
+ pNodeConvSize,
+ pNodeConvColor
+ ));
+ pcNodeBeforeSinkMain = pNodeConvSize ? pNodeConvSize : (pNodeConvColor ? pNodeConvColor : pNodeConvFrameRate);
+ }
+ else {
+ pcNodeBeforeSinkMain = pNodeSource;
+ }
+ }
+
+
+ if(pNodeTransform) {
+ // Connect(X->Transform)
+ CHECK_HR(hr = ((IMFTopologyNode *)pcNodeBeforeSinkMain)->ConnectOutput(0, pNodeTransform, 0));
+ pcNodeBeforeSinkMain = pNodeTransform;
+ }
+
+ // Connect(X -> SinkMain)
+ CHECK_HR(hr = ((IMFTopologyNode *)pcNodeBeforeSinkMain)->ConnectOutput(0, pNodeSinkMain, 0));
+
+ bSourceFound = TRUE;
+ break;
+ }
+ else {
+ CHECK_HR(hr = pPD->DeselectStream(i));
+ }
+ SafeRelease(&pSD);
+ SafeRelease(&pHandler);
+ }
+
+ *ppTopo = pTopology;
+ (*ppTopo)->AddRef();
bail:
- SafeRelease(&pTopology);
- SafeRelease(&pNodeSource);
- SafeRelease(&pNodeSinkMain);
- SafeRelease(&pNodeSinkPreview);
- SafeRelease(&pNodeTransform);
- SafeRelease(&pNodeTee);
- SafeRelease(&pPD);
- SafeRelease(&pSD);
- SafeRelease(&pHandler);
- SafeRelease(&pMediaType);
- SafeRelease(&pTransformInputType);
- SafeRelease(&pSinkMainInputType);
-
- SafeRelease(&pVideoProcessor);
+ SafeRelease(&pTopology);
+ SafeRelease(&pNodeSource);
+ SafeRelease(&pNodeSinkMain);
+ SafeRelease(&pNodeSinkPreview);
+ SafeRelease(&pNodeTransform);
+ SafeRelease(&pNodeTee);
+ SafeRelease(&pPD);
+ SafeRelease(&pSD);
+ SafeRelease(&pHandler);
+ SafeRelease(&pMediaType);
+ SafeRelease(&pTransformInputType);
+ SafeRelease(&pSinkMainInputType);
+
+ SafeRelease(&pVideoProcessor);
SafeRelease(&pNodeVideoProcessor);
- SafeRelease(&pConvFrameRate);
- SafeRelease(&pConvSize);
- SafeRelease(&pConvColor);
- SafeRelease(&pNodeConvFrameRate);
- SafeRelease(&pNodeConvSize);
- SafeRelease(&pNodeConvColor);
-
- if(!bSourceFound)
- {
- TSK_DEBUG_ERROR("No source node found");
- return E_NOT_SET;
- }
-
- return hr;
+ SafeRelease(&pConvFrameRate);
+ SafeRelease(&pConvSize);
+ SafeRelease(&pConvColor);
+ SafeRelease(&pNodeConvFrameRate);
+ SafeRelease(&pNodeConvSize);
+ SafeRelease(&pNodeConvColor);
+
+ if(!bSourceFound) {
+ TSK_DEBUG_ERROR("No source node found");
+ return E_NOT_SET;
+ }
+
+ return hr;
}
// Creates a fully loaded topology from the input partial topology.
HRESULT MFUtils::ResolveTopology(
- IMFTopology *pInputTopo, // A pointer to the IMFTopology interface of the partial topology to be resolved.
- IMFTopology **ppOutputTopo, // Receives a pointer to the IMFTopology interface of the completed topology. The caller must release the interface.
- IMFTopology *pCurrentTopo /*= NULL*/ // A pointer to the IMFTopology interface of the previous full topology. The topology loader can re-use objects from this topology in the new topology. This parameter can be NULL.
- )
+ IMFTopology *pInputTopo, // A pointer to the IMFTopology interface of the partial topology to be resolved.
+ IMFTopology **ppOutputTopo, // Receives a pointer to the IMFTopology interface of the completed topology. The caller must release the interface.
+ IMFTopology *pCurrentTopo /*= NULL*/ // A pointer to the IMFTopology interface of the previous full topology. The topology loader can re-use objects from this topology in the new topology. This parameter can be NULL.
+)
{
- assert(ppOutputTopo && pInputTopo);
-
- HRESULT hr = S_OK;
- IMFTopoLoader* pTopoLoader = NULL;
-
- *ppOutputTopo = NULL;
-
- CHECK_HR(hr = MFCreateTopoLoader(&pTopoLoader));
- CHECK_HR(hr = pTopoLoader->Load(pInputTopo, ppOutputTopo, pCurrentTopo));
-
+ assert(ppOutputTopo && pInputTopo);
+
+ HRESULT hr = S_OK;
+ IMFTopoLoader* pTopoLoader = NULL;
+
+ *ppOutputTopo = NULL;
+
+ CHECK_HR(hr = MFCreateTopoLoader(&pTopoLoader));
+ CHECK_HR(hr = pTopoLoader->Load(pInputTopo, ppOutputTopo, pCurrentTopo));
+
bail:
- SafeRelease(&pTopoLoader);
- return hr;
+ SafeRelease(&pTopoLoader);
+ return hr;
}
HRESULT MFUtils::FindNodeObject(
- IMFTopology *pInputTopo, // The Topology containing the node to find
- TOPOID qwTopoNodeID, //The identifier for the node
- void** ppObject // Receives the Object
- )
+ IMFTopology *pInputTopo, // The Topology containing the node to find
+ TOPOID qwTopoNodeID, //The identifier for the node
+ void** ppObject // Receives the Object
+)
{
- assert(pInputTopo && ppObject);
+ assert(pInputTopo && ppObject);
- *ppObject = NULL;
+ *ppObject = NULL;
- IMFTopologyNode *pNode = NULL;
- HRESULT hr = S_OK;
+ IMFTopologyNode *pNode = NULL;
+ HRESULT hr = S_OK;
- CHECK_HR(hr = pInputTopo->GetNodeByID(qwTopoNodeID, &pNode));
- CHECK_HR(hr = pNode->GetObject((IUnknown**)ppObject));
+ CHECK_HR(hr = pInputTopo->GetNodeByID(qwTopoNodeID, &pNode));
+ CHECK_HR(hr = pNode->GetObject((IUnknown**)ppObject));
bail:
- SafeRelease(&pNode);
- return hr;
+ SafeRelease(&pNode);
+ return hr;
}
// Create an activation object for a renderer, based on the stream media type.
@@ -1402,7 +1289,7 @@ HRESULT MFUtils::CreateMediaSinkActivate(
IMFActivate **ppActivate
)
{
- HRESULT hr = S_OK;
+ HRESULT hr = S_OK;
IMFMediaTypeHandler *pHandler = NULL;
IMFActivate *pActivate = NULL;
@@ -1411,29 +1298,25 @@ HRESULT MFUtils::CreateMediaSinkActivate(
// Get the major media type.
GUID guidMajorType;
CHECK_HR(hr = pHandler->GetMajorType(&guidMajorType));
-
+
// Create an IMFActivate object for the renderer, based on the media type.
- if (MFMediaType_Audio == guidMajorType)
- {
+ if (MFMediaType_Audio == guidMajorType) {
// Create the audio renderer.
CHECK_HR(hr = MFCreateAudioRendererActivate(&pActivate));
}
- else if (MFMediaType_Video == guidMajorType)
- {
+ else if (MFMediaType_Video == guidMajorType) {
// Create the video renderer.
CHECK_HR(hr = MFCreateVideoRendererActivate(hVideoWindow, &pActivate));
}
- else
- {
- // Unknown stream type.
+ else {
+ // Unknown stream type.
hr = E_FAIL;
// Optionally, you could deselect this stream instead of failing.
}
- if (FAILED(hr))
- {
+ if (FAILED(hr)) {
goto bail;
}
-
+
// Return IMFActivate pointer to caller.
*ppActivate = pActivate;
(*ppActivate)->AddRef();
@@ -1447,92 +1330,87 @@ bail:
// Set source output media type
HRESULT MFUtils::SetMediaType(
IMFMediaSource *pSource, // Media source.
- IMFMediaType* pMediaType // Media Type.
- )
+ IMFMediaType* pMediaType // Media Type.
+)
{
- assert(pSource && pMediaType);
-
- IMFPresentationDescriptor *pPD = NULL;
- IMFStreamDescriptor *pSD = NULL;
- IMFMediaTypeHandler *pHandler = NULL;
-
- HRESULT hr = S_OK;
- DWORD cStreams = 0;
- GUID inputMajorType;
-
- CHECK_HR(hr = pSource->CreatePresentationDescriptor(&pPD));
- CHECK_HR(hr = pPD->GetStreamDescriptorCount(&cStreams));
- CHECK_HR(hr = pMediaType->GetMajorType(&inputMajorType));
-
- for (DWORD i = 0; i < cStreams; i++)
- {
- BOOL fSelected = FALSE;
- GUID majorType;
-
- CHECK_HR(hr = pPD->GetStreamDescriptorByIndex(i, &fSelected, &pSD));
- CHECK_HR(hr = pSD->GetMediaTypeHandler(&pHandler));
- CHECK_HR(hr = pHandler->GetMajorType(&majorType));
-
- if (majorType == inputMajorType && fSelected)
- {
- CHECK_HR(hr = pHandler->SetCurrentMediaType(pMediaType));
- }
- else
- {
- CHECK_HR(hr = pPD->DeselectStream(i));
- }
- SafeRelease(&pSD);
- SafeRelease(&pHandler);
- }
+ assert(pSource && pMediaType);
+
+ IMFPresentationDescriptor *pPD = NULL;
+ IMFStreamDescriptor *pSD = NULL;
+ IMFMediaTypeHandler *pHandler = NULL;
+
+ HRESULT hr = S_OK;
+ DWORD cStreams = 0;
+ GUID inputMajorType;
+
+ CHECK_HR(hr = pSource->CreatePresentationDescriptor(&pPD));
+ CHECK_HR(hr = pPD->GetStreamDescriptorCount(&cStreams));
+ CHECK_HR(hr = pMediaType->GetMajorType(&inputMajorType));
+
+ for (DWORD i = 0; i < cStreams; i++) {
+ BOOL fSelected = FALSE;
+ GUID majorType;
+
+ CHECK_HR(hr = pPD->GetStreamDescriptorByIndex(i, &fSelected, &pSD));
+ CHECK_HR(hr = pSD->GetMediaTypeHandler(&pHandler));
+ CHECK_HR(hr = pHandler->GetMajorType(&majorType));
+
+ if (majorType == inputMajorType && fSelected) {
+ CHECK_HR(hr = pHandler->SetCurrentMediaType(pMediaType));
+ }
+ else {
+ CHECK_HR(hr = pPD->DeselectStream(i));
+ }
+ SafeRelease(&pSD);
+ SafeRelease(&pHandler);
+ }
bail:
- SafeRelease(&pPD);
- SafeRelease(&pSD);
- SafeRelease(&pHandler);
+ SafeRelease(&pPD);
+ SafeRelease(&pSD);
+ SafeRelease(&pHandler);
- return hr;
+ return hr;
}
HRESULT MFUtils::SetVideoWindow(
IMFTopology *pTopology, // Topology.
IMFMediaSource *pSource, // Media source.
HWND hVideoWnd // Window for video playback.
- )
+)
{
- HRESULT hr = S_OK;
- IMFStreamDescriptor *pSD = NULL;
- IMFPresentationDescriptor *pPD = NULL;
+ HRESULT hr = S_OK;
+ IMFStreamDescriptor *pSD = NULL;
+ IMFPresentationDescriptor *pPD = NULL;
IMFActivate *pSinkActivate = NULL;
IMFTopologyNode *pSourceNode = NULL;
IMFTopologyNode *pOutputNode = NULL;
- DWORD cStreams = 0, iStream;
-
- CHECK_HR(hr = pSource->CreatePresentationDescriptor(&pPD));
- CHECK_HR(hr = pPD->GetStreamDescriptorCount(&cStreams));
-
- for(iStream = 0; iStream < cStreams; ++iStream)
- {
- BOOL fSelected = FALSE;
-
- CHECK_HR(hr = pPD->GetStreamDescriptorByIndex(iStream, &fSelected, &pSD));
-
- if (fSelected)
- {
- // Create the media sink activation object.
- CHECK_HR(hr = CreateMediaSinkActivate(pSD, hVideoWnd, &pSinkActivate));
- // Add a source node for this stream.
- CHECK_HR(hr = AddSourceNode(pTopology, pSource, pPD, pSD, &pSourceNode));
- // Create the output node for the renderer.
- CHECK_HR(hr = AddOutputNode(pTopology, pSinkActivate, 0, &pOutputNode));
- // Connect the source node to the output node.
- CHECK_HR(hr = pSourceNode->ConnectOutput(0, pOutputNode, 0));
- }
- // else: If not selected, don't add the branch.
- }
+ DWORD cStreams = 0, iStream;
+
+ CHECK_HR(hr = pSource->CreatePresentationDescriptor(&pPD));
+ CHECK_HR(hr = pPD->GetStreamDescriptorCount(&cStreams));
+
+ for(iStream = 0; iStream < cStreams; ++iStream) {
+ BOOL fSelected = FALSE;
+
+ CHECK_HR(hr = pPD->GetStreamDescriptorByIndex(iStream, &fSelected, &pSD));
+
+ if (fSelected) {
+ // Create the media sink activation object.
+ CHECK_HR(hr = CreateMediaSinkActivate(pSD, hVideoWnd, &pSinkActivate));
+ // Add a source node for this stream.
+ CHECK_HR(hr = AddSourceNode(pTopology, pSource, pPD, pSD, &pSourceNode));
+ // Create the output node for the renderer.
+ CHECK_HR(hr = AddOutputNode(pTopology, pSinkActivate, 0, &pOutputNode));
+ // Connect the source node to the output node.
+ CHECK_HR(hr = pSourceNode->ConnectOutput(0, pOutputNode, 0));
+ }
+ // else: If not selected, don't add the branch.
+ }
bail:
- SafeRelease(&pPD);
+ SafeRelease(&pPD);
SafeRelease(&pSD);
SafeRelease(&pSinkActivate);
SafeRelease(&pSourceNode);
@@ -1542,407 +1420,374 @@ bail:
// Run the session
HRESULT MFUtils::RunSession(
- IMFMediaSession *pSession, // Session to run
- IMFTopology *pTopology // The toppology
- )
+ IMFMediaSession *pSession, // Session to run
+ IMFTopology *pTopology // The toppology
+)
{
- assert(pSession && pTopology);
-
- IMFMediaEvent *pEvent = NULL;
-
- PROPVARIANT var;
- PropVariantInit(&var);
-
- MediaEventType met;
- HRESULT hrStatus = S_OK;
- HRESULT hr = S_OK;
- CHECK_HR(hr = pSession->SetTopology(MFSESSION_SETTOPOLOGY_IMMEDIATE, pTopology)); // MFSESSION_SETTOPOLOGY_IMMEDIATE required to update (reload) topology when media type change
- CHECK_HR(hr = pSession->Start(&GUID_NULL, &var));
-
- // Check first event
- hr = pSession->GetEvent(MF_EVENT_FLAG_NO_WAIT, &pEvent);
- if(hr == MF_E_NO_EVENTS_AVAILABLE || hr == MF_E_MULTIPLE_SUBSCRIBERS){ // MF_E_MULTIPLE_SUBSCRIBERS means already listening
- hr = S_OK;
- goto bail;
- }
- if(pEvent) {
- CHECK_HR(hr = pEvent->GetStatus(&hrStatus));
- }
- else {
- hrStatus = hr;
- }
- if (FAILED(hrStatus))
- {
- CHECK_HR(hr = pEvent->GetType(&met));
- TSK_DEBUG_ERROR("Session error: 0x%x (event id: %d)\n", hrStatus, met);
- hr = hrStatus;
- goto bail;
- }
+ assert(pSession && pTopology);
+
+ IMFMediaEvent *pEvent = NULL;
+
+ PROPVARIANT var;
+ PropVariantInit(&var);
+
+ MediaEventType met;
+ HRESULT hrStatus = S_OK;
+ HRESULT hr = S_OK;
+ CHECK_HR(hr = pSession->SetTopology(MFSESSION_SETTOPOLOGY_IMMEDIATE, pTopology)); // MFSESSION_SETTOPOLOGY_IMMEDIATE required to update (reload) topology when media type change
+ CHECK_HR(hr = pSession->Start(&GUID_NULL, &var));
+
+ // Check first event
+ hr = pSession->GetEvent(MF_EVENT_FLAG_NO_WAIT, &pEvent);
+ if(hr == MF_E_NO_EVENTS_AVAILABLE || hr == MF_E_MULTIPLE_SUBSCRIBERS) { // MF_E_MULTIPLE_SUBSCRIBERS means already listening
+ hr = S_OK;
+ goto bail;
+ }
+ if(pEvent) {
+ CHECK_HR(hr = pEvent->GetStatus(&hrStatus));
+ }
+ else {
+ hrStatus = hr;
+ }
+ if (FAILED(hrStatus)) {
+ CHECK_HR(hr = pEvent->GetType(&met));
+ TSK_DEBUG_ERROR("Session error: 0x%x (event id: %d)\n", hrStatus, met);
+ hr = hrStatus;
+ goto bail;
+ }
bail:
- SafeRelease(&pEvent);
- return hr;
+ SafeRelease(&pEvent);
+ return hr;
}
// Stop session
HRESULT MFUtils::ShutdownSession(
- IMFMediaSession *pSession, // The Session
- IMFMediaSource *pSource // Source to shutdown (optional)
- )
+ IMFMediaSession *pSession, // The Session
+ IMFMediaSource *pSource // Source to shutdown (optional)
+)
{
- // MUST be source then session
- if(pSource){
- pSource->Stop();
- pSource->Shutdown();
- }
- if(pSession){
- pSession->Shutdown();
- }
- return S_OK;
+ // MUST be source then session
+ if(pSource) {
+ pSource->Stop();
+ pSource->Shutdown();
+ }
+ if(pSession) {
+ pSession->Shutdown();
+ }
+ return S_OK;
}
// Pause session
HRESULT MFUtils::PauseSession(
- IMFMediaSession *pSession, // The session
- IMFMediaSource *pSource // Source to pause (optional)
- )
+ IMFMediaSession *pSession, // The session
+ IMFMediaSource *pSource // Source to pause (optional)
+)
{
- if(!pSession){
- return E_INVALIDARG;
- }
- if(pSource){
- pSource->Pause();
- }
- return pSession->Pause();
+ if(!pSession) {
+ return E_INVALIDARG;
+ }
+ if(pSource) {
+ pSource->Pause();
+ }
+ return pSession->Pause();
}
// Returns -1 if none is supported
INT MFUtils::GetSupportedSubTypeIndex(
- IMFMediaSource *pSource, // The source
- const GUID& mediaType, // The MediaType
- const VideoSubTypeGuidPair* subTypes, UINT subTypesCount // List of preferred subtypes (in ascending order)
- )
+ IMFMediaSource *pSource, // The source
+ const GUID& mediaType, // The MediaType
+ const VideoSubTypeGuidPair* subTypes, UINT subTypesCount // List of preferred subtypes (in ascending order)
+)
{
- assert(pSource);
-
- IMFPresentationDescriptor *pPD = NULL;
- IMFStreamDescriptor *pSD = NULL;
- IMFMediaTypeHandler *pHandler = NULL;
- IMFMediaType *pMediaType = NULL;
-
- INT nIndex = -1;
- HRESULT hr = S_OK;
- DWORD cStreams = 0, cMediaTypesCount;
- GUID majorType, subType;
- BOOL fSelected;
-
- CHECK_HR(hr = pSource->CreatePresentationDescriptor(&pPD));
- CHECK_HR(hr = pPD->GetStreamDescriptorCount(&cStreams));
-
- for (UINT subTypesIndex = 0; subTypesIndex < subTypesCount && nIndex == -1; ++subTypesIndex)
- {
- for (DWORD cStreamIndex = 0; cStreamIndex < cStreams && nIndex == -1; ++cStreamIndex)
- {
- fSelected = FALSE;
- CHECK_HR(hr = pPD->GetStreamDescriptorByIndex(cStreamIndex, &fSelected, &pSD));
- if(fSelected)
- {
- CHECK_HR(hr = pSD->GetMediaTypeHandler(&pHandler));
- CHECK_HR(hr = pHandler->GetMajorType(&majorType));
- if(majorType == mediaType)
- {
- CHECK_HR(hr = pHandler->GetMediaTypeCount(&cMediaTypesCount));
- for(DWORD cMediaTypesIndex = 0; cMediaTypesIndex < cMediaTypesCount && nIndex == -1; ++cMediaTypesIndex)
- {
- CHECK_HR(hr = pHandler->GetMediaTypeByIndex(cMediaTypesIndex, &pMediaType));
- CHECK_HR(hr = pMediaType->GetGUID(MF_MT_SUBTYPE, &subType));
- if (subTypes[subTypesIndex].fourcc == subType)
- {
- nIndex = subTypesIndex;
- break;
- }
- SafeRelease(&pMediaType);
- }
- }
- }
-
- SafeRelease(&pSD);
- SafeRelease(&pHandler);
- }
- }
+ assert(pSource);
+
+ IMFPresentationDescriptor *pPD = NULL;
+ IMFStreamDescriptor *pSD = NULL;
+ IMFMediaTypeHandler *pHandler = NULL;
+ IMFMediaType *pMediaType = NULL;
+
+ INT nIndex = -1;
+ HRESULT hr = S_OK;
+ DWORD cStreams = 0, cMediaTypesCount;
+ GUID majorType, subType;
+ BOOL fSelected;
+
+ CHECK_HR(hr = pSource->CreatePresentationDescriptor(&pPD));
+ CHECK_HR(hr = pPD->GetStreamDescriptorCount(&cStreams));
+
+ for (UINT subTypesIndex = 0; subTypesIndex < subTypesCount && nIndex == -1; ++subTypesIndex) {
+ for (DWORD cStreamIndex = 0; cStreamIndex < cStreams && nIndex == -1; ++cStreamIndex) {
+ fSelected = FALSE;
+ CHECK_HR(hr = pPD->GetStreamDescriptorByIndex(cStreamIndex, &fSelected, &pSD));
+ if(fSelected) {
+ CHECK_HR(hr = pSD->GetMediaTypeHandler(&pHandler));
+ CHECK_HR(hr = pHandler->GetMajorType(&majorType));
+ if(majorType == mediaType) {
+ CHECK_HR(hr = pHandler->GetMediaTypeCount(&cMediaTypesCount));
+ for(DWORD cMediaTypesIndex = 0; cMediaTypesIndex < cMediaTypesCount && nIndex == -1; ++cMediaTypesIndex) {
+ CHECK_HR(hr = pHandler->GetMediaTypeByIndex(cMediaTypesIndex, &pMediaType));
+ CHECK_HR(hr = pMediaType->GetGUID(MF_MT_SUBTYPE, &subType));
+ if (subTypes[subTypesIndex].fourcc == subType) {
+ nIndex = subTypesIndex;
+ break;
+ }
+ SafeRelease(&pMediaType);
+ }
+ }
+ }
+
+ SafeRelease(&pSD);
+ SafeRelease(&pHandler);
+ }
+ }
bail:
- SafeRelease(&pMediaType);
- SafeRelease(&pPD);
- SafeRelease(&pSD);
- SafeRelease(&pHandler);
+ SafeRelease(&pMediaType);
+ SafeRelease(&pPD);
+ SafeRelease(&pSD);
+ SafeRelease(&pHandler);
- return nIndex;
+ return nIndex;
}
HRESULT MFUtils::IsSupported(
- IMFPresentationDescriptor *pPD,
- DWORD cStreamIndex,
- UINT32 nWidth,
- UINT32 nHeight,
- UINT32 nFps,
- const GUID& guidFormat,
- BOOL* pbSupportedSize,
- BOOL* pbSupportedFps,
- BOOL* pbSupportedFormat
- )
+ IMFPresentationDescriptor *pPD,
+ DWORD cStreamIndex,
+ UINT32 nWidth,
+ UINT32 nHeight,
+ UINT32 nFps,
+ const GUID& guidFormat,
+ BOOL* pbSupportedSize,
+ BOOL* pbSupportedFps,
+ BOOL* pbSupportedFormat
+)
{
- HRESULT hr = S_OK;
-
- BOOL fSelected = FALSE;
- IMFStreamDescriptor *pSD = NULL;
- IMFMediaTypeHandler *pHandler = NULL;
- IMFMediaType *pMediaType = NULL;
- UINT32 _nWidth = 0, _nHeight = 0, numeratorFps = 0, denominatorFps = 0;
- GUID subType;
- DWORD cMediaTypesCount;
-
- if(!pPD || !pbSupportedSize || !pbSupportedFps || !pbSupportedFormat)
- {
- CHECK_HR(hr = E_POINTER);
- }
-
- *pbSupportedSize = FALSE;
- *pbSupportedFps = FALSE;
- *pbSupportedFormat = FALSE;
-
- CHECK_HR(hr = pPD->GetStreamDescriptorByIndex(cStreamIndex, &fSelected, &pSD));
- if(fSelected)
- {
- CHECK_HR(hr = pSD->GetMediaTypeHandler(&pHandler));
- CHECK_HR(hr = pHandler->GetMediaTypeCount(&cMediaTypesCount));
- for(DWORD cMediaTypesIndex = 0; cMediaTypesIndex < cMediaTypesCount; ++cMediaTypesIndex)
- {
- CHECK_HR(hr = pHandler->GetMediaTypeByIndex(cMediaTypesIndex, &pMediaType));
- CHECK_HR(hr = MFGetAttributeSize(pMediaType, MF_MT_FRAME_SIZE, &_nWidth, &_nHeight));
- CHECK_HR(hr = pMediaType->GetGUID(MF_MT_SUBTYPE, &subType));
- if(FAILED(hr = MFGetAttributeRatio(pMediaType, MF_MT_FRAME_RATE, &numeratorFps, &denominatorFps)))
- {
- numeratorFps = 30;
- denominatorFps = 1;
- }
-
- // all must match for the same stream
- if(_nWidth == nWidth && _nHeight == nHeight && subType == guidFormat && (numeratorFps/denominatorFps) == nFps)
- {
- *pbSupportedSize = TRUE;
- *pbSupportedFormat = TRUE;
- *pbSupportedFps = TRUE;
- break;
- }
-
- SafeRelease(&pMediaType);
- }
- SafeRelease(&pHandler);
- }
-
+ HRESULT hr = S_OK;
+
+ BOOL fSelected = FALSE;
+ IMFStreamDescriptor *pSD = NULL;
+ IMFMediaTypeHandler *pHandler = NULL;
+ IMFMediaType *pMediaType = NULL;
+ UINT32 _nWidth = 0, _nHeight = 0, numeratorFps = 0, denominatorFps = 0;
+ GUID subType;
+ DWORD cMediaTypesCount;
+
+ if(!pPD || !pbSupportedSize || !pbSupportedFps || !pbSupportedFormat) {
+ CHECK_HR(hr = E_POINTER);
+ }
+
+ *pbSupportedSize = FALSE;
+ *pbSupportedFps = FALSE;
+ *pbSupportedFormat = FALSE;
+
+ CHECK_HR(hr = pPD->GetStreamDescriptorByIndex(cStreamIndex, &fSelected, &pSD));
+ if(fSelected) {
+ CHECK_HR(hr = pSD->GetMediaTypeHandler(&pHandler));
+ CHECK_HR(hr = pHandler->GetMediaTypeCount(&cMediaTypesCount));
+ for(DWORD cMediaTypesIndex = 0; cMediaTypesIndex < cMediaTypesCount; ++cMediaTypesIndex) {
+ CHECK_HR(hr = pHandler->GetMediaTypeByIndex(cMediaTypesIndex, &pMediaType));
+ CHECK_HR(hr = MFGetAttributeSize(pMediaType, MF_MT_FRAME_SIZE, &_nWidth, &_nHeight));
+ CHECK_HR(hr = pMediaType->GetGUID(MF_MT_SUBTYPE, &subType));
+ if(FAILED(hr = MFGetAttributeRatio(pMediaType, MF_MT_FRAME_RATE, &numeratorFps, &denominatorFps))) {
+ numeratorFps = 30;
+ denominatorFps = 1;
+ }
+
+ // all must match for the same stream
+ if(_nWidth == nWidth && _nHeight == nHeight && subType == guidFormat && (numeratorFps/denominatorFps) == nFps) {
+ *pbSupportedSize = TRUE;
+ *pbSupportedFormat = TRUE;
+ *pbSupportedFps = TRUE;
+ break;
+ }
+
+ SafeRelease(&pMediaType);
+ }
+ SafeRelease(&pHandler);
+ }
+
bail:
- SafeRelease(&pSD);
- SafeRelease(&pHandler);
- SafeRelease(&pMediaType);
-
- return hr;
+ SafeRelease(&pSD);
+ SafeRelease(&pHandler);
+ SafeRelease(&pMediaType);
+
+ return hr;
}
HRESULT MFUtils::IsSupported(
- IMFPresentationDescriptor *pPD,
- DWORD cStreamIndex,
- IMFMediaType* pMediaType,
- BOOL* pbSupportedSize,
- BOOL* pbSupportedFps,
- BOOL* pbSupportedFormat
- )
+ IMFPresentationDescriptor *pPD,
+ DWORD cStreamIndex,
+ IMFMediaType* pMediaType,
+ BOOL* pbSupportedSize,
+ BOOL* pbSupportedFps,
+ BOOL* pbSupportedFormat
+)
{
- HRESULT hr = S_OK;
-
- UINT32 nWidth = 0, nHeight = 0, nFps = 0, numeratorFps = 30, denominatorFps = 1;
- GUID subType;
-
- if(!pPD || !pMediaType || !pbSupportedSize || !pbSupportedFps || !pbSupportedFormat)
- {
- CHECK_HR(hr = E_POINTER);
- }
-
- CHECK_HR(hr = MFGetAttributeSize(pMediaType, MF_MT_FRAME_SIZE, &nWidth, &nHeight));
- CHECK_HR(hr = pMediaType->GetGUID(MF_MT_SUBTYPE, &subType));
- if(FAILED(hr = MFGetAttributeRatio(pMediaType, MF_MT_FRAME_RATE, &numeratorFps, &denominatorFps)))
- {
- numeratorFps = 30;
- denominatorFps = 1;
- }
-
- CHECK_HR(hr = IsSupported(
- pPD,
- cStreamIndex,
- nWidth,
- nHeight,
- (numeratorFps / denominatorFps),
- subType,
- pbSupportedSize,
- pbSupportedFps,
- pbSupportedFormat
- ));
+ HRESULT hr = S_OK;
+
+ UINT32 nWidth = 0, nHeight = 0, nFps = 0, numeratorFps = 30, denominatorFps = 1;
+ GUID subType;
+
+ if(!pPD || !pMediaType || !pbSupportedSize || !pbSupportedFps || !pbSupportedFormat) {
+ CHECK_HR(hr = E_POINTER);
+ }
+
+ CHECK_HR(hr = MFGetAttributeSize(pMediaType, MF_MT_FRAME_SIZE, &nWidth, &nHeight));
+ CHECK_HR(hr = pMediaType->GetGUID(MF_MT_SUBTYPE, &subType));
+ if(FAILED(hr = MFGetAttributeRatio(pMediaType, MF_MT_FRAME_RATE, &numeratorFps, &denominatorFps))) {
+ numeratorFps = 30;
+ denominatorFps = 1;
+ }
+
+ CHECK_HR(hr = IsSupported(
+ pPD,
+ cStreamIndex,
+ nWidth,
+ nHeight,
+ (numeratorFps / denominatorFps),
+ subType,
+ pbSupportedSize,
+ pbSupportedFps,
+ pbSupportedFormat
+ ));
bail:
- return hr;
+ return hr;
}
HRESULT MFUtils::IsSupportedByInput(
- IMFPresentationDescriptor *pPD,
- DWORD cStreamIndex,
- IMFTopologyNode *pNode,
- BOOL* pbSupportedSize,
- BOOL* pbSupportedFps,
- BOOL* pbSupportedFormat
- )
+ IMFPresentationDescriptor *pPD,
+ DWORD cStreamIndex,
+ IMFTopologyNode *pNode,
+ BOOL* pbSupportedSize,
+ BOOL* pbSupportedFps,
+ BOOL* pbSupportedFormat
+)
{
- HRESULT hr = S_OK;
-
- IMFMediaType *pMediaType = NULL;
- IUnknown* pObject = NULL;
- IMFActivate *pActivate = NULL;
- IMFMediaSink *pMediaSink = NULL;
- IMFTransform *pTransform = NULL;
- IMFStreamSink *pStreamSink = NULL;
- IMFMediaTypeHandler *pHandler = NULL;
-
- if(!pPD || !pNode || !pbSupportedSize || !pbSupportedFps || !pbSupportedFormat)
- {
- CHECK_HR(hr = E_POINTER);
- }
-
- CHECK_HR(hr = pNode->GetObject(&pObject));
- hr = pObject->QueryInterface(IID_PPV_ARGS(&pActivate));
- if(SUCCEEDED(hr))
- {
- SafeRelease(&pObject);
- hr = pActivate->ActivateObject(IID_IMFMediaSink, (void**)&pObject);
- if(FAILED(hr))
- {
- hr = pActivate->ActivateObject(IID_IMFTransform, (void**)&pObject);
- }
- }
-
- if(!pObject)
- {
- CHECK_HR(hr = E_NOINTERFACE);
- }
-
- hr = pObject->QueryInterface(IID_PPV_ARGS(&pMediaSink));
- if(FAILED(hr))
- {
- hr = pObject->QueryInterface(IID_PPV_ARGS(&pTransform));
- }
-
-
-
- if(pMediaSink)
- {
- CHECK_HR(hr = pMediaSink->GetStreamSinkByIndex(0, &pStreamSink));
- CHECK_HR(hr = pStreamSink->GetMediaTypeHandler(&pHandler));
- CHECK_HR(hr = pHandler->GetCurrentMediaType(&pMediaType));
-
- }
- else if(pTransform)
- {
- CHECK_HR(hr = pTransform->GetInputCurrentType(0, &pMediaType));
- }
- else
- {
- CHECK_HR(hr = pNode->GetInputPrefType(0, &pMediaType));
- }
-
- CHECK_HR(hr = IsSupported(
- pPD,
- cStreamIndex,
- pMediaType,
- pbSupportedSize,
- pbSupportedFps,
- pbSupportedFormat
- ));
+ HRESULT hr = S_OK;
+
+ IMFMediaType *pMediaType = NULL;
+ IUnknown* pObject = NULL;
+ IMFActivate *pActivate = NULL;
+ IMFMediaSink *pMediaSink = NULL;
+ IMFTransform *pTransform = NULL;
+ IMFStreamSink *pStreamSink = NULL;
+ IMFMediaTypeHandler *pHandler = NULL;
+
+ if(!pPD || !pNode || !pbSupportedSize || !pbSupportedFps || !pbSupportedFormat) {
+ CHECK_HR(hr = E_POINTER);
+ }
+
+ CHECK_HR(hr = pNode->GetObject(&pObject));
+ hr = pObject->QueryInterface(IID_PPV_ARGS(&pActivate));
+ if(SUCCEEDED(hr)) {
+ SafeRelease(&pObject);
+ hr = pActivate->ActivateObject(IID_IMFMediaSink, (void**)&pObject);
+ if(FAILED(hr)) {
+ hr = pActivate->ActivateObject(IID_IMFTransform, (void**)&pObject);
+ }
+ }
+
+ if(!pObject) {
+ CHECK_HR(hr = E_NOINTERFACE);
+ }
+
+ hr = pObject->QueryInterface(IID_PPV_ARGS(&pMediaSink));
+ if(FAILED(hr)) {
+ hr = pObject->QueryInterface(IID_PPV_ARGS(&pTransform));
+ }
+
+
+
+ if(pMediaSink) {
+ CHECK_HR(hr = pMediaSink->GetStreamSinkByIndex(0, &pStreamSink));
+ CHECK_HR(hr = pStreamSink->GetMediaTypeHandler(&pHandler));
+ CHECK_HR(hr = pHandler->GetCurrentMediaType(&pMediaType));
+
+ }
+ else if(pTransform) {
+ CHECK_HR(hr = pTransform->GetInputCurrentType(0, &pMediaType));
+ }
+ else {
+ CHECK_HR(hr = pNode->GetInputPrefType(0, &pMediaType));
+ }
+
+ CHECK_HR(hr = IsSupported(
+ pPD,
+ cStreamIndex,
+ pMediaType,
+ pbSupportedSize,
+ pbSupportedFps,
+ pbSupportedFormat
+ ));
bail:
- SafeRelease(&pObject);
- SafeRelease(&pActivate);
- SafeRelease(&pMediaType);
- SafeRelease(&pStreamSink);
- SafeRelease(&pHandler);
- return hr;
+ SafeRelease(&pObject);
+ SafeRelease(&pActivate);
+ SafeRelease(&pMediaType);
+ SafeRelease(&pStreamSink);
+ SafeRelease(&pHandler);
+ return hr;
}
HRESULT MFUtils::ConnectConverters(
- IMFTopologyNode *pNode,
- DWORD dwOutputIndex,
- IMFTopologyNode *pNodeConvFrameRate,
- IMFTopologyNode *pNodeConvColor,
- IMFTopologyNode *pNodeConvSize
- )
+ IMFTopologyNode *pNode,
+ DWORD dwOutputIndex,
+ IMFTopologyNode *pNodeConvFrameRate,
+ IMFTopologyNode *pNodeConvColor,
+ IMFTopologyNode *pNodeConvSize
+)
{
- HRESULT hr = S_OK;
-
- if(!pNode)
- {
- CHECK_HR(hr = E_POINTER);
- }
-
- if(pNodeConvFrameRate)
- {
- CHECK_HR(hr = pNode->ConnectOutput(dwOutputIndex, pNodeConvFrameRate, 0));
- if(pNodeConvSize)
- {
- CHECK_HR(hr = pNodeConvFrameRate->ConnectOutput(0, pNodeConvSize, 0));
- if(pNodeConvColor)
- {
- CHECK_HR(hr = pNodeConvSize->ConnectOutput(0, pNodeConvColor, 0));
- }
- }
- else
- {
- if(pNodeConvColor)
- {
- CHECK_HR(hr = pNodeConvFrameRate->ConnectOutput(0, pNodeConvColor, 0));
- }
- }
- }
- else
- {
- if(pNodeConvSize)
- {
- CHECK_HR(hr = pNode->ConnectOutput(dwOutputIndex, pNodeConvSize, 0));
- if(pNodeConvColor)
- {
- CHECK_HR(hr = pNodeConvSize->ConnectOutput(0, pNodeConvColor, 0));
- }
- }
- else
- {
- if(pNodeConvColor)
- {
- CHECK_HR(hr = pNode->ConnectOutput(dwOutputIndex, pNodeConvColor, 0));
- }
- }
- }
+ HRESULT hr = S_OK;
+
+ if(!pNode) {
+ CHECK_HR(hr = E_POINTER);
+ }
+
+ if(pNodeConvFrameRate) {
+ CHECK_HR(hr = pNode->ConnectOutput(dwOutputIndex, pNodeConvFrameRate, 0));
+ if(pNodeConvSize) {
+ CHECK_HR(hr = pNodeConvFrameRate->ConnectOutput(0, pNodeConvSize, 0));
+ if(pNodeConvColor) {
+ CHECK_HR(hr = pNodeConvSize->ConnectOutput(0, pNodeConvColor, 0));
+ }
+ }
+ else {
+ if(pNodeConvColor) {
+ CHECK_HR(hr = pNodeConvFrameRate->ConnectOutput(0, pNodeConvColor, 0));
+ }
+ }
+ }
+ else {
+ if(pNodeConvSize) {
+ CHECK_HR(hr = pNode->ConnectOutput(dwOutputIndex, pNodeConvSize, 0));
+ if(pNodeConvColor) {
+ CHECK_HR(hr = pNodeConvSize->ConnectOutput(0, pNodeConvColor, 0));
+ }
+ }
+ else {
+ if(pNodeConvColor) {
+ CHECK_HR(hr = pNode->ConnectOutput(dwOutputIndex, pNodeConvColor, 0));
+ }
+ }
+ }
bail:
- return hr;
+ return hr;
}
// This function should be called only if VideoProcessor is not supported
HRESULT MFUtils::GetBestFormat(
- IMFMediaSource *pSource,
- const GUID *pSubType,
- UINT32 nWidth,
- UINT32 nHeight,
- UINT32 nFps,
- UINT32 *pnWidth,
- UINT32 *pnHeight,
- UINT32 *pnFps,
- const VideoSubTypeGuidPair **ppSubTypeGuidPair
- )
+ IMFMediaSource *pSource,
+ const GUID *pSubType,
+ UINT32 nWidth,
+ UINT32 nHeight,
+ UINT32 nFps,
+ UINT32 *pnWidth,
+ UINT32 *pnHeight,
+ UINT32 *pnFps,
+ const VideoSubTypeGuidPair **ppSubTypeGuidPair
+)
{
#define _FindPairByGuid(_guid, _index) { \
@@ -1954,151 +1799,146 @@ HRESULT MFUtils::GetBestFormat(
} \
}
#if 0
- *pnWidth = 640;
- *pnHeight = 480;
- *pnFps = 30;
- return S_OK;
-#else
- HRESULT hr = S_OK;
- IMFPresentationDescriptor *pPD = NULL;
- IMFStreamDescriptor *pSD = NULL;
- IMFMediaTypeHandler *pHandler = NULL;
- IMFMediaType *pMediaType = NULL;
- DWORD cStreams = 0, cMediaTypesCount;
- GUID majorType, subType, _BestSubType;
- BOOL bFound = FALSE, fSelected;
- UINT32 _nWidth, _nHeight, numeratorFps, denominatorFps, _nFps, _nScore, _nBestScore;
- int PreferredVideoSubTypeGuidPairIndex;
- static const UINT32 kSubTypeMismatchPad = _UI32_MAX >> 4;
- static const UINT32 kFpsMismatchPad = _UI32_MAX >> 2;
-
- if (!ppSubTypeGuidPair || !pSubType) {
- CHECK_HR(hr = E_INVALIDARG);
- }
- _FindPairByGuid(*pSubType, PreferredVideoSubTypeGuidPairIndex);
- if (PreferredVideoSubTypeGuidPairIndex == -1) {
- CHECK_HR(hr = E_INVALIDARG);
- }
- *ppSubTypeGuidPair = &PreferredVideoSubTypeGuidPairs[PreferredVideoSubTypeGuidPairIndex];
-
- _nBestScore = _UI32_MAX;
- CHECK_HR(hr = pSource->CreatePresentationDescriptor(&pPD));
- CHECK_HR(hr = pPD->GetStreamDescriptorCount(&cStreams));
-
-
- for (DWORD i = 0; i < cStreams; i++)
- {
- fSelected = FALSE;
-
- CHECK_HR(hr = pPD->GetStreamDescriptorByIndex(i, &fSelected, &pSD));
-
- if (fSelected)
- {
- CHECK_HR(hr = pSD->GetMediaTypeHandler(&pHandler));
-
- CHECK_HR(hr = pHandler->GetMajorType(&majorType));
-
- if(majorType == MFMediaType_Video)
- {
- CHECK_HR(hr = pHandler->GetMediaTypeCount(&cMediaTypesCount));
-
- for(DWORD cMediaTypesIndex = 0; cMediaTypesIndex < cMediaTypesCount; ++cMediaTypesIndex)
- {
- CHECK_HR(hr = pHandler->GetMediaTypeByIndex(cMediaTypesIndex, &pMediaType));
-
- CHECK_HR(hr = pMediaType->GetGUID(MF_MT_SUBTYPE, &subType));
- // if(subType == *pSubType)
- {
- CHECK_HR(hr = MFGetAttributeSize(pMediaType, MF_MT_FRAME_SIZE, &_nWidth, &_nHeight));
- CHECK_HR(hr = MFGetAttributeRatio(pMediaType, MF_MT_FRAME_RATE, &numeratorFps, &denominatorFps));
- _nFps = (numeratorFps / denominatorFps);
-
- if (subType == *pSubType) {
- _nScore = 0;
- }
- else {
- _FindPairByGuid(subType, PreferredVideoSubTypeGuidPairIndex);
- if (PreferredVideoSubTypeGuidPairIndex == -1) {
- _nScore = kSubTypeMismatchPad; // Not a must but important: If(!VideoProcess) then CLSID_CColorConvertDMO
- }
- else {
- _nScore = kSubTypeMismatchPad >> (PreferredVideoSubTypeGuidPairsCount - PreferredVideoSubTypeGuidPairIndex);
- }
- }
- _nScore += abs((int)(_nWidth - nWidth)); // Not a must: If(!VideoProcess) then CLSID_CResizerDMO
- _nScore += abs((int)(_nHeight - nHeight)); // Not a must: If(!VideoProcess) then CLSID_CResizerDMO
- _nScore += (_nFps == nFps) ? 0 : kFpsMismatchPad; // Fps is a must because without video processor no alternative exist (CLSID_CFrameRateConvertDmo doesn't support I420)
-
- if (_nScore <= _nBestScore || !bFound)
- {
- *pnWidth = _nWidth;
- *pnHeight = _nHeight;
- *pnFps = _nFps;
- bFound = TRUE;
- _BestSubType = subType;
- _nBestScore = _nScore;
- }
- }
-
- SafeRelease(&pMediaType);
- }
- }
- }
-
- SafeRelease(&pHandler);
- SafeRelease(&pSD);
- }
+ *pnWidth = 640;
+ *pnHeight = 480;
+ *pnFps = 30;
+ return S_OK;
+#else
+ HRESULT hr = S_OK;
+ IMFPresentationDescriptor *pPD = NULL;
+ IMFStreamDescriptor *pSD = NULL;
+ IMFMediaTypeHandler *pHandler = NULL;
+ IMFMediaType *pMediaType = NULL;
+ DWORD cStreams = 0, cMediaTypesCount;
+ GUID majorType, subType, _BestSubType;
+ BOOL bFound = FALSE, fSelected;
+ UINT32 _nWidth, _nHeight, numeratorFps, denominatorFps, _nFps, _nScore, _nBestScore;
+ int PreferredVideoSubTypeGuidPairIndex;
+ static const UINT32 kSubTypeMismatchPad = _UI32_MAX >> 4;
+ static const UINT32 kFpsMismatchPad = _UI32_MAX >> 2;
+
+ if (!ppSubTypeGuidPair || !pSubType) {
+ CHECK_HR(hr = E_INVALIDARG);
+ }
+ _FindPairByGuid(*pSubType, PreferredVideoSubTypeGuidPairIndex);
+ if (PreferredVideoSubTypeGuidPairIndex == -1) {
+ CHECK_HR(hr = E_INVALIDARG);
+ }
+ *ppSubTypeGuidPair = &PreferredVideoSubTypeGuidPairs[PreferredVideoSubTypeGuidPairIndex];
+
+ _nBestScore = _UI32_MAX;
+ CHECK_HR(hr = pSource->CreatePresentationDescriptor(&pPD));
+ CHECK_HR(hr = pPD->GetStreamDescriptorCount(&cStreams));
+
+
+ for (DWORD i = 0; i < cStreams; i++) {
+ fSelected = FALSE;
+
+ CHECK_HR(hr = pPD->GetStreamDescriptorByIndex(i, &fSelected, &pSD));
+
+ if (fSelected) {
+ CHECK_HR(hr = pSD->GetMediaTypeHandler(&pHandler));
+
+ CHECK_HR(hr = pHandler->GetMajorType(&majorType));
+
+ if(majorType == MFMediaType_Video) {
+ CHECK_HR(hr = pHandler->GetMediaTypeCount(&cMediaTypesCount));
+
+ for(DWORD cMediaTypesIndex = 0; cMediaTypesIndex < cMediaTypesCount; ++cMediaTypesIndex) {
+ CHECK_HR(hr = pHandler->GetMediaTypeByIndex(cMediaTypesIndex, &pMediaType));
+
+ CHECK_HR(hr = pMediaType->GetGUID(MF_MT_SUBTYPE, &subType));
+ // if(subType == *pSubType)
+ {
+ CHECK_HR(hr = MFGetAttributeSize(pMediaType, MF_MT_FRAME_SIZE, &_nWidth, &_nHeight));
+ CHECK_HR(hr = MFGetAttributeRatio(pMediaType, MF_MT_FRAME_RATE, &numeratorFps, &denominatorFps));
+ _nFps = (numeratorFps / denominatorFps);
+
+ if (subType == *pSubType) {
+ _nScore = 0;
+ }
+ else {
+ _FindPairByGuid(subType, PreferredVideoSubTypeGuidPairIndex);
+ if (PreferredVideoSubTypeGuidPairIndex == -1) {
+ _nScore = kSubTypeMismatchPad; // Not a must but important: If(!VideoProcess) then CLSID_CColorConvertDMO
+ }
+ else {
+ _nScore = kSubTypeMismatchPad >> (PreferredVideoSubTypeGuidPairsCount - PreferredVideoSubTypeGuidPairIndex);
+ }
+ }
+ _nScore += abs((int)(_nWidth - nWidth)); // Not a must: If(!VideoProcess) then CLSID_CResizerDMO
+ _nScore += abs((int)(_nHeight - nHeight)); // Not a must: If(!VideoProcess) then CLSID_CResizerDMO
+ _nScore += (_nFps == nFps) ? 0 : kFpsMismatchPad; // Fps is a must because without video processor no alternative exist (CLSID_CFrameRateConvertDmo doesn't support I420)
+
+ if (_nScore <= _nBestScore || !bFound) {
+ *pnWidth = _nWidth;
+ *pnHeight = _nHeight;
+ *pnFps = _nFps;
+ bFound = TRUE;
+ _BestSubType = subType;
+ _nBestScore = _nScore;
+ }
+ }
+
+ SafeRelease(&pMediaType);
+ }
+ }
+ }
+
+ SafeRelease(&pHandler);
+ SafeRelease(&pSD);
+ }
bail:
- SafeRelease(&pPD);
- SafeRelease(&pSD);
- SafeRelease(&pHandler);
- SafeRelease(&pMediaType);
-
- _FindPairByGuid(_BestSubType, PreferredVideoSubTypeGuidPairIndex);
- if (PreferredVideoSubTypeGuidPairIndex != -1) {
- *ppSubTypeGuidPair = &PreferredVideoSubTypeGuidPairs[PreferredVideoSubTypeGuidPairIndex];
- }
- else /*if (_nBestScore > kSubTypeMismatchPad)*/ {
- *pnWidth = 640;
- *pnHeight = 480;
- *pnFps = 30;
- TSK_DEBUG_WARN("Failed to math subtype...using VGA@30fps");
- }
-
- return SUCCEEDED(hr) ? (bFound ? S_OK : E_NOT_SET): hr;
+ SafeRelease(&pPD);
+ SafeRelease(&pSD);
+ SafeRelease(&pHandler);
+ SafeRelease(&pMediaType);
+
+ _FindPairByGuid(_BestSubType, PreferredVideoSubTypeGuidPairIndex);
+ if (PreferredVideoSubTypeGuidPairIndex != -1) {
+ *ppSubTypeGuidPair = &PreferredVideoSubTypeGuidPairs[PreferredVideoSubTypeGuidPairIndex];
+ }
+ else { /*if (_nBestScore > kSubTypeMismatchPad)*/
+ *pnWidth = 640;
+ *pnHeight = 480;
+ *pnFps = 30;
+ TSK_DEBUG_WARN("Failed to math subtype...using VGA@30fps");
+ }
+
+ return SUCCEEDED(hr) ? (bFound ? S_OK : E_NOT_SET): hr;
#endif
}
HWND MFUtils::GetConsoleHwnd(void)
{
- #define MY_BUFSIZE 1024 // Buffer size for console window titles.
- HWND hwndFound; // This is what is returned to the caller.
- TCHAR pszNewWindowTitle[MY_BUFSIZE]; // Contains fabricated
- // WindowTitle.
- TCHAR pszOldWindowTitle[MY_BUFSIZE]; // Contains original
- // WindowTitle.
+#define MY_BUFSIZE 1024 // Buffer size for console window titles.
+ HWND hwndFound; // This is what is returned to the caller.
+ TCHAR pszNewWindowTitle[MY_BUFSIZE]; // Contains fabricated
+ // WindowTitle.
+ TCHAR pszOldWindowTitle[MY_BUFSIZE]; // Contains original
+ // WindowTitle.
- // Fetch current window title.
- GetConsoleTitle(pszOldWindowTitle, MY_BUFSIZE);
+ // Fetch current window title.
+ GetConsoleTitle(pszOldWindowTitle, MY_BUFSIZE);
- // Format a "unique" NewWindowTitle.
- wsprintf(pszNewWindowTitle,TEXT("%d/%d"),
- GetTickCount(),
- GetCurrentProcessId());
+ // Format a "unique" NewWindowTitle.
+ wsprintf(pszNewWindowTitle,TEXT("%d/%d"),
+ GetTickCount(),
+ GetCurrentProcessId());
- // Change current window title.
- SetConsoleTitle(pszNewWindowTitle);
+ // Change current window title.
+ SetConsoleTitle(pszNewWindowTitle);
- // Ensure window title has been updated.
- Sleep(40);
+ // Ensure window title has been updated.
+ Sleep(40);
- // Look for NewWindowTitle.
- hwndFound=FindWindow(NULL, pszNewWindowTitle);
+ // Look for NewWindowTitle.
+ hwndFound=FindWindow(NULL, pszNewWindowTitle);
- // Restore original window title.
- SetConsoleTitle(pszOldWindowTitle);
+ // Restore original window title.
+ SetConsoleTitle(pszOldWindowTitle);
- return(hwndFound);
+ return(hwndFound);
}
diff --git a/plugins/pluginWinMF/internals/mf_utils.h b/plugins/pluginWinMF/internals/mf_utils.h
index 0819597..1225b3b 100755
--- a/plugins/pluginWinMF/internals/mf_utils.h
+++ b/plugins/pluginWinMF/internals/mf_utils.h
@@ -1,18 +1,18 @@
/* Copyright (C) 2013 Mamadou DIOP
* Copyright (C) 2013 Doubango Telecom <http://www.doubango.org>
-*
+*
* This file is part of Open Source Doubango Framework.
*
* DOUBANGO is free software: you can redistribute it and/or modify
* it under the terms of the GNU General Public License as published by
* the Free Software Foundation, either version 3 of the License, or
* (at your option) any later version.
-*
+*
* DOUBANGO is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
* GNU General Public License for more details.
-*
+*
* You should have received a copy of the GNU General Public License
* along with DOUBANGO.
*/
@@ -35,14 +35,13 @@
(*ppT)->Release(); \
*ppT = NULL; \
} \
-}
+}
#undef CHECK_HR
// In CHECK_HR(x) When (x) is a function it will be executed twice when used in "TSK_DEBUG_ERROR(x)" and "If(x)"
#define CHECK_HR(x) { HRESULT __hr__ = (x); if (FAILED(__hr__)) { TSK_DEBUG_ERROR("Operation Failed (%08x)", __hr__); goto bail; } }
-typedef struct VideoSubTypeGuidPair
-{
+typedef struct VideoSubTypeGuidPair {
enum tmedia_chroma_e chroma;
const GUID& fourcc;
}
@@ -52,209 +51,207 @@ class MFUtils
{
public:
-static HRESULT Startup();
-static HRESULT Shutdown();
+ static HRESULT Startup();
+ static HRESULT Shutdown();
-static BOOL IsD3D9Supported();
-static BOOL IsLowLatencyH264Supported();
-static BOOL IsLowLatencyH264SupportsMaxSliceSize();
+ static BOOL IsD3D9Supported();
+ static BOOL IsLowLatencyH264Supported();
+ static BOOL IsLowLatencyH264SupportsMaxSliceSize();
-static HRESULT IsAsyncMFT(
- IMFTransform *pMFT, // The MFT to check
- BOOL* pbIsAsync // Whether the MFT is Async
- );
-static HRESULT UnlockAsyncMFT(
- IMFTransform *pMFT // The MFT to unlock
- );
+ static HRESULT IsAsyncMFT(
+ IMFTransform *pMFT, // The MFT to check
+ BOOL* pbIsAsync // Whether the MFT is Async
+ );
+ static HRESULT UnlockAsyncMFT(
+ IMFTransform *pMFT // The MFT to unlock
+ );
-static HRESULT CreatePCMAudioType(
- UINT32 sampleRate, // Samples per second
- UINT32 bitsPerSample, // Bits per sample
- UINT32 cChannels, // Number of channels
- IMFMediaType **ppType // Receives a pointer to the media type.
+ static HRESULT CreatePCMAudioType(
+ UINT32 sampleRate, // Samples per second
+ UINT32 bitsPerSample, // Bits per sample
+ UINT32 cChannels, // Number of channels
+ IMFMediaType **ppType // Receives a pointer to the media type.
+ );
+ static HRESULT CreateVideoType(
+ const GUID* subType, // video subType
+ IMFMediaType **ppType, // Receives a pointer to the media type.
+ UINT32 unWidth = 0, // Video width (0 to ignore)
+ UINT32 unHeight = 0 // Video height (0 to ignore)
+ );
+ static HRESULT ConvertVideoTypeToUncompressedType(
+ IMFMediaType *pType, // Pointer to an encoded video type.
+ const GUID& subtype, // Uncompressed subtype (eg, RGB-32, AYUV)
+ IMFMediaType **ppType // Receives a matching uncompressed video type.
+ );
+ static HRESULT CreateMediaSample(
+ DWORD cbData, // Maximum buffer size
+ IMFSample **ppSample // Receives the sample
+ );
+ static HRESULT ValidateVideoFormat(
+ IMFMediaType *pmt
+ );
+ static HRESULT IsVideoProcessorSupported(BOOL *pbSupported);
+ static HRESULT GetBestVideoProcessor(
+ const GUID& inputFormat, // The input MediaFormat (e.g. MFVideoFormat_I420)
+ const GUID& outputFormat, // The output MediaFormat (e.g. MFVideoFormat_NV12)
+ IMFTransform **ppProcessor // Receives the video processor
+ );
+ static HRESULT GetBestCodec(
+ BOOL bEncoder, // Whether we request an encoder or not (TRUE=encoder, FALSE=decoder)
+ const GUID& mediaType, // The MediaType
+ const GUID& inputFormat, // The input MediaFormat (e.g. MFVideoFormat_NV12)
+ const GUID& outputFormat, // The output MediaFormat (e.g. MFVideoFormat_H264)
+ IMFTransform **ppMFT // Receives the decoder/encoder transform
+ );
+ static HRESULT BindOutputNode(
+ IMFTopologyNode *pNode // The Node
+ );
+ static HRESULT AddOutputNode(
+ IMFTopology *pTopology, // Topology.
+ IMFActivate *pActivate, // Media sink activation object.
+ DWORD dwId, // Identifier of the stream sink.
+ IMFTopologyNode **ppNode // Receives the node pointer.
+ );
+ static HRESULT AddTransformNode(
+ IMFTopology *pTopology, // Topology.
+ IMFTransform *pMFT, // MFT.
+ DWORD dwId, // Identifier of the stream sink.
+ IMFTopologyNode **ppNode // Receives the node pointer.
+ );
+ static HRESULT AddSourceNode(
+ IMFTopology *pTopology, // Topology.
+ IMFMediaSource *pSource, // Media source.
+ IMFPresentationDescriptor *pPD, // Presentation descriptor.
+ IMFStreamDescriptor *pSD, // Stream descriptor.
+ IMFTopologyNode **ppNode // Receives the node pointer.
);
-static HRESULT CreateVideoType(
- const GUID* subType, // video subType
- IMFMediaType **ppType, // Receives a pointer to the media type.
- UINT32 unWidth = 0, // Video width (0 to ignore)
- UINT32 unHeight = 0 // Video height (0 to ignore)
- );
-static HRESULT ConvertVideoTypeToUncompressedType(
- IMFMediaType *pType, // Pointer to an encoded video type.
- const GUID& subtype, // Uncompressed subtype (eg, RGB-32, AYUV)
- IMFMediaType **ppType // Receives a matching uncompressed video type.
+ static HRESULT CreateTopology(
+ IMFMediaSource *pSource, // Media source
+ IMFTransform *pTransform, // Transform filter (e.g. encoder or decoder) to insert between the source and Sink. NULL is valid.
+ IMFActivate *pSinkActivateMain, // Main sink (e.g. sample grabber or EVR).
+ IMFActivate *pSinkActivatePreview, // Preview sink. Optional. Could be NULL.
+ IMFMediaType *pIputTypeMain, // Main sink input MediaType
+ IMFTopology **ppTopo // Receives the newly created topology
+ );
+ static HRESULT ResolveTopology(
+ IMFTopology *pInputTopo, // A pointer to the IMFTopology interface of the partial topology to be resolved.
+ IMFTopology **ppOutputTopo, // Receives a pointer to the IMFTopology interface of the completed topology. The caller must release the interface.
+ IMFTopology *pCurrentTopo = NULL // A pointer to the IMFTopology interface of the previous full topology. The topology loader can re-use objects from this topology in the new topology. This parameter can be NULL.
+ );
+ static HRESULT FindNodeObject(
+ IMFTopology *pInputTopo, // The Topology containing the node to find
+ TOPOID qwTopoNodeID, //The identifier for the node
+ void** ppObject // Receives the Object
+ );
+ static HRESULT CreateMediaSinkActivate(
+ IMFStreamDescriptor *pSourceSD, // Pointer to the stream descriptor.
+ HWND hVideoWindow, // Handle to the video clipping window.
+ IMFActivate **ppActivate
+ );
+ static HRESULT SetMediaType(
+ IMFMediaSource *pSource, // Media source.
+ IMFMediaType* pMediaType // Media Type.
+ );
+ static HRESULT SetVideoWindow(
+ IMFTopology *pTopology, // Topology.
+ IMFMediaSource *pSource, // Media source.
+ HWND hVideoWnd // Window for video playback.
+ );
+ static HRESULT RunSession(
+ IMFMediaSession *pSession, // Session to run
+ IMFTopology *pTopology // The toppology
+ );
+ static HRESULT ShutdownSession(
+ IMFMediaSession *pSession, // The Session
+ IMFMediaSource *pSource = NULL // Source to shutdown (optional)
+ );
+ static HRESULT PauseSession(
+ IMFMediaSession *pSession, // The session
+ IMFMediaSource *pSource = NULL// Source to pause (optional)
+ );
+ static INT GetSupportedSubTypeIndex(
+ IMFMediaSource *pSource, // The source
+ const GUID& mediaType, // The MediaType
+ const VideoSubTypeGuidPair* subTypes, UINT subTypesCount // List of preferred subtypes (in ascending order)
+ );
+ static HRESULT IsSupported(
+ IMFPresentationDescriptor *pPD,
+ DWORD cStreamIndex,
+ UINT32 nWidth,
+ UINT32 nHeight,
+ UINT32 nFps,
+ const GUID& guidFormat,
+ BOOL* pbSupportedSize,
+ BOOL* pbSupportedFps,
+ BOOL* pbSupportedFormat
+ );
+ static HRESULT IsSupported(
+ IMFPresentationDescriptor *pPD,
+ DWORD cStreamIndex,
+ IMFMediaType* pMediaType,
+ BOOL* pbSupportedSize,
+ BOOL* pbSupportedFps,
+ BOOL* pbSupportedFormat
+ );
+ static HRESULT IsSupportedByInput(
+ IMFPresentationDescriptor *pPD,
+ DWORD cStreamIndex,
+ IMFTopologyNode *pNode,
+ BOOL* pbSupportedSize,
+ BOOL* pbSupportedFps,
+ BOOL* pbSupportedFormat
+ );
+ static HRESULT ConnectConverters(
+ IMFTopologyNode *pNode,
+ DWORD dwOutputIndex,
+ IMFTopologyNode *pNodeConvFrameRate,
+ IMFTopologyNode *pNodeConvColor,
+ IMFTopologyNode *pNodeConvSize
+ );
+ static HRESULT GetBestFormat(
+ IMFMediaSource *pSource,
+ const GUID *pSubType,
+ UINT32 nWidth,
+ UINT32 nHeight,
+ UINT32 nFps,
+ UINT32 *pnWidth,
+ UINT32 *pnHeight,
+ UINT32 *pnFps,
+ const VideoSubTypeGuidPair **pSubTypeGuidPair
);
-static HRESULT CreateMediaSample(
- DWORD cbData, // Maximum buffer size
- IMFSample **ppSample // Receives the sample
- );
-static HRESULT ValidateVideoFormat(
- IMFMediaType *pmt
- );
-static HRESULT IsVideoProcessorSupported(BOOL *pbSupported);
-static HRESULT GetBestVideoProcessor(
- const GUID& inputFormat, // The input MediaFormat (e.g. MFVideoFormat_I420)
- const GUID& outputFormat, // The output MediaFormat (e.g. MFVideoFormat_NV12)
- IMFTransform **ppProcessor // Receives the video processor
- );
-static HRESULT GetBestCodec(
- BOOL bEncoder, // Whether we request an encoder or not (TRUE=encoder, FALSE=decoder)
- const GUID& mediaType, // The MediaType
- const GUID& inputFormat, // The input MediaFormat (e.g. MFVideoFormat_NV12)
- const GUID& outputFormat, // The output MediaFormat (e.g. MFVideoFormat_H264)
- IMFTransform **ppMFT // Receives the decoder/encoder transform
- );
-static HRESULT BindOutputNode(
- IMFTopologyNode *pNode // The Node
- );
-static HRESULT AddOutputNode(
- IMFTopology *pTopology, // Topology.
- IMFActivate *pActivate, // Media sink activation object.
- DWORD dwId, // Identifier of the stream sink.
- IMFTopologyNode **ppNode // Receives the node pointer.
- );
-static HRESULT AddTransformNode(
- IMFTopology *pTopology, // Topology.
- IMFTransform *pMFT, // MFT.
- DWORD dwId, // Identifier of the stream sink.
- IMFTopologyNode **ppNode // Receives the node pointer.
- );
-static HRESULT AddSourceNode(
- IMFTopology *pTopology, // Topology.
- IMFMediaSource *pSource, // Media source.
- IMFPresentationDescriptor *pPD, // Presentation descriptor.
- IMFStreamDescriptor *pSD, // Stream descriptor.
- IMFTopologyNode **ppNode // Receives the node pointer.
- );
-static HRESULT CreateTopology(
- IMFMediaSource *pSource, // Media source
- IMFTransform *pTransform, // Transform filter (e.g. encoder or decoder) to insert between the source and Sink. NULL is valid.
- IMFActivate *pSinkActivateMain, // Main sink (e.g. sample grabber or EVR).
- IMFActivate *pSinkActivatePreview, // Preview sink. Optional. Could be NULL.
- IMFMediaType *pIputTypeMain, // Main sink input MediaType
- IMFTopology **ppTopo // Receives the newly created topology
- );
-static HRESULT ResolveTopology(
- IMFTopology *pInputTopo, // A pointer to the IMFTopology interface of the partial topology to be resolved.
- IMFTopology **ppOutputTopo, // Receives a pointer to the IMFTopology interface of the completed topology. The caller must release the interface.
- IMFTopology *pCurrentTopo = NULL // A pointer to the IMFTopology interface of the previous full topology. The topology loader can re-use objects from this topology in the new topology. This parameter can be NULL.
- );
-static HRESULT FindNodeObject(
- IMFTopology *pInputTopo, // The Topology containing the node to find
- TOPOID qwTopoNodeID, //The identifier for the node
- void** ppObject // Receives the Object
- );
-static HRESULT CreateMediaSinkActivate(
- IMFStreamDescriptor *pSourceSD, // Pointer to the stream descriptor.
- HWND hVideoWindow, // Handle to the video clipping window.
- IMFActivate **ppActivate
-);
-static HRESULT SetMediaType(
- IMFMediaSource *pSource, // Media source.
- IMFMediaType* pMediaType // Media Type.
- );
-static HRESULT SetVideoWindow(
- IMFTopology *pTopology, // Topology.
- IMFMediaSource *pSource, // Media source.
- HWND hVideoWnd // Window for video playback.
- );
-static HRESULT RunSession(
- IMFMediaSession *pSession, // Session to run
- IMFTopology *pTopology // The toppology
- );
-static HRESULT ShutdownSession(
- IMFMediaSession *pSession, // The Session
- IMFMediaSource *pSource = NULL // Source to shutdown (optional)
- );
-static HRESULT PauseSession(
- IMFMediaSession *pSession, // The session
- IMFMediaSource *pSource = NULL// Source to pause (optional)
- );
-static INT GetSupportedSubTypeIndex(
- IMFMediaSource *pSource, // The source
- const GUID& mediaType, // The MediaType
- const VideoSubTypeGuidPair* subTypes, UINT subTypesCount // List of preferred subtypes (in ascending order)
- );
-static HRESULT IsSupported(
- IMFPresentationDescriptor *pPD,
- DWORD cStreamIndex,
- UINT32 nWidth,
- UINT32 nHeight,
- UINT32 nFps,
- const GUID& guidFormat,
- BOOL* pbSupportedSize,
- BOOL* pbSupportedFps,
- BOOL* pbSupportedFormat
- );
-static HRESULT IsSupported(
- IMFPresentationDescriptor *pPD,
- DWORD cStreamIndex,
- IMFMediaType* pMediaType,
- BOOL* pbSupportedSize,
- BOOL* pbSupportedFps,
- BOOL* pbSupportedFormat
- );
-static HRESULT IsSupportedByInput(
- IMFPresentationDescriptor *pPD,
- DWORD cStreamIndex,
- IMFTopologyNode *pNode,
- BOOL* pbSupportedSize,
- BOOL* pbSupportedFps,
- BOOL* pbSupportedFormat
- );
-static HRESULT ConnectConverters(
- IMFTopologyNode *pNode,
- DWORD dwOutputIndex,
- IMFTopologyNode *pNodeConvFrameRate,
- IMFTopologyNode *pNodeConvColor,
- IMFTopologyNode *pNodeConvSize
- );
-static HRESULT GetBestFormat(
- IMFMediaSource *pSource,
- const GUID *pSubType,
- UINT32 nWidth,
- UINT32 nHeight,
- UINT32 nFps,
- UINT32 *pnWidth,
- UINT32 *pnHeight,
- UINT32 *pnFps,
- const VideoSubTypeGuidPair **pSubTypeGuidPair
- );
-static HWND GetConsoleHwnd(void);
+ static HWND GetConsoleHwnd(void);
-template <class Q>
-static HRESULT GetTopoNodeObject(IMFTopologyNode *pNode, Q **ppObject)
-{
- IUnknown *pUnk = NULL; // zero output
+ template <class Q>
+ static HRESULT GetTopoNodeObject(IMFTopologyNode *pNode, Q **ppObject) {
+ IUnknown *pUnk = NULL; // zero output
- HRESULT hr = pNode->GetObject(&pUnk);
- if (SUCCEEDED(hr))
- {
- pUnk->QueryInterface(IID_PPV_ARGS(ppObject));
- pUnk->Release();
+ HRESULT hr = pNode->GetObject(&pUnk);
+ if (SUCCEEDED(hr)) {
+ pUnk->QueryInterface(IID_PPV_ARGS(ppObject));
+ pUnk->Release();
+ }
+ return hr;
}
- return hr;
-}
private:
- static BOOL g_bStarted;
+ static BOOL g_bStarted;
- static DWORD g_dwMajorVersion;
- static DWORD g_dwMinorVersion;
+ static DWORD g_dwMajorVersion;
+ static DWORD g_dwMinorVersion;
- static BOOL g_bLowLatencyH264Checked;
- static BOOL g_bLowLatencyH264Supported;
- static BOOL g_bLowLatencyH264SupportsMaxSliceSize;
+ static BOOL g_bLowLatencyH264Checked;
+ static BOOL g_bLowLatencyH264Supported;
+ static BOOL g_bLowLatencyH264SupportsMaxSliceSize;
- static BOOL g_bD3D9Checked;
- static BOOL g_bD3D9Supported;
+ static BOOL g_bD3D9Checked;
+ static BOOL g_bD3D9Supported;
public:
- static const TOPOID g_ullTopoIdSinkMain;
- static const TOPOID g_ullTopoIdSinkPreview;
- static const TOPOID g_ullTopoIdSource;
- static const TOPOID g_ullTopoIdVideoProcessor;
+ static const TOPOID g_ullTopoIdSinkMain;
+ static const TOPOID g_ullTopoIdSinkPreview;
+ static const TOPOID g_ullTopoIdSource;
+ static const TOPOID g_ullTopoIdVideoProcessor;
};
#endif /* PLUGIN_WIN_MF_UTILS_H */
diff --git a/plugins/pluginWinMF/plugin_win_mf_codec_h264.cxx b/plugins/pluginWinMF/plugin_win_mf_codec_h264.cxx
index bee00f0..c815deb 100755
--- a/plugins/pluginWinMF/plugin_win_mf_codec_h264.cxx
+++ b/plugins/pluginWinMF/plugin_win_mf_codec_h264.cxx
@@ -1,18 +1,18 @@
/* Copyright (C) 2013 Mamadou DIOP
* Copyright (C) 2013 Doubango Telecom <http://www.doubango.org>
-*
+*
* This file is part of Open Source Doubango Framework.
*
* DOUBANGO is free software: you can redistribute it and/or modify
* it under the terms of the GNU General Public License as published by
* the Free Software Foundation, either version 3 of the License, or
* (at your option) any later version.
-*
+*
* DOUBANGO is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
* GNU General Public License for more details.
-*
+*
* You should have received a copy of the GNU General Public License
* along with DOUBANGO.
*/
@@ -31,35 +31,34 @@
#include "tsk_memory.h"
#include "tsk_debug.h"
-typedef struct mf_codec_h264_s
-{
- TDAV_DECLARE_CODEC_H264_COMMON;
-
- // Encoder
- struct{
- MFCodecVideoH264* pInst;
- void* buffer;
- int64_t frame_count;
- tsk_bool_t force_idr;
- int32_t quality; // [1-31]
- int rotation;
- int neg_width;
- int neg_height;
- int neg_fps;
- int max_bitrate_bps;
- int32_t max_bw_kpbs;
- tsk_bool_t passthrough; // whether to bypass encoding
- } encoder;
-
- // decoder
- struct{
- MFCodecVideoH264* pInst;
- void* accumulator;
- tsk_size_t accumulator_pos;
- tsk_size_t accumulator_size;
- uint16_t last_seq;
- tsk_bool_t passthrough; // whether to bypass decoding
- } decoder;
+typedef struct mf_codec_h264_s {
+ TDAV_DECLARE_CODEC_H264_COMMON;
+
+ // Encoder
+ struct {
+ MFCodecVideoH264* pInst;
+ void* buffer;
+ int64_t frame_count;
+ tsk_bool_t force_idr;
+ int32_t quality; // [1-31]
+ int rotation;
+ int neg_width;
+ int neg_height;
+ int neg_fps;
+ int max_bitrate_bps;
+ int32_t max_bw_kpbs;
+ tsk_bool_t passthrough; // whether to bypass encoding
+ } encoder;
+
+ // decoder
+ struct {
+ MFCodecVideoH264* pInst;
+ void* accumulator;
+ tsk_size_t accumulator_pos;
+ tsk_size_t accumulator_size;
+ uint16_t last_seq;
+ tsk_bool_t passthrough; // whether to bypass decoding
+ } decoder;
}
mf_codec_h264_t;
@@ -78,372 +77,368 @@ static int mf_codec_h264_close_decoder(mf_codec_h264_t* self);
static int mf_codec_h264_set(tmedia_codec_t* self, const tmedia_param_t* param)
{
- mf_codec_h264_t* h264 = (mf_codec_h264_t*)self;
- if(!self->opened){
- TSK_DEBUG_ERROR("Codec not opened");
- return -1;
- }
- if(param->value_type == tmedia_pvt_int32){
- if(tsk_striequals(param->key, "action")){
- tmedia_codec_action_t action = (tmedia_codec_action_t)TSK_TO_INT32((uint8_t*)param->value);
- switch(action){
- case tmedia_codec_action_encode_idr:
- {
- h264->encoder.force_idr = tsk_true;
- break;
- }
- case tmedia_codec_action_bw_down:
- {
- h264->encoder.quality = TSK_CLAMP(1, (h264->encoder.quality + 1), 31);
- break;
- }
- case tmedia_codec_action_bw_up:
- {
- h264->encoder.quality = TSK_CLAMP(1, (h264->encoder.quality - 1), 31);
- break;
- }
- }
- return 0;
- }
- else if(tsk_striequals(param->key, "bypass-encoding")){
- h264->encoder.passthrough = *((int32_t*)param->value) ? tsk_true : tsk_false;
- h264->encoder.pInst->setBundled(h264->encoder.passthrough);
- TSK_DEBUG_INFO("[H.264] bypass-encoding = %d", h264->encoder.passthrough);
- return 0;
- }
- else if(tsk_striequals(param->key, "bypass-decoding")){
- h264->decoder.passthrough = *((int32_t*)param->value) ? tsk_true : tsk_false;
- h264->decoder.pInst->setBundled(h264->decoder.passthrough);
- TSK_DEBUG_INFO("[H.264] bypass-decoding = %d", h264->decoder.passthrough);
- return 0;
- }
- else if(tsk_striequals(param->key, "rotation")){
- int rotation = *((int32_t*)param->value);
- if(h264->encoder.rotation != rotation){
- if(self->opened){
- int ret;
- h264->encoder.rotation = rotation;
- if((ret = mf_codec_h264_close_encoder(h264))){
- return ret;
- }
- if((ret = mf_codec_h264_open_encoder(h264))){
- return ret;
- }
- }
- }
- return 0;
- }
- }
- return -1;
+ mf_codec_h264_t* h264 = (mf_codec_h264_t*)self;
+ if(!self->opened) {
+ TSK_DEBUG_ERROR("Codec not opened");
+ return -1;
+ }
+ if(param->value_type == tmedia_pvt_int32) {
+ if(tsk_striequals(param->key, "action")) {
+ tmedia_codec_action_t action = (tmedia_codec_action_t)TSK_TO_INT32((uint8_t*)param->value);
+ switch(action) {
+ case tmedia_codec_action_encode_idr: {
+ h264->encoder.force_idr = tsk_true;
+ break;
+ }
+ case tmedia_codec_action_bw_down: {
+ h264->encoder.quality = TSK_CLAMP(1, (h264->encoder.quality + 1), 31);
+ break;
+ }
+ case tmedia_codec_action_bw_up: {
+ h264->encoder.quality = TSK_CLAMP(1, (h264->encoder.quality - 1), 31);
+ break;
+ }
+ }
+ return 0;
+ }
+ else if(tsk_striequals(param->key, "bypass-encoding")) {
+ h264->encoder.passthrough = *((int32_t*)param->value) ? tsk_true : tsk_false;
+ h264->encoder.pInst->setBundled(h264->encoder.passthrough);
+ TSK_DEBUG_INFO("[H.264] bypass-encoding = %d", h264->encoder.passthrough);
+ return 0;
+ }
+ else if(tsk_striequals(param->key, "bypass-decoding")) {
+ h264->decoder.passthrough = *((int32_t*)param->value) ? tsk_true : tsk_false;
+ h264->decoder.pInst->setBundled(h264->decoder.passthrough);
+ TSK_DEBUG_INFO("[H.264] bypass-decoding = %d", h264->decoder.passthrough);
+ return 0;
+ }
+ else if(tsk_striequals(param->key, "rotation")) {
+ int rotation = *((int32_t*)param->value);
+ if(h264->encoder.rotation != rotation) {
+ if(self->opened) {
+ int ret;
+ h264->encoder.rotation = rotation;
+ if((ret = mf_codec_h264_close_encoder(h264))) {
+ return ret;
+ }
+ if((ret = mf_codec_h264_open_encoder(h264))) {
+ return ret;
+ }
+ }
+ }
+ return 0;
+ }
+ }
+ return -1;
}
static int mf_codec_h264_open(tmedia_codec_t* self)
{
- int ret;
- mf_codec_h264_t* h264 = (mf_codec_h264_t*)self;
-
- if(!h264){
- TSK_DEBUG_ERROR("Invalid parameter");
- return -1;
- }
-
- /* the caller (base class) already checked that the codec is not opened */
-
- // Encoder
- if((ret = mf_codec_h264_open_encoder(h264))){
- return ret;
- }
-
- // Decoder
- if((ret = mf_codec_h264_open_decoder(h264))){
- return ret;
- }
-
- return 0;
+ int ret;
+ mf_codec_h264_t* h264 = (mf_codec_h264_t*)self;
+
+ if(!h264) {
+ TSK_DEBUG_ERROR("Invalid parameter");
+ return -1;
+ }
+
+ /* the caller (base class) already checked that the codec is not opened */
+
+ // Encoder
+ if((ret = mf_codec_h264_open_encoder(h264))) {
+ return ret;
+ }
+
+ // Decoder
+ if((ret = mf_codec_h264_open_decoder(h264))) {
+ return ret;
+ }
+
+ return 0;
}
static int mf_codec_h264_close(tmedia_codec_t* self)
{
- mf_codec_h264_t* h264 = (mf_codec_h264_t*)self;
+ mf_codec_h264_t* h264 = (mf_codec_h264_t*)self;
- if(!h264){
- TSK_DEBUG_ERROR("Invalid parameter");
- return -1;
- }
+ if(!h264) {
+ TSK_DEBUG_ERROR("Invalid parameter");
+ return -1;
+ }
- /* the caller (base class) alreasy checked that the codec is opened */
+ /* the caller (base class) alreasy checked that the codec is opened */
- // Encoder
- mf_codec_h264_close_encoder(h264);
+ // Encoder
+ mf_codec_h264_close_encoder(h264);
- // Decoder
- mf_codec_h264_close_decoder(h264);
+ // Decoder
+ mf_codec_h264_close_decoder(h264);
- return 0;
+ return 0;
}
static tsk_size_t mf_codec_h264_encode(tmedia_codec_t* self, const void* in_data, tsk_size_t in_size, void** out_data, tsk_size_t* out_max_size)
{
- int ret = 0;
- tsk_bool_t send_idr, send_hdr;
-
- mf_codec_h264_t* h264 = (mf_codec_h264_t*)self;
- tdav_codec_h264_common_t* common = (tdav_codec_h264_common_t*)self;
-
- if(!self || !in_data || !in_size){
- TSK_DEBUG_ERROR("Invalid parameter");
- return 0;
- }
-
- if(!self->opened || !h264->encoder.pInst || !h264->encoder.pInst->IsReady()){
- TSK_DEBUG_ERROR("Encoder not opened or not ready");
- return 0;
- }
-
-
- HRESULT hr = S_OK;
- IMFSample *pSampleOut = NULL;
- IMFMediaBuffer* pBufferOut = NULL;
-
- // send IDR for:
- // - the first frame
- // - remote peer requested an IDR
- // - every second within the first 4seconds
- send_idr = (
- h264->encoder.frame_count++ == 0
- || h264 ->encoder.force_idr
- || ( (h264->encoder.frame_count < h264->encoder.neg_fps * 4) && ((h264->encoder.frame_count % h264->encoder.neg_fps)==0) )
- );
-
- if(send_idr) {
- CHECK_HR(hr = h264->encoder.pInst->RequestKeyFrame());
- }
-
- // send SPS and PPS headers for:
- // - IDR frames (not required but it's the easiest way to deal with pkt loss)
- // - every 5 seconds after the first 4seconds
- send_hdr = (
- send_idr
- || ( (h264->encoder.frame_count % (h264->encoder.neg_fps * 5))==0 )
- );
- if(send_hdr){
- //FIXME: MF_MT_MPEG_SEQUENCE_HEADER
- // tdav_codec_h264_rtp_encap(TDAV_CODEC_H264_COMMON(h264), h264->encoder.context->extradata, (tsk_size_t)h264->encoder.context->extradata_size);
- }
-
- if (h264->encoder.passthrough) {
- tdav_codec_h264_rtp_encap(common, (const uint8_t*)in_data, in_size);
- return 0;
- }
-
- // Encode data
- CHECK_HR(hr = h264->encoder.pInst->Process(in_data, (UINT32)in_size, &pSampleOut));
- if(pSampleOut) {
- CHECK_HR(hr = pSampleOut->GetBufferByIndex(0, &pBufferOut));
-
- BYTE* pBufferPtr = NULL;
- DWORD dwDataLength = 0;
- CHECK_HR(hr = pBufferOut->GetCurrentLength(&dwDataLength));
- if(dwDataLength > 0) {
- CHECK_HR(hr = pBufferOut->Lock(&pBufferPtr, NULL, NULL));
- tdav_codec_h264_rtp_encap(common, (const uint8_t*)pBufferPtr, (tsk_size_t)dwDataLength);
- CHECK_HR(hr = pBufferOut->Unlock());
- }
- }
-
- // reset
- h264->encoder.force_idr = tsk_false;
+ int ret = 0;
+ tsk_bool_t send_idr, send_hdr;
+
+ mf_codec_h264_t* h264 = (mf_codec_h264_t*)self;
+ tdav_codec_h264_common_t* common = (tdav_codec_h264_common_t*)self;
+
+ if(!self || !in_data || !in_size) {
+ TSK_DEBUG_ERROR("Invalid parameter");
+ return 0;
+ }
+
+ if(!self->opened || !h264->encoder.pInst || !h264->encoder.pInst->IsReady()) {
+ TSK_DEBUG_ERROR("Encoder not opened or not ready");
+ return 0;
+ }
+
+
+ HRESULT hr = S_OK;
+ IMFSample *pSampleOut = NULL;
+ IMFMediaBuffer* pBufferOut = NULL;
+
+ // send IDR for:
+ // - the first frame
+ // - remote peer requested an IDR
+ // - every second within the first 4seconds
+ send_idr = (
+ h264->encoder.frame_count++ == 0
+ || h264 ->encoder.force_idr
+ || ( (h264->encoder.frame_count < h264->encoder.neg_fps * 4) && ((h264->encoder.frame_count % h264->encoder.neg_fps)==0) )
+ );
+
+ if(send_idr) {
+ CHECK_HR(hr = h264->encoder.pInst->RequestKeyFrame());
+ }
+
+ // send SPS and PPS headers for:
+ // - IDR frames (not required but it's the easiest way to deal with pkt loss)
+ // - every 5 seconds after the first 4seconds
+ send_hdr = (
+ send_idr
+ || ( (h264->encoder.frame_count % (h264->encoder.neg_fps * 5))==0 )
+ );
+ if(send_hdr) {
+ //FIXME: MF_MT_MPEG_SEQUENCE_HEADER
+ // tdav_codec_h264_rtp_encap(TDAV_CODEC_H264_COMMON(h264), h264->encoder.context->extradata, (tsk_size_t)h264->encoder.context->extradata_size);
+ }
+
+ if (h264->encoder.passthrough) {
+ tdav_codec_h264_rtp_encap(common, (const uint8_t*)in_data, in_size);
+ return 0;
+ }
+
+ // Encode data
+ CHECK_HR(hr = h264->encoder.pInst->Process(in_data, (UINT32)in_size, &pSampleOut));
+ if(pSampleOut) {
+ CHECK_HR(hr = pSampleOut->GetBufferByIndex(0, &pBufferOut));
+
+ BYTE* pBufferPtr = NULL;
+ DWORD dwDataLength = 0;
+ CHECK_HR(hr = pBufferOut->GetCurrentLength(&dwDataLength));
+ if(dwDataLength > 0) {
+ CHECK_HR(hr = pBufferOut->Lock(&pBufferPtr, NULL, NULL));
+ tdav_codec_h264_rtp_encap(common, (const uint8_t*)pBufferPtr, (tsk_size_t)dwDataLength);
+ CHECK_HR(hr = pBufferOut->Unlock());
+ }
+ }
+
+ // reset
+ h264->encoder.force_idr = tsk_false;
bail:
- SafeRelease(&pSampleOut);
- SafeRelease(&pBufferOut);
- return 0;
+ SafeRelease(&pSampleOut);
+ SafeRelease(&pBufferOut);
+ return 0;
}
static tsk_size_t mf_codec_h264_decode(tmedia_codec_t* self, const void* in_data, tsk_size_t in_size, void** out_data, tsk_size_t* out_max_size, const tsk_object_t* proto_hdr)
{
- mf_codec_h264_t* h264 = (mf_codec_h264_t*)self;
- const trtp_rtp_header_t* rtp_hdr = (const trtp_rtp_header_t*)proto_hdr;
-
- const uint8_t* pay_ptr = tsk_null;
- tsk_size_t pay_size = 0;
- int ret;
- tsk_bool_t append_scp, end_of_unit;
- tsk_bool_t sps_or_pps;
- tsk_size_t retsize = 0, size_to_copy = 0;
- static const tsk_size_t xmax_size = (3840 * 2160 * 3) >> 3; // >>3 instead of >>1 (not an error)
- static tsk_size_t start_code_prefix_size = sizeof(H264_START_CODE_PREFIX);
-
- if(!h264 || !in_data || !in_size || !out_data)
- {
- TSK_DEBUG_ERROR("Invalid parameter");
- return 0;
- }
-
- if(!self->opened || !h264->encoder.pInst || !h264->decoder.pInst->IsReady()){
- TSK_DEBUG_ERROR("Decoder not opened or not ready");
- return 0;
- }
-
- HRESULT hr = S_OK;
- IMFSample *pSampleOut = NULL;
- IMFMediaBuffer* pBufferOut = NULL;
-
- /* Packet lost? */
- if((h264->decoder.last_seq + 1) != rtp_hdr->seq_num && h264->decoder.last_seq){
- TSK_DEBUG_INFO("[H.264] Packet loss, seq_num=%d", (h264->decoder.last_seq + 1));
- }
- h264->decoder.last_seq = rtp_hdr->seq_num;
-
-
- /* 5.3. NAL Unit Octet Usage
- +---------------+
+ mf_codec_h264_t* h264 = (mf_codec_h264_t*)self;
+ const trtp_rtp_header_t* rtp_hdr = (const trtp_rtp_header_t*)proto_hdr;
+
+ const uint8_t* pay_ptr = tsk_null;
+ tsk_size_t pay_size = 0;
+ int ret;
+ tsk_bool_t append_scp, end_of_unit;
+ tsk_bool_t sps_or_pps;
+ tsk_size_t retsize = 0, size_to_copy = 0;
+ static const tsk_size_t xmax_size = (3840 * 2160 * 3) >> 3; // >>3 instead of >>1 (not an error)
+ static tsk_size_t start_code_prefix_size = sizeof(H264_START_CODE_PREFIX);
+
+ if(!h264 || !in_data || !in_size || !out_data) {
+ TSK_DEBUG_ERROR("Invalid parameter");
+ return 0;
+ }
+
+ if(!self->opened || !h264->encoder.pInst || !h264->decoder.pInst->IsReady()) {
+ TSK_DEBUG_ERROR("Decoder not opened or not ready");
+ return 0;
+ }
+
+ HRESULT hr = S_OK;
+ IMFSample *pSampleOut = NULL;
+ IMFMediaBuffer* pBufferOut = NULL;
+
+ /* Packet lost? */
+ if((h264->decoder.last_seq + 1) != rtp_hdr->seq_num && h264->decoder.last_seq) {
+ TSK_DEBUG_INFO("[H.264] Packet loss, seq_num=%d", (h264->decoder.last_seq + 1));
+ }
+ h264->decoder.last_seq = rtp_hdr->seq_num;
+
+
+ /* 5.3. NAL Unit Octet Usage
+ +---------------+
|0|1|2|3|4|5|6|7|
+-+-+-+-+-+-+-+-+
|F|NRI| Type |
+---------------+
- */
- if (*((uint8_t*)in_data) & 0x80) {
- TSK_DEBUG_WARN("F=1");
- /* reset accumulator */
- h264->decoder.accumulator_pos = 0;
- return 0;
- }
-
- /* get payload */
- if ((ret = tdav_codec_h264_get_pay(in_data, in_size, (const void**)&pay_ptr, &pay_size, &append_scp, &end_of_unit)) || !pay_ptr || !pay_size){
- TSK_DEBUG_ERROR("Depayloader failed to get H.264 content");
- return 0;
- }
- //append_scp = tsk_true;
- size_to_copy = pay_size + (append_scp ? start_code_prefix_size : 0);
- // whether it's SPS or PPS (append_scp is false for subsequent FUA chuncks)
- sps_or_pps = append_scp && pay_ptr && ((pay_ptr[0] & 0x1F) == 7 || (pay_ptr[0] & 0x1F) == 8);
-
- // start-accumulator
- if (!h264->decoder.accumulator) {
- if (size_to_copy > xmax_size) {
- TSK_DEBUG_ERROR("%u too big to contain valid encoded data. xmax_size=%u", size_to_copy, xmax_size);
- return 0;
- }
- if (!(h264->decoder.accumulator = tsk_calloc(size_to_copy, sizeof(uint8_t)))) {
- TSK_DEBUG_ERROR("Failed to allocated new buffer");
- return 0;
- }
- h264->decoder.accumulator_size = size_to_copy;
- }
- if ((h264->decoder.accumulator_pos + size_to_copy) >= xmax_size) {
- TSK_DEBUG_ERROR("BufferOverflow");
- h264->decoder.accumulator_pos = 0;
- return 0;
- }
- if ((h264->decoder.accumulator_pos + size_to_copy) > h264->decoder.accumulator_size) {
- if(!(h264->decoder.accumulator = tsk_realloc(h264->decoder.accumulator, (h264->decoder.accumulator_pos + size_to_copy)))){
- TSK_DEBUG_ERROR("Failed to reallocated new buffer");
- h264->decoder.accumulator_pos = 0;
- h264->decoder.accumulator_size = 0;
- return 0;
- }
- h264->decoder.accumulator_size = (h264->decoder.accumulator_pos + size_to_copy);
- }
-
- if (append_scp) {
- memcpy(&((uint8_t*)h264->decoder.accumulator)[h264->decoder.accumulator_pos], H264_START_CODE_PREFIX, start_code_prefix_size);
- h264->decoder.accumulator_pos += start_code_prefix_size;
- }
- memcpy(&((uint8_t*)h264->decoder.accumulator)[h264->decoder.accumulator_pos], pay_ptr, pay_size);
- h264->decoder.accumulator_pos += pay_size;
- // end-accumulator
-
- /*if(sps_or_pps){
- // http://libav-users.943685.n4.nabble.com/Decode-H264-streams-how-to-fill-AVCodecContext-from-SPS-PPS-td2484472.html
- // SPS and PPS should be bundled with IDR
- TSK_DEBUG_INFO("Receiving SPS or PPS ...to be tied to an IDR");
- }
- else */if (rtp_hdr->marker) {
- if (h264->decoder.passthrough) {
- if (*out_max_size < h264->decoder.accumulator_pos) {
- if ((*out_data = tsk_realloc(*out_data, h264->decoder.accumulator_pos))) {
- *out_max_size = h264->decoder.accumulator_pos;
- }
- else {
- *out_max_size = 0;
- return 0;
- }
- }
- memcpy(*out_data, h264->decoder.accumulator, h264->decoder.accumulator_pos);
- retsize = h264->decoder.accumulator_pos;
- }
- else { // !h264->decoder.passthrough
- /* decode the picture */
- CHECK_HR(hr = h264->decoder.pInst->Process(h264->decoder.accumulator, (UINT32)h264->decoder.accumulator_pos, &pSampleOut));
- if (pSampleOut) {
- CHECK_HR(hr = pSampleOut->GetBufferByIndex(0, &pBufferOut));
-
- BYTE* pBufferPtr = NULL;
- DWORD dwDataLength = 0;
- CHECK_HR(hr = pBufferOut->GetCurrentLength(&dwDataLength));
- if (dwDataLength > 0) {
- CHECK_HR(hr = pBufferOut->Lock(&pBufferPtr, NULL, NULL));
- {
- /* IDR ? */
- if(((pay_ptr[0] & 0x1F) == 0x05) && TMEDIA_CODEC_VIDEO(self)->in.callback){
- TSK_DEBUG_INFO("Decoded H.264 IDR");
- TMEDIA_CODEC_VIDEO(self)->in.result.type = tmedia_video_decode_result_type_idr;
- TMEDIA_CODEC_VIDEO(self)->in.result.proto_hdr = proto_hdr;
- TMEDIA_CODEC_VIDEO(self)->in.callback(&TMEDIA_CODEC_VIDEO(self)->in.result);
- }
- /* fill out */
- if(*out_max_size < dwDataLength){
- if((*out_data = tsk_realloc(*out_data, dwDataLength))){
- *out_max_size = dwDataLength;
- }
- else{
- *out_max_size = 0;
- return 0;
- }
- }
- retsize = (tsk_size_t)dwDataLength;
- TMEDIA_CODEC_VIDEO(h264)->in.width = h264->decoder.pInst->GetWidth();
- TMEDIA_CODEC_VIDEO(h264)->in.height = h264->decoder.pInst->GetHeight();
- memcpy(*out_data, pBufferPtr, retsize);
- }
- CHECK_HR(hr = pBufferOut->Unlock());
- }
- }
- }// else(!h264->decoder.passthrough)
- } // else if(rtp_hdr->marker)
+ */
+ if (*((uint8_t*)in_data) & 0x80) {
+ TSK_DEBUG_WARN("F=1");
+ /* reset accumulator */
+ h264->decoder.accumulator_pos = 0;
+ return 0;
+ }
+
+ /* get payload */
+ if ((ret = tdav_codec_h264_get_pay(in_data, in_size, (const void**)&pay_ptr, &pay_size, &append_scp, &end_of_unit)) || !pay_ptr || !pay_size) {
+ TSK_DEBUG_ERROR("Depayloader failed to get H.264 content");
+ return 0;
+ }
+ //append_scp = tsk_true;
+ size_to_copy = pay_size + (append_scp ? start_code_prefix_size : 0);
+ // whether it's SPS or PPS (append_scp is false for subsequent FUA chuncks)
+ sps_or_pps = append_scp && pay_ptr && ((pay_ptr[0] & 0x1F) == 7 || (pay_ptr[0] & 0x1F) == 8);
+
+ // start-accumulator
+ if (!h264->decoder.accumulator) {
+ if (size_to_copy > xmax_size) {
+ TSK_DEBUG_ERROR("%u too big to contain valid encoded data. xmax_size=%u", size_to_copy, xmax_size);
+ return 0;
+ }
+ if (!(h264->decoder.accumulator = tsk_calloc(size_to_copy, sizeof(uint8_t)))) {
+ TSK_DEBUG_ERROR("Failed to allocated new buffer");
+ return 0;
+ }
+ h264->decoder.accumulator_size = size_to_copy;
+ }
+ if ((h264->decoder.accumulator_pos + size_to_copy) >= xmax_size) {
+ TSK_DEBUG_ERROR("BufferOverflow");
+ h264->decoder.accumulator_pos = 0;
+ return 0;
+ }
+ if ((h264->decoder.accumulator_pos + size_to_copy) > h264->decoder.accumulator_size) {
+ if(!(h264->decoder.accumulator = tsk_realloc(h264->decoder.accumulator, (h264->decoder.accumulator_pos + size_to_copy)))) {
+ TSK_DEBUG_ERROR("Failed to reallocated new buffer");
+ h264->decoder.accumulator_pos = 0;
+ h264->decoder.accumulator_size = 0;
+ return 0;
+ }
+ h264->decoder.accumulator_size = (h264->decoder.accumulator_pos + size_to_copy);
+ }
+
+ if (append_scp) {
+ memcpy(&((uint8_t*)h264->decoder.accumulator)[h264->decoder.accumulator_pos], H264_START_CODE_PREFIX, start_code_prefix_size);
+ h264->decoder.accumulator_pos += start_code_prefix_size;
+ }
+ memcpy(&((uint8_t*)h264->decoder.accumulator)[h264->decoder.accumulator_pos], pay_ptr, pay_size);
+ h264->decoder.accumulator_pos += pay_size;
+ // end-accumulator
+
+ /*if(sps_or_pps){
+ // http://libav-users.943685.n4.nabble.com/Decode-H264-streams-how-to-fill-AVCodecContext-from-SPS-PPS-td2484472.html
+ // SPS and PPS should be bundled with IDR
+ TSK_DEBUG_INFO("Receiving SPS or PPS ...to be tied to an IDR");
+ }
+ else */if (rtp_hdr->marker) {
+ if (h264->decoder.passthrough) {
+ if (*out_max_size < h264->decoder.accumulator_pos) {
+ if ((*out_data = tsk_realloc(*out_data, h264->decoder.accumulator_pos))) {
+ *out_max_size = h264->decoder.accumulator_pos;
+ }
+ else {
+ *out_max_size = 0;
+ return 0;
+ }
+ }
+ memcpy(*out_data, h264->decoder.accumulator, h264->decoder.accumulator_pos);
+ retsize = h264->decoder.accumulator_pos;
+ }
+ else { // !h264->decoder.passthrough
+ /* decode the picture */
+ CHECK_HR(hr = h264->decoder.pInst->Process(h264->decoder.accumulator, (UINT32)h264->decoder.accumulator_pos, &pSampleOut));
+ if (pSampleOut) {
+ CHECK_HR(hr = pSampleOut->GetBufferByIndex(0, &pBufferOut));
+
+ BYTE* pBufferPtr = NULL;
+ DWORD dwDataLength = 0;
+ CHECK_HR(hr = pBufferOut->GetCurrentLength(&dwDataLength));
+ if (dwDataLength > 0) {
+ CHECK_HR(hr = pBufferOut->Lock(&pBufferPtr, NULL, NULL));
+ {
+ /* IDR ? */
+ if(((pay_ptr[0] & 0x1F) == 0x05) && TMEDIA_CODEC_VIDEO(self)->in.callback) {
+ TSK_DEBUG_INFO("Decoded H.264 IDR");
+ TMEDIA_CODEC_VIDEO(self)->in.result.type = tmedia_video_decode_result_type_idr;
+ TMEDIA_CODEC_VIDEO(self)->in.result.proto_hdr = proto_hdr;
+ TMEDIA_CODEC_VIDEO(self)->in.callback(&TMEDIA_CODEC_VIDEO(self)->in.result);
+ }
+ /* fill out */
+ if(*out_max_size < dwDataLength) {
+ if((*out_data = tsk_realloc(*out_data, dwDataLength))) {
+ *out_max_size = dwDataLength;
+ }
+ else {
+ *out_max_size = 0;
+ return 0;
+ }
+ }
+ retsize = (tsk_size_t)dwDataLength;
+ TMEDIA_CODEC_VIDEO(h264)->in.width = h264->decoder.pInst->GetWidth();
+ TMEDIA_CODEC_VIDEO(h264)->in.height = h264->decoder.pInst->GetHeight();
+ memcpy(*out_data, pBufferPtr, retsize);
+ }
+ CHECK_HR(hr = pBufferOut->Unlock());
+ }
+ }
+ }// else(!h264->decoder.passthrough)
+ } // else if(rtp_hdr->marker)
bail:
- if (rtp_hdr->marker) {
- h264->decoder.accumulator_pos = 0;
- }
- if (FAILED(hr) /*|| (!pSampleOut && rtp_hdr->marker)*/){
- TSK_DEBUG_INFO("Failed to decode the buffer with error code =%d, size=%u, append=%s", ret, h264->decoder.accumulator_pos, append_scp ? "yes" : "no");
- if(TMEDIA_CODEC_VIDEO(self)->in.callback){
- TMEDIA_CODEC_VIDEO(self)->in.result.type = tmedia_video_decode_result_type_error;
- TMEDIA_CODEC_VIDEO(self)->in.result.proto_hdr = proto_hdr;
- TMEDIA_CODEC_VIDEO(self)->in.callback(&TMEDIA_CODEC_VIDEO(self)->in.result);
- }
- }
- SafeRelease(&pSampleOut);
- SafeRelease(&pBufferOut);
- return retsize;
+ if (rtp_hdr->marker) {
+ h264->decoder.accumulator_pos = 0;
+ }
+ if (FAILED(hr) /*|| (!pSampleOut && rtp_hdr->marker)*/) {
+ TSK_DEBUG_INFO("Failed to decode the buffer with error code =%d, size=%u, append=%s", ret, h264->decoder.accumulator_pos, append_scp ? "yes" : "no");
+ if(TMEDIA_CODEC_VIDEO(self)->in.callback) {
+ TMEDIA_CODEC_VIDEO(self)->in.result.type = tmedia_video_decode_result_type_error;
+ TMEDIA_CODEC_VIDEO(self)->in.result.proto_hdr = proto_hdr;
+ TMEDIA_CODEC_VIDEO(self)->in.callback(&TMEDIA_CODEC_VIDEO(self)->in.result);
+ }
+ }
+ SafeRelease(&pSampleOut);
+ SafeRelease(&pBufferOut);
+ return retsize;
}
static tsk_bool_t mf_codec_h264_sdp_att_match(const tmedia_codec_t* self, const char* att_name, const char* att_value)
{
- return tdav_codec_h264_common_sdp_att_match((tdav_codec_h264_common_t*)self, att_name, att_value);
+ return tdav_codec_h264_common_sdp_att_match((tdav_codec_h264_common_t*)self, att_name, att_value);
}
static char* mf_codec_h264_sdp_att_get(const tmedia_codec_t* self, const char* att_name)
{
- char* att = tdav_codec_h264_common_sdp_att_get((const tdav_codec_h264_common_t*)self, att_name);
- if(att && tsk_striequals(att_name, "fmtp")) {
- tsk_strcat(&att, "; impl=MF");
- }
- return att;
+ char* att = tdav_codec_h264_common_sdp_att_get((const tdav_codec_h264_common_t*)self, att_name);
+ if(att && tsk_striequals(att_name, "fmtp")) {
+ tsk_strcat(&att, "; impl=MF");
+ }
+ return att;
}
@@ -454,63 +449,61 @@ static char* mf_codec_h264_sdp_att_get(const tmedia_codec_t* self, const char* a
/* constructor */
static tsk_object_t* mf_codec_h264_base_ctor(tsk_object_t * self, va_list * app)
{
- mf_codec_h264_t *h264 = (mf_codec_h264_t*)self;
- if(h264){
- /* init base: called by tmedia_codec_create() */
- /* init self */
- if(mf_codec_h264_init(h264, profile_idc_baseline) != 0){
- return tsk_null;
- }
- }
- return self;
+ mf_codec_h264_t *h264 = (mf_codec_h264_t*)self;
+ if(h264) {
+ /* init base: called by tmedia_codec_create() */
+ /* init self */
+ if(mf_codec_h264_init(h264, profile_idc_baseline) != 0) {
+ return tsk_null;
+ }
+ }
+ return self;
}
/* destructor */
static tsk_object_t* mf_codec_h264_base_dtor(tsk_object_t * self)
-{
- mf_codec_h264_t *h264 = (mf_codec_h264_t*)self;
- if(h264){
- /* deinit base */
- tdav_codec_h264_common_deinit(TDAV_CODEC_H264_COMMON(self));
- /* deinit self */
- mf_codec_h264_deinit(h264);
- }
-
- return self;
+{
+ mf_codec_h264_t *h264 = (mf_codec_h264_t*)self;
+ if(h264) {
+ /* deinit base */
+ tdav_codec_h264_common_deinit(TDAV_CODEC_H264_COMMON(self));
+ /* deinit self */
+ mf_codec_h264_deinit(h264);
+ }
+
+ return self;
}
/* object definition */
-static const tsk_object_def_t mf_codec_h264_base_def_s =
-{
- sizeof(mf_codec_h264_t),
- mf_codec_h264_base_ctor,
- mf_codec_h264_base_dtor,
- tmedia_codec_cmp,
+static const tsk_object_def_t mf_codec_h264_base_def_s = {
+ sizeof(mf_codec_h264_t),
+ mf_codec_h264_base_ctor,
+ mf_codec_h264_base_dtor,
+ tmedia_codec_cmp,
};
/* plugin definition*/
-static const tmedia_codec_plugin_def_t mf_codec_h264_base_plugin_def_s =
-{
- &mf_codec_h264_base_def_s,
-
- tmedia_video,
- tmedia_codec_id_h264_bp,
- "H264",
- "H264 Base Profile (Media Foundation)",
- TMEDIA_CODEC_FORMAT_H264_BP,
- tsk_true,
- 90000, // rate
-
- /* audio */
- { 0 },
-
- /* video (width, height, fps) */
- {176, 144, 0}, // fps is @deprecated
-
- mf_codec_h264_set,
- mf_codec_h264_open,
- mf_codec_h264_close,
- mf_codec_h264_encode,
- mf_codec_h264_decode,
- mf_codec_h264_sdp_att_match,
- mf_codec_h264_sdp_att_get
+static const tmedia_codec_plugin_def_t mf_codec_h264_base_plugin_def_s = {
+ &mf_codec_h264_base_def_s,
+
+ tmedia_video,
+ tmedia_codec_id_h264_bp,
+ "H264",
+ "H264 Base Profile (Media Foundation)",
+ TMEDIA_CODEC_FORMAT_H264_BP,
+ tsk_true,
+ 90000, // rate
+
+ /* audio */
+ { 0 },
+
+ /* video (width, height, fps) */
+ {176, 144, 0}, // fps is @deprecated
+
+ mf_codec_h264_set,
+ mf_codec_h264_open,
+ mf_codec_h264_close,
+ mf_codec_h264_encode,
+ mf_codec_h264_decode,
+ mf_codec_h264_sdp_att_match,
+ mf_codec_h264_sdp_att_get
};
const tmedia_codec_plugin_def_t *mf_codec_h264_base_plugin_def_t = &mf_codec_h264_base_plugin_def_s;
@@ -519,64 +512,62 @@ const tmedia_codec_plugin_def_t *mf_codec_h264_base_plugin_def_t = &mf_codec_h26
/* constructor */
static tsk_object_t* mf_codec_h264_main_ctor(tsk_object_t * self, va_list * app)
{
- mf_codec_h264_t *h264 = (mf_codec_h264_t*)self;
- if(h264){
- /* init base: called by tmedia_codec_create() */
- /* init self */
- if(mf_codec_h264_init(h264, profile_idc_main) != 0){
- return tsk_null;
- }
- }
- return self;
+ mf_codec_h264_t *h264 = (mf_codec_h264_t*)self;
+ if(h264) {
+ /* init base: called by tmedia_codec_create() */
+ /* init self */
+ if(mf_codec_h264_init(h264, profile_idc_main) != 0) {
+ return tsk_null;
+ }
+ }
+ return self;
}
/* destructor */
static tsk_object_t* mf_codec_h264_main_dtor(tsk_object_t * self)
-{
- mf_codec_h264_t *h264 = (mf_codec_h264_t*)self;
- if(h264){
- /* deinit base */
- tdav_codec_h264_common_deinit(TDAV_CODEC_H264_COMMON(self));
- /* deinit self */
- mf_codec_h264_deinit(h264);
-
- }
-
- return self;
+{
+ mf_codec_h264_t *h264 = (mf_codec_h264_t*)self;
+ if(h264) {
+ /* deinit base */
+ tdav_codec_h264_common_deinit(TDAV_CODEC_H264_COMMON(self));
+ /* deinit self */
+ mf_codec_h264_deinit(h264);
+
+ }
+
+ return self;
}
/* object definition */
-static const tsk_object_def_t mf_codec_h264_main_def_s =
-{
- sizeof(mf_codec_h264_t),
- mf_codec_h264_main_ctor,
- mf_codec_h264_main_dtor,
- tmedia_codec_cmp,
+static const tsk_object_def_t mf_codec_h264_main_def_s = {
+ sizeof(mf_codec_h264_t),
+ mf_codec_h264_main_ctor,
+ mf_codec_h264_main_dtor,
+ tmedia_codec_cmp,
};
/* plugin definition*/
-static const tmedia_codec_plugin_def_t mf_codec_h264_main_plugin_def_s =
-{
- &mf_codec_h264_main_def_s,
-
- tmedia_video,
- tmedia_codec_id_h264_mp,
- "H264",
- "H264 Main Profile (Media Foundation)",
- TMEDIA_CODEC_FORMAT_H264_MP,
- tsk_true,
- 90000, // rate
-
- /* audio */
- { 0 },
-
- /* video (width, height, fps)*/
- {176, 144, 0},// fps is @deprecated
-
- mf_codec_h264_set,
- mf_codec_h264_open,
- mf_codec_h264_close,
- mf_codec_h264_encode,
- mf_codec_h264_decode,
- mf_codec_h264_sdp_att_match,
- mf_codec_h264_sdp_att_get
+static const tmedia_codec_plugin_def_t mf_codec_h264_main_plugin_def_s = {
+ &mf_codec_h264_main_def_s,
+
+ tmedia_video,
+ tmedia_codec_id_h264_mp,
+ "H264",
+ "H264 Main Profile (Media Foundation)",
+ TMEDIA_CODEC_FORMAT_H264_MP,
+ tsk_true,
+ 90000, // rate
+
+ /* audio */
+ { 0 },
+
+ /* video (width, height, fps)*/
+ {176, 144, 0},// fps is @deprecated
+
+ mf_codec_h264_set,
+ mf_codec_h264_open,
+ mf_codec_h264_close,
+ mf_codec_h264_encode,
+ mf_codec_h264_decode,
+ mf_codec_h264_sdp_att_match,
+ mf_codec_h264_sdp_att_get
};
const tmedia_codec_plugin_def_t *mf_codec_h264_main_plugin_def_t = &mf_codec_h264_main_plugin_def_s;
@@ -586,165 +577,165 @@ const tmedia_codec_plugin_def_t *mf_codec_h264_main_plugin_def_t = &mf_codec_h26
int mf_codec_h264_open_encoder(mf_codec_h264_t* self)
{
- HRESULT hr = S_OK;
- int32_t max_bw_kpbs;
- tdav_codec_h264_common_t* common = (tdav_codec_h264_common_t*)self;
+ HRESULT hr = S_OK;
+ int32_t max_bw_kpbs;
+ tdav_codec_h264_common_t* common = (tdav_codec_h264_common_t*)self;
- if(self->encoder.pInst) {
- TSK_DEBUG_ERROR("Encoder already initialized");
+ if(self->encoder.pInst) {
+ TSK_DEBUG_ERROR("Encoder already initialized");
#if defined(E_ILLEGAL_METHOD_CALL)
- CHECK_HR(hr = E_ILLEGAL_METHOD_CALL);
+ CHECK_HR(hr = E_ILLEGAL_METHOD_CALL);
#else
- CHECK_HR(hr = 0x8000000EL);
-#endif
- }
-
- // create encoder
- if(!(self->encoder.pInst = (common->profile == profile_idc_baseline) ? MFCodecVideoH264::CreateCodecH264Base(MFCodecType_Encoder) : MFCodecVideoH264::CreateCodecH264Main(MFCodecType_Encoder))){
- TSK_DEBUG_ERROR("Failed to find H.264 encoder");
- CHECK_HR(hr = E_OUTOFMEMORY);
- }
-
- //self->encoder.context->pix_fmt = PIX_FMT_YUV420P;
- //self->encoder.context->time_base.num = 1;
- //self->encoder.context->time_base.den = TMEDIA_CODEC_VIDEO(self)->out.fps;
- self->encoder.neg_width = (self->encoder.rotation == 90 || self->encoder.rotation == 270) ? TMEDIA_CODEC_VIDEO(self)->out.height : TMEDIA_CODEC_VIDEO(self)->out.width;
- self->encoder.neg_height = (self->encoder.rotation == 90 || self->encoder.rotation == 270) ? TMEDIA_CODEC_VIDEO(self)->out.width : TMEDIA_CODEC_VIDEO(self)->out.height;
- self->encoder.neg_fps = TMEDIA_CODEC_VIDEO(self)->out.fps;
- max_bw_kpbs = TSK_CLAMP(
- 0,
- tmedia_get_video_bandwidth_kbps_2(self->encoder.neg_width, self->encoder.neg_height, self->encoder.neg_fps),
- self->encoder.max_bw_kpbs
- );
- self->encoder.max_bitrate_bps = (max_bw_kpbs * 1024);
-
- TSK_DEBUG_INFO("[H.264 MF Encoder] neg_width=%d, neg_height=%d, neg_fps=%d, max_bitrate_bps=%d",
- self->encoder.neg_width,
- self->encoder.neg_height,
- self->encoder.neg_fps,
- self->encoder.max_bitrate_bps
- );
-
- CHECK_HR(hr = self->encoder.pInst->Initialize(
- self->encoder.neg_fps,
- self->encoder.neg_width,
- self->encoder.neg_height,
- self->encoder.max_bitrate_bps));
-
- CHECK_HR(hr = self->encoder.pInst->SetGOPSize(self->encoder.neg_fps * PLUGIN_MF_H264_GOP_SIZE_IN_SECONDS));
- CHECK_HR(hr = self->encoder.pInst->SetSliceMaxSizeInBytes((H264_RTP_PAYLOAD_SIZE - 100)));
+ CHECK_HR(hr = 0x8000000EL);
+#endif
+ }
+
+ // create encoder
+ if(!(self->encoder.pInst = (common->profile == profile_idc_baseline) ? MFCodecVideoH264::CreateCodecH264Base(MFCodecType_Encoder) : MFCodecVideoH264::CreateCodecH264Main(MFCodecType_Encoder))) {
+ TSK_DEBUG_ERROR("Failed to find H.264 encoder");
+ CHECK_HR(hr = E_OUTOFMEMORY);
+ }
+
+ //self->encoder.context->pix_fmt = PIX_FMT_YUV420P;
+ //self->encoder.context->time_base.num = 1;
+ //self->encoder.context->time_base.den = TMEDIA_CODEC_VIDEO(self)->out.fps;
+ self->encoder.neg_width = (self->encoder.rotation == 90 || self->encoder.rotation == 270) ? TMEDIA_CODEC_VIDEO(self)->out.height : TMEDIA_CODEC_VIDEO(self)->out.width;
+ self->encoder.neg_height = (self->encoder.rotation == 90 || self->encoder.rotation == 270) ? TMEDIA_CODEC_VIDEO(self)->out.width : TMEDIA_CODEC_VIDEO(self)->out.height;
+ self->encoder.neg_fps = TMEDIA_CODEC_VIDEO(self)->out.fps;
+ max_bw_kpbs = TSK_CLAMP(
+ 0,
+ tmedia_get_video_bandwidth_kbps_2(self->encoder.neg_width, self->encoder.neg_height, self->encoder.neg_fps),
+ self->encoder.max_bw_kpbs
+ );
+ self->encoder.max_bitrate_bps = (max_bw_kpbs * 1024);
+
+ TSK_DEBUG_INFO("[H.264 MF Encoder] neg_width=%d, neg_height=%d, neg_fps=%d, max_bitrate_bps=%d",
+ self->encoder.neg_width,
+ self->encoder.neg_height,
+ self->encoder.neg_fps,
+ self->encoder.max_bitrate_bps
+ );
+
+ CHECK_HR(hr = self->encoder.pInst->Initialize(
+ self->encoder.neg_fps,
+ self->encoder.neg_width,
+ self->encoder.neg_height,
+ self->encoder.max_bitrate_bps));
+
+ CHECK_HR(hr = self->encoder.pInst->SetGOPSize(self->encoder.neg_fps * PLUGIN_MF_H264_GOP_SIZE_IN_SECONDS));
+ CHECK_HR(hr = self->encoder.pInst->SetSliceMaxSizeInBytes((H264_RTP_PAYLOAD_SIZE - 100)));
bail:
- return SUCCEEDED(hr) ? 0 : -1;
+ return SUCCEEDED(hr) ? 0 : -1;
}
int mf_codec_h264_close_encoder(mf_codec_h264_t* self)
{
- if(self){
- SafeRelease(&self->encoder.pInst);
- if(self->encoder.buffer){
- TSK_FREE(self->encoder.buffer);
- }
- self->encoder.frame_count = 0;
- }
-
- return 0;
+ if(self) {
+ SafeRelease(&self->encoder.pInst);
+ if(self->encoder.buffer) {
+ TSK_FREE(self->encoder.buffer);
+ }
+ self->encoder.frame_count = 0;
+ }
+
+ return 0;
}
int mf_codec_h264_open_decoder(mf_codec_h264_t* self)
{
- HRESULT hr = S_OK;
- tdav_codec_h264_common_t* common = (tdav_codec_h264_common_t*)self;
+ HRESULT hr = S_OK;
+ tdav_codec_h264_common_t* common = (tdav_codec_h264_common_t*)self;
- if(self->decoder.pInst) {
- TSK_DEBUG_ERROR("Decoder already initialized");
+ if(self->decoder.pInst) {
+ TSK_DEBUG_ERROR("Decoder already initialized");
#if defined(E_ILLEGAL_METHOD_CALL)
- CHECK_HR(hr = E_ILLEGAL_METHOD_CALL);
+ CHECK_HR(hr = E_ILLEGAL_METHOD_CALL);
#else
- CHECK_HR(hr = 0x8000000EL);
+ CHECK_HR(hr = 0x8000000EL);
#endif
- }
+ }
- // create decoder
- if(!(self->decoder.pInst = (common->profile == profile_idc_baseline) ? MFCodecVideoH264::CreateCodecH264Base(MFCodecType_Decoder) : MFCodecVideoH264::CreateCodecH264Main(MFCodecType_Decoder))){
- TSK_DEBUG_ERROR("Failed to find H.264 encoder");
- CHECK_HR(hr = E_OUTOFMEMORY);
- }
+ // create decoder
+ if(!(self->decoder.pInst = (common->profile == profile_idc_baseline) ? MFCodecVideoH264::CreateCodecH264Base(MFCodecType_Decoder) : MFCodecVideoH264::CreateCodecH264Main(MFCodecType_Decoder))) {
+ TSK_DEBUG_ERROR("Failed to find H.264 encoder");
+ CHECK_HR(hr = E_OUTOFMEMORY);
+ }
- TSK_DEBUG_INFO("[H.264 MF Decoder] neg_width=%d, neg_height=%d, neg_fps=%d",
- TMEDIA_CODEC_VIDEO(self)->in.width,
- TMEDIA_CODEC_VIDEO(self)->in.height,
- TMEDIA_CODEC_VIDEO(self)->in.fps
- );
+ TSK_DEBUG_INFO("[H.264 MF Decoder] neg_width=%d, neg_height=%d, neg_fps=%d",
+ TMEDIA_CODEC_VIDEO(self)->in.width,
+ TMEDIA_CODEC_VIDEO(self)->in.height,
+ TMEDIA_CODEC_VIDEO(self)->in.fps
+ );
- CHECK_HR(hr = self->decoder.pInst->Initialize(
- TMEDIA_CODEC_VIDEO(self)->in.fps,
- TMEDIA_CODEC_VIDEO(self)->in.width,
- TMEDIA_CODEC_VIDEO(self)->in.height));
+ CHECK_HR(hr = self->decoder.pInst->Initialize(
+ TMEDIA_CODEC_VIDEO(self)->in.fps,
+ TMEDIA_CODEC_VIDEO(self)->in.width,
+ TMEDIA_CODEC_VIDEO(self)->in.height));
bail:
- return SUCCEEDED(hr) ? 0 : -1;
+ return SUCCEEDED(hr) ? 0 : -1;
}
int mf_codec_h264_close_decoder(mf_codec_h264_t* self)
{
- if(self){
- SafeRelease(&self->decoder.pInst);
- TSK_FREE(self->decoder.accumulator);
- self->decoder.accumulator_pos = 0;
- }
+ if(self) {
+ SafeRelease(&self->decoder.pInst);
+ TSK_FREE(self->decoder.accumulator);
+ self->decoder.accumulator_pos = 0;
+ }
- return 0;
+ return 0;
}
int mf_codec_h264_init(mf_codec_h264_t* self, profile_idc_t profile)
{
- int ret = 0;
- level_idc_t level;
- tdav_codec_h264_common_t* common = (tdav_codec_h264_common_t*)self;
-
- if(!self){
- TSK_DEBUG_ERROR("Invalid parameter");
- return -1;
- }
-
- if((ret = tdav_codec_h264_common_init(common))){
- TSK_DEBUG_ERROR("mf_codec_h264_common_init() faile with error code=%d", ret);
- return ret;
- }
-
- if((ret = tdav_codec_h264_common_level_from_size(TMEDIA_CODEC_VIDEO(self)->out.width, TMEDIA_CODEC_VIDEO(self)->out.height, &level))){
- TSK_DEBUG_ERROR("Failed to find level for size=[%u, %u]", TMEDIA_CODEC_VIDEO(self)->out.width, TMEDIA_CODEC_VIDEO(self)->out.height);
- return ret;
- }
-
- (self)->encoder.max_bw_kpbs = tmedia_defaults_get_bandwidth_video_upload_max();
- if (MFUtils::IsLowLatencyH264SupportsMaxSliceSize()) {
- common->pack_mode_local = H264_PACKETIZATION_MODE;
- }
- else {
- common->pack_mode_local = Non_Interleaved_Mode;
- }
- common->profile = profile;
- common->level = level;
- TMEDIA_CODEC_VIDEO(self)->in.max_mbps = TMEDIA_CODEC_VIDEO(self)->out.max_mbps = H264_MAX_MBPS*1000;
- TMEDIA_CODEC_VIDEO(self)->in.max_br = TMEDIA_CODEC_VIDEO(self)->out.max_br = H264_MAX_BR*1000;
-
- TMEDIA_CODEC_VIDEO(self)->in.chroma = tmedia_chroma_nv12;
- TMEDIA_CODEC_VIDEO(self)->out.chroma = tmedia_chroma_nv12;
-
- self->encoder.quality = 1;
-
- return ret;
+ int ret = 0;
+ level_idc_t level;
+ tdav_codec_h264_common_t* common = (tdav_codec_h264_common_t*)self;
+
+ if(!self) {
+ TSK_DEBUG_ERROR("Invalid parameter");
+ return -1;
+ }
+
+ if((ret = tdav_codec_h264_common_init(common))) {
+ TSK_DEBUG_ERROR("mf_codec_h264_common_init() faile with error code=%d", ret);
+ return ret;
+ }
+
+ if((ret = tdav_codec_h264_common_level_from_size(TMEDIA_CODEC_VIDEO(self)->out.width, TMEDIA_CODEC_VIDEO(self)->out.height, &level))) {
+ TSK_DEBUG_ERROR("Failed to find level for size=[%u, %u]", TMEDIA_CODEC_VIDEO(self)->out.width, TMEDIA_CODEC_VIDEO(self)->out.height);
+ return ret;
+ }
+
+ (self)->encoder.max_bw_kpbs = tmedia_defaults_get_bandwidth_video_upload_max();
+ if (MFUtils::IsLowLatencyH264SupportsMaxSliceSize()) {
+ common->pack_mode_local = H264_PACKETIZATION_MODE;
+ }
+ else {
+ common->pack_mode_local = Non_Interleaved_Mode;
+ }
+ common->profile = profile;
+ common->level = level;
+ TMEDIA_CODEC_VIDEO(self)->in.max_mbps = TMEDIA_CODEC_VIDEO(self)->out.max_mbps = H264_MAX_MBPS*1000;
+ TMEDIA_CODEC_VIDEO(self)->in.max_br = TMEDIA_CODEC_VIDEO(self)->out.max_br = H264_MAX_BR*1000;
+
+ TMEDIA_CODEC_VIDEO(self)->in.chroma = tmedia_chroma_nv12;
+ TMEDIA_CODEC_VIDEO(self)->out.chroma = tmedia_chroma_nv12;
+
+ self->encoder.quality = 1;
+
+ return ret;
}
int mf_codec_h264_deinit(mf_codec_h264_t* self)
{
- if(!self){
- TSK_DEBUG_ERROR("Invalid parameter");
- return -1;
- }
+ if(!self) {
+ TSK_DEBUG_ERROR("Invalid parameter");
+ return -1;
+ }
- mf_codec_h264_close((tmedia_codec_t*)self);
+ mf_codec_h264_close((tmedia_codec_t*)self);
- return 0;
+ return 0;
} \ No newline at end of file
diff --git a/plugins/pluginWinMF/plugin_win_mf_config.h b/plugins/pluginWinMF/plugin_win_mf_config.h
index f4f692a..e518e37 100755
--- a/plugins/pluginWinMF/plugin_win_mf_config.h
+++ b/plugins/pluginWinMF/plugin_win_mf_config.h
@@ -1,18 +1,18 @@
/* Copyright (C) 2013 Mamadou DIOP
* Copyright (C) 2013 Doubango Telecom <http://www.doubango.org>
-*
+*
* This file is part of Open Source Doubango Framework.
*
* DOUBANGO is free software: you can redistribute it and/or modify
* it under the terms of the GNU General Public License as published by
* the Free Software Foundation, either version 3 of the License, or
* (at your option) any later version.
-*
+*
* DOUBANGO is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
* GNU General Public License for more details.
-*
+*
* You should have received a copy of the GNU General Public License
* along with DOUBANGO.
*/
@@ -48,12 +48,12 @@
# define PLUGIN_WIN_MF_UNDER_X86 1
#endif
-// Guards against C++ name mangling
+// Guards against C++ name mangling
#ifdef __cplusplus
# define PLUGIN_WIN_MF_BEGIN_DECLS extern "C" {
# define PLUGIN_WIN_MF_END_DECLS }
#else
-# define PLUGIN_WIN_MF_BEGIN_DECLS
+# define PLUGIN_WIN_MF_BEGIN_DECLS
# define PLUGIN_WIN_MF_END_DECLS
#endif
@@ -69,7 +69,7 @@
#endif
#if HAVE_CONFIG_H
- #include <config.h>
+#include <config.h>
#endif
#endif // PLUGIN_WIN_MF_CONFIG_H
diff --git a/plugins/pluginWinMF/plugin_win_mf_consumer_audio.cxx b/plugins/pluginWinMF/plugin_win_mf_consumer_audio.cxx
index 026f510..f68f428 100755
--- a/plugins/pluginWinMF/plugin_win_mf_consumer_audio.cxx
+++ b/plugins/pluginWinMF/plugin_win_mf_consumer_audio.cxx
@@ -1,18 +1,18 @@
/* Copyright (C) 2013 Mamadou DIOP
* Copyright (C) 2013 Doubango Telecom <http://www.doubango.org>
-*
+*
* This file is part of Open Source Doubango Framework.
*
* DOUBANGO is free software: you can redistribute it and/or modify
* it under the terms of the GNU General Public License as published by
* the Free Software Foundation, either version 3 of the License, or
* (at your option) any later version.
-*
+*
* DOUBANGO is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
* GNU General Public License for more details.
-*
+*
* You should have received a copy of the GNU General Public License
* along with DOUBANGO.
*/
@@ -23,11 +23,10 @@
#include "tsk_debug.h"
-typedef struct plugin_win_mf_consumer_audio_s
-{
- TDAV_DECLARE_CONSUMER_AUDIO;
+typedef struct plugin_win_mf_consumer_audio_s {
+ TDAV_DECLARE_CONSUMER_AUDIO;
- bool bStarted;
+ bool bStarted;
}
plugin_win_mf_consumer_audio_t;
@@ -35,70 +34,70 @@ plugin_win_mf_consumer_audio_t;
/* ============ Consumer Interface ================= */
static int plugin_win_mf_consumer_audio_set(tmedia_consumer_t* self, const tmedia_param_t* param)
{
- plugin_win_mf_consumer_audio_t* pSelf = (plugin_win_mf_consumer_audio_t*)self;
- int ret = tdav_consumer_audio_set(TDAV_CONSUMER_AUDIO(self), param);
+ plugin_win_mf_consumer_audio_t* pSelf = (plugin_win_mf_consumer_audio_t*)self;
+ int ret = tdav_consumer_audio_set(TDAV_CONSUMER_AUDIO(self), param);
+
+ if(ret == 0) {
- if(ret == 0){
-
- }
+ }
- return ret;
+ return ret;
}
static int plugin_win_mf_consumer_audio_prepare(tmedia_consumer_t* self, const tmedia_codec_t* codec)
{
- plugin_win_mf_consumer_audio_t* pSelf = (plugin_win_mf_consumer_audio_t*)self;
+ plugin_win_mf_consumer_audio_t* pSelf = (plugin_win_mf_consumer_audio_t*)self;
- if(!pSelf){
- TSK_DEBUG_ERROR("Invalid parameter");
- return -1;
- }
+ if(!pSelf) {
+ TSK_DEBUG_ERROR("Invalid parameter");
+ return -1;
+ }
- return 0;
+ return 0;
}
static int plugin_win_mf_consumer_audio_start(tmedia_consumer_t* self)
{
- plugin_win_mf_consumer_audio_t* pSelf = (plugin_win_mf_consumer_audio_t*)self;
+ plugin_win_mf_consumer_audio_t* pSelf = (plugin_win_mf_consumer_audio_t*)self;
- pSelf->bStarted = true;
+ pSelf->bStarted = true;
- return 0;
+ return 0;
}
static int plugin_win_mf_consumer_audio_consume(tmedia_consumer_t* self, const void* buffer, tsk_size_t size, const tsk_object_t* proto_hdr)
{
- if(!self || !buffer || !size){
- TSK_DEBUG_ERROR("Invalid parameter");
- return -1;
- }
- /* buffer is already decoded */
- return tdav_consumer_audio_put(TDAV_CONSUMER_AUDIO(self), buffer, size, proto_hdr);
+ if(!self || !buffer || !size) {
+ TSK_DEBUG_ERROR("Invalid parameter");
+ return -1;
+ }
+ /* buffer is already decoded */
+ return tdav_consumer_audio_put(TDAV_CONSUMER_AUDIO(self), buffer, size, proto_hdr);
}
static int plugin_win_mf_consumer_audio_pause(tmedia_consumer_t* self)
{
- return 0;
+ return 0;
}
static int plugin_win_mf_consumer_audio_stop(tmedia_consumer_t* self)
{
- plugin_win_mf_consumer_audio_t* pSelf = (plugin_win_mf_consumer_audio_t*)self;
-
- if(!pSelf){
- TSK_DEBUG_ERROR("Invalid parameter");
- return -1;
- }
-
- if(!pSelf->bStarted){
- TSK_DEBUG_INFO("WinMF audio consumer not started");
- return 0;
- }
-
- /* should be done here */
- pSelf->bStarted = false;
-
- return 0;
+ plugin_win_mf_consumer_audio_t* pSelf = (plugin_win_mf_consumer_audio_t*)self;
+
+ if(!pSelf) {
+ TSK_DEBUG_ERROR("Invalid parameter");
+ return -1;
+ }
+
+ if(!pSelf->bStarted) {
+ TSK_DEBUG_INFO("WinMF audio consumer not started");
+ return 0;
+ }
+
+ /* should be done here */
+ pSelf->bStarted = false;
+
+ return 0;
}
@@ -108,56 +107,54 @@ static int plugin_win_mf_consumer_audio_stop(tmedia_consumer_t* self)
/* constructor */
static tsk_object_t* plugin_win_mf_consumer_audio_ctor(tsk_object_t * self, va_list * app)
{
- MFUtils::Startup();
-
- plugin_win_mf_consumer_audio_t *pSelf = (plugin_win_mf_consumer_audio_t *)self;
- if(pSelf){
- /* init base */
- tdav_consumer_audio_init(TDAV_CONSUMER_AUDIO(pSelf));
- /* init self */
-
- }
- return self;
+ MFUtils::Startup();
+
+ plugin_win_mf_consumer_audio_t *pSelf = (plugin_win_mf_consumer_audio_t *)self;
+ if(pSelf) {
+ /* init base */
+ tdav_consumer_audio_init(TDAV_CONSUMER_AUDIO(pSelf));
+ /* init self */
+
+ }
+ return self;
}
/* destructor */
static tsk_object_t* plugin_win_mf_consumer_audio_dtor(tsk_object_t * self)
-{
- plugin_win_mf_consumer_audio_t *pSelf = (plugin_win_mf_consumer_audio_t *)self;
- if(pSelf){
- /* stop */
- if(pSelf->bStarted){
- plugin_win_mf_consumer_audio_stop(TMEDIA_CONSUMER(pSelf));
- }
-
- /* deinit base */
- tdav_consumer_audio_deinit(TDAV_CONSUMER_AUDIO(pSelf));
- /* deinit self */
-
- }
-
- return self;
+{
+ plugin_win_mf_consumer_audio_t *pSelf = (plugin_win_mf_consumer_audio_t *)self;
+ if(pSelf) {
+ /* stop */
+ if(pSelf->bStarted) {
+ plugin_win_mf_consumer_audio_stop(TMEDIA_CONSUMER(pSelf));
+ }
+
+ /* deinit base */
+ tdav_consumer_audio_deinit(TDAV_CONSUMER_AUDIO(pSelf));
+ /* deinit self */
+
+ }
+
+ return self;
}
/* object definition */
-static const tsk_object_def_t plugin_win_mf_consumer_audio_def_s =
-{
- sizeof(plugin_win_mf_consumer_audio_t),
- plugin_win_mf_consumer_audio_ctor,
- plugin_win_mf_consumer_audio_dtor,
- tdav_consumer_audio_cmp,
+static const tsk_object_def_t plugin_win_mf_consumer_audio_def_s = {
+ sizeof(plugin_win_mf_consumer_audio_t),
+ plugin_win_mf_consumer_audio_ctor,
+ plugin_win_mf_consumer_audio_dtor,
+ tdav_consumer_audio_cmp,
};
/* plugin definition*/
-static const tmedia_consumer_plugin_def_t plugin_win_mf_consumer_audio_plugin_def_s =
-{
- &plugin_win_mf_consumer_audio_def_s,
-
- tmedia_audio,
- "Windows Media Foundation audio consumer",
-
- plugin_win_mf_consumer_audio_set,
- plugin_win_mf_consumer_audio_prepare,
- plugin_win_mf_consumer_audio_start,
- plugin_win_mf_consumer_audio_consume,
- plugin_win_mf_consumer_audio_pause,
- plugin_win_mf_consumer_audio_stop
+static const tmedia_consumer_plugin_def_t plugin_win_mf_consumer_audio_plugin_def_s = {
+ &plugin_win_mf_consumer_audio_def_s,
+
+ tmedia_audio,
+ "Windows Media Foundation audio consumer",
+
+ plugin_win_mf_consumer_audio_set,
+ plugin_win_mf_consumer_audio_prepare,
+ plugin_win_mf_consumer_audio_start,
+ plugin_win_mf_consumer_audio_consume,
+ plugin_win_mf_consumer_audio_pause,
+ plugin_win_mf_consumer_audio_stop
};
const tmedia_consumer_plugin_def_t *plugin_win_mf_consumer_audio_plugin_def_t = &plugin_win_mf_consumer_audio_plugin_def_s;
diff --git a/plugins/pluginWinMF/plugin_win_mf_consumer_video.cxx b/plugins/pluginWinMF/plugin_win_mf_consumer_video.cxx
index f6bef59..ee6eaaa 100755
--- a/plugins/pluginWinMF/plugin_win_mf_consumer_video.cxx
+++ b/plugins/pluginWinMF/plugin_win_mf_consumer_video.cxx
@@ -1,18 +1,18 @@
/* Copyright (C) 2013 Mamadou DIOP
* Copyright (C) 2013 Doubango Telecom <http://www.doubango.org>
-*
+*
* This file is part of Open Source Doubango Framework.
*
* DOUBANGO is free software: you can redistribute it and/or modify
* it under the terms of the GNU General Public License as published by
* the Free Software Foundation, either version 3 of the License, or
* (at your option) any later version.
-*
+*
* DOUBANGO is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
* GNU General Public License for more details.
-*
+*
* You should have received a copy of the GNU General Public License
* along with DOUBANGO.
*/
@@ -50,20 +50,20 @@
const DWORD NUM_BACK_BUFFERS = 2;
static HRESULT CreateDeviceD3D9(
- HWND hWnd,
- IDirect3DDevice9** ppDevice,
- IDirect3D9 **ppD3D,
- D3DPRESENT_PARAMETERS &d3dpp
- );
+ HWND hWnd,
+ IDirect3DDevice9** ppDevice,
+ IDirect3D9 **ppD3D,
+ D3DPRESENT_PARAMETERS &d3dpp
+);
static HRESULT TestCooperativeLevel(
- struct plugin_win_mf_consumer_video_s *pSelf
- );
+ struct plugin_win_mf_consumer_video_s *pSelf
+);
static HRESULT CreateSwapChain(
- HWND hWnd,
- UINT32 nFrameWidth,
- UINT32 nFrameHeight,
- IDirect3DDevice9* pDevice,
- IDirect3DSwapChain9 **ppSwapChain);
+ HWND hWnd,
+ UINT32 nFrameWidth,
+ UINT32 nFrameHeight,
+ IDirect3DDevice9* pDevice,
+ IDirect3DSwapChain9 **ppSwapChain);
static LRESULT CALLBACK WndProc(HWND hWnd, UINT uMsg, WPARAM wParam, LPARAM lParam);
@@ -81,30 +81,29 @@ static HRESULT UnhookWindow(struct plugin_win_mf_consumer_video_s *pSelf);
-typedef struct plugin_win_mf_consumer_video_s
-{
- TMEDIA_DECLARE_CONSUMER;
-
- BOOL bStarted, bPrepared, bPaused, bFullScreen, bWindowHooked;
- BOOL bPluginFireFox, bPluginWebRTC4All;
- HWND hWindow;
- WNDPROC wndProc;
- HWND hWindowFullScreen;
- RECT rcWindow;
- RECT rcDest;
- MFRatio pixelAR;
-
- UINT32 nNegWidth;
- UINT32 nNegHeight;
- UINT32 nNegFps;
-
- D3DLOCKED_RECT rcLock;
- IDirect3DDevice9* pDevice;
- IDirect3D9 *pD3D;
- IDirect3DSwapChain9 *pSwapChain;
- D3DPRESENT_PARAMETERS d3dpp;
-
- TSK_DECLARE_SAFEOBJ;
+typedef struct plugin_win_mf_consumer_video_s {
+ TMEDIA_DECLARE_CONSUMER;
+
+ BOOL bStarted, bPrepared, bPaused, bFullScreen, bWindowHooked;
+ BOOL bPluginFireFox, bPluginWebRTC4All;
+ HWND hWindow;
+ WNDPROC wndProc;
+ HWND hWindowFullScreen;
+ RECT rcWindow;
+ RECT rcDest;
+ MFRatio pixelAR;
+
+ UINT32 nNegWidth;
+ UINT32 nNegHeight;
+ UINT32 nNegFps;
+
+ D3DLOCKED_RECT rcLock;
+ IDirect3DDevice9* pDevice;
+ IDirect3D9 *pD3D;
+ IDirect3DSwapChain9 *pSwapChain;
+ D3DPRESENT_PARAMETERS d3dpp;
+
+ TSK_DECLARE_SAFEOBJ;
}
plugin_win_mf_consumer_video_t;
@@ -113,360 +112,331 @@ static int _plugin_win_mf_consumer_video_unprepare(plugin_win_mf_consumer_video_
/* ============ Media Consumer Interface ================= */
static int plugin_win_mf_consumer_video_set(tmedia_consumer_t *self, const tmedia_param_t* param)
{
- int ret = 0;
- HRESULT hr = S_OK;
- plugin_win_mf_consumer_video_t* pSelf = (plugin_win_mf_consumer_video_t*)self;
-
- if(!self || !param)
- {
- TSK_DEBUG_ERROR("Invalid parameter");
- CHECK_HR(hr = E_POINTER);
- }
-
- if(param->value_type == tmedia_pvt_int64)
- {
- if(tsk_striequals(param->key, "remote-hwnd"))
- {
- HWND hWnd = reinterpret_cast<HWND>((INT64)*((int64_t*)param->value));
- if(hWnd != pSelf->hWindow)
- {
- tsk_safeobj_lock(pSelf); // block consumer thread
- pSelf->hWindow = hWnd;
- if(pSelf->bPrepared)
- {
- hr = ResetDevice(pSelf);
- }
- tsk_safeobj_unlock(pSelf); // unblock consumer thread
- }
- }
- }
- else if(param->value_type == tmedia_pvt_int32)
- {
- if(tsk_striequals(param->key, "fullscreen"))
- {
- BOOL bFullScreen = !!*((int32_t*)param->value);
- TSK_DEBUG_INFO("[MF video consumer] Full Screen = %d", bFullScreen);
- CHECK_HR(hr = SetFullscreen(pSelf, bFullScreen));
- }
- else if(tsk_striequals(param->key, "create-on-current-thead"))
- {
- // DSCONSUMER(self)->create_on_ui_thread = *((int32_t*)param->value) ? tsk_false : tsk_true;
- }
- else if(tsk_striequals(param->key, "plugin-firefox"))
- {
- pSelf->bPluginFireFox = (*((int32_t*)param->value) != 0);
- }
- else if(tsk_striequals(param->key, "plugin-webrtc4all"))
- {
- pSelf->bPluginWebRTC4All = (*((int32_t*)param->value) != 0);
- }
- }
-
- CHECK_HR(hr);
+ int ret = 0;
+ HRESULT hr = S_OK;
+ plugin_win_mf_consumer_video_t* pSelf = (plugin_win_mf_consumer_video_t*)self;
+
+ if(!self || !param) {
+ TSK_DEBUG_ERROR("Invalid parameter");
+ CHECK_HR(hr = E_POINTER);
+ }
+
+ if(param->value_type == tmedia_pvt_int64) {
+ if(tsk_striequals(param->key, "remote-hwnd")) {
+ HWND hWnd = reinterpret_cast<HWND>((INT64)*((int64_t*)param->value));
+ if(hWnd != pSelf->hWindow) {
+ tsk_safeobj_lock(pSelf); // block consumer thread
+ pSelf->hWindow = hWnd;
+ if(pSelf->bPrepared) {
+ hr = ResetDevice(pSelf);
+ }
+ tsk_safeobj_unlock(pSelf); // unblock consumer thread
+ }
+ }
+ }
+ else if(param->value_type == tmedia_pvt_int32) {
+ if(tsk_striequals(param->key, "fullscreen")) {
+ BOOL bFullScreen = !!*((int32_t*)param->value);
+ TSK_DEBUG_INFO("[MF video consumer] Full Screen = %d", bFullScreen);
+ CHECK_HR(hr = SetFullscreen(pSelf, bFullScreen));
+ }
+ else if(tsk_striequals(param->key, "create-on-current-thead")) {
+ // DSCONSUMER(self)->create_on_ui_thread = *((int32_t*)param->value) ? tsk_false : tsk_true;
+ }
+ else if(tsk_striequals(param->key, "plugin-firefox")) {
+ pSelf->bPluginFireFox = (*((int32_t*)param->value) != 0);
+ }
+ else if(tsk_striequals(param->key, "plugin-webrtc4all")) {
+ pSelf->bPluginWebRTC4All = (*((int32_t*)param->value) != 0);
+ }
+ }
+
+ CHECK_HR(hr);
bail:
- return SUCCEEDED(hr) ? 0 : -1;
+ return SUCCEEDED(hr) ? 0 : -1;
}
static int plugin_win_mf_consumer_video_prepare(tmedia_consumer_t* self, const tmedia_codec_t* codec)
{
- plugin_win_mf_consumer_video_t* pSelf = (plugin_win_mf_consumer_video_t*)self;
-
- if(!pSelf || !codec && codec->plugin){
- TSK_DEBUG_ERROR("Invalid parameter");
- return -1;
- }
- if(pSelf->bPrepared){
- TSK_DEBUG_WARN("D3D9 video consumer already prepared");
- return -1;
- }
-
- // FIXME: DirectShow requires flipping but not D3D9
- // The Core library always tries to flip when OSType==Win32. Must be changed
- TMEDIA_CODEC_VIDEO(codec)->in.flip = tsk_false;
-
- HRESULT hr = S_OK;
- HWND hWnd = Window(pSelf);
-
- TMEDIA_CONSUMER(pSelf)->video.fps = TMEDIA_CODEC_VIDEO(codec)->in.fps;
- TMEDIA_CONSUMER(pSelf)->video.in.width = TMEDIA_CODEC_VIDEO(codec)->in.width;
- TMEDIA_CONSUMER(pSelf)->video.in.height = TMEDIA_CODEC_VIDEO(codec)->in.height;
-
- if(!TMEDIA_CONSUMER(pSelf)->video.display.width){
- TMEDIA_CONSUMER(pSelf)->video.display.width = TMEDIA_CONSUMER(pSelf)->video.in.width;
- }
- if(!TMEDIA_CONSUMER(pSelf)->video.display.height){
- TMEDIA_CONSUMER(pSelf)->video.display.height = TMEDIA_CONSUMER(pSelf)->video.in.height;
- }
-
- pSelf->nNegFps = TMEDIA_CONSUMER(pSelf)->video.fps;
- pSelf->nNegWidth = (UINT32)TMEDIA_CONSUMER(pSelf)->video.display.width;
- pSelf->nNegHeight = (UINT32)TMEDIA_CONSUMER(pSelf)->video.display.height;
-
- TSK_DEBUG_INFO("D3D9 video consumer: fps=%d, width=%d, height=%d",
- pSelf->nNegFps,
- pSelf->nNegWidth,
- pSelf->nNegHeight);
-
- TMEDIA_CONSUMER(pSelf)->video.display.chroma = tmedia_chroma_rgb32;
- TMEDIA_CONSUMER(pSelf)->decoder.codec_id = tmedia_codec_id_none; // means accept RAW fames
-
- // The window handle is not created until the call is connect (incoming only) - At least on Internet Explorer 10
- if(hWnd && !pSelf->bPluginWebRTC4All)
- {
- CHECK_HR(hr = CreateDeviceD3D9(hWnd, &pSelf->pDevice, &pSelf->pD3D, pSelf->d3dpp));
- CHECK_HR(hr = CreateSwapChain(hWnd, pSelf->nNegWidth, pSelf->nNegHeight, pSelf->pDevice, &pSelf->pSwapChain));
- }
- else
- {
- if(hWnd && pSelf->bPluginWebRTC4All)
- {
- TSK_DEBUG_INFO("[MF consumer] HWND is defined but we detected webrtc4all...delaying D3D9 device creating until session get connected");
- }
- else
- {
- TSK_DEBUG_WARN("Delaying D3D9 device creation because HWND is not defined yet");
- }
- }
-
+ plugin_win_mf_consumer_video_t* pSelf = (plugin_win_mf_consumer_video_t*)self;
+
+ if(!pSelf || !codec && codec->plugin) {
+ TSK_DEBUG_ERROR("Invalid parameter");
+ return -1;
+ }
+ if(pSelf->bPrepared) {
+ TSK_DEBUG_WARN("D3D9 video consumer already prepared");
+ return -1;
+ }
+
+ // FIXME: DirectShow requires flipping but not D3D9
+ // The Core library always tries to flip when OSType==Win32. Must be changed
+ TMEDIA_CODEC_VIDEO(codec)->in.flip = tsk_false;
+
+ HRESULT hr = S_OK;
+ HWND hWnd = Window(pSelf);
+
+ TMEDIA_CONSUMER(pSelf)->video.fps = TMEDIA_CODEC_VIDEO(codec)->in.fps;
+ TMEDIA_CONSUMER(pSelf)->video.in.width = TMEDIA_CODEC_VIDEO(codec)->in.width;
+ TMEDIA_CONSUMER(pSelf)->video.in.height = TMEDIA_CODEC_VIDEO(codec)->in.height;
+
+ if(!TMEDIA_CONSUMER(pSelf)->video.display.width) {
+ TMEDIA_CONSUMER(pSelf)->video.display.width = TMEDIA_CONSUMER(pSelf)->video.in.width;
+ }
+ if(!TMEDIA_CONSUMER(pSelf)->video.display.height) {
+ TMEDIA_CONSUMER(pSelf)->video.display.height = TMEDIA_CONSUMER(pSelf)->video.in.height;
+ }
+
+ pSelf->nNegFps = TMEDIA_CONSUMER(pSelf)->video.fps;
+ pSelf->nNegWidth = (UINT32)TMEDIA_CONSUMER(pSelf)->video.display.width;
+ pSelf->nNegHeight = (UINT32)TMEDIA_CONSUMER(pSelf)->video.display.height;
+
+ TSK_DEBUG_INFO("D3D9 video consumer: fps=%d, width=%d, height=%d",
+ pSelf->nNegFps,
+ pSelf->nNegWidth,
+ pSelf->nNegHeight);
+
+ TMEDIA_CONSUMER(pSelf)->video.display.chroma = tmedia_chroma_rgb32;
+ TMEDIA_CONSUMER(pSelf)->decoder.codec_id = tmedia_codec_id_none; // means accept RAW fames
+
+ // The window handle is not created until the call is connect (incoming only) - At least on Internet Explorer 10
+ if(hWnd && !pSelf->bPluginWebRTC4All) {
+ CHECK_HR(hr = CreateDeviceD3D9(hWnd, &pSelf->pDevice, &pSelf->pD3D, pSelf->d3dpp));
+ CHECK_HR(hr = CreateSwapChain(hWnd, pSelf->nNegWidth, pSelf->nNegHeight, pSelf->pDevice, &pSelf->pSwapChain));
+ }
+ else {
+ if(hWnd && pSelf->bPluginWebRTC4All) {
+ TSK_DEBUG_INFO("[MF consumer] HWND is defined but we detected webrtc4all...delaying D3D9 device creating until session get connected");
+ }
+ else {
+ TSK_DEBUG_WARN("Delaying D3D9 device creation because HWND is not defined yet");
+ }
+ }
+
bail:
- pSelf->bPrepared = SUCCEEDED(hr);
- return pSelf->bPrepared ? 0 : -1;
+ pSelf->bPrepared = SUCCEEDED(hr);
+ return pSelf->bPrepared ? 0 : -1;
}
static int plugin_win_mf_consumer_video_start(tmedia_consumer_t* self)
{
- plugin_win_mf_consumer_video_t* pSelf = (plugin_win_mf_consumer_video_t*)self;
-
- if(!pSelf){
- TSK_DEBUG_ERROR("Invalid parameter");
- return -1;
- }
-
- if(pSelf->bStarted){
- TSK_DEBUG_INFO("D3D9 video consumer already started");
- return 0;
- }
- if(!pSelf->bPrepared){
- TSK_DEBUG_ERROR("D3D9 video consumer not prepared");
- return -1;
- }
-
- HRESULT hr = S_OK;
-
- pSelf->bPaused = false;
- pSelf->bStarted = true;
-
- return SUCCEEDED(hr) ? 0 : -1;
+ plugin_win_mf_consumer_video_t* pSelf = (plugin_win_mf_consumer_video_t*)self;
+
+ if(!pSelf) {
+ TSK_DEBUG_ERROR("Invalid parameter");
+ return -1;
+ }
+
+ if(pSelf->bStarted) {
+ TSK_DEBUG_INFO("D3D9 video consumer already started");
+ return 0;
+ }
+ if(!pSelf->bPrepared) {
+ TSK_DEBUG_ERROR("D3D9 video consumer not prepared");
+ return -1;
+ }
+
+ HRESULT hr = S_OK;
+
+ pSelf->bPaused = false;
+ pSelf->bStarted = true;
+
+ return SUCCEEDED(hr) ? 0 : -1;
}
static int plugin_win_mf_consumer_video_consume(tmedia_consumer_t* self, const void* buffer, tsk_size_t size, const tsk_object_t* proto_hdr)
{
- plugin_win_mf_consumer_video_t* pSelf = (plugin_win_mf_consumer_video_t*)self;
+ plugin_win_mf_consumer_video_t* pSelf = (plugin_win_mf_consumer_video_t*)self;
- HRESULT hr = S_OK;
- HWND hWnd = Window(pSelf);
+ HRESULT hr = S_OK;
+ HWND hWnd = Window(pSelf);
- IDirect3DSurface9 *pSurf = NULL;
+ IDirect3DSurface9 *pSurf = NULL;
IDirect3DSurface9 *pBB = NULL;
- if(!pSelf)
- {
- TSK_DEBUG_ERROR("Invalid parameter");
- return -1; // because of the mutex lock do it here
- }
-
- tsk_safeobj_lock(pSelf);
-
- if(!buffer || !size)
- {
- TSK_DEBUG_ERROR("Invalid parameter");
- CHECK_HR(hr = E_INVALIDARG);
- }
-
- if(!pSelf->bStarted)
- {
- TSK_DEBUG_INFO("D3D9 video consumer not started");
- CHECK_HR(hr = E_FAIL);
- }
-
- if(!hWnd)
- {
- TSK_DEBUG_INFO("Do not draw frame because HWND not set");
- goto bail; // not an error as the application can decide to set the HWND at any time
- }
-
- if (!pSelf->bWindowHooked)
- {
- // Do not hook "hWnd" as it could be the fullscreen handle which is always hooked.
- CHECK_HR(hr = HookWindow(pSelf, pSelf->hWindow));
- }
-
- if(!pSelf->pDevice || !pSelf->pD3D || !pSelf->pSwapChain)
- {
- if(pSelf->pDevice || pSelf->pD3D || pSelf->pSwapChain)
- {
- CHECK_HR(hr = E_POINTER); // They must be "all null" or "all valid"
- }
-
- if(hWnd)
- {
- // means HWND was not set but defined now
- pSelf->nNegWidth = (UINT32)TMEDIA_CONSUMER(pSelf)->video.in.width;
- pSelf->nNegHeight = (UINT32)TMEDIA_CONSUMER(pSelf)->video.in.height;
-
- CHECK_HR(hr = CreateDeviceD3D9(hWnd, &pSelf->pDevice, &pSelf->pD3D, pSelf->d3dpp));
- CHECK_HR(hr = CreateSwapChain(hWnd, pSelf->nNegWidth, pSelf->nNegHeight, pSelf->pDevice, &pSelf->pSwapChain));
- }
- }
-
- if(pSelf->nNegWidth != TMEDIA_CONSUMER(pSelf)->video.in.width || pSelf->nNegHeight != TMEDIA_CONSUMER(pSelf)->video.in.height){
- TSK_DEBUG_INFO("Negotiated and input video sizes are different:%d#%d or %d#%d",
- pSelf->nNegWidth, TMEDIA_CONSUMER(pSelf)->video.in.width,
- pSelf->nNegHeight, TMEDIA_CONSUMER(pSelf)->video.in.height);
- // Update media type
-
- SafeRelease(&pSelf->pSwapChain);
- CHECK_HR(hr = CreateSwapChain(hWnd, (UINT32)TMEDIA_CONSUMER(pSelf)->video.in.width, (UINT32)TMEDIA_CONSUMER(pSelf)->video.in.height, pSelf->pDevice, &pSelf->pSwapChain));
-
- pSelf->nNegWidth = (UINT32)TMEDIA_CONSUMER(pSelf)->video.in.width;
- pSelf->nNegHeight = (UINT32)TMEDIA_CONSUMER(pSelf)->video.in.height;
-
- // Update Destination will do noting if the window size haven't changed.
- // Force updating the destination rect if negotiated size change
- CHECK_HR(hr = UpdateDestinationRect(pSelf, TRUE/* Force */));
- }
-
- CHECK_HR(hr = TestCooperativeLevel(pSelf));
-
- CHECK_HR(hr = UpdateDestinationRect(pSelf));
-
- CHECK_HR(hr = pSelf->pSwapChain->GetBackBuffer(0, D3DBACKBUFFER_TYPE_MONO, &pSurf));
- CHECK_HR(hr = pSurf->LockRect(&pSelf->rcLock, NULL, D3DLOCK_NOSYSLOCK ));
-
- // Fast copy() using MMX, SSE, or SSE2
- hr = MFCopyImage(
- (BYTE*)pSelf->rcLock.pBits,
- pSelf->rcLock.Pitch,
- (BYTE*)buffer,
- (pSelf->nNegWidth << 2),
- (pSelf->nNegWidth << 2),
- pSelf->nNegHeight
- );
- if(FAILED(hr))
- {
- // unlock() before leaving
- pSurf->UnlockRect();
- CHECK_HR(hr);
- }
-
- CHECK_HR(hr = pSurf->UnlockRect());
-
- // Color fill the back buffer
- CHECK_HR(hr = pSelf->pDevice->GetBackBuffer(0, 0, D3DBACKBUFFER_TYPE_MONO, &pBB));
+ if(!pSelf) {
+ TSK_DEBUG_ERROR("Invalid parameter");
+ return -1; // because of the mutex lock do it here
+ }
+
+ tsk_safeobj_lock(pSelf);
+
+ if(!buffer || !size) {
+ TSK_DEBUG_ERROR("Invalid parameter");
+ CHECK_HR(hr = E_INVALIDARG);
+ }
+
+ if(!pSelf->bStarted) {
+ TSK_DEBUG_INFO("D3D9 video consumer not started");
+ CHECK_HR(hr = E_FAIL);
+ }
+
+ if(!hWnd) {
+ TSK_DEBUG_INFO("Do not draw frame because HWND not set");
+ goto bail; // not an error as the application can decide to set the HWND at any time
+ }
+
+ if (!pSelf->bWindowHooked) {
+ // Do not hook "hWnd" as it could be the fullscreen handle which is always hooked.
+ CHECK_HR(hr = HookWindow(pSelf, pSelf->hWindow));
+ }
+
+ if(!pSelf->pDevice || !pSelf->pD3D || !pSelf->pSwapChain) {
+ if(pSelf->pDevice || pSelf->pD3D || pSelf->pSwapChain) {
+ CHECK_HR(hr = E_POINTER); // They must be "all null" or "all valid"
+ }
+
+ if(hWnd) {
+ // means HWND was not set but defined now
+ pSelf->nNegWidth = (UINT32)TMEDIA_CONSUMER(pSelf)->video.in.width;
+ pSelf->nNegHeight = (UINT32)TMEDIA_CONSUMER(pSelf)->video.in.height;
+
+ CHECK_HR(hr = CreateDeviceD3D9(hWnd, &pSelf->pDevice, &pSelf->pD3D, pSelf->d3dpp));
+ CHECK_HR(hr = CreateSwapChain(hWnd, pSelf->nNegWidth, pSelf->nNegHeight, pSelf->pDevice, &pSelf->pSwapChain));
+ }
+ }
+
+ if(pSelf->nNegWidth != TMEDIA_CONSUMER(pSelf)->video.in.width || pSelf->nNegHeight != TMEDIA_CONSUMER(pSelf)->video.in.height) {
+ TSK_DEBUG_INFO("Negotiated and input video sizes are different:%d#%d or %d#%d",
+ pSelf->nNegWidth, TMEDIA_CONSUMER(pSelf)->video.in.width,
+ pSelf->nNegHeight, TMEDIA_CONSUMER(pSelf)->video.in.height);
+ // Update media type
+
+ SafeRelease(&pSelf->pSwapChain);
+ CHECK_HR(hr = CreateSwapChain(hWnd, (UINT32)TMEDIA_CONSUMER(pSelf)->video.in.width, (UINT32)TMEDIA_CONSUMER(pSelf)->video.in.height, pSelf->pDevice, &pSelf->pSwapChain));
+
+ pSelf->nNegWidth = (UINT32)TMEDIA_CONSUMER(pSelf)->video.in.width;
+ pSelf->nNegHeight = (UINT32)TMEDIA_CONSUMER(pSelf)->video.in.height;
+
+ // Update Destination will do noting if the window size haven't changed.
+ // Force updating the destination rect if negotiated size change
+ CHECK_HR(hr = UpdateDestinationRect(pSelf, TRUE/* Force */));
+ }
+
+ CHECK_HR(hr = TestCooperativeLevel(pSelf));
+
+ CHECK_HR(hr = UpdateDestinationRect(pSelf));
+
+ CHECK_HR(hr = pSelf->pSwapChain->GetBackBuffer(0, D3DBACKBUFFER_TYPE_MONO, &pSurf));
+ CHECK_HR(hr = pSurf->LockRect(&pSelf->rcLock, NULL, D3DLOCK_NOSYSLOCK ));
+
+ // Fast copy() using MMX, SSE, or SSE2
+ hr = MFCopyImage(
+ (BYTE*)pSelf->rcLock.pBits,
+ pSelf->rcLock.Pitch,
+ (BYTE*)buffer,
+ (pSelf->nNegWidth << 2),
+ (pSelf->nNegWidth << 2),
+ pSelf->nNegHeight
+ );
+ if(FAILED(hr)) {
+ // unlock() before leaving
+ pSurf->UnlockRect();
+ CHECK_HR(hr);
+ }
+
+ CHECK_HR(hr = pSurf->UnlockRect());
+
+ // Color fill the back buffer
+ CHECK_HR(hr = pSelf->pDevice->GetBackBuffer(0, 0, D3DBACKBUFFER_TYPE_MONO, &pBB));
#if METROPOLIS
- CHECK_HR(hr = pSelf->pDevice->ColorFill(pBB, NULL, D3DCOLOR_XRGB(0x00, 0x00, 0x00)));
+ CHECK_HR(hr = pSelf->pDevice->ColorFill(pBB, NULL, D3DCOLOR_XRGB(0x00, 0x00, 0x00)));
#else
- CHECK_HR(hr = pSelf->pDevice->ColorFill(pBB, NULL, D3DCOLOR_XRGB(0xFF, 0xFF, 0xFF)));
+ CHECK_HR(hr = pSelf->pDevice->ColorFill(pBB, NULL, D3DCOLOR_XRGB(0xFF, 0xFF, 0xFF)));
#endif
-
- // Resize keeping aspect ratio and Blit the frame (required)
- hr = pSelf->pDevice->StretchRect(
- pSurf,
- NULL,
- pBB,
- &pSelf->rcDest/*NULL*/,
- D3DTEXF_LINEAR
- ); // could fail when display is being resized
- if(SUCCEEDED(hr))
- {
- // Present the frame
- CHECK_HR(hr = pSelf->pDevice->Present(NULL, NULL, NULL, NULL));
- }
- else
- {
- TSK_DEBUG_INFO("StretchRect returned ...%x", hr);
- }
+
+ // Resize keeping aspect ratio and Blit the frame (required)
+ hr = pSelf->pDevice->StretchRect(
+ pSurf,
+ NULL,
+ pBB,
+ &pSelf->rcDest/*NULL*/,
+ D3DTEXF_LINEAR
+ ); // could fail when display is being resized
+ if(SUCCEEDED(hr)) {
+ // Present the frame
+ CHECK_HR(hr = pSelf->pDevice->Present(NULL, NULL, NULL, NULL));
+ }
+ else {
+ TSK_DEBUG_INFO("StretchRect returned ...%x", hr);
+ }
bail:
- SafeRelease(&pSurf);
- SafeRelease(&pBB);
+ SafeRelease(&pSurf);
+ SafeRelease(&pBB);
- tsk_safeobj_unlock(pSelf);
+ tsk_safeobj_unlock(pSelf);
- return SUCCEEDED(hr) ? 0 : -1;
+ return SUCCEEDED(hr) ? 0 : -1;
}
static int plugin_win_mf_consumer_video_pause(tmedia_consumer_t* self)
{
- plugin_win_mf_consumer_video_t* pSelf = (plugin_win_mf_consumer_video_t*)self;
+ plugin_win_mf_consumer_video_t* pSelf = (plugin_win_mf_consumer_video_t*)self;
- if(!pSelf){
- TSK_DEBUG_ERROR("Invalid parameter");
- return -1;
- }
- if(!pSelf->bStarted)
- {
- TSK_DEBUG_INFO("MF video producer not started");
- return 0;
- }
+ if(!pSelf) {
+ TSK_DEBUG_ERROR("Invalid parameter");
+ return -1;
+ }
+ if(!pSelf->bStarted) {
+ TSK_DEBUG_INFO("MF video producer not started");
+ return 0;
+ }
- HRESULT hr = S_OK;
+ HRESULT hr = S_OK;
- pSelf->bPaused = true;
+ pSelf->bPaused = true;
- return SUCCEEDED(hr) ? 0 : -1;
+ return SUCCEEDED(hr) ? 0 : -1;
}
static int plugin_win_mf_consumer_video_stop(tmedia_consumer_t* self)
{
- plugin_win_mf_consumer_video_t* pSelf = (plugin_win_mf_consumer_video_t*)self;
+ plugin_win_mf_consumer_video_t* pSelf = (plugin_win_mf_consumer_video_t*)self;
- if(!pSelf){
+ if(!pSelf) {
TSK_DEBUG_ERROR("Invalid parameter");
return -1;
}
- HRESULT hr = S_OK;
-
+ HRESULT hr = S_OK;
+
pSelf->bStarted = false;
- pSelf->bPaused = false;
+ pSelf->bPaused = false;
- if(pSelf->hWindowFullScreen)
- {
- ::InvalidateRect(pSelf->hWindowFullScreen, NULL, FALSE);
- ::ShowWindow(pSelf->hWindowFullScreen, SW_HIDE);
- }
+ if(pSelf->hWindowFullScreen) {
+ ::InvalidateRect(pSelf->hWindowFullScreen, NULL, FALSE);
+ ::ShowWindow(pSelf->hWindowFullScreen, SW_HIDE);
+ }
- // next start() will be called after prepare()
- return _plugin_win_mf_consumer_video_unprepare(pSelf);
+ // next start() will be called after prepare()
+ return _plugin_win_mf_consumer_video_unprepare(pSelf);
}
static int _plugin_win_mf_consumer_video_unprepare(plugin_win_mf_consumer_video_t* pSelf)
{
- if(!pSelf)
- {
- TSK_DEBUG_ERROR("Invalid parameter");
- return -1;
- }
+ if(!pSelf) {
+ TSK_DEBUG_ERROR("Invalid parameter");
+ return -1;
+ }
- UnhookWindow(pSelf);
+ UnhookWindow(pSelf);
- if(pSelf->bStarted)
- {
- // plugin_win_mf_producer_video_stop(TMEDIA_PRODUCER(pSelf));
- TSK_DEBUG_ERROR("Consumer must be stopped before calling unprepare");
- return -1;
- }
+ if(pSelf->bStarted) {
+ // plugin_win_mf_producer_video_stop(TMEDIA_PRODUCER(pSelf));
+ TSK_DEBUG_ERROR("Consumer must be stopped before calling unprepare");
+ return -1;
+ }
- SafeRelease(&pSelf->pDevice);
- SafeRelease(&pSelf->pD3D);
- SafeRelease(&pSelf->pSwapChain);
+ SafeRelease(&pSelf->pDevice);
+ SafeRelease(&pSelf->pD3D);
+ SafeRelease(&pSelf->pSwapChain);
- pSelf->bPrepared = false;
+ pSelf->bPrepared = false;
- return 0;
+ return 0;
}
@@ -476,192 +446,179 @@ static int _plugin_win_mf_consumer_video_unprepare(plugin_win_mf_consumer_video_
/* constructor */
static tsk_object_t* plugin_win_mf_consumer_video_ctor(tsk_object_t * self, va_list * app)
{
- MFUtils::Startup();
-
- plugin_win_mf_consumer_video_t *pSelf = (plugin_win_mf_consumer_video_t *)self;
- if(pSelf){
- /* init base */
- tmedia_consumer_init(TMEDIA_CONSUMER(pSelf));
- TMEDIA_CONSUMER(pSelf)->video.display.chroma = tmedia_chroma_rgb32;
- TMEDIA_CONSUMER(pSelf)->decoder.codec_id = tmedia_codec_id_none; // means accept RAW fames
-
- /* init self */
- tsk_safeobj_init(pSelf);
- TMEDIA_CONSUMER(pSelf)->video.fps = 15;
- TMEDIA_CONSUMER(pSelf)->video.display.width = 0; // use codec value
- TMEDIA_CONSUMER(pSelf)->video.display.height = 0; // use codec value
- TMEDIA_CONSUMER(pSelf)->video.display.auto_resize = tsk_true;
-
- pSelf->pixelAR.Denominator = pSelf->pixelAR.Numerator = 1;
- }
- return self;
+ MFUtils::Startup();
+
+ plugin_win_mf_consumer_video_t *pSelf = (plugin_win_mf_consumer_video_t *)self;
+ if(pSelf) {
+ /* init base */
+ tmedia_consumer_init(TMEDIA_CONSUMER(pSelf));
+ TMEDIA_CONSUMER(pSelf)->video.display.chroma = tmedia_chroma_rgb32;
+ TMEDIA_CONSUMER(pSelf)->decoder.codec_id = tmedia_codec_id_none; // means accept RAW fames
+
+ /* init self */
+ tsk_safeobj_init(pSelf);
+ TMEDIA_CONSUMER(pSelf)->video.fps = 15;
+ TMEDIA_CONSUMER(pSelf)->video.display.width = 0; // use codec value
+ TMEDIA_CONSUMER(pSelf)->video.display.height = 0; // use codec value
+ TMEDIA_CONSUMER(pSelf)->video.display.auto_resize = tsk_true;
+
+ pSelf->pixelAR.Denominator = pSelf->pixelAR.Numerator = 1;
+ }
+ return self;
}
/* destructor */
static tsk_object_t* plugin_win_mf_consumer_video_dtor(tsk_object_t * self)
-{
- plugin_win_mf_consumer_video_t *pSelf = (plugin_win_mf_consumer_video_t *)self;
- if(pSelf){
- /* stop */
- if(pSelf->bStarted)
- {
- plugin_win_mf_consumer_video_stop(TMEDIA_CONSUMER(pSelf));
- }
-
- /* deinit base */
- tmedia_consumer_deinit(TMEDIA_CONSUMER(pSelf));
- /* deinit self */
- _plugin_win_mf_consumer_video_unprepare(pSelf);
- tsk_safeobj_deinit(pSelf);
- }
-
- return self;
+{
+ plugin_win_mf_consumer_video_t *pSelf = (plugin_win_mf_consumer_video_t *)self;
+ if(pSelf) {
+ /* stop */
+ if(pSelf->bStarted) {
+ plugin_win_mf_consumer_video_stop(TMEDIA_CONSUMER(pSelf));
+ }
+
+ /* deinit base */
+ tmedia_consumer_deinit(TMEDIA_CONSUMER(pSelf));
+ /* deinit self */
+ _plugin_win_mf_consumer_video_unprepare(pSelf);
+ tsk_safeobj_deinit(pSelf);
+ }
+
+ return self;
}
/* object definition */
-static const tsk_object_def_t plugin_win_mf_consumer_video_def_s =
-{
- sizeof(plugin_win_mf_consumer_video_t),
- plugin_win_mf_consumer_video_ctor,
- plugin_win_mf_consumer_video_dtor,
- tsk_null,
+static const tsk_object_def_t plugin_win_mf_consumer_video_def_s = {
+ sizeof(plugin_win_mf_consumer_video_t),
+ plugin_win_mf_consumer_video_ctor,
+ plugin_win_mf_consumer_video_dtor,
+ tsk_null,
};
/* plugin definition*/
-static const tmedia_consumer_plugin_def_t plugin_win_mf_consumer_video_plugin_def_s =
-{
- &plugin_win_mf_consumer_video_def_s,
-
- tmedia_video,
- "D3D9 video consumer",
-
- plugin_win_mf_consumer_video_set,
- plugin_win_mf_consumer_video_prepare,
- plugin_win_mf_consumer_video_start,
- plugin_win_mf_consumer_video_consume,
- plugin_win_mf_consumer_video_pause,
- plugin_win_mf_consumer_video_stop
+static const tmedia_consumer_plugin_def_t plugin_win_mf_consumer_video_plugin_def_s = {
+ &plugin_win_mf_consumer_video_def_s,
+
+ tmedia_video,
+ "D3D9 video consumer",
+
+ plugin_win_mf_consumer_video_set,
+ plugin_win_mf_consumer_video_prepare,
+ plugin_win_mf_consumer_video_start,
+ plugin_win_mf_consumer_video_consume,
+ plugin_win_mf_consumer_video_pause,
+ plugin_win_mf_consumer_video_stop
};
const tmedia_consumer_plugin_def_t *plugin_win_mf_consumer_video_plugin_def_t = &plugin_win_mf_consumer_video_plugin_def_s;
// Helper functions
static HRESULT CreateDeviceD3D9(
- HWND hWnd,
- IDirect3DDevice9** ppDevice,
- IDirect3D9 **ppD3D,
- D3DPRESENT_PARAMETERS &d3dpp
- )
+ HWND hWnd,
+ IDirect3DDevice9** ppDevice,
+ IDirect3D9 **ppD3D,
+ D3DPRESENT_PARAMETERS &d3dpp
+)
{
- HRESULT hr = S_OK;
+ HRESULT hr = S_OK;
D3DDISPLAYMODE mode = { 0 };
- D3DPRESENT_PARAMETERS pp = {0};
-
- if(!ppDevice || *ppDevice || !ppD3D || *ppD3D)
- {
- CHECK_HR(hr = E_POINTER);
- }
-
- if(!(*ppD3D = Direct3DCreate9(D3D_SDK_VERSION)))
- {
+ D3DPRESENT_PARAMETERS pp = {0};
+
+ if(!ppDevice || *ppDevice || !ppD3D || *ppD3D) {
+ CHECK_HR(hr = E_POINTER);
+ }
+
+ if(!(*ppD3D = Direct3DCreate9(D3D_SDK_VERSION))) {
CHECK_HR(hr = E_OUTOFMEMORY);
}
CHECK_HR(hr = (*ppD3D)->GetAdapterDisplayMode(
- D3DADAPTER_DEFAULT,
- &mode
- ));
+ D3DADAPTER_DEFAULT,
+ &mode
+ ));
CHECK_HR(hr = (*ppD3D)->CheckDeviceType(
- D3DADAPTER_DEFAULT,
- D3DDEVTYPE_HAL,
- mode.Format,
- D3DFMT_X8R8G8B8,
- TRUE // windowed
- ));
+ D3DADAPTER_DEFAULT,
+ D3DDEVTYPE_HAL,
+ mode.Format,
+ D3DFMT_X8R8G8B8,
+ TRUE // windowed
+ ));
pp.BackBufferFormat = D3DFMT_X8R8G8B8;
pp.SwapEffect = D3DSWAPEFFECT_DISCARD;
pp.PresentationInterval = D3DPRESENT_INTERVAL_IMMEDIATE;
- pp.Windowed = TRUE;
+ pp.Windowed = TRUE;
pp.hDeviceWindow = hWnd;
CHECK_HR(hr = (*ppD3D)->CreateDevice(
- D3DADAPTER_DEFAULT,
- D3DDEVTYPE_HAL,
- hWnd,
- D3DCREATE_HARDWARE_VERTEXPROCESSING,
- &pp,
- ppDevice
- ));
+ D3DADAPTER_DEFAULT,
+ D3DDEVTYPE_HAL,
+ hWnd,
+ D3DCREATE_HARDWARE_VERTEXPROCESSING,
+ &pp,
+ ppDevice
+ ));
- d3dpp = pp;
+ d3dpp = pp;
bail:
- if(FAILED(hr))
- {
- SafeRelease(ppD3D);
- SafeRelease(ppDevice);
- }
+ if(FAILED(hr)) {
+ SafeRelease(ppD3D);
+ SafeRelease(ppDevice);
+ }
return hr;
}
static HRESULT TestCooperativeLevel(
- struct plugin_win_mf_consumer_video_s *pSelf
- )
+ struct plugin_win_mf_consumer_video_s *pSelf
+)
{
- HRESULT hr = S_OK;
-
- if (!pSelf || !pSelf->pDevice)
- {
- CHECK_HR(hr = E_POINTER);
- }
-
- switch((hr = pSelf->pDevice->TestCooperativeLevel()))
- {
- case D3D_OK:
- {
- break;
- }
-
- case D3DERR_DEVICELOST:
- {
- hr = S_OK;
- break;
- }
-
- case D3DERR_DEVICENOTRESET:
- {
- hr = ResetDevice(pSelf, TRUE);
- break;
- }
-
- default:
- {
- break;
- }
- }
-
- CHECK_HR(hr);
+ HRESULT hr = S_OK;
+
+ if (!pSelf || !pSelf->pDevice) {
+ CHECK_HR(hr = E_POINTER);
+ }
+
+ switch((hr = pSelf->pDevice->TestCooperativeLevel())) {
+ case D3D_OK: {
+ break;
+ }
+
+ case D3DERR_DEVICELOST: {
+ hr = S_OK;
+ break;
+ }
+
+ case D3DERR_DEVICENOTRESET: {
+ hr = ResetDevice(pSelf, TRUE);
+ break;
+ }
+
+ default: {
+ break;
+ }
+ }
+
+ CHECK_HR(hr);
bail:
return hr;
}
static HRESULT CreateSwapChain(
- HWND hWnd,
- UINT32 nFrameWidth,
- UINT32 nFrameHeight,
- IDirect3DDevice9* pDevice,
- IDirect3DSwapChain9 **ppSwapChain
- )
+ HWND hWnd,
+ UINT32 nFrameWidth,
+ UINT32 nFrameHeight,
+ IDirect3DDevice9* pDevice,
+ IDirect3DSwapChain9 **ppSwapChain
+)
{
HRESULT hr = S_OK;
D3DPRESENT_PARAMETERS pp = { 0 };
- if(!pDevice || !ppSwapChain || *ppSwapChain)
- {
- CHECK_HR(hr = E_POINTER);
- }
-
- pp.BackBufferWidth = nFrameWidth;
+ if(!pDevice || !ppSwapChain || *ppSwapChain) {
+ CHECK_HR(hr = E_POINTER);
+ }
+
+ pp.BackBufferWidth = nFrameWidth;
pp.BackBufferHeight = nFrameHeight;
pp.Windowed = TRUE;
pp.SwapEffect = D3DSWAPEFFECT_FLIP;
@@ -673,7 +630,7 @@ static HRESULT CreateSwapChain(
pp.PresentationInterval = D3DPRESENT_INTERVAL_IMMEDIATE;
pp.BackBufferCount = NUM_BACK_BUFFERS;
- CHECK_HR(hr = pDevice->CreateAdditionalSwapChain(&pp, ppSwapChain));
+ CHECK_HR(hr = pDevice->CreateAdditionalSwapChain(&pp, ppSwapChain));
bail:
return hr;
@@ -681,7 +638,7 @@ bail:
static inline HWND Window(struct plugin_win_mf_consumer_video_s *pSelf)
{
- return pSelf ? (pSelf->bFullScreen ? pSelf->hWindowFullScreen : pSelf->hWindow) : NULL;
+ return pSelf ? (pSelf->bFullScreen ? pSelf->hWindowFullScreen : pSelf->hWindow) : NULL;
}
static inline LONG Width(const RECT& r)
@@ -710,17 +667,14 @@ static inline RECT CorrectAspectRatio(const RECT& src, const MFRatio& srcPAR)
// Start with a rectangle the same size as src, but offset to the origin (0,0).
RECT rc = {0, 0, src.right - src.left, src.bottom - src.top};
- if ((srcPAR.Numerator != 1) || (srcPAR.Denominator != 1))
- {
+ if ((srcPAR.Numerator != 1) || (srcPAR.Denominator != 1)) {
// Correct for the source's PAR.
- if (srcPAR.Numerator > srcPAR.Denominator)
- {
+ if (srcPAR.Numerator > srcPAR.Denominator) {
// The source has "wide" pixels, so stretch the width.
rc.right = MulDiv(rc.right, srcPAR.Numerator, srcPAR.Denominator);
}
- else if (srcPAR.Numerator < srcPAR.Denominator)
- {
+ else if (srcPAR.Numerator < srcPAR.Denominator) {
// The source has "tall" pixels, so stretch the height.
rc.bottom = MulDiv(rc.bottom, srcPAR.Denominator, srcPAR.Numerator);
}
@@ -783,236 +737,214 @@ static inline RECT LetterBoxRect(const RECT& rcSrc, const RECT& rcDst)
static inline HRESULT UpdateDestinationRect(plugin_win_mf_consumer_video_t *pSelf, BOOL bForce /*= FALSE*/)
{
- HRESULT hr = S_OK;
- HWND hwnd = Window(pSelf);
-
- if(!pSelf)
- {
- CHECK_HR(hr = E_POINTER);
- }
-
- if(!hwnd)
- {
- CHECK_HR(hr = E_HANDLE);
- }
+ HRESULT hr = S_OK;
+ HWND hwnd = Window(pSelf);
+
+ if(!pSelf) {
+ CHECK_HR(hr = E_POINTER);
+ }
+
+ if(!hwnd) {
+ CHECK_HR(hr = E_HANDLE);
+ }
RECT rcClient;
- GetClientRect(hwnd, &rcClient);
+ GetClientRect(hwnd, &rcClient);
- // only update destination if window size changed
- if(bForce || (rcClient.bottom != pSelf->rcWindow.bottom || rcClient.left != pSelf->rcWindow.left || rcClient.right != pSelf->rcWindow.right || rcClient.top != pSelf->rcWindow.top))
- {
- CHECK_HR(hr = ResetDevice(pSelf));
+ // only update destination if window size changed
+ if(bForce || (rcClient.bottom != pSelf->rcWindow.bottom || rcClient.left != pSelf->rcWindow.left || rcClient.right != pSelf->rcWindow.right || rcClient.top != pSelf->rcWindow.top)) {
+ CHECK_HR(hr = ResetDevice(pSelf));
- pSelf->rcWindow = rcClient;
+ pSelf->rcWindow = rcClient;
#if 1
- RECT rcSrc = { 0, 0, pSelf->nNegWidth, pSelf->nNegHeight };
- rcSrc = CorrectAspectRatio(rcSrc, pSelf->pixelAR);
- pSelf->rcDest = LetterBoxRect(rcSrc, rcClient);
+ RECT rcSrc = { 0, 0, pSelf->nNegWidth, pSelf->nNegHeight };
+ rcSrc = CorrectAspectRatio(rcSrc, pSelf->pixelAR);
+ pSelf->rcDest = LetterBoxRect(rcSrc, rcClient);
#else
- long w = rcClient.right - rcClient.left;
- long h = rcClient.bottom - rcClient.top;
- float ratio = ((float)pSelf->nNegWidth/(float)pSelf->nNegHeight);
- // (w/h)=ratio =>
- // 1) h=w/ratio
- // and
- // 2) w=h*ratio
- pSelf->rcDest.right = (int)(w/ratio) > h ? (int)(h * ratio) : w;
- pSelf->rcDest.bottom = (int)(pSelf->rcDest.right/ratio) > h ? h : (int)(pSelf->rcDest.right/ratio);
- pSelf->rcDest.left = ((w - pSelf->rcDest.right) >> 1);
- pSelf->rcDest.top = ((h - pSelf->rcDest.bottom) >> 1);
+ long w = rcClient.right - rcClient.left;
+ long h = rcClient.bottom - rcClient.top;
+ float ratio = ((float)pSelf->nNegWidth/(float)pSelf->nNegHeight);
+ // (w/h)=ratio =>
+ // 1) h=w/ratio
+ // and
+ // 2) w=h*ratio
+ pSelf->rcDest.right = (int)(w/ratio) > h ? (int)(h * ratio) : w;
+ pSelf->rcDest.bottom = (int)(pSelf->rcDest.right/ratio) > h ? h : (int)(pSelf->rcDest.right/ratio);
+ pSelf->rcDest.left = ((w - pSelf->rcDest.right) >> 1);
+ pSelf->rcDest.top = ((h - pSelf->rcDest.bottom) >> 1);
#endif
- //::InvalidateRect(hwnd, NULL, FALSE);
- }
+ //::InvalidateRect(hwnd, NULL, FALSE);
+ }
bail:
- return hr;
+ return hr;
}
static HRESULT ResetDevice(plugin_win_mf_consumer_video_t *pSelf, BOOL bUpdateDestinationRect /*= FALSE*/)
{
HRESULT hr = S_OK;
- tsk_safeobj_lock(pSelf);
+ tsk_safeobj_lock(pSelf);
- HWND hWnd = Window(pSelf);
+ HWND hWnd = Window(pSelf);
- if (pSelf->pDevice)
- {
+ if (pSelf->pDevice) {
D3DPRESENT_PARAMETERS d3dpp = pSelf->d3dpp;
hr = pSelf->pDevice->Reset(&d3dpp);
- if (FAILED(hr))
- {
+ if (FAILED(hr)) {
SafeRelease(&pSelf->pDevice);
- SafeRelease(&pSelf->pD3D);
- SafeRelease(&pSelf->pSwapChain);
+ SafeRelease(&pSelf->pD3D);
+ SafeRelease(&pSelf->pSwapChain);
}
}
- if (pSelf->pDevice == NULL && hWnd)
- {
+ if (pSelf->pDevice == NULL && hWnd) {
CHECK_HR(hr = CreateDeviceD3D9(hWnd, &pSelf->pDevice, &pSelf->pD3D, pSelf->d3dpp));
- CHECK_HR(hr = CreateSwapChain(hWnd, pSelf->nNegWidth, pSelf->nNegHeight, pSelf->pDevice, &pSelf->pSwapChain));
- }
+ CHECK_HR(hr = CreateSwapChain(hWnd, pSelf->nNegWidth, pSelf->nNegHeight, pSelf->pDevice, &pSelf->pSwapChain));
+ }
- if(bUpdateDestinationRect) // endless loop guard
- {
- CHECK_HR(hr = UpdateDestinationRect(pSelf));
- }
+ if(bUpdateDestinationRect) { // endless loop guard
+ CHECK_HR(hr = UpdateDestinationRect(pSelf));
+ }
bail:
- tsk_safeobj_unlock(pSelf);
+ tsk_safeobj_unlock(pSelf);
- return hr;
+ return hr;
}
static HRESULT SetFullscreen(struct plugin_win_mf_consumer_video_s *pSelf, BOOL bFullScreen)
{
- HRESULT hr = S_OK;
- if(!pSelf)
- {
- CHECK_HR(hr = E_POINTER);
- }
-
- if(pSelf->bFullScreen != bFullScreen)
- {
- tsk_safeobj_lock(pSelf);
- if(bFullScreen)
- {
- HWND hWnd = CreateFullScreenWindow(pSelf);
- if(hWnd)
- {
- ::ShowWindow(hWnd, SW_SHOWDEFAULT);
- ::UpdateWindow(hWnd);
- }
- }
- else if(pSelf->hWindowFullScreen)
- {
- ::ShowWindow(pSelf->hWindowFullScreen, SW_HIDE);
- }
- pSelf->bFullScreen = bFullScreen;
- if(pSelf->bPrepared)
- {
- hr = ResetDevice(pSelf);
- }
- tsk_safeobj_unlock(pSelf);
-
- CHECK_HR(hr);
- }
+ HRESULT hr = S_OK;
+ if(!pSelf) {
+ CHECK_HR(hr = E_POINTER);
+ }
+
+ if(pSelf->bFullScreen != bFullScreen) {
+ tsk_safeobj_lock(pSelf);
+ if(bFullScreen) {
+ HWND hWnd = CreateFullScreenWindow(pSelf);
+ if(hWnd) {
+ ::ShowWindow(hWnd, SW_SHOWDEFAULT);
+ ::UpdateWindow(hWnd);
+ }
+ }
+ else if(pSelf->hWindowFullScreen) {
+ ::ShowWindow(pSelf->hWindowFullScreen, SW_HIDE);
+ }
+ pSelf->bFullScreen = bFullScreen;
+ if(pSelf->bPrepared) {
+ hr = ResetDevice(pSelf);
+ }
+ tsk_safeobj_unlock(pSelf);
+
+ CHECK_HR(hr);
+ }
bail:
- return hr;
+ return hr;
}
static LRESULT CALLBACK WndProc(HWND hWnd, UINT uMsg, WPARAM wParam, LPARAM lParam)
{
- switch(uMsg)
- {
- case WM_CREATE:
- case WM_SIZE:
- case WM_MOVE:
- {
- struct plugin_win_mf_consumer_video_s* pSelf = dynamic_cast<struct plugin_win_mf_consumer_video_s*>((struct plugin_win_mf_consumer_video_s*)GetPropA(hWnd, "Self"));
- if (pSelf)
- {
-
- }
- break;
- }
-
- case WM_ERASEBKGND:
- {
- return TRUE; // avoid background erasing.
- }
-
- case WM_CHAR:
- case WM_KEYUP:
- {
- struct plugin_win_mf_consumer_video_s* pSelf = dynamic_cast<struct plugin_win_mf_consumer_video_s*>((struct plugin_win_mf_consumer_video_s*)GetPropA(hWnd, "Self"));
- if (pSelf)
- {
- SetFullscreen(pSelf, FALSE);
- }
-
- break;
- }
- }
-
- return DefWindowProc(hWnd, uMsg, wParam, lParam);
+ switch(uMsg) {
+ case WM_CREATE:
+ case WM_SIZE:
+ case WM_MOVE: {
+ struct plugin_win_mf_consumer_video_s* pSelf = dynamic_cast<struct plugin_win_mf_consumer_video_s*>((struct plugin_win_mf_consumer_video_s*)GetPropA(hWnd, "Self"));
+ if (pSelf) {
+
+ }
+ break;
+ }
+
+ case WM_ERASEBKGND: {
+ return TRUE; // avoid background erasing.
+ }
+
+ case WM_CHAR:
+ case WM_KEYUP: {
+ struct plugin_win_mf_consumer_video_s* pSelf = dynamic_cast<struct plugin_win_mf_consumer_video_s*>((struct plugin_win_mf_consumer_video_s*)GetPropA(hWnd, "Self"));
+ if (pSelf) {
+ SetFullscreen(pSelf, FALSE);
+ }
+
+ break;
+ }
+ }
+
+ return DefWindowProc(hWnd, uMsg, wParam, lParam);
}
static HWND CreateFullScreenWindow(struct plugin_win_mf_consumer_video_s *pSelf)
{
- HRESULT hr = S_OK;
-
- if(!pSelf)
- {
- return NULL;
- }
-
- if(!pSelf->hWindowFullScreen)
- {
- WNDCLASS wc = {0};
-
- wc.lpfnWndProc = WndProc;
- wc.hInstance = GetModuleHandle(NULL);
- wc.hCursor = LoadCursor(NULL, IDC_ARROW);
- wc.lpszClassName = L"WindowClass";
- RegisterClass(&wc);
- pSelf->hWindowFullScreen = ::CreateWindowEx(
- NULL,
- wc.lpszClassName,
- L"Doubango's Video Consumer Fullscreen",
- WS_EX_TOPMOST | WS_POPUP,
- 0, 0,
- GetSystemMetrics(SM_CXSCREEN), GetSystemMetrics(SM_CYSCREEN),
- NULL,
- NULL,
- GetModuleHandle(NULL),
- NULL);
-
- SetPropA(pSelf->hWindowFullScreen, "Self", pSelf);
- }
- return pSelf->hWindowFullScreen;
+ HRESULT hr = S_OK;
+
+ if(!pSelf) {
+ return NULL;
+ }
+
+ if(!pSelf->hWindowFullScreen) {
+ WNDCLASS wc = {0};
+
+ wc.lpfnWndProc = WndProc;
+ wc.hInstance = GetModuleHandle(NULL);
+ wc.hCursor = LoadCursor(NULL, IDC_ARROW);
+ wc.lpszClassName = L"WindowClass";
+ RegisterClass(&wc);
+ pSelf->hWindowFullScreen = ::CreateWindowEx(
+ NULL,
+ wc.lpszClassName,
+ L"Doubango's Video Consumer Fullscreen",
+ WS_EX_TOPMOST | WS_POPUP,
+ 0, 0,
+ GetSystemMetrics(SM_CXSCREEN), GetSystemMetrics(SM_CYSCREEN),
+ NULL,
+ NULL,
+ GetModuleHandle(NULL),
+ NULL);
+
+ SetPropA(pSelf->hWindowFullScreen, "Self", pSelf);
+ }
+ return pSelf->hWindowFullScreen;
}
static HRESULT HookWindow(plugin_win_mf_consumer_video_s *pSelf, HWND hWnd)
{
- HRESULT hr = S_OK;
+ HRESULT hr = S_OK;
- tsk_safeobj_lock(pSelf);
+ tsk_safeobj_lock(pSelf);
- CHECK_HR(hr = UnhookWindow(pSelf));
+ CHECK_HR(hr = UnhookWindow(pSelf));
- if ((pSelf->hWindow = hWnd)) {
- pSelf->wndProc = (WNDPROC)SetWindowLongPtr(pSelf->hWindow, GWLP_WNDPROC, (LONG_PTR)WndProc);
- if (!pSelf->wndProc) {
- TSK_DEBUG_ERROR("HookWindowLongPtr() failed with errcode=%d", GetLastError());
- CHECK_HR(hr = E_FAIL);
- }
- pSelf->bWindowHooked = TRUE;
- }
+ if ((pSelf->hWindow = hWnd)) {
+ pSelf->wndProc = (WNDPROC)SetWindowLongPtr(pSelf->hWindow, GWLP_WNDPROC, (LONG_PTR)WndProc);
+ if (!pSelf->wndProc) {
+ TSK_DEBUG_ERROR("HookWindowLongPtr() failed with errcode=%d", GetLastError());
+ CHECK_HR(hr = E_FAIL);
+ }
+ pSelf->bWindowHooked = TRUE;
+ }
bail:
- tsk_safeobj_unlock(pSelf);
- return S_OK;
+ tsk_safeobj_unlock(pSelf);
+ return S_OK;
}
static HRESULT UnhookWindow(struct plugin_win_mf_consumer_video_s *pSelf)
{
- tsk_safeobj_lock(pSelf);
- if (pSelf->hWindow && pSelf->wndProc) {
- SetWindowLongPtr(pSelf->hWindow, GWLP_WNDPROC, (LONG_PTR)pSelf->wndProc);
- pSelf->wndProc = NULL;
- }
- if(pSelf->hWindow)
- {
- ::InvalidateRect(pSelf->hWindow, NULL, FALSE);
- }
- pSelf->bWindowHooked = FALSE;
- tsk_safeobj_unlock(pSelf);
- return S_OK;
+ tsk_safeobj_lock(pSelf);
+ if (pSelf->hWindow && pSelf->wndProc) {
+ SetWindowLongPtr(pSelf->hWindow, GWLP_WNDPROC, (LONG_PTR)pSelf->wndProc);
+ pSelf->wndProc = NULL;
+ }
+ if(pSelf->hWindow) {
+ ::InvalidateRect(pSelf->hWindow, NULL, FALSE);
+ }
+ pSelf->bWindowHooked = FALSE;
+ tsk_safeobj_unlock(pSelf);
+ return S_OK;
}
@@ -1038,39 +970,38 @@ static HRESULT UnhookWindow(struct plugin_win_mf_consumer_video_s *pSelf)
// To avoid chroma conversion (performance issues) we use NV12 when the codec is bundled as MediaFoundation codecs most likely only support this format.
// NV12 is the native format for media foundation codecs (e.g. Intel Quick Sync) and the GPU.
// I420 is the native format for FFmpeg, libvpx and libtheora.
-const GUID kDefaultUncompressedType
+const GUID kDefaultUncompressedType
#if PLUGIN_MF_CV_BUNDLE_CODEC
-= MFVideoFormat_NV12;
+ = MFVideoFormat_NV12;
#else
-= MFVideoFormat_I420;
+ = MFVideoFormat_I420;
#endif
DEFINE_GUID(PLUGIN_MF_LOW_LATENCY,
-0x9c27891a, 0xed7a, 0x40e1, 0x88, 0xe8, 0xb2, 0x27, 0x27, 0xa0, 0x24, 0xee);
+ 0x9c27891a, 0xed7a, 0x40e1, 0x88, 0xe8, 0xb2, 0x27, 0x27, 0xa0, 0x24, 0xee);
static void* TSK_STDCALL RunSessionThread(void *pArg);
static int _plugin_win_mf_consumer_video_unprepare(struct plugin_win_mf_consumer_video_s* pSelf);
-typedef struct plugin_win_mf_consumer_video_s
-{
- TMEDIA_DECLARE_CONSUMER;
-
- bool bStarted, bPrepared;
- HWND hWindow;
- tsk_thread_handle_t* ppTread[1];
+typedef struct plugin_win_mf_consumer_video_s {
+ TMEDIA_DECLARE_CONSUMER;
+
+ bool bStarted, bPrepared;
+ HWND hWindow;
+ tsk_thread_handle_t* ppTread[1];
- UINT32 nNegWidth;
- UINT32 nNegHeight;
- UINT32 nNegFps;
+ UINT32 nNegWidth;
+ UINT32 nNegHeight;
+ UINT32 nNegFps;
- MFCodecVideo *pDecoder;
+ MFCodecVideo *pDecoder;
IMFMediaSession *pSession;
CMFSource *pSource;
IMFActivate *pSinkActivate;
- DisplayWatcher* pDisplayWatcher;
+ DisplayWatcher* pDisplayWatcher;
IMFTopology *pTopologyFull;
- IMFTopology *pTopologyPartial;
- IMFMediaType *pOutType;
+ IMFTopology *pTopologyPartial;
+ IMFMediaType *pOutType;
}
plugin_win_mf_consumer_video_t;
@@ -1079,437 +1010,437 @@ plugin_win_mf_consumer_video_t;
/* ============ Media Consumer Interface ================= */
static int plugin_win_mf_consumer_video_set(tmedia_consumer_t *self, const tmedia_param_t* param)
{
- int ret = 0;
- HRESULT hr = S_OK;
- plugin_win_mf_consumer_video_t* pSelf = (plugin_win_mf_consumer_video_t*)self;
-
- if(!self || !param){
- TSK_DEBUG_ERROR("Invalid parameter");
- return -1;
- }
-
- if(param->value_type == tmedia_pvt_int64){
- if(tsk_striequals(param->key, "remote-hwnd")){
- HWND hWnd = reinterpret_cast<HWND>((INT64)*((int64_t*)param->value));
- if(hWnd != pSelf->hWindow)
- {
- pSelf->hWindow = hWnd;
- if(pSelf->pDisplayWatcher)
- {
- CHECK_HR(hr = pSelf->pDisplayWatcher->SetHwnd(hWnd));
- }
- }
- }
- }
- else if(param->value_type == tmedia_pvt_int32){
- if(tsk_striequals(param->key, "fullscreen")){
- if(pSelf->pDisplayWatcher)
- {
- CHECK_HR(hr = pSelf->pDisplayWatcher->SetFullscreen(!!*((int32_t*)param->value)));
- }
- }
- else if(tsk_striequals(param->key, "create-on-current-thead")){
- // DSCONSUMER(self)->create_on_ui_thread = *((int32_t*)param->value) ? tsk_false : tsk_true;
- }
- else if(tsk_striequals(param->key, "plugin-firefox")){
- /*DSCONSUMER(self)->plugin_firefox = (*((int32_t*)param->value) != 0);
- if(DSCONSUMER(self)->display){
- DSCONSUMER(self)->display->setPluginFirefox((DSCONSUMER(self)->plugin_firefox == tsk_true));
- }*/
- }
- }
+ int ret = 0;
+ HRESULT hr = S_OK;
+ plugin_win_mf_consumer_video_t* pSelf = (plugin_win_mf_consumer_video_t*)self;
+
+ if(!self || !param) {
+ TSK_DEBUG_ERROR("Invalid parameter");
+ return -1;
+ }
+
+ if(param->value_type == tmedia_pvt_int64) {
+ if(tsk_striequals(param->key, "remote-hwnd")) {
+ HWND hWnd = reinterpret_cast<HWND>((INT64)*((int64_t*)param->value));
+ if(hWnd != pSelf->hWindow) {
+ pSelf->hWindow = hWnd;
+ if(pSelf->pDisplayWatcher) {
+ CHECK_HR(hr = pSelf->pDisplayWatcher->SetHwnd(hWnd));
+ }
+ }
+ }
+ }
+ else if(param->value_type == tmedia_pvt_int32) {
+ if(tsk_striequals(param->key, "fullscreen")) {
+ if(pSelf->pDisplayWatcher) {
+ CHECK_HR(hr = pSelf->pDisplayWatcher->SetFullscreen(!!*((int32_t*)param->value)));
+ }
+ }
+ else if(tsk_striequals(param->key, "create-on-current-thead")) {
+ // DSCONSUMER(self)->create_on_ui_thread = *((int32_t*)param->value) ? tsk_false : tsk_true;
+ }
+ else if(tsk_striequals(param->key, "plugin-firefox")) {
+ /*DSCONSUMER(self)->plugin_firefox = (*((int32_t*)param->value) != 0);
+ if(DSCONSUMER(self)->display){
+ DSCONSUMER(self)->display->setPluginFirefox((DSCONSUMER(self)->plugin_firefox == tsk_true));
+ }*/
+ }
+ }
bail:
- return SUCCEEDED(hr) ? 0 : -1;
+ return SUCCEEDED(hr) ? 0 : -1;
}
static int plugin_win_mf_consumer_video_prepare(tmedia_consumer_t* self, const tmedia_codec_t* codec)
{
- plugin_win_mf_consumer_video_t* pSelf = (plugin_win_mf_consumer_video_t*)self;
-
- if(!pSelf || !codec && codec->plugin){
- TSK_DEBUG_ERROR("Invalid parameter");
- return -1;
- }
- if(pSelf->bPrepared){
- TSK_DEBUG_WARN("MF video consumer already prepared");
- return -1;
- }
-
- // FIXME: DirectShow requires flipping but not MF
- // The Core library always tries to flip when OSType==Win32. Must be changed
- TMEDIA_CODEC_VIDEO(codec)->in.flip = tsk_false;
-
- HRESULT hr = S_OK;
-
- TMEDIA_CONSUMER(pSelf)->video.fps = TMEDIA_CODEC_VIDEO(codec)->in.fps;
- TMEDIA_CONSUMER(pSelf)->video.in.width = TMEDIA_CODEC_VIDEO(codec)->in.width;
- TMEDIA_CONSUMER(pSelf)->video.in.height = TMEDIA_CODEC_VIDEO(codec)->in.height;
-
- if(!TMEDIA_CONSUMER(pSelf)->video.display.width){
- TMEDIA_CONSUMER(pSelf)->video.display.width = TMEDIA_CONSUMER(pSelf)->video.in.width;
- }
- if(!TMEDIA_CONSUMER(pSelf)->video.display.height){
- TMEDIA_CONSUMER(pSelf)->video.display.height = TMEDIA_CONSUMER(pSelf)->video.in.height;
- }
-
- pSelf->nNegFps = TMEDIA_CONSUMER(pSelf)->video.fps;
- pSelf->nNegWidth = TMEDIA_CONSUMER(pSelf)->video.display.width;
- pSelf->nNegHeight = TMEDIA_CONSUMER(pSelf)->video.display.height;
-
- TSK_DEBUG_INFO("MF video consumer: fps=%d, width=%d, height=%d",
- pSelf->nNegFps,
- pSelf->nNegWidth,
- pSelf->nNegHeight);
-
- if(kDefaultUncompressedType == MFVideoFormat_NV12) {
- TMEDIA_CONSUMER(pSelf)->video.display.chroma = tmedia_chroma_nv12;
- }
- else if(kDefaultUncompressedType == MFVideoFormat_I420) {
- TMEDIA_CONSUMER(pSelf)->video.display.chroma = tmedia_chroma_yuv420p;
- }
- else if(kDefaultUncompressedType == MFVideoFormat_RGB32) {
- TMEDIA_CONSUMER(pSelf)->video.display.chroma = tmedia_chroma_rgb32;
- }
- else if(kDefaultUncompressedType == MFVideoFormat_RGB24) {
- TMEDIA_CONSUMER(pSelf)->video.display.chroma = tmedia_chroma_rgb24;
- }
- else {
- CHECK_HR(hr = E_NOTIMPL);
- }
- TMEDIA_CONSUMER(pSelf)->decoder.codec_id = tmedia_codec_id_none; // means accept RAW fames
-
- IMFMediaSink* pMediaSink = NULL;
- IMFAttributes* pSessionAttributes = NULL;
-
- // Set session attributes
- CHECK_HR(hr = MFCreateAttributes(&pSessionAttributes, 1));
- CHECK_HR(hr = pSessionAttributes->SetUINT32(PLUGIN_MF_LOW_LATENCY, 1));
-
- CHECK_HR(hr = MFCreateMediaType(&pSelf->pOutType));
- CHECK_HR(hr = pSelf->pOutType->SetGUID(MF_MT_MAJOR_TYPE, MFMediaType_Video));
+ plugin_win_mf_consumer_video_t* pSelf = (plugin_win_mf_consumer_video_t*)self;
+
+ if(!pSelf || !codec && codec->plugin) {
+ TSK_DEBUG_ERROR("Invalid parameter");
+ return -1;
+ }
+ if(pSelf->bPrepared) {
+ TSK_DEBUG_WARN("MF video consumer already prepared");
+ return -1;
+ }
+
+ // FIXME: DirectShow requires flipping but not MF
+ // The Core library always tries to flip when OSType==Win32. Must be changed
+ TMEDIA_CODEC_VIDEO(codec)->in.flip = tsk_false;
+
+ HRESULT hr = S_OK;
+
+ TMEDIA_CONSUMER(pSelf)->video.fps = TMEDIA_CODEC_VIDEO(codec)->in.fps;
+ TMEDIA_CONSUMER(pSelf)->video.in.width = TMEDIA_CODEC_VIDEO(codec)->in.width;
+ TMEDIA_CONSUMER(pSelf)->video.in.height = TMEDIA_CODEC_VIDEO(codec)->in.height;
+
+ if(!TMEDIA_CONSUMER(pSelf)->video.display.width) {
+ TMEDIA_CONSUMER(pSelf)->video.display.width = TMEDIA_CONSUMER(pSelf)->video.in.width;
+ }
+ if(!TMEDIA_CONSUMER(pSelf)->video.display.height) {
+ TMEDIA_CONSUMER(pSelf)->video.display.height = TMEDIA_CONSUMER(pSelf)->video.in.height;
+ }
+
+ pSelf->nNegFps = TMEDIA_CONSUMER(pSelf)->video.fps;
+ pSelf->nNegWidth = TMEDIA_CONSUMER(pSelf)->video.display.width;
+ pSelf->nNegHeight = TMEDIA_CONSUMER(pSelf)->video.display.height;
+
+ TSK_DEBUG_INFO("MF video consumer: fps=%d, width=%d, height=%d",
+ pSelf->nNegFps,
+ pSelf->nNegWidth,
+ pSelf->nNegHeight);
+
+ if(kDefaultUncompressedType == MFVideoFormat_NV12) {
+ TMEDIA_CONSUMER(pSelf)->video.display.chroma = tmedia_chroma_nv12;
+ }
+ else if(kDefaultUncompressedType == MFVideoFormat_I420) {
+ TMEDIA_CONSUMER(pSelf)->video.display.chroma = tmedia_chroma_yuv420p;
+ }
+ else if(kDefaultUncompressedType == MFVideoFormat_RGB32) {
+ TMEDIA_CONSUMER(pSelf)->video.display.chroma = tmedia_chroma_rgb32;
+ }
+ else if(kDefaultUncompressedType == MFVideoFormat_RGB24) {
+ TMEDIA_CONSUMER(pSelf)->video.display.chroma = tmedia_chroma_rgb24;
+ }
+ else {
+ CHECK_HR(hr = E_NOTIMPL);
+ }
+ TMEDIA_CONSUMER(pSelf)->decoder.codec_id = tmedia_codec_id_none; // means accept RAW fames
+
+ IMFMediaSink* pMediaSink = NULL;
+ IMFAttributes* pSessionAttributes = NULL;
+
+ // Set session attributes
+ CHECK_HR(hr = MFCreateAttributes(&pSessionAttributes, 1));
+ CHECK_HR(hr = pSessionAttributes->SetUINT32(PLUGIN_MF_LOW_LATENCY, 1));
+
+ CHECK_HR(hr = MFCreateMediaType(&pSelf->pOutType));
+ CHECK_HR(hr = pSelf->pOutType->SetGUID(MF_MT_MAJOR_TYPE, MFMediaType_Video));
#if PLUGIN_MF_CV_BUNDLE_CODEC
- if((codec->id == tmedia_codec_id_h264_bp || codec->id == tmedia_codec_id_h264_mp) && MFUtils::IsLowLatencyH264Supported()) {
- // both Microsoft and Intel encoders support NV12 only as input
- // static const BOOL kIsEncoder = FALSE;
- // hr = MFUtils::GetBestCodec(kIsEncoder, MFMediaType_Video, MFVideoFormat_H264, MFVideoFormat_NV12, &pSelf->pDecoder);
- pSelf->pDecoder = (codec->id == tmedia_codec_id_h264_bp) ? MFCodecVideoH264::CreateCodecH264Base(MFCodecType_Decoder) : MFCodecVideoH264::CreateCodecH264Main(MFCodecType_Decoder);
- if(pSelf->pDecoder)
- {
- hr = pSelf->pDecoder->Initialize(
- pSelf->nNegFps,
- pSelf->nNegWidth,
- pSelf->nNegHeight);
-
- if(FAILED(hr))
- {
- SafeRelease(&pSelf->pDecoder);
- hr = S_OK;
- }
- }
- if(SUCCEEDED(hr) && pSelf->pDecoder) {
- TMEDIA_CONSUMER(pSelf)->decoder.codec_id = codec->id; // means accept ENCODED fames
- CHECK_HR(hr = pSelf->pOutType->SetGUID(MF_MT_SUBTYPE, MFVideoFormat_H264));
- }
- else {
- SafeRelease(&pSelf->pDecoder);
- TSK_DEBUG_WARN("Failed to find H.264 HW encoder...fallback to SW implementation");
- }
- }
+ if((codec->id == tmedia_codec_id_h264_bp || codec->id == tmedia_codec_id_h264_mp) && MFUtils::IsLowLatencyH264Supported()) {
+ // both Microsoft and Intel encoders support NV12 only as input
+ // static const BOOL kIsEncoder = FALSE;
+ // hr = MFUtils::GetBestCodec(kIsEncoder, MFMediaType_Video, MFVideoFormat_H264, MFVideoFormat_NV12, &pSelf->pDecoder);
+ pSelf->pDecoder = (codec->id == tmedia_codec_id_h264_bp) ? MFCodecVideoH264::CreateCodecH264Base(MFCodecType_Decoder) : MFCodecVideoH264::CreateCodecH264Main(MFCodecType_Decoder);
+ if(pSelf->pDecoder) {
+ hr = pSelf->pDecoder->Initialize(
+ pSelf->nNegFps,
+ pSelf->nNegWidth,
+ pSelf->nNegHeight);
+
+ if(FAILED(hr)) {
+ SafeRelease(&pSelf->pDecoder);
+ hr = S_OK;
+ }
+ }
+ if(SUCCEEDED(hr) && pSelf->pDecoder) {
+ TMEDIA_CONSUMER(pSelf)->decoder.codec_id = codec->id; // means accept ENCODED fames
+ CHECK_HR(hr = pSelf->pOutType->SetGUID(MF_MT_SUBTYPE, MFVideoFormat_H264));
+ }
+ else {
+ SafeRelease(&pSelf->pDecoder);
+ TSK_DEBUG_WARN("Failed to find H.264 HW encoder...fallback to SW implementation");
+ }
+ }
#endif
- if(!pSelf->pDecoder){
- CHECK_HR(hr = pSelf->pOutType->SetGUID(MF_MT_SUBTYPE, kDefaultUncompressedType));
- TMEDIA_CONSUMER(pSelf)->video.display.chroma = kDefaultUncompressedType == MFVideoFormat_NV12 ? tmedia_chroma_nv12 : tmedia_chroma_yuv420p;
- }
+ if(!pSelf->pDecoder) {
+ CHECK_HR(hr = pSelf->pOutType->SetGUID(MF_MT_SUBTYPE, kDefaultUncompressedType));
+ TMEDIA_CONSUMER(pSelf)->video.display.chroma = kDefaultUncompressedType == MFVideoFormat_NV12 ? tmedia_chroma_nv12 : tmedia_chroma_yuv420p;
+ }
CHECK_HR(hr = pSelf->pOutType->SetUINT32(MF_MT_INTERLACE_MODE, MFVideoInterlace_Progressive));
- CHECK_HR(hr = pSelf->pOutType->SetUINT32(MF_MT_ALL_SAMPLES_INDEPENDENT, TRUE));
+ CHECK_HR(hr = pSelf->pOutType->SetUINT32(MF_MT_ALL_SAMPLES_INDEPENDENT, TRUE));
CHECK_HR(hr = MFSetAttributeSize(pSelf->pOutType, MF_MT_FRAME_SIZE, pSelf->nNegWidth, pSelf->nNegHeight));
- CHECK_HR(hr = MFSetAttributeRatio(pSelf->pOutType, MF_MT_FRAME_RATE, pSelf->nNegFps, 1));
+ CHECK_HR(hr = MFSetAttributeRatio(pSelf->pOutType, MF_MT_FRAME_RATE, pSelf->nNegFps, 1));
CHECK_HR(hr = MFSetAttributeRatio(pSelf->pOutType, MF_MT_PIXEL_ASPECT_RATIO, 1, 1));
- CHECK_HR(hr = CMFSource::CreateInstanceEx(IID_IMFMediaSource, (void**)&pSelf->pSource, pSelf->pOutType));
+ CHECK_HR(hr = CMFSource::CreateInstanceEx(IID_IMFMediaSource, (void**)&pSelf->pSource, pSelf->pOutType));
- // Apply Encoder output type (must be called before SetInputType)
- //if(pSelf->pDecoder) {
- // CHECK_HR(hr = pSelf->pDecoder->SetOutputType(0, pSelf->pOutType, 0/*MFT_SET_TYPE_TEST_ONLY*/));
- //}
+ // Apply Encoder output type (must be called before SetInputType)
+ //if(pSelf->pDecoder) {
+ // CHECK_HR(hr = pSelf->pDecoder->SetOutputType(0, pSelf->pOutType, 0/*MFT_SET_TYPE_TEST_ONLY*/));
+ //}
- // Create the Media Session.
- CHECK_HR(hr = MFCreateMediaSession(pSessionAttributes, &pSelf->pSession));
+ // Create the Media Session.
+ CHECK_HR(hr = MFCreateMediaSession(pSessionAttributes, &pSelf->pSession));
- // Create the EVR activation object.
- CHECK_HR(hr = MFCreateVideoRendererActivate(pSelf->hWindow, &pSelf->pSinkActivate));
+ // Create the EVR activation object.
+ CHECK_HR(hr = MFCreateVideoRendererActivate(pSelf->hWindow, &pSelf->pSinkActivate));
- // Create the topology.
- CHECK_HR(hr = MFUtils::CreateTopology(
- pSelf->pSource,
- pSelf->pDecoder ? pSelf->pDecoder->GetMFT() : NULL,
- pSelf->pSinkActivate,
- NULL/*Preview*/,
- pSelf->pOutType,
- &pSelf->pTopologyPartial));
- // Resolve topology (adds video processors if needed).
- CHECK_HR(hr = MFUtils::ResolveTopology(pSelf->pTopologyPartial, &pSelf->pTopologyFull));
+ // Create the topology.
+ CHECK_HR(hr = MFUtils::CreateTopology(
+ pSelf->pSource,
+ pSelf->pDecoder ? pSelf->pDecoder->GetMFT() : NULL,
+ pSelf->pSinkActivate,
+ NULL/*Preview*/,
+ pSelf->pOutType,
+ &pSelf->pTopologyPartial));
+ // Resolve topology (adds video processors if needed).
+ CHECK_HR(hr = MFUtils::ResolveTopology(pSelf->pTopologyPartial, &pSelf->pTopologyFull));
- // Find EVR
- CHECK_HR(hr = MFUtils::FindNodeObject(pSelf->pTopologyFull, MFUtils::g_ullTopoIdSinkMain, (void**)&pMediaSink));
+ // Find EVR
+ CHECK_HR(hr = MFUtils::FindNodeObject(pSelf->pTopologyFull, MFUtils::g_ullTopoIdSinkMain, (void**)&pMediaSink));
- // Create EVR watcher
- pSelf->pDisplayWatcher = new DisplayWatcher(pSelf->hWindow, pMediaSink, hr);
- CHECK_HR(hr);
+ // Create EVR watcher
+ pSelf->pDisplayWatcher = new DisplayWatcher(pSelf->hWindow, pMediaSink, hr);
+ CHECK_HR(hr);
bail:
- SafeRelease(&pMediaSink);
- SafeRelease(&pSessionAttributes);
-
- pSelf->bPrepared = SUCCEEDED(hr);
- return pSelf->bPrepared ? 0 : -1;
+ SafeRelease(&pMediaSink);
+ SafeRelease(&pSessionAttributes);
+
+ pSelf->bPrepared = SUCCEEDED(hr);
+ return pSelf->bPrepared ? 0 : -1;
}
static int plugin_win_mf_consumer_video_start(tmedia_consumer_t* self)
{
- plugin_win_mf_consumer_video_t* pSelf = (plugin_win_mf_consumer_video_t*)self;
-
- if(!pSelf){
- TSK_DEBUG_ERROR("Invalid parameter");
- return -1;
- }
-
- if(pSelf->bStarted){
- TSK_DEBUG_INFO("MF video consumer already started");
- return 0;
- }
- if(!pSelf->bPrepared){
- TSK_DEBUG_ERROR("MF video consumer not prepared");
- return -1;
- }
-
- HRESULT hr = S_OK;
-
- // Run EVR watcher
- if(pSelf->pDisplayWatcher) {
- CHECK_HR(hr = pSelf->pDisplayWatcher->Start());
- }
-
- // Run the media session.
- CHECK_HR(hr = MFUtils::RunSession(pSelf->pSession, pSelf->pTopologyFull));
-
- // Start asynchronous watcher thread
- pSelf->bStarted = true;
- int ret = tsk_thread_create(&pSelf->ppTread[0], RunSessionThread, pSelf);
- if(ret != 0) {
- TSK_DEBUG_ERROR("Failed to create thread");
- hr = E_FAIL;
- pSelf->bStarted = false;
- if(pSelf->ppTread[0]){
- tsk_thread_join(&pSelf->ppTread[0]);
- }
- MFUtils::ShutdownSession(pSelf->pSession, pSelf->pSource);
- CHECK_HR(hr = E_FAIL);
- }
+ plugin_win_mf_consumer_video_t* pSelf = (plugin_win_mf_consumer_video_t*)self;
+
+ if(!pSelf) {
+ TSK_DEBUG_ERROR("Invalid parameter");
+ return -1;
+ }
+
+ if(pSelf->bStarted) {
+ TSK_DEBUG_INFO("MF video consumer already started");
+ return 0;
+ }
+ if(!pSelf->bPrepared) {
+ TSK_DEBUG_ERROR("MF video consumer not prepared");
+ return -1;
+ }
+
+ HRESULT hr = S_OK;
+
+ // Run EVR watcher
+ if(pSelf->pDisplayWatcher) {
+ CHECK_HR(hr = pSelf->pDisplayWatcher->Start());
+ }
+
+ // Run the media session.
+ CHECK_HR(hr = MFUtils::RunSession(pSelf->pSession, pSelf->pTopologyFull));
+
+ // Start asynchronous watcher thread
+ pSelf->bStarted = true;
+ int ret = tsk_thread_create(&pSelf->ppTread[0], RunSessionThread, pSelf);
+ if(ret != 0) {
+ TSK_DEBUG_ERROR("Failed to create thread");
+ hr = E_FAIL;
+ pSelf->bStarted = false;
+ if(pSelf->ppTread[0]) {
+ tsk_thread_join(&pSelf->ppTread[0]);
+ }
+ MFUtils::ShutdownSession(pSelf->pSession, pSelf->pSource);
+ CHECK_HR(hr = E_FAIL);
+ }
bail:
- return SUCCEEDED(hr) ? 0 : -1;
+ return SUCCEEDED(hr) ? 0 : -1;
}
static int plugin_win_mf_consumer_video_consume(tmedia_consumer_t* self, const void* buffer, tsk_size_t size, const tsk_object_t* proto_hdr)
{
- plugin_win_mf_consumer_video_t* pSelf = (plugin_win_mf_consumer_video_t*)self;
-
- HRESULT hr = S_OK;
-
- if(!pSelf || !buffer || !size) {
- TSK_DEBUG_ERROR("Invalid parameter");
- CHECK_HR(hr = E_INVALIDARG);
- }
-
- if(!pSelf->bStarted) {
- TSK_DEBUG_INFO("MF video consumer not started");
- CHECK_HR(hr = E_FAIL);
- }
- if(!pSelf->pSource) {
- TSK_DEBUG_ERROR("No video custom source");
- CHECK_HR(hr = E_FAIL);
- }
-
- if(pSelf->nNegWidth != TMEDIA_CONSUMER(pSelf)->video.in.width || pSelf->nNegHeight != TMEDIA_CONSUMER(pSelf)->video.in.height){
- TSK_DEBUG_INFO("Negotiated and input video sizes are different:%d#%d or %d#%d",
- pSelf->nNegWidth, TMEDIA_CONSUMER(pSelf)->video.in.width,
- pSelf->nNegHeight, TMEDIA_CONSUMER(pSelf)->video.in.height);
- // Update media type
- CHECK_HR(hr = MFSetAttributeSize(pSelf->pOutType, MF_MT_FRAME_SIZE, TMEDIA_CONSUMER(pSelf)->video.in.width, TMEDIA_CONSUMER(pSelf)->video.in.height));
- CHECK_HR(hr = MFSetAttributeRatio(pSelf->pOutType, MF_MT_FRAME_RATE, TMEDIA_CONSUMER(pSelf)->video.fps, 1));
-
- CHECK_HR(hr = pSelf->pSession->ClearTopologies());
-
- //
- // FIXME: Using same EVR when the size is just swapped (e.g. [640, 480] -> [480, 640]) doesn't work while other changes does (e.g. [352, 288] -> [640, 480])
- // /!\This look like a bug in Media Foundation
- //
- if(pSelf->nNegWidth == TMEDIA_CONSUMER(pSelf)->video.in.height && pSelf->nNegHeight == TMEDIA_CONSUMER(pSelf)->video.in.width) // swapped?
- {
- TSK_DEBUG_INFO("/!\\ Size swapped");
-
- IMFActivate* pSinkActivate = NULL;
- IMFTopology* pTopologyPartial = NULL;
- hr = MFCreateVideoRendererActivate(pSelf->hWindow, &pSinkActivate);
- if(FAILED(hr)) goto end_of_swapping;
- hr = MFUtils::CreateTopology(
- pSelf->pSource,
- pSelf->pDecoder ? pSelf->pDecoder->GetMFT() : NULL,
- pSinkActivate,
- NULL/*Preview*/,
- pSelf->pOutType,
- &pTopologyPartial);
- if(FAILED(hr)) goto end_of_swapping;
-
- if(SUCCEEDED(hr)) {
- SafeRelease(&pSelf->pSinkActivate);
- SafeRelease(&pSelf->pTopologyPartial);
- pSelf->pSinkActivate = pSinkActivate; pSinkActivate = NULL;
- pSelf->pTopologyPartial = pTopologyPartial; pTopologyPartial = NULL;
-
- }
-
+ plugin_win_mf_consumer_video_t* pSelf = (plugin_win_mf_consumer_video_t*)self;
+
+ HRESULT hr = S_OK;
+
+ if(!pSelf || !buffer || !size) {
+ TSK_DEBUG_ERROR("Invalid parameter");
+ CHECK_HR(hr = E_INVALIDARG);
+ }
+
+ if(!pSelf->bStarted) {
+ TSK_DEBUG_INFO("MF video consumer not started");
+ CHECK_HR(hr = E_FAIL);
+ }
+ if(!pSelf->pSource) {
+ TSK_DEBUG_ERROR("No video custom source");
+ CHECK_HR(hr = E_FAIL);
+ }
+
+ if(pSelf->nNegWidth != TMEDIA_CONSUMER(pSelf)->video.in.width || pSelf->nNegHeight != TMEDIA_CONSUMER(pSelf)->video.in.height) {
+ TSK_DEBUG_INFO("Negotiated and input video sizes are different:%d#%d or %d#%d",
+ pSelf->nNegWidth, TMEDIA_CONSUMER(pSelf)->video.in.width,
+ pSelf->nNegHeight, TMEDIA_CONSUMER(pSelf)->video.in.height);
+ // Update media type
+ CHECK_HR(hr = MFSetAttributeSize(pSelf->pOutType, MF_MT_FRAME_SIZE, TMEDIA_CONSUMER(pSelf)->video.in.width, TMEDIA_CONSUMER(pSelf)->video.in.height));
+ CHECK_HR(hr = MFSetAttributeRatio(pSelf->pOutType, MF_MT_FRAME_RATE, TMEDIA_CONSUMER(pSelf)->video.fps, 1));
+
+ CHECK_HR(hr = pSelf->pSession->ClearTopologies());
+
+ //
+ // FIXME: Using same EVR when the size is just swapped (e.g. [640, 480] -> [480, 640]) doesn't work while other changes does (e.g. [352, 288] -> [640, 480])
+ // /!\This look like a bug in Media Foundation
+ //
+ if(pSelf->nNegWidth == TMEDIA_CONSUMER(pSelf)->video.in.height && pSelf->nNegHeight == TMEDIA_CONSUMER(pSelf)->video.in.width) { // swapped?
+ TSK_DEBUG_INFO("/!\\ Size swapped");
+
+ IMFActivate* pSinkActivate = NULL;
+ IMFTopology* pTopologyPartial = NULL;
+ hr = MFCreateVideoRendererActivate(pSelf->hWindow, &pSinkActivate);
+ if(FAILED(hr)) {
+ goto end_of_swapping;
+ }
+ hr = MFUtils::CreateTopology(
+ pSelf->pSource,
+ pSelf->pDecoder ? pSelf->pDecoder->GetMFT() : NULL,
+ pSinkActivate,
+ NULL/*Preview*/,
+ pSelf->pOutType,
+ &pTopologyPartial);
+ if(FAILED(hr)) {
+ goto end_of_swapping;
+ }
+
+ if(SUCCEEDED(hr)) {
+ SafeRelease(&pSelf->pSinkActivate);
+ SafeRelease(&pSelf->pTopologyPartial);
+ pSelf->pSinkActivate = pSinkActivate;
+ pSinkActivate = NULL;
+ pSelf->pTopologyPartial = pTopologyPartial;
+ pTopologyPartial = NULL;
+
+ }
+
end_of_swapping:
- SafeRelease(&pSinkActivate);
- SafeRelease(&pTopologyPartial);
- CHECK_HR(hr);
- }
-
- // Set media type again (not required but who know)
- CHECK_HR(hr = MFUtils::SetMediaType(pSelf->pSource, pSelf->pOutType));
-
- // Rebuild topology using the partial one
- IMFTopology* pTopologyFull = NULL;
- hr = MFUtils::ResolveTopology(pSelf->pTopologyPartial, &pTopologyFull);
- if(SUCCEEDED(hr)){
- SafeRelease(&pSelf->pTopologyFull);
- pSelf->pTopologyFull = pTopologyFull; pTopologyFull = NULL;
- }
- SafeRelease(&pTopologyFull);
- CHECK_HR(hr);
-
- // Find Main Sink
- IMFMediaSink* pMediaSink = NULL;
- hr = MFUtils::FindNodeObject(pSelf->pTopologyFull, MFUtils::g_ullTopoIdSinkMain, (void**)&pMediaSink);
- if(SUCCEEDED(hr)) {
- if(pSelf->pDisplayWatcher){
- delete pSelf->pDisplayWatcher, pSelf->pDisplayWatcher = NULL;
- }
- pSelf->pDisplayWatcher = new DisplayWatcher(pSelf->hWindow, pMediaSink, hr);
- if(SUCCEEDED(hr) && pSelf->bStarted) {
- hr = pSelf->pDisplayWatcher->Start();
- }
- }
- SafeRelease(&pMediaSink);
- CHECK_HR(hr);
-
- // Update the topology associated to the media session
- CHECK_HR(hr = pSelf->pSession->SetTopology(MFSESSION_SETTOPOLOGY_IMMEDIATE, pSelf->pTopologyFull));
-
- // Update negotiated width and height
- pSelf->nNegWidth = TMEDIA_CONSUMER(pSelf)->video.in.width;
- pSelf->nNegHeight = TMEDIA_CONSUMER(pSelf)->video.in.height;
- }
-
- // Deliver buffer
- CHECK_HR(hr = pSelf->pSource->CopyVideoBuffer(pSelf->nNegWidth, pSelf->nNegHeight, buffer, size));
+ SafeRelease(&pSinkActivate);
+ SafeRelease(&pTopologyPartial);
+ CHECK_HR(hr);
+ }
+
+ // Set media type again (not required but who know)
+ CHECK_HR(hr = MFUtils::SetMediaType(pSelf->pSource, pSelf->pOutType));
+
+ // Rebuild topology using the partial one
+ IMFTopology* pTopologyFull = NULL;
+ hr = MFUtils::ResolveTopology(pSelf->pTopologyPartial, &pTopologyFull);
+ if(SUCCEEDED(hr)) {
+ SafeRelease(&pSelf->pTopologyFull);
+ pSelf->pTopologyFull = pTopologyFull;
+ pTopologyFull = NULL;
+ }
+ SafeRelease(&pTopologyFull);
+ CHECK_HR(hr);
+
+ // Find Main Sink
+ IMFMediaSink* pMediaSink = NULL;
+ hr = MFUtils::FindNodeObject(pSelf->pTopologyFull, MFUtils::g_ullTopoIdSinkMain, (void**)&pMediaSink);
+ if(SUCCEEDED(hr)) {
+ if(pSelf->pDisplayWatcher) {
+ delete pSelf->pDisplayWatcher, pSelf->pDisplayWatcher = NULL;
+ }
+ pSelf->pDisplayWatcher = new DisplayWatcher(pSelf->hWindow, pMediaSink, hr);
+ if(SUCCEEDED(hr) && pSelf->bStarted) {
+ hr = pSelf->pDisplayWatcher->Start();
+ }
+ }
+ SafeRelease(&pMediaSink);
+ CHECK_HR(hr);
+
+ // Update the topology associated to the media session
+ CHECK_HR(hr = pSelf->pSession->SetTopology(MFSESSION_SETTOPOLOGY_IMMEDIATE, pSelf->pTopologyFull));
+
+ // Update negotiated width and height
+ pSelf->nNegWidth = TMEDIA_CONSUMER(pSelf)->video.in.width;
+ pSelf->nNegHeight = TMEDIA_CONSUMER(pSelf)->video.in.height;
+ }
+
+ // Deliver buffer
+ CHECK_HR(hr = pSelf->pSource->CopyVideoBuffer(pSelf->nNegWidth, pSelf->nNegHeight, buffer, size));
bail:
- return SUCCEEDED(hr) ? 0 : -1;
+ return SUCCEEDED(hr) ? 0 : -1;
}
static int plugin_win_mf_consumer_video_pause(tmedia_consumer_t* self)
{
- plugin_win_mf_consumer_video_t* pSelf = (plugin_win_mf_consumer_video_t*)self;
+ plugin_win_mf_consumer_video_t* pSelf = (plugin_win_mf_consumer_video_t*)self;
- if(!pSelf){
- TSK_DEBUG_ERROR("Invalid parameter");
- return -1;
- }
- if(!pSelf->bStarted)
- {
- TSK_DEBUG_INFO("MF video producer not started");
- return 0;
- }
+ if(!pSelf) {
+ TSK_DEBUG_ERROR("Invalid parameter");
+ return -1;
+ }
+ if(!pSelf->bStarted) {
+ TSK_DEBUG_INFO("MF video producer not started");
+ return 0;
+ }
- HRESULT hr = MFUtils::PauseSession(pSelf->pSession);
+ HRESULT hr = MFUtils::PauseSession(pSelf->pSession);
- return SUCCEEDED(hr) ? 0 : -1;
+ return SUCCEEDED(hr) ? 0 : -1;
}
static int plugin_win_mf_consumer_video_stop(tmedia_consumer_t* self)
{
- plugin_win_mf_consumer_video_t* pSelf = (plugin_win_mf_consumer_video_t*)self;
+ plugin_win_mf_consumer_video_t* pSelf = (plugin_win_mf_consumer_video_t*)self;
- if(!pSelf){
+ if(!pSelf) {
TSK_DEBUG_ERROR("Invalid parameter");
return -1;
}
HRESULT hr = S_OK;
- // stop EVR watcher
- if(pSelf->pDisplayWatcher) {
- hr = pSelf->pDisplayWatcher->Stop();
- }
+ // stop EVR watcher
+ if(pSelf->pDisplayWatcher) {
+ hr = pSelf->pDisplayWatcher->Stop();
+ }
// for the thread
pSelf->bStarted = false;
hr = MFUtils::ShutdownSession(pSelf->pSession, NULL); // stop session to wakeup the asynchronous thread
- if(pSelf->ppTread[0]){
+ if(pSelf->ppTread[0]) {
tsk_thread_join(&pSelf->ppTread[0]);
}
hr = MFUtils::ShutdownSession(NULL, pSelf->pSource); // stop source to release the camera
- // next start() will be called after prepare()
- return _plugin_win_mf_consumer_video_unprepare(pSelf);
+ // next start() will be called after prepare()
+ return _plugin_win_mf_consumer_video_unprepare(pSelf);
}
static int _plugin_win_mf_consumer_video_unprepare(plugin_win_mf_consumer_video_t* pSelf)
{
- if(!pSelf){
- TSK_DEBUG_ERROR("Invalid parameter");
- return -1;
- }
-
- if(pSelf->bStarted) {
- // plugin_win_mf_producer_video_stop(TMEDIA_PRODUCER(pSelf));
- TSK_DEBUG_ERROR("Consumer must be stopped before calling unprepare");
- }
-
- if(pSelf->pDisplayWatcher) {
- pSelf->pDisplayWatcher->Stop();
- }
- if(pSelf->pSource){
- pSelf->pSource->Shutdown();
- pSelf->pSource = NULL;
- }
- if(pSelf->pSession){
+ if(!pSelf) {
+ TSK_DEBUG_ERROR("Invalid parameter");
+ return -1;
+ }
+
+ if(pSelf->bStarted) {
+ // plugin_win_mf_producer_video_stop(TMEDIA_PRODUCER(pSelf));
+ TSK_DEBUG_ERROR("Consumer must be stopped before calling unprepare");
+ }
+
+ if(pSelf->pDisplayWatcher) {
+ pSelf->pDisplayWatcher->Stop();
+ }
+ if(pSelf->pSource) {
+ pSelf->pSource->Shutdown();
+ pSelf->pSource = NULL;
+ }
+ if(pSelf->pSession) {
pSelf->pSession->Shutdown();
- pSelf->pSession = NULL;
+ pSelf->pSession = NULL;
}
- SafeRelease(&pSelf->pDecoder);
+ SafeRelease(&pSelf->pDecoder);
SafeRelease(&pSelf->pSession);
SafeRelease(&pSelf->pSource);
SafeRelease(&pSelf->pSinkActivate);
SafeRelease(&pSelf->pTopologyFull);
- SafeRelease(&pSelf->pTopologyPartial);
- SafeRelease(&pSelf->pOutType);
+ SafeRelease(&pSelf->pTopologyPartial);
+ SafeRelease(&pSelf->pOutType);
- if(pSelf->pDisplayWatcher) {
- delete pSelf->pDisplayWatcher;
- pSelf->pDisplayWatcher = NULL;
- }
+ if(pSelf->pDisplayWatcher) {
+ delete pSelf->pDisplayWatcher;
+ pSelf->pDisplayWatcher = NULL;
+ }
- pSelf->bPrepared = false;
+ pSelf->bPrepared = false;
- return 0;
+ return 0;
}
@@ -1519,102 +1450,98 @@ static int _plugin_win_mf_consumer_video_unprepare(plugin_win_mf_consumer_video_
/* constructor */
static tsk_object_t* plugin_win_mf_consumer_video_ctor(tsk_object_t * self, va_list * app)
{
- MFUtils::Startup();
-
- plugin_win_mf_consumer_video_t *pSelf = (plugin_win_mf_consumer_video_t *)self;
- if(pSelf){
- /* init base */
- tmedia_consumer_init(TMEDIA_CONSUMER(pSelf));
- TMEDIA_CONSUMER(pSelf)->video.display.chroma = tmedia_chroma_yuv420p;
- TMEDIA_CONSUMER(pSelf)->decoder.codec_id = tmedia_codec_id_none; // means accept RAW fames
-
- /* init self */
- // consumer->create_on_ui_thread = tsk_true;
- TMEDIA_CONSUMER(pSelf)->video.fps = 15;
- TMEDIA_CONSUMER(pSelf)->video.display.width = 0; // use codec value
- TMEDIA_CONSUMER(pSelf)->video.display.height = 0; // use codec value
- TMEDIA_CONSUMER(pSelf)->video.display.auto_resize = tsk_true;
-
- TSK_DEBUG_INFO("Create WinMF video consumer");
- }
- return self;
+ MFUtils::Startup();
+
+ plugin_win_mf_consumer_video_t *pSelf = (plugin_win_mf_consumer_video_t *)self;
+ if(pSelf) {
+ /* init base */
+ tmedia_consumer_init(TMEDIA_CONSUMER(pSelf));
+ TMEDIA_CONSUMER(pSelf)->video.display.chroma = tmedia_chroma_yuv420p;
+ TMEDIA_CONSUMER(pSelf)->decoder.codec_id = tmedia_codec_id_none; // means accept RAW fames
+
+ /* init self */
+ // consumer->create_on_ui_thread = tsk_true;
+ TMEDIA_CONSUMER(pSelf)->video.fps = 15;
+ TMEDIA_CONSUMER(pSelf)->video.display.width = 0; // use codec value
+ TMEDIA_CONSUMER(pSelf)->video.display.height = 0; // use codec value
+ TMEDIA_CONSUMER(pSelf)->video.display.auto_resize = tsk_true;
+
+ TSK_DEBUG_INFO("Create WinMF video consumer");
+ }
+ return self;
}
/* destructor */
static tsk_object_t* plugin_win_mf_consumer_video_dtor(tsk_object_t * self)
-{
- plugin_win_mf_consumer_video_t *pSelf = (plugin_win_mf_consumer_video_t *)self;
- if(pSelf){
- /* stop */
- if(pSelf->bStarted){
- plugin_win_mf_consumer_video_stop(TMEDIA_CONSUMER(pSelf));
- }
-
- /* deinit base */
- tmedia_consumer_deinit(TMEDIA_CONSUMER(pSelf));
- /* deinit self */
- _plugin_win_mf_consumer_video_unprepare(pSelf);
- }
-
- return self;
+{
+ plugin_win_mf_consumer_video_t *pSelf = (plugin_win_mf_consumer_video_t *)self;
+ if(pSelf) {
+ /* stop */
+ if(pSelf->bStarted) {
+ plugin_win_mf_consumer_video_stop(TMEDIA_CONSUMER(pSelf));
+ }
+
+ /* deinit base */
+ tmedia_consumer_deinit(TMEDIA_CONSUMER(pSelf));
+ /* deinit self */
+ _plugin_win_mf_consumer_video_unprepare(pSelf);
+ }
+
+ return self;
}
/* object definition */
-static const tsk_object_def_t plugin_win_mf_consumer_video_def_s =
-{
- sizeof(plugin_win_mf_consumer_video_t),
- plugin_win_mf_consumer_video_ctor,
- plugin_win_mf_consumer_video_dtor,
- tsk_null,
+static const tsk_object_def_t plugin_win_mf_consumer_video_def_s = {
+ sizeof(plugin_win_mf_consumer_video_t),
+ plugin_win_mf_consumer_video_ctor,
+ plugin_win_mf_consumer_video_dtor,
+ tsk_null,
};
/* plugin definition*/
-static const tmedia_consumer_plugin_def_t plugin_win_mf_consumer_video_plugin_def_s =
-{
- &plugin_win_mf_consumer_video_def_s,
-
- tmedia_video,
- "Media Foundation video consumer",
-
- plugin_win_mf_consumer_video_set,
- plugin_win_mf_consumer_video_prepare,
- plugin_win_mf_consumer_video_start,
- plugin_win_mf_consumer_video_consume,
- plugin_win_mf_consumer_video_pause,
- plugin_win_mf_consumer_video_stop
+static const tmedia_consumer_plugin_def_t plugin_win_mf_consumer_video_plugin_def_s = {
+ &plugin_win_mf_consumer_video_def_s,
+
+ tmedia_video,
+ "Media Foundation video consumer",
+
+ plugin_win_mf_consumer_video_set,
+ plugin_win_mf_consumer_video_prepare,
+ plugin_win_mf_consumer_video_start,
+ plugin_win_mf_consumer_video_consume,
+ plugin_win_mf_consumer_video_pause,
+ plugin_win_mf_consumer_video_stop
};
const tmedia_consumer_plugin_def_t *plugin_win_mf_consumer_video_plugin_def_t = &plugin_win_mf_consumer_video_plugin_def_s;
// Run session async thread
static void* TSK_STDCALL RunSessionThread(void *pArg)
{
- plugin_win_mf_consumer_video_t *pSelf = (plugin_win_mf_consumer_video_t *)pArg;
- HRESULT hrStatus = S_OK;
- HRESULT hr = S_OK;
- IMFMediaEvent *pEvent = NULL;
- MediaEventType met;
-
- TSK_DEBUG_INFO("RunSessionThread (MF video consumer) - ENTER");
-
- while(pSelf->bStarted){
- CHECK_HR(hr = pSelf->pSession->GetEvent(0, &pEvent));
- CHECK_HR(hr = pEvent->GetStatus(&hrStatus));
- CHECK_HR(hr = pEvent->GetType(&met));
-
- if (FAILED(hrStatus) /*&& hrStatus != MF_E_NO_SAMPLE_TIMESTAMP*/)
- {
- TSK_DEBUG_ERROR("Session error: 0x%x (event id: %d)\n", hrStatus, met);
- hr = hrStatus;
- goto bail;
- }
- if (met == MESessionEnded)
- {
- break;
- }
- SafeRelease(&pEvent);
- }
+ plugin_win_mf_consumer_video_t *pSelf = (plugin_win_mf_consumer_video_t *)pArg;
+ HRESULT hrStatus = S_OK;
+ HRESULT hr = S_OK;
+ IMFMediaEvent *pEvent = NULL;
+ MediaEventType met;
+
+ TSK_DEBUG_INFO("RunSessionThread (MF video consumer) - ENTER");
+
+ while(pSelf->bStarted) {
+ CHECK_HR(hr = pSelf->pSession->GetEvent(0, &pEvent));
+ CHECK_HR(hr = pEvent->GetStatus(&hrStatus));
+ CHECK_HR(hr = pEvent->GetType(&met));
+
+ if (FAILED(hrStatus) /*&& hrStatus != MF_E_NO_SAMPLE_TIMESTAMP*/) {
+ TSK_DEBUG_ERROR("Session error: 0x%x (event id: %d)\n", hrStatus, met);
+ hr = hrStatus;
+ goto bail;
+ }
+ if (met == MESessionEnded) {
+ break;
+ }
+ SafeRelease(&pEvent);
+ }
bail:
- TSK_DEBUG_INFO("RunSessionThread (MF video consumer) - EXIT");
+ TSK_DEBUG_INFO("RunSessionThread (MF video consumer) - EXIT");
- return NULL;
+ return NULL;
}
#endif /* PLUGIN_MF_CV_USE_D3D9 */ \ No newline at end of file
diff --git a/plugins/pluginWinMF/plugin_win_mf_converter_video.cxx b/plugins/pluginWinMF/plugin_win_mf_converter_video.cxx
index 0e6abcb..70a1b4c 100755
--- a/plugins/pluginWinMF/plugin_win_mf_converter_video.cxx
+++ b/plugins/pluginWinMF/plugin_win_mf_converter_video.cxx
@@ -1,18 +1,18 @@
/* Copyright (C) 2013-2015 Mamadou DIOP
* Copyright (C) 2013-2015 Doubango Telecom <http://www.doubango.org>
-*
+*
* This file is part of Open Source Doubango Framework.
*
* DOUBANGO is free software: you can redistribute it and/or modify
* it under the terms of the GNU General Public License as published by
* the Free Software Foundation, either version 3 of the License, or
* (at your option) any later version.
-*
+*
* DOUBANGO is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
* GNU General Public License for more details.
-*
+*
* You should have received a copy of the GNU General Public License
* along with DOUBANGO.
*/
@@ -53,9 +53,9 @@ EXTERN_C const GUID CLSID_VideoProcessorMFT; // defined in mf_utils.cxx
#define _WIN32_WINNT_WIN8 0x0602
#endif /* _WIN32_WINNT_WIN8 */
-#if (WINVER < _WIN32_WINNT_WIN8)
+#if (WINVER < _WIN32_WINNT_WIN8)
DEFINE_GUID(MF_SA_D3D11_AWARE,
- 0x206b4fc8, 0xfcf9, 0x4c51, 0xaf, 0xe3, 0x97, 0x64, 0x36, 0x9e, 0x33, 0xa0);
+ 0x206b4fc8, 0xfcf9, 0x4c51, 0xaf, 0xe3, 0x97, 0x64, 0x36, 0x9e, 0x33, 0xa0);
#endif /* MF_SA_D3D11_AWARE */
#if !defined(HAVE_IMFVideoProcessorControl)
@@ -72,34 +72,33 @@ DEFINE_GUID(MF_SA_D3D11_AWARE,
#define PLUGIN_MF_VC_FPS 120 // Samples requires timestamp
#endif /* PLUGIN_MF_VC_FPS */
-typedef struct plugin_win_mf_converter_video_ms_s
-{
- TMEDIA_DECLARE_CONVERTER_VIDEO;
-
- GUID fmtSrc;
- tsk_size_t widthSrc;
- tsk_size_t heightSrc;
-
- GUID fmtDst;
- tsk_size_t widthDst;
- tsk_size_t heightDst;
-
- UINT32 rotation;
- UINT32 xOutputSize;
- UINT32 xInputSize;
- BOOL flip;
-
- IMFSample* pSampleOut;
- IMFSample* pSampleIn;
-
- LONGLONG rtStart;
+typedef struct plugin_win_mf_converter_video_ms_s {
+ TMEDIA_DECLARE_CONVERTER_VIDEO;
+
+ GUID fmtSrc;
+ tsk_size_t widthSrc;
+ tsk_size_t heightSrc;
+
+ GUID fmtDst;
+ tsk_size_t widthDst;
+ tsk_size_t heightDst;
+
+ UINT32 rotation;
+ UINT32 xOutputSize;
+ UINT32 xInputSize;
+ BOOL flip;
+
+ IMFSample* pSampleOut;
+ IMFSample* pSampleIn;
+
+ LONGLONG rtStart;
UINT64 rtDuration;
- IMFTransform* pMFT; // "CLSID_VideoProcessorMFT" or "CLSID_CColorConvertDMO"
+ IMFTransform* pMFT; // "CLSID_VideoProcessorMFT" or "CLSID_CColorConvertDMO"
#if HAVE_IMFVideoProcessorControl
- IMFVideoProcessorControl* pVPC;
+ IMFVideoProcessorControl* pVPC;
#endif
- BOOL isVideoProcessor;
+ BOOL isVideoProcessor;
}
plugin_win_mf_converter_video_ms_t;
@@ -110,324 +109,299 @@ static inline HRESULT _plugin_win_mf_converter_video_ms_copy_rgb32_down_top(
const BYTE* pSrc,
INT dwWidthInPixels,
INT dwHeightInPixels
- );
+);
static HRESULT _plugin_win_mf_converter_video_ms_process_input(plugin_win_mf_converter_video_ms_t* pSelf, IMFSample* pSample);
static HRESULT _plugin_win_mf_converter_video_ms_process_output(plugin_win_mf_converter_video_ms_t* pSelf, IMFSample **ppSample);
static HRESULT _plugin_win_mf_converter_video_ms_process(plugin_win_mf_converter_video_ms_t* pSelf, const void* pcInputPtr, UINT32 nInputSize, IMFSample **ppSampleOut);
static int plugin_win_mf_converter_video_ms_init(tmedia_converter_video_t* self, tsk_size_t srcWidth, tsk_size_t srcHeight, tmedia_chroma_t srcChroma, tsk_size_t dstWidth, tsk_size_t dstHeight, tmedia_chroma_t dstChroma)
{
- plugin_win_mf_converter_video_ms_t* pSelf = (plugin_win_mf_converter_video_ms_t*)self;
- TSK_DEBUG_INFO("Initializing new MF Video Converter src=(%dx%d@%d) dst=(%dx%d@%d)", srcWidth, srcHeight, srcChroma, dstWidth, dstHeight, dstChroma);
-
- if((pSelf->fmtSrc = _plugin_win_mf_converter_video_ms_get_pixfmt(srcChroma)) == GUID_NULL)
- {
- TSK_DEBUG_ERROR("Invalid source chroma");
- return -2;
- }
- if((pSelf->fmtDst = _plugin_win_mf_converter_video_ms_get_pixfmt(dstChroma)) == GUID_NULL)
- {
- TSK_DEBUG_ERROR("Invalid destination chroma");
- return -3;
- }
-
- pSelf->rtStart = 0;
-
- pSelf->widthSrc = srcWidth;
- pSelf->heightSrc = srcHeight;
- pSelf->widthDst = dstWidth;
- pSelf->heightDst = dstHeight;
- pSelf->rotation = 0;
- pSelf->xOutputSize = (UINT32)_plugin_win_mf_converter_video_ms_get_size(dstChroma, dstWidth, dstHeight);
- pSelf->xInputSize = (UINT32)_plugin_win_mf_converter_video_ms_get_size(srcChroma, srcWidth, srcHeight);
-
- SafeRelease(&pSelf->pSampleOut);
- SafeRelease(&pSelf->pSampleIn);
- SafeRelease(&pSelf->pMFT);
+ plugin_win_mf_converter_video_ms_t* pSelf = (plugin_win_mf_converter_video_ms_t*)self;
+ TSK_DEBUG_INFO("Initializing new MF Video Converter src=(%dx%d@%d) dst=(%dx%d@%d)", srcWidth, srcHeight, srcChroma, dstWidth, dstHeight, dstChroma);
+
+ if((pSelf->fmtSrc = _plugin_win_mf_converter_video_ms_get_pixfmt(srcChroma)) == GUID_NULL) {
+ TSK_DEBUG_ERROR("Invalid source chroma");
+ return -2;
+ }
+ if((pSelf->fmtDst = _plugin_win_mf_converter_video_ms_get_pixfmt(dstChroma)) == GUID_NULL) {
+ TSK_DEBUG_ERROR("Invalid destination chroma");
+ return -3;
+ }
+
+ pSelf->rtStart = 0;
+
+ pSelf->widthSrc = srcWidth;
+ pSelf->heightSrc = srcHeight;
+ pSelf->widthDst = dstWidth;
+ pSelf->heightDst = dstHeight;
+ pSelf->rotation = 0;
+ pSelf->xOutputSize = (UINT32)_plugin_win_mf_converter_video_ms_get_size(dstChroma, dstWidth, dstHeight);
+ pSelf->xInputSize = (UINT32)_plugin_win_mf_converter_video_ms_get_size(srcChroma, srcWidth, srcHeight);
+
+ SafeRelease(&pSelf->pSampleOut);
+ SafeRelease(&pSelf->pSampleIn);
+ SafeRelease(&pSelf->pMFT);
#if HAVE_IMFVideoProcessorControl
- SafeRelease(&pSelf->pVPC);
+ SafeRelease(&pSelf->pVPC);
#endif
- HRESULT hr = S_OK;
-
- IMFMediaType* pTypeSrc = NULL;
- IMFMediaType* pTypeDst = NULL;
-
- // Get video processor or Color convertor
- hr = CoCreateInstance(CLSID_VideoProcessorMFT, NULL,
- CLSCTX_INPROC_SERVER, IID_PPV_ARGS(&pSelf->pMFT));
- pSelf->isVideoProcessor = SUCCEEDED(hr);
- if(FAILED(hr))
- {
- TSK_DEBUG_INFO("CoCreateInstance(CLSID_VideoProcessorMFT) failed");
- if(pSelf->widthSrc == pSelf->widthDst && pSelf->heightSrc == pSelf->heightDst)
- {
- TSK_DEBUG_INFO("No video scaling is required...perform CoCreateInstance(CLSID_CColorConvertDMO)");
- CHECK_HR(hr = CoCreateInstance(CLSID_CColorConvertDMO, NULL,
- CLSCTX_INPROC_SERVER, IID_PPV_ARGS(&pSelf->pMFT)));
- }
- else
- {
- CHECK_HR(hr);
- }
- }
-
-
-
- if(pSelf->isVideoProcessor)
- {
- IMFAttributes* pAttributes = NULL;
- UINT32 GPU = 0;
- hr = pSelf->pMFT->GetAttributes(&pAttributes);
- if (SUCCEEDED(hr)) {
- hr = pAttributes->GetUINT32(MF_SA_D3D11_AWARE, &GPU);
- }
- SafeRelease(&pAttributes);
- TSK_DEBUG_INFO("MF_SA_D3D11_AWARE = %d", GPU);
+ HRESULT hr = S_OK;
+
+ IMFMediaType* pTypeSrc = NULL;
+ IMFMediaType* pTypeDst = NULL;
+
+ // Get video processor or Color convertor
+ hr = CoCreateInstance(CLSID_VideoProcessorMFT, NULL,
+ CLSCTX_INPROC_SERVER, IID_PPV_ARGS(&pSelf->pMFT));
+ pSelf->isVideoProcessor = SUCCEEDED(hr);
+ if(FAILED(hr)) {
+ TSK_DEBUG_INFO("CoCreateInstance(CLSID_VideoProcessorMFT) failed");
+ if(pSelf->widthSrc == pSelf->widthDst && pSelf->heightSrc == pSelf->heightDst) {
+ TSK_DEBUG_INFO("No video scaling is required...perform CoCreateInstance(CLSID_CColorConvertDMO)");
+ CHECK_HR(hr = CoCreateInstance(CLSID_CColorConvertDMO, NULL,
+ CLSCTX_INPROC_SERVER, IID_PPV_ARGS(&pSelf->pMFT)));
+ }
+ else {
+ CHECK_HR(hr);
+ }
+ }
+
+
+
+ if(pSelf->isVideoProcessor) {
+ IMFAttributes* pAttributes = NULL;
+ UINT32 GPU = 0;
+ hr = pSelf->pMFT->GetAttributes(&pAttributes);
+ if (SUCCEEDED(hr)) {
+ hr = pAttributes->GetUINT32(MF_SA_D3D11_AWARE, &GPU);
+ }
+ SafeRelease(&pAttributes);
+ TSK_DEBUG_INFO("MF_SA_D3D11_AWARE = %d", GPU);
#if HAVE_IMFVideoProcessorControl
- CHECK_HR(hr = pSelf->pMFT->QueryInterface(IID_PPV_ARGS(&pSelf->pVPC)));
+ CHECK_HR(hr = pSelf->pMFT->QueryInterface(IID_PPV_ARGS(&pSelf->pVPC)));
#endif
- }
+ }
- CHECK_HR(hr = MFUtils::CreateVideoType(&pSelf->fmtSrc, &pTypeSrc, (UINT32)pSelf->widthSrc, (UINT32)pSelf->heightSrc));
- CHECK_HR(hr = MFUtils::CreateVideoType(&pSelf->fmtDst, &pTypeDst, (UINT32)pSelf->widthDst, (UINT32)pSelf->heightDst));
+ CHECK_HR(hr = MFUtils::CreateVideoType(&pSelf->fmtSrc, &pTypeSrc, (UINT32)pSelf->widthSrc, (UINT32)pSelf->heightSrc));
+ CHECK_HR(hr = MFUtils::CreateVideoType(&pSelf->fmtDst, &pTypeDst, (UINT32)pSelf->widthDst, (UINT32)pSelf->heightDst));
- CHECK_HR(hr = pSelf->pMFT->SetInputType(0, pTypeSrc, 0));
- CHECK_HR(hr = pSelf->pMFT->SetOutputType(0, pTypeDst, 0));
+ CHECK_HR(hr = pSelf->pMFT->SetInputType(0, pTypeSrc, 0));
+ CHECK_HR(hr = pSelf->pMFT->SetOutputType(0, pTypeDst, 0));
bail:
- SafeRelease(&pTypeSrc);
- SafeRelease(&pTypeDst);
+ SafeRelease(&pTypeSrc);
+ SafeRelease(&pTypeDst);
- if(FAILED(hr))
- {
- SafeRelease(&pSelf->pMFT);
+ if(FAILED(hr)) {
+ SafeRelease(&pSelf->pMFT);
#if HAVE_IMFVideoProcessorControl
- SafeRelease(&pSelf->pVPC);
+ SafeRelease(&pSelf->pVPC);
#endif
- return -4;
- }
+ return -4;
+ }
- return 0;
+ return 0;
}
static tsk_size_t plugin_win_mf_converter_video_ms_process(tmedia_converter_video_t* _self, const void* buffer, tsk_size_t buffer_size, void** output, tsk_size_t* output_max_size)
{
- plugin_win_mf_converter_video_ms_t* pSelf = (plugin_win_mf_converter_video_ms_t*)_self;
+ plugin_win_mf_converter_video_ms_t* pSelf = (plugin_win_mf_converter_video_ms_t*)_self;
- HRESULT hr = S_OK;
+ HRESULT hr = S_OK;
- IMFSample *pSampleOut = NULL;
- IMFMediaBuffer* pBufferOut = NULL;
+ IMFSample *pSampleOut = NULL;
+ IMFMediaBuffer* pBufferOut = NULL;
- if(!pSelf || !buffer || !output || !output_max_size)
- {
- CHECK_HR(hr = E_POINTER);
- }
+ if(!pSelf || !buffer || !output || !output_max_size) {
+ CHECK_HR(hr = E_POINTER);
+ }
- if(!pSelf->pMFT)
- {
- TSK_DEBUG_ERROR("Not initialized");
- CHECK_HR(hr = E_FAIL);
- }
+ if(!pSelf->pMFT) {
+ TSK_DEBUG_ERROR("Not initialized");
+ CHECK_HR(hr = E_FAIL);
+ }
#if HAVE_IMFVideoProcessorControl
- if(!pSelf->pVPC && pSelf->isVideoProcessor)
- {
- TSK_DEBUG_ERROR("Not initialized");
- CHECK_HR(hr = E_FAIL);
- }
+ if(!pSelf->pVPC && pSelf->isVideoProcessor) {
+ TSK_DEBUG_ERROR("Not initialized");
+ CHECK_HR(hr = E_FAIL);
+ }
#endif
- if(*output_max_size < pSelf->xOutputSize)
- {
- if(!(*output = tsk_realloc(*output, pSelf->xOutputSize)))
- {
- *output_max_size = 0;
- TSK_DEBUG_ERROR("Failed to allocate buffer with size = %u", pSelf->xOutputSize);
- CHECK_HR(hr = E_OUTOFMEMORY);
- }
- *output_max_size = pSelf->xOutputSize;
- }
+ if(*output_max_size < pSelf->xOutputSize) {
+ if(!(*output = tsk_realloc(*output, pSelf->xOutputSize))) {
+ *output_max_size = 0;
+ TSK_DEBUG_ERROR("Failed to allocate buffer with size = %u", pSelf->xOutputSize);
+ CHECK_HR(hr = E_OUTOFMEMORY);
+ }
+ *output_max_size = pSelf->xOutputSize;
+ }
#if HAVE_IMFVideoProcessorControl
- if(pSelf->pVPC && !!_self->flip != !!pSelf->flip)
- {
- pSelf->flip = !!_self->flip;
- CHECK_HR(hr = pSelf->pVPC->SetMirror(pSelf->flip ? MIRROR_NONE : MIRROR_VERTICAL));
- }
- if(pSelf->pVPC && _self->rotation != pSelf->rotation)
- {
- _self->rotation = pSelf->rotation;
- CHECK_HR(hr = pSelf->pVPC->SetRotation(pSelf->rotation == 0 ? ROTATION_NONE : ROTATION_NORMAL));
-
- }
+ if(pSelf->pVPC && !!_self->flip != !!pSelf->flip) {
+ pSelf->flip = !!_self->flip;
+ CHECK_HR(hr = pSelf->pVPC->SetMirror(pSelf->flip ? MIRROR_NONE : MIRROR_VERTICAL));
+ }
+ if(pSelf->pVPC && _self->rotation != pSelf->rotation) {
+ _self->rotation = pSelf->rotation;
+ CHECK_HR(hr = pSelf->pVPC->SetRotation(pSelf->rotation == 0 ? ROTATION_NONE : ROTATION_NORMAL));
+
+ }
#endif
- CHECK_HR(hr = _plugin_win_mf_converter_video_ms_process(
- pSelf, buffer, pSelf->xInputSize, &pSampleOut));
-
- if(pSampleOut)
- {
- CHECK_HR(hr = pSampleOut->GetBufferByIndex(0, &pBufferOut));
-
- BYTE* pBufferPtr = NULL;
- DWORD dwDataLength = 0;
- CHECK_HR(hr = pBufferOut->GetCurrentLength(&dwDataLength));
- if(dwDataLength > 0)
- {
- if(dwDataLength != pSelf->xOutputSize)
- {
- TSK_DEBUG_ERROR("Output size mismatch");
- CHECK_HR(hr = E_BOUNDS);
- }
- CHECK_HR(hr = pBufferOut->Lock(&pBufferPtr, NULL, NULL));
-
- // MFCopyImage() is optimized: MMX, SSE, or SSE2
- switch(_self->dstChroma)
- {
- // Don't waste your time guessing which parameter to use: The consumer will always request RGB32. If not used for consumer then, just memcpy()
- case tmedia_chroma_rgb32:
- {
- if(pSelf->isVideoProcessor)
- {
- hr = _plugin_win_mf_converter_video_ms_copy_rgb32_down_top(
- (BYTE*)*output,
- (const BYTE*)pBufferPtr,
- (INT)pSelf->widthDst,
- (INT)pSelf->heightDst
- );
- }
- else
- {
- hr = MFCopyImage(
- (BYTE*)*output,
- (LONG)(pSelf->widthDst << 2),
- (BYTE*)pBufferPtr,
- (LONG)(pSelf->widthDst << 2),
- (DWORD)(pSelf->widthDst << 2),
- (DWORD)pSelf->heightDst
- );
- }
-
-
- if(FAILED(hr))
- {
- // unlock() before leaving
- pBufferOut->Unlock();
- CHECK_HR(hr);
- }
- break;
- }
- default:
- {
- memcpy(*output, pBufferPtr, dwDataLength);
- }
- }
- CHECK_HR(hr = pBufferOut->Unlock());
- }
- }
-
- pSelf->rtStart += pSelf->rtDuration;
+ CHECK_HR(hr = _plugin_win_mf_converter_video_ms_process(
+ pSelf, buffer, pSelf->xInputSize, &pSampleOut));
+
+ if(pSampleOut) {
+ CHECK_HR(hr = pSampleOut->GetBufferByIndex(0, &pBufferOut));
+
+ BYTE* pBufferPtr = NULL;
+ DWORD dwDataLength = 0;
+ CHECK_HR(hr = pBufferOut->GetCurrentLength(&dwDataLength));
+ if(dwDataLength > 0) {
+ if(dwDataLength != pSelf->xOutputSize) {
+ TSK_DEBUG_ERROR("Output size mismatch");
+ CHECK_HR(hr = E_BOUNDS);
+ }
+ CHECK_HR(hr = pBufferOut->Lock(&pBufferPtr, NULL, NULL));
+
+ // MFCopyImage() is optimized: MMX, SSE, or SSE2
+ switch(_self->dstChroma) {
+ // Don't waste your time guessing which parameter to use: The consumer will always request RGB32. If not used for consumer then, just memcpy()
+ case tmedia_chroma_rgb32: {
+ if(pSelf->isVideoProcessor) {
+ hr = _plugin_win_mf_converter_video_ms_copy_rgb32_down_top(
+ (BYTE*)*output,
+ (const BYTE*)pBufferPtr,
+ (INT)pSelf->widthDst,
+ (INT)pSelf->heightDst
+ );
+ }
+ else {
+ hr = MFCopyImage(
+ (BYTE*)*output,
+ (LONG)(pSelf->widthDst << 2),
+ (BYTE*)pBufferPtr,
+ (LONG)(pSelf->widthDst << 2),
+ (DWORD)(pSelf->widthDst << 2),
+ (DWORD)pSelf->heightDst
+ );
+ }
+
+
+ if(FAILED(hr)) {
+ // unlock() before leaving
+ pBufferOut->Unlock();
+ CHECK_HR(hr);
+ }
+ break;
+ }
+ default: {
+ memcpy(*output, pBufferPtr, dwDataLength);
+ }
+ }
+ CHECK_HR(hr = pBufferOut->Unlock());
+ }
+ }
+
+ pSelf->rtStart += pSelf->rtDuration;
bail:
- SafeRelease(&pSampleOut);
- SafeRelease(&pBufferOut);
+ SafeRelease(&pSampleOut);
+ SafeRelease(&pBufferOut);
- return SUCCEEDED(hr) ? pSelf->xOutputSize : 0;
+ return SUCCEEDED(hr) ? pSelf->xOutputSize : 0;
}
static tsk_object_t* plugin_win_mf_converter_video_ms_ctor(tsk_object_t * self, va_list * app)
{
- plugin_win_mf_converter_video_ms_t *pSelf = (plugin_win_mf_converter_video_ms_t *)self;
- if(pSelf){
- HRESULT hr = MFFrameRateToAverageTimePerFrame(PLUGIN_MF_VC_FPS, 1, &pSelf->rtDuration);
- if(FAILED(hr)){
- pSelf->rtDuration = 83333; // 120 FPS
- }
- }
- return self;
+ plugin_win_mf_converter_video_ms_t *pSelf = (plugin_win_mf_converter_video_ms_t *)self;
+ if(pSelf) {
+ HRESULT hr = MFFrameRateToAverageTimePerFrame(PLUGIN_MF_VC_FPS, 1, &pSelf->rtDuration);
+ if(FAILED(hr)) {
+ pSelf->rtDuration = 83333; // 120 FPS
+ }
+ }
+ return self;
}
static tsk_object_t* plugin_win_mf_converter_video_ms_dtor(tsk_object_t * self)
-{
- plugin_win_mf_converter_video_ms_t *pSelf = (plugin_win_mf_converter_video_ms_t *)self;
- if(pSelf){
- SafeRelease(&pSelf->pSampleOut);
- SafeRelease(&pSelf->pSampleIn);
- SafeRelease(&pSelf->pMFT);
+{
+ plugin_win_mf_converter_video_ms_t *pSelf = (plugin_win_mf_converter_video_ms_t *)self;
+ if(pSelf) {
+ SafeRelease(&pSelf->pSampleOut);
+ SafeRelease(&pSelf->pSampleIn);
+ SafeRelease(&pSelf->pMFT);
#if HAVE_IMFVideoProcessorControl
- SafeRelease(&pSelf->pVPC);
+ SafeRelease(&pSelf->pVPC);
#endif
- }
+ }
- return self;
+ return self;
}
-static const tsk_object_def_t plugin_win_mf_converter_video_ms_def_s =
-{
- sizeof(plugin_win_mf_converter_video_ms_t),
- plugin_win_mf_converter_video_ms_ctor,
- plugin_win_mf_converter_video_ms_dtor,
- tsk_null,
+static const tsk_object_def_t plugin_win_mf_converter_video_ms_def_s = {
+ sizeof(plugin_win_mf_converter_video_ms_t),
+ plugin_win_mf_converter_video_ms_ctor,
+ plugin_win_mf_converter_video_ms_dtor,
+ tsk_null,
};
const tsk_object_def_t *plugin_win_mf_converter_video_ms_def_t = &plugin_win_mf_converter_video_ms_def_s;
-static const tmedia_converter_video_plugin_def_t plugin_win_mf_converter_video_ms_plugin_def_s =
-{
- &plugin_win_mf_converter_video_ms_def_s,
-
- plugin_win_mf_converter_video_ms_init,
- plugin_win_mf_converter_video_ms_process
+static const tmedia_converter_video_plugin_def_t plugin_win_mf_converter_video_ms_plugin_def_s = {
+ &plugin_win_mf_converter_video_ms_def_s,
+
+ plugin_win_mf_converter_video_ms_init,
+ plugin_win_mf_converter_video_ms_process
};
const tmedia_converter_video_plugin_def_t *plugin_win_mf_converter_video_ms_plugin_def_t = &plugin_win_mf_converter_video_ms_plugin_def_s;
static inline tsk_size_t _plugin_win_mf_converter_video_ms_get_size(tmedia_chroma_t chroma, tsk_size_t w, tsk_size_t h)
{
- switch(chroma){
- case tmedia_chroma_rgb24:
- case tmedia_chroma_bgr24:
- return (w * h * 3);
- case tmedia_chroma_rgb565le:
- return ((w * h) << 1);
- case tmedia_chroma_rgb32:
- return ((w * h) << 2);
- case tmedia_chroma_nv21:
- return ((w * h * 3) >> 1);
- case tmedia_chroma_nv12:
- return ((w * h * 3) >> 1);
- case tmedia_chroma_yuv422p:
- return ((w * h) << 1);
- case tmedia_chroma_uyvy422:
- case tmedia_chroma_yuyv422:
- return ((w * h) << 1);
- case tmedia_chroma_yuv420p:
- return ((w * h * 3) >> 1);
- default:
- TSK_DEBUG_ERROR("Invalid chroma %d", (int)chroma);
- return 0;
- }
+ switch(chroma) {
+ case tmedia_chroma_rgb24:
+ case tmedia_chroma_bgr24:
+ return (w * h * 3);
+ case tmedia_chroma_rgb565le:
+ return ((w * h) << 1);
+ case tmedia_chroma_rgb32:
+ return ((w * h) << 2);
+ case tmedia_chroma_nv21:
+ return ((w * h * 3) >> 1);
+ case tmedia_chroma_nv12:
+ return ((w * h * 3) >> 1);
+ case tmedia_chroma_yuv422p:
+ return ((w * h) << 1);
+ case tmedia_chroma_uyvy422:
+ case tmedia_chroma_yuyv422:
+ return ((w * h) << 1);
+ case tmedia_chroma_yuv420p:
+ return ((w * h * 3) >> 1);
+ default:
+ TSK_DEBUG_ERROR("Invalid chroma %d", (int)chroma);
+ return 0;
+ }
}
static inline const GUID& _plugin_win_mf_converter_video_ms_get_pixfmt(tmedia_chroma_t chroma)
{
- switch(chroma){
- case tmedia_chroma_rgb24:
- case tmedia_chroma_bgr24:
- return MFVideoFormat_RGB24;
- case tmedia_chroma_rgb565le:
- return MFVideoFormat_RGB565;
- case tmedia_chroma_rgb32:
- return MFVideoFormat_RGB32;
- case tmedia_chroma_nv12:
- return MFVideoFormat_NV12;
- case tmedia_chroma_yuv420p:
- return MFVideoFormat_I420;
- case tmedia_chroma_yuyv422:
- return MFVideoFormat_YUY2;
- case tmedia_chroma_uyvy422:
- return MFVideoFormat_UYVY;
- default:
- TSK_DEBUG_ERROR("Invalid chroma %d", (int)chroma);
- return GUID_NULL;
- }
+ switch(chroma) {
+ case tmedia_chroma_rgb24:
+ case tmedia_chroma_bgr24:
+ return MFVideoFormat_RGB24;
+ case tmedia_chroma_rgb565le:
+ return MFVideoFormat_RGB565;
+ case tmedia_chroma_rgb32:
+ return MFVideoFormat_RGB32;
+ case tmedia_chroma_nv12:
+ return MFVideoFormat_NV12;
+ case tmedia_chroma_yuv420p:
+ return MFVideoFormat_I420;
+ case tmedia_chroma_yuyv422:
+ return MFVideoFormat_YUY2;
+ case tmedia_chroma_uyvy422:
+ return MFVideoFormat_UYVY;
+ default:
+ TSK_DEBUG_ERROR("Invalid chroma %d", (int)chroma);
+ return GUID_NULL;
+ }
}
// For RGB32:
@@ -438,72 +412,66 @@ static inline HRESULT _plugin_win_mf_converter_video_ms_copy_rgb32_down_top(
const BYTE* pSrc,
INT dwWidthInPixels,
INT dwHeightInPixels
- )
+)
{
- RGBQUAD *pSrcPixel = &((RGBQUAD*)pSrc)[(dwWidthInPixels * dwHeightInPixels) - dwWidthInPixels];
+ RGBQUAD *pSrcPixel = &((RGBQUAD*)pSrc)[(dwWidthInPixels * dwHeightInPixels) - dwWidthInPixels];
RGBQUAD *pDestPixel = &((RGBQUAD*)pDst)[0];
- register INT x;
- register INT y;
+ register INT x;
+ register INT y;
- for (y = dwHeightInPixels; y > 0 ; --y)
- {
- for (x = 0; x < dwWidthInPixels; ++x)
- {
+ for (y = dwHeightInPixels; y > 0 ; --y) {
+ for (x = 0; x < dwWidthInPixels; ++x) {
pDestPixel[x] = pSrcPixel[x];
}
pDestPixel += dwWidthInPixels;
pSrcPixel -= dwWidthInPixels;
}
- return S_OK;
+ return S_OK;
}
static HRESULT _plugin_win_mf_converter_video_ms_process_input(plugin_win_mf_converter_video_ms_t* pSelf, IMFSample* pSample)
{
- return pSelf->pMFT->ProcessInput(0, pSample, 0);
+ return pSelf->pMFT->ProcessInput(0, pSample, 0);
}
static HRESULT _plugin_win_mf_converter_video_ms_process_output(plugin_win_mf_converter_video_ms_t* pSelf, IMFSample **ppSample)
{
- *ppSample = NULL;
+ *ppSample = NULL;
IMFMediaBuffer* pBufferOut = NULL;
DWORD dwStatus;
- HRESULT hr = S_OK;
-
+ HRESULT hr = S_OK;
+
MFT_OUTPUT_STREAM_INFO mftStreamInfo = { 0 };
MFT_OUTPUT_DATA_BUFFER mftOutputData = { 0 };
- CHECK_HR(hr = pSelf->pMFT->GetOutputStreamInfo(0, &mftStreamInfo));
-
- if(!pSelf->pSampleOut)
- {
- CHECK_HR(hr = MFUtils::CreateMediaSample(mftStreamInfo.cbSize, &pSelf->pSampleOut));
- hr = pSelf->pSampleOut->GetBufferByIndex(0, &pBufferOut);
- if(FAILED(hr))
- {
- SafeRelease(&pSelf->pSampleOut);
- CHECK_HR(hr);
- }
- }
- else
- {
- DWORD dwMaxLength = 0;
- CHECK_HR(hr = pSelf->pSampleOut->GetBufferByIndex(0, &pBufferOut));
- CHECK_HR(hr = pBufferOut->GetMaxLength(&dwMaxLength));
- if(dwMaxLength < mftStreamInfo.cbSize)
- {
- CHECK_HR(hr = pSelf->pSampleOut->RemoveAllBuffers());
- SafeRelease(&pBufferOut);
- CHECK_HR(hr = MFCreateMemoryBuffer(mftStreamInfo.cbSize, &pBufferOut));
- CHECK_HR(hr = pSelf->pSampleOut->AddBuffer(pBufferOut));
- }
- }
-
- CHECK_HR(hr = pBufferOut->SetCurrentLength(0));
-
+ CHECK_HR(hr = pSelf->pMFT->GetOutputStreamInfo(0, &mftStreamInfo));
+
+ if(!pSelf->pSampleOut) {
+ CHECK_HR(hr = MFUtils::CreateMediaSample(mftStreamInfo.cbSize, &pSelf->pSampleOut));
+ hr = pSelf->pSampleOut->GetBufferByIndex(0, &pBufferOut);
+ if(FAILED(hr)) {
+ SafeRelease(&pSelf->pSampleOut);
+ CHECK_HR(hr);
+ }
+ }
+ else {
+ DWORD dwMaxLength = 0;
+ CHECK_HR(hr = pSelf->pSampleOut->GetBufferByIndex(0, &pBufferOut));
+ CHECK_HR(hr = pBufferOut->GetMaxLength(&dwMaxLength));
+ if(dwMaxLength < mftStreamInfo.cbSize) {
+ CHECK_HR(hr = pSelf->pSampleOut->RemoveAllBuffers());
+ SafeRelease(&pBufferOut);
+ CHECK_HR(hr = MFCreateMemoryBuffer(mftStreamInfo.cbSize, &pBufferOut));
+ CHECK_HR(hr = pSelf->pSampleOut->AddBuffer(pBufferOut));
+ }
+ }
+
+ CHECK_HR(hr = pBufferOut->SetCurrentLength(0));
+
//Set the output sample
mftOutputData.pSample = pSelf->pSampleOut;
//Set the output id
@@ -529,72 +497,64 @@ bail:
static HRESULT _plugin_win_mf_converter_video_ms_process(plugin_win_mf_converter_video_ms_t* pSelf, const void* pcInputPtr, UINT32 nInputSize, IMFSample **ppSampleOut)
{
- if(!pcInputPtr || !nInputSize || !ppSampleOut)
- {
- TSK_DEBUG_ERROR("Invalid parameter");
- return E_INVALIDARG;
- }
-
- *ppSampleOut = NULL;
-
- HRESULT hr = S_OK;
-
- IMFMediaBuffer* pBufferIn = NULL;
- BYTE* pBufferPtr = NULL;
-
- if(!pSelf->pSampleIn)
- {
- CHECK_HR(hr = MFUtils::CreateMediaSample(nInputSize, &pSelf->pSampleIn));
- hr = pSelf->pSampleIn->GetBufferByIndex(0, &pBufferIn);
- if(FAILED(hr))
- {
- SafeRelease(&pSelf->pSampleIn);
- CHECK_HR(hr);
- }
- }
- else
- {
- DWORD dwMaxLength = 0;
- CHECK_HR(hr = pSelf->pSampleIn->GetBufferByIndex(0, &pBufferIn));
- CHECK_HR(hr = pBufferIn->GetMaxLength(&dwMaxLength));
- if(dwMaxLength < nInputSize)
- {
- CHECK_HR(hr = pSelf->pSampleIn->RemoveAllBuffers());
- SafeRelease(&pBufferIn);
- CHECK_HR(hr = MFCreateMemoryBuffer(nInputSize, &pBufferIn));
- CHECK_HR(hr = pSelf->pSampleIn->AddBuffer(pBufferIn));
- }
- }
-
- CHECK_HR(hr = pBufferIn->Lock(&pBufferPtr, NULL, NULL));
- memcpy(pBufferPtr, pcInputPtr, nInputSize);
- CHECK_HR(hr = pBufferIn->Unlock());
- CHECK_HR(hr = pBufferIn->SetCurrentLength(nInputSize));
-
- CHECK_HR(hr = pSelf->pSampleIn->SetSampleDuration(pSelf->rtDuration));
- CHECK_HR(hr = pSelf->pSampleIn->SetSampleTime(pSelf->rtStart));
-
- hr = _plugin_win_mf_converter_video_ms_process_input(pSelf, pSelf->pSampleIn);
- while(hr == MF_E_NOTACCEPTING)
- {
- TSK_DEBUG_INFO("MF_E_NOTACCEPTING");
- IMFSample* pSample = NULL;
- hr = _plugin_win_mf_converter_video_ms_process_output(pSelf, &pSample);
- if(SUCCEEDED(hr) && pSample)
- {
- SafeRelease(ppSampleOut);
- *ppSampleOut = pSample, pSample = NULL;
-
- hr = pSelf->pSampleIn->SetUINT32(MFSampleExtension_Discontinuity, TRUE);
- hr = _plugin_win_mf_converter_video_ms_process_input(pSelf, pSelf->pSampleIn);
- }
- }
- if(!*ppSampleOut)
- {
- CHECK_HR(hr = _plugin_win_mf_converter_video_ms_process_output(pSelf, ppSampleOut));
- }
-
+ if(!pcInputPtr || !nInputSize || !ppSampleOut) {
+ TSK_DEBUG_ERROR("Invalid parameter");
+ return E_INVALIDARG;
+ }
+
+ *ppSampleOut = NULL;
+
+ HRESULT hr = S_OK;
+
+ IMFMediaBuffer* pBufferIn = NULL;
+ BYTE* pBufferPtr = NULL;
+
+ if(!pSelf->pSampleIn) {
+ CHECK_HR(hr = MFUtils::CreateMediaSample(nInputSize, &pSelf->pSampleIn));
+ hr = pSelf->pSampleIn->GetBufferByIndex(0, &pBufferIn);
+ if(FAILED(hr)) {
+ SafeRelease(&pSelf->pSampleIn);
+ CHECK_HR(hr);
+ }
+ }
+ else {
+ DWORD dwMaxLength = 0;
+ CHECK_HR(hr = pSelf->pSampleIn->GetBufferByIndex(0, &pBufferIn));
+ CHECK_HR(hr = pBufferIn->GetMaxLength(&dwMaxLength));
+ if(dwMaxLength < nInputSize) {
+ CHECK_HR(hr = pSelf->pSampleIn->RemoveAllBuffers());
+ SafeRelease(&pBufferIn);
+ CHECK_HR(hr = MFCreateMemoryBuffer(nInputSize, &pBufferIn));
+ CHECK_HR(hr = pSelf->pSampleIn->AddBuffer(pBufferIn));
+ }
+ }
+
+ CHECK_HR(hr = pBufferIn->Lock(&pBufferPtr, NULL, NULL));
+ memcpy(pBufferPtr, pcInputPtr, nInputSize);
+ CHECK_HR(hr = pBufferIn->Unlock());
+ CHECK_HR(hr = pBufferIn->SetCurrentLength(nInputSize));
+
+ CHECK_HR(hr = pSelf->pSampleIn->SetSampleDuration(pSelf->rtDuration));
+ CHECK_HR(hr = pSelf->pSampleIn->SetSampleTime(pSelf->rtStart));
+
+ hr = _plugin_win_mf_converter_video_ms_process_input(pSelf, pSelf->pSampleIn);
+ while(hr == MF_E_NOTACCEPTING) {
+ TSK_DEBUG_INFO("MF_E_NOTACCEPTING");
+ IMFSample* pSample = NULL;
+ hr = _plugin_win_mf_converter_video_ms_process_output(pSelf, &pSample);
+ if(SUCCEEDED(hr) && pSample) {
+ SafeRelease(ppSampleOut);
+ *ppSampleOut = pSample, pSample = NULL;
+
+ hr = pSelf->pSampleIn->SetUINT32(MFSampleExtension_Discontinuity, TRUE);
+ hr = _plugin_win_mf_converter_video_ms_process_input(pSelf, pSelf->pSampleIn);
+ }
+ }
+ if(!*ppSampleOut) {
+ CHECK_HR(hr = _plugin_win_mf_converter_video_ms_process_output(pSelf, ppSampleOut));
+ }
+
bail:
- SafeRelease(&pBufferIn);
- return hr;
+ SafeRelease(&pBufferIn);
+ return hr;
} \ No newline at end of file
diff --git a/plugins/pluginWinMF/plugin_win_mf_producer_audio.cxx b/plugins/pluginWinMF/plugin_win_mf_producer_audio.cxx
index 2a3c314..5745b24 100755
--- a/plugins/pluginWinMF/plugin_win_mf_producer_audio.cxx
+++ b/plugins/pluginWinMF/plugin_win_mf_producer_audio.cxx
@@ -1,18 +1,18 @@
/* Copyright (C) 2013 Mamadou DIOP
* Copyright (C) 2013 Doubango Telecom <http://www.doubango.org>
-*
+*
* This file is part of Open Source Doubango Framework.
*
* DOUBANGO is free software: you can redistribute it and/or modify
* it under the terms of the GNU General Public License as published by
* the Free Software Foundation, either version 3 of the License, or
* (at your option) any later version.
-*
+*
* DOUBANGO is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
* GNU General Public License for more details.
-*
+*
* You should have received a copy of the GNU General Public License
* along with DOUBANGO.
*/
@@ -28,12 +28,11 @@
static void* TSK_STDCALL RunSessionThread(void *pArg);
-typedef struct plugin_win_mf_producer_audio_s
-{
- TDAV_DECLARE_PRODUCER_AUDIO;
+typedef struct plugin_win_mf_producer_audio_s {
+ TDAV_DECLARE_PRODUCER_AUDIO;
- bool bStarted;
- tsk_thread_handle_t* ppTread[1];
+ bool bStarted;
+ tsk_thread_handle_t* ppTread[1];
DeviceListAudio* pDeviceList;
@@ -48,162 +47,162 @@ plugin_win_mf_producer_audio_t;
/* ============ Media Producer Interface ================= */
static int plugin_win_mf_producer_audio_set(tmedia_producer_t* self, const tmedia_param_t* param)
-{
- plugin_win_mf_producer_audio_t* pSelf = (plugin_win_mf_producer_audio_t*)self;
- if(param->plugin_type == tmedia_ppt_producer){
- }
- return tdav_producer_audio_set(TDAV_PRODUCER_AUDIO(pSelf), param);
+{
+ plugin_win_mf_producer_audio_t* pSelf = (plugin_win_mf_producer_audio_t*)self;
+ if(param->plugin_type == tmedia_ppt_producer) {
+ }
+ return tdav_producer_audio_set(TDAV_PRODUCER_AUDIO(pSelf), param);
}
static int plugin_win_mf_producer_audio_prepare(tmedia_producer_t* self, const tmedia_codec_t* codec)
{
- plugin_win_mf_producer_audio_t* pSelf = (plugin_win_mf_producer_audio_t*)self;
-
- if(!pSelf || !codec){
- TSK_DEBUG_ERROR("Invalid parameter");
- return -1;
- }
-
- TMEDIA_PRODUCER(pSelf)->audio.channels = TMEDIA_CODEC_CHANNELS_AUDIO_ENCODING(codec);
- TMEDIA_PRODUCER(pSelf)->audio.rate = TMEDIA_CODEC_RATE_ENCODING(codec);
- TMEDIA_PRODUCER(pSelf)->audio.ptime = TMEDIA_CODEC_PTIME_AUDIO_ENCODING(codec);
-
- TSK_DEBUG_INFO("MF audio producer: channels=%d, rate=%d, ptime=%d",
- TMEDIA_PRODUCER(pSelf)->audio.channels,
- TMEDIA_PRODUCER(pSelf)->audio.rate,
- TMEDIA_PRODUCER(pSelf)->audio.ptime
- );
-
- HRESULT hr = S_OK;
-
- // create device list object
- if(!pSelf->pDeviceList && !(pSelf->pDeviceList = new DeviceListAudio())){
- TSK_DEBUG_ERROR("Failed to create device list");
- hr = E_OUTOFMEMORY;
- goto bail;
- }
- // enumerate devices
- hr = pSelf->pDeviceList->EnumerateDevices();
- if(!SUCCEEDED(hr)){
- goto bail;
- }
-
- // check if we have at least one MF video source connected to the PC
- if(pSelf->pDeviceList->Count() == 0){
- TSK_DEBUG_WARN("No MF video source could be found...no video will be sent");
- // do not break the negotiation as one-way video connection is a valid use-case
- }
- else{
- IMFActivate* pActivate = NULL;
- // Get best MF audio source
- hr = pSelf->pDeviceList->GetDeviceBest(&pActivate);
- if(!SUCCEEDED(hr) || !pActivate){
- TSK_DEBUG_ERROR("Failed to get best MF audio source");
- if(!pActivate){
- hr = E_OUTOFMEMORY;
- }
- goto bail;
- }
-
- // Create the media source for the device.
- hr = pActivate->ActivateObject(
- __uuidof(IMFMediaSource),
- (void**)&pSelf->pSource
- );
- SafeRelease(&pActivate);
- if(!SUCCEEDED(hr)){
- TSK_DEBUG_ERROR("ActivateObject(MF audio source) failed");
- goto bail;
- }
-
- // Create and configure the media type
- CHECK_HR(hr = MFCreateMediaType(&pSelf->pType));
- CHECK_HR(hr = pSelf->pType->SetGUID(MF_MT_MAJOR_TYPE, MFMediaType_Audio));
- CHECK_HR(hr = pSelf->pType->SetGUID(MF_MT_SUBTYPE, MFAudioFormat_PCM));
- CHECK_HR(hr = pSelf->pType->SetUINT32(MF_MT_AUDIO_NUM_CHANNELS, TMEDIA_PRODUCER(pSelf)->audio.channels));
- CHECK_HR(hr = pSelf->pType->SetUINT32(MF_MT_AUDIO_SAMPLES_PER_SECOND, TMEDIA_PRODUCER(pSelf)->audio.rate));
- CHECK_HR(hr = pSelf->pType->SetUINT32(MF_MT_AUDIO_BITS_PER_SAMPLE, TMEDIA_PRODUCER(pSelf)->audio.bits_per_sample));
- CHECK_HR(hr = pSelf->pType->SetUINT32(MF_MT_ALL_SAMPLES_INDEPENDENT, TRUE)); // because uncompressed media type
- CHECK_HR(hr = pSelf->pType->SetUINT32(MF_MT_FIXED_SIZE_SAMPLES, TRUE));
- UINT32 nBlockAlign = TMEDIA_PRODUCER(pSelf)->audio.channels * (TMEDIA_PRODUCER(pSelf)->audio.bits_per_sample >> 3);
- UINT32 nAvgBytesPerSec = (nBlockAlign * TMEDIA_PRODUCER(pSelf)->audio.rate);
- CHECK_HR(hr = pSelf->pType->SetUINT32(MF_MT_AUDIO_BLOCK_ALIGNMENT, nBlockAlign));
- CHECK_HR(hr = pSelf->pType->SetUINT32(MF_MT_AUDIO_AVG_BYTES_PER_SECOND, nAvgBytesPerSec));
-
- // Create the sample grabber sink.
- CHECK_HR(hr = SampleGrabberCB::CreateInstance(TMEDIA_PRODUCER(pSelf), &pSelf->pCallback));
- CHECK_HR(hr = MFCreateSampleGrabberSinkActivate(pSelf->pType, pSelf->pCallback, &pSelf->pSinkActivate));
-
- // To run as fast as possible, set this attribute (requires Windows 7):
- CHECK_HR(hr = pSelf->pSinkActivate->SetUINT32(MF_SAMPLEGRABBERSINK_IGNORE_CLOCK, TRUE));
-
- // Create the Media Session.
- CHECK_HR(hr = MFCreateMediaSession(NULL, &pSelf->pSession));
-
- // Create the topology.
- CHECK_HR(hr = MFUtils::CreateTopology(pSelf->pSource, NULL/*NO ENCODER*/, pSelf->pSinkActivate, NULL/*Preview*/, pSelf->pType, &pSelf->pTopology));
- }
+ plugin_win_mf_producer_audio_t* pSelf = (plugin_win_mf_producer_audio_t*)self;
+
+ if(!pSelf || !codec) {
+ TSK_DEBUG_ERROR("Invalid parameter");
+ return -1;
+ }
+
+ TMEDIA_PRODUCER(pSelf)->audio.channels = TMEDIA_CODEC_CHANNELS_AUDIO_ENCODING(codec);
+ TMEDIA_PRODUCER(pSelf)->audio.rate = TMEDIA_CODEC_RATE_ENCODING(codec);
+ TMEDIA_PRODUCER(pSelf)->audio.ptime = TMEDIA_CODEC_PTIME_AUDIO_ENCODING(codec);
+
+ TSK_DEBUG_INFO("MF audio producer: channels=%d, rate=%d, ptime=%d",
+ TMEDIA_PRODUCER(pSelf)->audio.channels,
+ TMEDIA_PRODUCER(pSelf)->audio.rate,
+ TMEDIA_PRODUCER(pSelf)->audio.ptime
+ );
+
+ HRESULT hr = S_OK;
+
+ // create device list object
+ if(!pSelf->pDeviceList && !(pSelf->pDeviceList = new DeviceListAudio())) {
+ TSK_DEBUG_ERROR("Failed to create device list");
+ hr = E_OUTOFMEMORY;
+ goto bail;
+ }
+ // enumerate devices
+ hr = pSelf->pDeviceList->EnumerateDevices();
+ if(!SUCCEEDED(hr)) {
+ goto bail;
+ }
+
+ // check if we have at least one MF video source connected to the PC
+ if(pSelf->pDeviceList->Count() == 0) {
+ TSK_DEBUG_WARN("No MF video source could be found...no video will be sent");
+ // do not break the negotiation as one-way video connection is a valid use-case
+ }
+ else {
+ IMFActivate* pActivate = NULL;
+ // Get best MF audio source
+ hr = pSelf->pDeviceList->GetDeviceBest(&pActivate);
+ if(!SUCCEEDED(hr) || !pActivate) {
+ TSK_DEBUG_ERROR("Failed to get best MF audio source");
+ if(!pActivate) {
+ hr = E_OUTOFMEMORY;
+ }
+ goto bail;
+ }
+
+ // Create the media source for the device.
+ hr = pActivate->ActivateObject(
+ __uuidof(IMFMediaSource),
+ (void**)&pSelf->pSource
+ );
+ SafeRelease(&pActivate);
+ if(!SUCCEEDED(hr)) {
+ TSK_DEBUG_ERROR("ActivateObject(MF audio source) failed");
+ goto bail;
+ }
+
+ // Create and configure the media type
+ CHECK_HR(hr = MFCreateMediaType(&pSelf->pType));
+ CHECK_HR(hr = pSelf->pType->SetGUID(MF_MT_MAJOR_TYPE, MFMediaType_Audio));
+ CHECK_HR(hr = pSelf->pType->SetGUID(MF_MT_SUBTYPE, MFAudioFormat_PCM));
+ CHECK_HR(hr = pSelf->pType->SetUINT32(MF_MT_AUDIO_NUM_CHANNELS, TMEDIA_PRODUCER(pSelf)->audio.channels));
+ CHECK_HR(hr = pSelf->pType->SetUINT32(MF_MT_AUDIO_SAMPLES_PER_SECOND, TMEDIA_PRODUCER(pSelf)->audio.rate));
+ CHECK_HR(hr = pSelf->pType->SetUINT32(MF_MT_AUDIO_BITS_PER_SAMPLE, TMEDIA_PRODUCER(pSelf)->audio.bits_per_sample));
+ CHECK_HR(hr = pSelf->pType->SetUINT32(MF_MT_ALL_SAMPLES_INDEPENDENT, TRUE)); // because uncompressed media type
+ CHECK_HR(hr = pSelf->pType->SetUINT32(MF_MT_FIXED_SIZE_SAMPLES, TRUE));
+ UINT32 nBlockAlign = TMEDIA_PRODUCER(pSelf)->audio.channels * (TMEDIA_PRODUCER(pSelf)->audio.bits_per_sample >> 3);
+ UINT32 nAvgBytesPerSec = (nBlockAlign * TMEDIA_PRODUCER(pSelf)->audio.rate);
+ CHECK_HR(hr = pSelf->pType->SetUINT32(MF_MT_AUDIO_BLOCK_ALIGNMENT, nBlockAlign));
+ CHECK_HR(hr = pSelf->pType->SetUINT32(MF_MT_AUDIO_AVG_BYTES_PER_SECOND, nAvgBytesPerSec));
+
+ // Create the sample grabber sink.
+ CHECK_HR(hr = SampleGrabberCB::CreateInstance(TMEDIA_PRODUCER(pSelf), &pSelf->pCallback));
+ CHECK_HR(hr = MFCreateSampleGrabberSinkActivate(pSelf->pType, pSelf->pCallback, &pSelf->pSinkActivate));
+
+ // To run as fast as possible, set this attribute (requires Windows 7):
+ CHECK_HR(hr = pSelf->pSinkActivate->SetUINT32(MF_SAMPLEGRABBERSINK_IGNORE_CLOCK, TRUE));
+
+ // Create the Media Session.
+ CHECK_HR(hr = MFCreateMediaSession(NULL, &pSelf->pSession));
+
+ // Create the topology.
+ CHECK_HR(hr = MFUtils::CreateTopology(pSelf->pSource, NULL/*NO ENCODER*/, pSelf->pSinkActivate, NULL/*Preview*/, pSelf->pType, &pSelf->pTopology));
+ }
bail:
- return SUCCEEDED(hr) ? 0 : -1;
+ return SUCCEEDED(hr) ? 0 : -1;
}
static int plugin_win_mf_producer_audio_start(tmedia_producer_t* self)
{
- plugin_win_mf_producer_audio_t* pSelf = (plugin_win_mf_producer_audio_t*)self;
-
- if(!pSelf){
- TSK_DEBUG_ERROR("Invalid parameter");
- return -1;
- }
-
- if(pSelf->bStarted){
- TSK_DEBUG_INFO("MF audio producer already started");
- return 0;
- }
-
- HRESULT hr = S_OK;
-
- // Run the media session.
- CHECK_HR(hr = MFUtils::RunSession(pSelf->pSession, pSelf->pTopology));
-
- // Start asynchronous watcher thread
- pSelf->bStarted = true;
- int ret = tsk_thread_create(&pSelf->ppTread[0], RunSessionThread, pSelf);
- if(ret != 0) {
- TSK_DEBUG_ERROR("Failed to create thread");
- hr = E_FAIL;
- pSelf->bStarted = false;
- if(pSelf->ppTread[0]){
- tsk_thread_join(&pSelf->ppTread[0]);
- }
- MFUtils::ShutdownSession(pSelf->pSession, pSelf->pSource);
- goto bail;
- }
+ plugin_win_mf_producer_audio_t* pSelf = (plugin_win_mf_producer_audio_t*)self;
+
+ if(!pSelf) {
+ TSK_DEBUG_ERROR("Invalid parameter");
+ return -1;
+ }
+
+ if(pSelf->bStarted) {
+ TSK_DEBUG_INFO("MF audio producer already started");
+ return 0;
+ }
+
+ HRESULT hr = S_OK;
+
+ // Run the media session.
+ CHECK_HR(hr = MFUtils::RunSession(pSelf->pSession, pSelf->pTopology));
+
+ // Start asynchronous watcher thread
+ pSelf->bStarted = true;
+ int ret = tsk_thread_create(&pSelf->ppTread[0], RunSessionThread, pSelf);
+ if(ret != 0) {
+ TSK_DEBUG_ERROR("Failed to create thread");
+ hr = E_FAIL;
+ pSelf->bStarted = false;
+ if(pSelf->ppTread[0]) {
+ tsk_thread_join(&pSelf->ppTread[0]);
+ }
+ MFUtils::ShutdownSession(pSelf->pSession, pSelf->pSource);
+ goto bail;
+ }
bail:
- return SUCCEEDED(hr) ? 0 : -1;
+ return SUCCEEDED(hr) ? 0 : -1;
}
static int plugin_win_mf_producer_audio_pause(tmedia_producer_t* self)
{
- plugin_win_mf_producer_audio_t* pSelf = (plugin_win_mf_producer_audio_t*)self;
+ plugin_win_mf_producer_audio_t* pSelf = (plugin_win_mf_producer_audio_t*)self;
- if(!pSelf){
- TSK_DEBUG_ERROR("Invalid parameter");
- return -1;
- }
+ if(!pSelf) {
+ TSK_DEBUG_ERROR("Invalid parameter");
+ return -1;
+ }
- HRESULT hr = MFUtils::PauseSession(pSelf->pSession);
+ HRESULT hr = MFUtils::PauseSession(pSelf->pSession);
- return SUCCEEDED(hr) ? 0 : -1;
+ return SUCCEEDED(hr) ? 0 : -1;
}
static int plugin_win_mf_producer_audio_stop(tmedia_producer_t* self)
{
- plugin_win_mf_producer_audio_t* pSelf = (plugin_win_mf_producer_audio_t*)self;
-
- if(!pSelf){
+ plugin_win_mf_producer_audio_t* pSelf = (plugin_win_mf_producer_audio_t*)self;
+
+ if(!pSelf) {
TSK_DEBUG_ERROR("Invalid parameter");
return -1;
}
@@ -213,7 +212,7 @@ static int plugin_win_mf_producer_audio_stop(tmedia_producer_t* self)
// for the thread
pSelf->bStarted = false;
hr = MFUtils::ShutdownSession(pSelf->pSession, NULL); // stop session to wakeup the asynchronous thread
- if(pSelf->ppTread[0]){
+ if(pSelf->ppTread[0]) {
tsk_thread_join(&pSelf->ppTread[0]);
}
hr = MFUtils::ShutdownSession(NULL, pSelf->pSource); // stop source to release the camera
@@ -228,37 +227,37 @@ static int plugin_win_mf_producer_audio_stop(tmedia_producer_t* self)
/* constructor */
static tsk_object_t* plugin_win_mf_producer_audio_ctor(tsk_object_t * self, va_list * app)
{
- MFUtils::Startup();
-
- plugin_win_mf_producer_audio_t *pSelf = (plugin_win_mf_producer_audio_t*)self;
- if(pSelf){
- /* init base */
- tdav_producer_audio_init(TDAV_PRODUCER_AUDIO(pSelf));
- /* init self */
-
- }
- return self;
+ MFUtils::Startup();
+
+ plugin_win_mf_producer_audio_t *pSelf = (plugin_win_mf_producer_audio_t*)self;
+ if(pSelf) {
+ /* init base */
+ tdav_producer_audio_init(TDAV_PRODUCER_AUDIO(pSelf));
+ /* init self */
+
+ }
+ return self;
}
/* destructor */
static tsk_object_t* plugin_win_mf_producer_audio_dtor(tsk_object_t * self)
-{
- plugin_win_mf_producer_audio_t *pSelf = (plugin_win_mf_producer_audio_t *)self;
- if(pSelf){
- /* stop */
- if(pSelf->bStarted){
- plugin_win_mf_producer_audio_stop(TMEDIA_PRODUCER(pSelf));
- }
-
- /* deinit base */
- tdav_producer_audio_deinit(TDAV_PRODUCER_AUDIO(pSelf));
- /* deinit self */
- if(pSelf->pDeviceList){
- delete pSelf->pDeviceList, pSelf->pDeviceList = NULL;
+{
+ plugin_win_mf_producer_audio_t *pSelf = (plugin_win_mf_producer_audio_t *)self;
+ if(pSelf) {
+ /* stop */
+ if(pSelf->bStarted) {
+ plugin_win_mf_producer_audio_stop(TMEDIA_PRODUCER(pSelf));
}
- if(pSelf->pSource){
- pSelf->pSource->Shutdown();
+
+ /* deinit base */
+ tdav_producer_audio_deinit(TDAV_PRODUCER_AUDIO(pSelf));
+ /* deinit self */
+ if(pSelf->pDeviceList) {
+ delete pSelf->pDeviceList, pSelf->pDeviceList = NULL;
+ }
+ if(pSelf->pSource) {
+ pSelf->pSource->Shutdown();
}
- if(pSelf->pSession){
+ if(pSelf->pSession) {
pSelf->pSession->Shutdown();
}
@@ -268,31 +267,29 @@ static tsk_object_t* plugin_win_mf_producer_audio_dtor(tsk_object_t * self)
SafeRelease(&pSelf->pSinkActivate);
SafeRelease(&pSelf->pTopology);
SafeRelease(&pSelf->pType);
- }
+ }
- return self;
+ return self;
}
/* object definition */
-static const tsk_object_def_t plugin_win_mf_producer_audio_def_s =
-{
- sizeof(plugin_win_mf_producer_audio_t),
- plugin_win_mf_producer_audio_ctor,
- plugin_win_mf_producer_audio_dtor,
- tdav_producer_audio_cmp,
+static const tsk_object_def_t plugin_win_mf_producer_audio_def_s = {
+ sizeof(plugin_win_mf_producer_audio_t),
+ plugin_win_mf_producer_audio_ctor,
+ plugin_win_mf_producer_audio_dtor,
+ tdav_producer_audio_cmp,
};
/* plugin definition*/
-static const tmedia_producer_plugin_def_t plugin_win_mf_producer_audio_plugin_def_s =
-{
- &plugin_win_mf_producer_audio_def_s,
-
- tmedia_audio,
- "Media Foundation audio producer",
-
- plugin_win_mf_producer_audio_set,
- plugin_win_mf_producer_audio_prepare,
- plugin_win_mf_producer_audio_start,
- plugin_win_mf_producer_audio_pause,
- plugin_win_mf_producer_audio_stop
+static const tmedia_producer_plugin_def_t plugin_win_mf_producer_audio_plugin_def_s = {
+ &plugin_win_mf_producer_audio_def_s,
+
+ tmedia_audio,
+ "Media Foundation audio producer",
+
+ plugin_win_mf_producer_audio_set,
+ plugin_win_mf_producer_audio_prepare,
+ plugin_win_mf_producer_audio_start,
+ plugin_win_mf_producer_audio_pause,
+ plugin_win_mf_producer_audio_stop
};
const tmedia_producer_plugin_def_t *plugin_win_mf_producer_audio_plugin_def_t = &plugin_win_mf_producer_audio_plugin_def_s;
@@ -300,34 +297,32 @@ const tmedia_producer_plugin_def_t *plugin_win_mf_producer_audio_plugin_def_t =
// Run session async thread
static void* TSK_STDCALL RunSessionThread(void *pArg)
{
- plugin_win_mf_producer_audio_t *pSelf = (plugin_win_mf_producer_audio_t *)pArg;
- HRESULT hrStatus = S_OK;
- HRESULT hr = S_OK;
- IMFMediaEvent *pEvent = NULL;
- MediaEventType met;
-
- TSK_DEBUG_INFO("RunSessionThread (audio) - ENTER");
-
- while(pSelf->bStarted){
- CHECK_HR(hr = pSelf->pSession->GetEvent(0, &pEvent));
- CHECK_HR(hr = pEvent->GetStatus(&hrStatus));
- CHECK_HR(hr = pEvent->GetType(&met));
-
- if (FAILED(hrStatus))
- {
- TSK_DEBUG_ERROR("Session error: 0x%x (event id: %d)\n", hrStatus, met);
- hr = hrStatus;
- goto bail;
- }
- if (met == MESessionEnded)
- {
- break;
- }
- SafeRelease(&pEvent);
- }
+ plugin_win_mf_producer_audio_t *pSelf = (plugin_win_mf_producer_audio_t *)pArg;
+ HRESULT hrStatus = S_OK;
+ HRESULT hr = S_OK;
+ IMFMediaEvent *pEvent = NULL;
+ MediaEventType met;
+
+ TSK_DEBUG_INFO("RunSessionThread (audio) - ENTER");
+
+ while(pSelf->bStarted) {
+ CHECK_HR(hr = pSelf->pSession->GetEvent(0, &pEvent));
+ CHECK_HR(hr = pEvent->GetStatus(&hrStatus));
+ CHECK_HR(hr = pEvent->GetType(&met));
+
+ if (FAILED(hrStatus)) {
+ TSK_DEBUG_ERROR("Session error: 0x%x (event id: %d)\n", hrStatus, met);
+ hr = hrStatus;
+ goto bail;
+ }
+ if (met == MESessionEnded) {
+ break;
+ }
+ SafeRelease(&pEvent);
+ }
bail:
- TSK_DEBUG_INFO("RunSessionThread (audio) - EXIT");
+ TSK_DEBUG_INFO("RunSessionThread (audio) - EXIT");
- return NULL;
+ return NULL;
}
diff --git a/plugins/pluginWinMF/plugin_win_mf_producer_video.cxx b/plugins/pluginWinMF/plugin_win_mf_producer_video.cxx
index f20f9e4..e4bf690 100755
--- a/plugins/pluginWinMF/plugin_win_mf_producer_video.cxx
+++ b/plugins/pluginWinMF/plugin_win_mf_producer_video.cxx
@@ -50,7 +50,7 @@
#endif /* PLUGIN_MF_GOP_SIZE_IN_SECONDS */
DEFINE_GUID(PLUGIN_MF_LOW_LATENCY,
- 0x9c27891a, 0xed7a, 0x40e1, 0x88, 0xe8, 0xb2, 0x27, 0x27, 0xa0, 0x24, 0xee);
+ 0x9c27891a, 0xed7a, 0x40e1, 0x88, 0xe8, 0xb2, 0x27, 0x27, 0xa0, 0x24, 0xee);
extern const tmedia_codec_plugin_def_t *mf_codec_h264_main_plugin_def_t;
extern const tmedia_codec_plugin_def_t *mf_codec_h264_base_plugin_def_t;
@@ -61,548 +61,527 @@ static int _plugin_win_mf_producer_video_unprepare(struct plugin_win_mf_producer
//
// plugin_win_mf_producer_video_t
//
-typedef struct plugin_win_mf_producer_video_s
-{
- TMEDIA_DECLARE_PRODUCER;
-
- bool bStarted, bPrepared, bMuted;
- tsk_thread_handle_t* ppTread[1];
- HWND hWndPreview;
-
- int32_t bitrate_bps; // used when encoder bundled only
-
- DeviceListVideo* pDeviceList;
-
- MFCodecVideo *pEncoder;
- IMFMediaSession *pSession;
- IMFMediaSource *pSource;
- SampleGrabberCB *pCallback;
- IMFActivate *pSinkGrabber;
- IMFActivate *pSinkActivatePreview;
- DisplayWatcher* pWatcherPreview;
- IMFTopology *pTopology;
- IMFMediaType *pGrabberInputType;
+typedef struct plugin_win_mf_producer_video_s {
+ TMEDIA_DECLARE_PRODUCER;
+
+ bool bStarted, bPrepared, bMuted;
+ tsk_thread_handle_t* ppTread[1];
+ HWND hWndPreview;
+
+ int32_t bitrate_bps; // used when encoder bundled only
+
+ DeviceListVideo* pDeviceList;
+
+ MFCodecVideo *pEncoder;
+ IMFMediaSession *pSession;
+ IMFMediaSource *pSource;
+ SampleGrabberCB *pCallback;
+ IMFActivate *pSinkGrabber;
+ IMFActivate *pSinkActivatePreview;
+ DisplayWatcher* pWatcherPreview;
+ IMFTopology *pTopology;
+ IMFMediaType *pGrabberInputType;
}
plugin_win_mf_producer_video_t;
/* ============ Video MF Producer Interface ================= */
static int plugin_win_mf_producer_video_set(tmedia_producer_t *self, const tmedia_param_t* param)
{
- int ret = 0;
- HRESULT hr = S_OK;
- plugin_win_mf_producer_video_t* pSelf = (plugin_win_mf_producer_video_t*)self;
-
- if (!pSelf || !param){
- TSK_DEBUG_ERROR("Invalid parameter");
- return -1;
- }
-
- if (tsk_striequals(param->key, "action")){
- tmedia_codec_action_t action = (tmedia_codec_action_t)TSK_TO_INT32((uint8_t*)param->value);
- HRESULT hr = S_OK;
- switch (action){
- case tmedia_codec_action_encode_idr:
- {
- if (pSelf->pEncoder)
- {
- CHECK_HR(hr = pSelf->pEncoder->RequestKeyFrame());
- }
- break;
- }
- case tmedia_codec_action_bw_down:
- {
- pSelf->bitrate_bps = TSK_CLAMP(0, (int32_t)((pSelf->bitrate_bps << 1) / 3), TMEDIA_CODEC(pSelf)->bandwidth_max_upload);
- TSK_DEBUG_INFO("New target bitrate = %d kbps", pSelf->bitrate_bps);
- if (pSelf->pEncoder)
- {
- CHECK_HR(hr = pSelf->pEncoder->SetBitRate(pSelf->bitrate_bps));
- }
- break;
- }
- case tmedia_codec_action_bw_up:
- {
- pSelf->bitrate_bps = TSK_CLAMP(0, (int32_t)((pSelf->bitrate_bps * 3) >> 1), TMEDIA_CODEC(pSelf)->bandwidth_max_upload);
- TSK_DEBUG_INFO("New target bitrate = %d kbps", pSelf->bitrate_bps);
- if (pSelf->pEncoder)
- {
- CHECK_HR(hr = pSelf->pEncoder->SetBitRate(pSelf->bitrate_bps));
- }
- break;
- }
- }
- }
- else if (param->value_type == tmedia_pvt_int64){
- if (tsk_striequals(param->key, "local-hwnd")){
- HWND hWnd = reinterpret_cast<HWND>((INT64)*((int64_t*)param->value));
- if (hWnd != pSelf->hWndPreview)
- {
- pSelf->hWndPreview = hWnd;
- if (pSelf->pWatcherPreview)
- {
- CHECK_HR(hr = pSelf->pWatcherPreview->SetHwnd(hWnd));
- }
- }
- }
- }
- else if (param->value_type == tmedia_pvt_int32){
- if (tsk_striequals(param->key, "mute")){
- pSelf->bMuted = (TSK_TO_INT32((uint8_t*)param->value) != 0);
- if (pSelf->pCallback) {
- pSelf->pCallback->SetMute(pSelf->bMuted);
- }
+ int ret = 0;
+ HRESULT hr = S_OK;
+ plugin_win_mf_producer_video_t* pSelf = (plugin_win_mf_producer_video_t*)self;
+
+ if (!pSelf || !param) {
+ TSK_DEBUG_ERROR("Invalid parameter");
+ return -1;
+ }
+
+ if (tsk_striequals(param->key, "action")) {
+ tmedia_codec_action_t action = (tmedia_codec_action_t)TSK_TO_INT32((uint8_t*)param->value);
+ HRESULT hr = S_OK;
+ switch (action) {
+ case tmedia_codec_action_encode_idr: {
+ if (pSelf->pEncoder) {
+ CHECK_HR(hr = pSelf->pEncoder->RequestKeyFrame());
+ }
+ break;
+ }
+ case tmedia_codec_action_bw_down: {
+ pSelf->bitrate_bps = TSK_CLAMP(0, (int32_t)((pSelf->bitrate_bps << 1) / 3), TMEDIA_CODEC(pSelf)->bandwidth_max_upload);
+ TSK_DEBUG_INFO("New target bitrate = %d kbps", pSelf->bitrate_bps);
+ if (pSelf->pEncoder) {
+ CHECK_HR(hr = pSelf->pEncoder->SetBitRate(pSelf->bitrate_bps));
+ }
+ break;
+ }
+ case tmedia_codec_action_bw_up: {
+ pSelf->bitrate_bps = TSK_CLAMP(0, (int32_t)((pSelf->bitrate_bps * 3) >> 1), TMEDIA_CODEC(pSelf)->bandwidth_max_upload);
+ TSK_DEBUG_INFO("New target bitrate = %d kbps", pSelf->bitrate_bps);
+ if (pSelf->pEncoder) {
+ CHECK_HR(hr = pSelf->pEncoder->SetBitRate(pSelf->bitrate_bps));
+ }
+ break;
+ }
+ }
+ }
+ else if (param->value_type == tmedia_pvt_int64) {
+ if (tsk_striequals(param->key, "local-hwnd")) {
+ HWND hWnd = reinterpret_cast<HWND>((INT64)*((int64_t*)param->value));
+ if (hWnd != pSelf->hWndPreview) {
+ pSelf->hWndPreview = hWnd;
+ if (pSelf->pWatcherPreview) {
+ CHECK_HR(hr = pSelf->pWatcherPreview->SetHwnd(hWnd));
+ }
+ }
+ }
+ }
+ else if (param->value_type == tmedia_pvt_int32) {
+ if (tsk_striequals(param->key, "mute")) {
+ pSelf->bMuted = (TSK_TO_INT32((uint8_t*)param->value) != 0);
+ if (pSelf->pCallback) {
+ pSelf->pCallback->SetMute(pSelf->bMuted);
+ }
#if 0
- if (pSelf->bStarted && pSelf->pSession) {
- if (pSelf->bMuted) {
- pSelf->pSession->Pause();
- }
- else {
- CHECK_HR(hr = MFUtils::RunSession(pSelf->pSession, pSelf->pTopology));
- }
- }
+ if (pSelf->bStarted && pSelf->pSession) {
+ if (pSelf->bMuted) {
+ pSelf->pSession->Pause();
+ }
+ else {
+ CHECK_HR(hr = MFUtils::RunSession(pSelf->pSession, pSelf->pTopology));
+ }
+ }
#endif
- }
- else if (tsk_striequals(param->key, "create-on-current-thead")){
- //producer->create_on_ui_thread = *((int32_t*)param->value) ? tsk_false : tsk_true;
- }
- else if (tsk_striequals(param->key, "plugin-firefox")){
- //producer->plugin_firefox = (*((int32_t*)param->value) != 0);
- //if(producer->grabber){
- // producer->grabber->setPluginFirefox((producer->plugin_firefox == tsk_true));
- //}
- }
- }
+ }
+ else if (tsk_striequals(param->key, "create-on-current-thead")) {
+ //producer->create_on_ui_thread = *((int32_t*)param->value) ? tsk_false : tsk_true;
+ }
+ else if (tsk_striequals(param->key, "plugin-firefox")) {
+ //producer->plugin_firefox = (*((int32_t*)param->value) != 0);
+ //if(producer->grabber){
+ // producer->grabber->setPluginFirefox((producer->plugin_firefox == tsk_true));
+ //}
+ }
+ }
bail:
- return SUCCEEDED(hr) ? 0 : -1;
+ return SUCCEEDED(hr) ? 0 : -1;
}
static int plugin_win_mf_producer_video_prepare(tmedia_producer_t* self, const tmedia_codec_t* codec)
{
- plugin_win_mf_producer_video_t* pSelf = (plugin_win_mf_producer_video_t*)self;
-
- if (!pSelf || !codec && codec->plugin){
- TSK_DEBUG_ERROR("Invalid parameter");
- return -1;
- }
- if (pSelf->bPrepared){
- TSK_DEBUG_WARN("MF video producer already prepared");
- return -1;
- }
-
- // FIXME: DirectShow requires flipping but not MF
- // The Core library always tries to flip when OSType==Win32. Must be changed
- TMEDIA_CODEC_VIDEO(codec)->out.flip = tsk_false;
-
- TMEDIA_PRODUCER(pSelf)->video.fps = TMEDIA_CODEC_VIDEO(codec)->out.fps;
- TMEDIA_PRODUCER(pSelf)->video.width = TMEDIA_CODEC_VIDEO(codec)->out.width;
- TMEDIA_PRODUCER(pSelf)->video.height = TMEDIA_CODEC_VIDEO(codec)->out.height;
- TMEDIA_PRODUCER(pSelf)->encoder.codec_id = tmedia_codec_id_none; // means RAW frames as input
-
- TSK_DEBUG_INFO("MF video producer: fps=%d, width=%d, height=%d",
- TMEDIA_PRODUCER(pSelf)->video.fps,
- TMEDIA_PRODUCER(pSelf)->video.width,
- TMEDIA_PRODUCER(pSelf)->video.height);
-
- HRESULT hr = S_OK;
- IMFAttributes* pSessionAttributes = NULL;
- IMFTopology *pTopology = NULL;
- IMFMediaSink* pEvr = NULL;
- IMFMediaType* pEncoderInputType = NULL;
- IMFTopologyNode *pNodeGrabber = NULL;
- IMFMediaType* pGrabberNegotiatedInputMedia = NULL;
- BOOL bVideoProcessorIsSupported = FALSE;
- const VideoSubTypeGuidPair *pcPreferredSubTypeGuidPair = NULL;
-
- // create device list object
- if (!pSelf->pDeviceList && !(pSelf->pDeviceList = new DeviceListVideo())){
- TSK_DEBUG_ERROR("Failed to create device list");
- hr = E_OUTOFMEMORY;
- goto bail;
- }
- // enumerate devices
- hr = pSelf->pDeviceList->EnumerateDevices();
- if (!SUCCEEDED(hr)){
- goto bail;
- }
-
- // check if we have at least one MF video source connected to the PC
- if (pSelf->pDeviceList->Count() == 0){
- TSK_DEBUG_WARN("No MF video source could be found...no video will be sent");
- // do not break the negotiation as one-way video connection is a valid use-case
- }
- else{
- // Get best MF video source
- IMFActivate* pActivate = NULL;
- const char* pczSrcFriendlyName = tmedia_producer_get_friendly_name(tmedia_video);
- if (!tsk_strnullORempty(pczSrcFriendlyName)) {
- TSK_DEBUG_INFO("MF pref. video source = %s", pczSrcFriendlyName);
- wchar_t pczwSrcFriendlyName[MAX_PATH] = { 0 };
- mbstowcs(pczwSrcFriendlyName, pczSrcFriendlyName, sizeof(pczwSrcFriendlyName) / sizeof(pczwSrcFriendlyName[0]));
- hr = pSelf->pDeviceList->GetDeviceBest(&pActivate, pczwSrcFriendlyName);
- }
- else {
- hr = pSelf->pDeviceList->GetDeviceBest(&pActivate);
- }
- if (!SUCCEEDED(hr) || !pActivate){
- TSK_DEBUG_ERROR("Failed to get best MF video source");
- if (!pActivate){
- hr = E_OUTOFMEMORY;
- }
- goto bail;
- }
-
- // Create the media source for the device.
- hr = pActivate->ActivateObject(
- __uuidof(IMFMediaSource),
- (void**)&pSelf->pSource
- );
- SafeRelease(&pActivate);
- if (!SUCCEEDED(hr)){
- TSK_DEBUG_ERROR("ActivateObject(MF video source) failed");
- goto bail;
- }
-
- // Check whether video processor (http://msdn.microsoft.com/en-us/library/windows/desktop/hh162913(v=vs.85).aspx) is supported
- CHECK_HR(hr = MFUtils::IsVideoProcessorSupported(&bVideoProcessorIsSupported));
-
- // Must not be set because not supported by Frame Rate Converter DSP (http://msdn.microsoft.com/en-us/library/windows/desktop/ff819100(v=vs.85).aspx).aspx) because of color (neither I420 nor NV12)
- // Video Processor (http://msdn.microsoft.com/en-us/library/windows/desktop/hh162913(v=vs.85).aspx) supports both NV12 and I420
- if (!bVideoProcessorIsSupported) {
- UINT32 nWidth, nHeight, nFps;
- hr = MFUtils::GetBestFormat(
- pSelf->pSource,
- &MFVideoFormat_I420,
- (UINT32)TMEDIA_PRODUCER(pSelf)->video.width,
- (UINT32)TMEDIA_PRODUCER(pSelf)->video.height,
- (UINT32)TMEDIA_PRODUCER(pSelf)->video.fps,
- &nWidth,
- &nHeight,
- &nFps,
- &pcPreferredSubTypeGuidPair
- );
- if (SUCCEEDED(hr))
- {
- TSK_DEBUG_INFO("Video processor not supported...using source fps=%u, width=%u, height=%u", nFps, nWidth, nHeight);
- TMEDIA_PRODUCER(pSelf)->video.width = nWidth;
- TMEDIA_PRODUCER(pSelf)->video.height = nHeight;
- TMEDIA_PRODUCER(pSelf)->video.fps = nFps;
- }
- }
-
- // If H.264 is negotiated for this session then, try to find hardware encoder
- // If no HW encoder is found will fallback to SW implementation from x264
+ plugin_win_mf_producer_video_t* pSelf = (plugin_win_mf_producer_video_t*)self;
+
+ if (!pSelf || !codec && codec->plugin) {
+ TSK_DEBUG_ERROR("Invalid parameter");
+ return -1;
+ }
+ if (pSelf->bPrepared) {
+ TSK_DEBUG_WARN("MF video producer already prepared");
+ return -1;
+ }
+
+ // FIXME: DirectShow requires flipping but not MF
+ // The Core library always tries to flip when OSType==Win32. Must be changed
+ TMEDIA_CODEC_VIDEO(codec)->out.flip = tsk_false;
+
+ TMEDIA_PRODUCER(pSelf)->video.fps = TMEDIA_CODEC_VIDEO(codec)->out.fps;
+ TMEDIA_PRODUCER(pSelf)->video.width = TMEDIA_CODEC_VIDEO(codec)->out.width;
+ TMEDIA_PRODUCER(pSelf)->video.height = TMEDIA_CODEC_VIDEO(codec)->out.height;
+ TMEDIA_PRODUCER(pSelf)->encoder.codec_id = tmedia_codec_id_none; // means RAW frames as input
+
+ TSK_DEBUG_INFO("MF video producer: fps=%d, width=%d, height=%d",
+ TMEDIA_PRODUCER(pSelf)->video.fps,
+ TMEDIA_PRODUCER(pSelf)->video.width,
+ TMEDIA_PRODUCER(pSelf)->video.height);
+
+ HRESULT hr = S_OK;
+ IMFAttributes* pSessionAttributes = NULL;
+ IMFTopology *pTopology = NULL;
+ IMFMediaSink* pEvr = NULL;
+ IMFMediaType* pEncoderInputType = NULL;
+ IMFTopologyNode *pNodeGrabber = NULL;
+ IMFMediaType* pGrabberNegotiatedInputMedia = NULL;
+ BOOL bVideoProcessorIsSupported = FALSE;
+ const VideoSubTypeGuidPair *pcPreferredSubTypeGuidPair = NULL;
+
+ // create device list object
+ if (!pSelf->pDeviceList && !(pSelf->pDeviceList = new DeviceListVideo())) {
+ TSK_DEBUG_ERROR("Failed to create device list");
+ hr = E_OUTOFMEMORY;
+ goto bail;
+ }
+ // enumerate devices
+ hr = pSelf->pDeviceList->EnumerateDevices();
+ if (!SUCCEEDED(hr)) {
+ goto bail;
+ }
+
+ // check if we have at least one MF video source connected to the PC
+ if (pSelf->pDeviceList->Count() == 0) {
+ TSK_DEBUG_WARN("No MF video source could be found...no video will be sent");
+ // do not break the negotiation as one-way video connection is a valid use-case
+ }
+ else {
+ // Get best MF video source
+ IMFActivate* pActivate = NULL;
+ const char* pczSrcFriendlyName = tmedia_producer_get_friendly_name(tmedia_video);
+ if (!tsk_strnullORempty(pczSrcFriendlyName)) {
+ TSK_DEBUG_INFO("MF pref. video source = %s", pczSrcFriendlyName);
+ wchar_t pczwSrcFriendlyName[MAX_PATH] = { 0 };
+ mbstowcs(pczwSrcFriendlyName, pczSrcFriendlyName, sizeof(pczwSrcFriendlyName) / sizeof(pczwSrcFriendlyName[0]));
+ hr = pSelf->pDeviceList->GetDeviceBest(&pActivate, pczwSrcFriendlyName);
+ }
+ else {
+ hr = pSelf->pDeviceList->GetDeviceBest(&pActivate);
+ }
+ if (!SUCCEEDED(hr) || !pActivate) {
+ TSK_DEBUG_ERROR("Failed to get best MF video source");
+ if (!pActivate) {
+ hr = E_OUTOFMEMORY;
+ }
+ goto bail;
+ }
+
+ // Create the media source for the device.
+ hr = pActivate->ActivateObject(
+ __uuidof(IMFMediaSource),
+ (void**)&pSelf->pSource
+ );
+ SafeRelease(&pActivate);
+ if (!SUCCEEDED(hr)) {
+ TSK_DEBUG_ERROR("ActivateObject(MF video source) failed");
+ goto bail;
+ }
+
+ // Check whether video processor (http://msdn.microsoft.com/en-us/library/windows/desktop/hh162913(v=vs.85).aspx) is supported
+ CHECK_HR(hr = MFUtils::IsVideoProcessorSupported(&bVideoProcessorIsSupported));
+
+ // Must not be set because not supported by Frame Rate Converter DSP (http://msdn.microsoft.com/en-us/library/windows/desktop/ff819100(v=vs.85).aspx).aspx) because of color (neither I420 nor NV12)
+ // Video Processor (http://msdn.microsoft.com/en-us/library/windows/desktop/hh162913(v=vs.85).aspx) supports both NV12 and I420
+ if (!bVideoProcessorIsSupported) {
+ UINT32 nWidth, nHeight, nFps;
+ hr = MFUtils::GetBestFormat(
+ pSelf->pSource,
+ &MFVideoFormat_I420,
+ (UINT32)TMEDIA_PRODUCER(pSelf)->video.width,
+ (UINT32)TMEDIA_PRODUCER(pSelf)->video.height,
+ (UINT32)TMEDIA_PRODUCER(pSelf)->video.fps,
+ &nWidth,
+ &nHeight,
+ &nFps,
+ &pcPreferredSubTypeGuidPair
+ );
+ if (SUCCEEDED(hr)) {
+ TSK_DEBUG_INFO("Video processor not supported...using source fps=%u, width=%u, height=%u", nFps, nWidth, nHeight);
+ TMEDIA_PRODUCER(pSelf)->video.width = nWidth;
+ TMEDIA_PRODUCER(pSelf)->video.height = nHeight;
+ TMEDIA_PRODUCER(pSelf)->video.fps = nFps;
+ }
+ }
+
+ // If H.264 is negotiated for this session then, try to find hardware encoder
+ // If no HW encoder is found will fallback to SW implementation from x264
#if PLUGIN_MF_PV_BUNDLE_CODEC
- // Before embedding a H.264 encoder we have to be sure that:
- // - Low latency is supported
- // - The user decided to use MF encoder (Microsoft, Intel Quick Sync or any other)
- if ((codec->id == tmedia_codec_id_h264_bp || codec->id == tmedia_codec_id_h264_mp) && MFUtils::IsLowLatencyH264Supported()) {
- BOOL bMFEncoderIsRegistered =
- (codec->id == tmedia_codec_id_h264_mp && codec->plugin == mf_codec_h264_main_plugin_def_t)
- || (codec->id == tmedia_codec_id_h264_bp && codec->plugin == mf_codec_h264_base_plugin_def_t);
- if (bMFEncoderIsRegistered)
- {
- // both Microsoft and Intel encoders support NV12 only as input
- // static const BOOL kIsEncoder = TRUE;
- // hr = MFUtils::GetBestCodec(kIsEncoder, MFMediaType_Video, MFVideoFormat_NV12, MFVideoFormat_H264, &pSelf->pEncoder);
- pSelf->pEncoder = (codec->id == tmedia_codec_id_h264_bp) ? MFCodecVideoH264::CreateCodecH264Base(MFCodecType_Encoder) : MFCodecVideoH264::CreateCodecH264Main(MFCodecType_Encoder);
- if (pSelf->pEncoder)
- {
- pSelf->pEncoder->setBundled(TRUE);
- int32_t avg_bitrate_kbps = tmedia_get_video_bandwidth_kbps_2((unsigned int)TMEDIA_PRODUCER(pSelf)->video.width, (unsigned int)TMEDIA_PRODUCER(pSelf)->video.height, TMEDIA_PRODUCER(pSelf)->video.fps);
- TSK_DEBUG_INFO("MF_MT_AVG_BITRATE defined with value = %d kbps", avg_bitrate_kbps);
- pSelf->bitrate_bps = (avg_bitrate_kbps * 1024);
-
- hr = pSelf->pEncoder->Initialize(
- (UINT32)TMEDIA_PRODUCER(pSelf)->video.fps,
- (UINT32)TMEDIA_PRODUCER(pSelf)->video.width,
- (UINT32)TMEDIA_PRODUCER(pSelf)->video.height,
- (UINT32)pSelf->bitrate_bps);
- if (SUCCEEDED(hr))
- {
- /*hr =*/ pSelf->pEncoder->SetGOPSize((PLUGIN_MF_GOP_SIZE_IN_SECONDS * TMEDIA_PRODUCER(pSelf)->video.fps));
- }
- if (FAILED(hr))
- {
- SafeRelease(&pSelf->pEncoder);
- hr = S_OK;
- }
- }
- if (SUCCEEDED(hr) && pSelf->pEncoder)
- {
- TMEDIA_PRODUCER(pSelf)->encoder.codec_id = codec->id; // means encoded frames as input
- }
- else
- {
- SafeRelease(&pSelf->pEncoder);
- TSK_DEBUG_WARN("Failed to find H.264 HW encoder...fallback to SW implementation");
- }
- }
- else /* if(!bMFEncoderIsRegistered) */
- {
- TSK_DEBUG_INFO("Not bundling MF H.264 encoder even if low latency is supported because another implementation is registered: %s", codec->plugin->desc);
- }
- }
+ // Before embedding a H.264 encoder we have to be sure that:
+ // - Low latency is supported
+ // - The user decided to use MF encoder (Microsoft, Intel Quick Sync or any other)
+ if ((codec->id == tmedia_codec_id_h264_bp || codec->id == tmedia_codec_id_h264_mp) && MFUtils::IsLowLatencyH264Supported()) {
+ BOOL bMFEncoderIsRegistered =
+ (codec->id == tmedia_codec_id_h264_mp && codec->plugin == mf_codec_h264_main_plugin_def_t)
+ || (codec->id == tmedia_codec_id_h264_bp && codec->plugin == mf_codec_h264_base_plugin_def_t);
+ if (bMFEncoderIsRegistered) {
+ // both Microsoft and Intel encoders support NV12 only as input
+ // static const BOOL kIsEncoder = TRUE;
+ // hr = MFUtils::GetBestCodec(kIsEncoder, MFMediaType_Video, MFVideoFormat_NV12, MFVideoFormat_H264, &pSelf->pEncoder);
+ pSelf->pEncoder = (codec->id == tmedia_codec_id_h264_bp) ? MFCodecVideoH264::CreateCodecH264Base(MFCodecType_Encoder) : MFCodecVideoH264::CreateCodecH264Main(MFCodecType_Encoder);
+ if (pSelf->pEncoder) {
+ pSelf->pEncoder->setBundled(TRUE);
+ int32_t avg_bitrate_kbps = tmedia_get_video_bandwidth_kbps_2((unsigned int)TMEDIA_PRODUCER(pSelf)->video.width, (unsigned int)TMEDIA_PRODUCER(pSelf)->video.height, TMEDIA_PRODUCER(pSelf)->video.fps);
+ TSK_DEBUG_INFO("MF_MT_AVG_BITRATE defined with value = %d kbps", avg_bitrate_kbps);
+ pSelf->bitrate_bps = (avg_bitrate_kbps * 1024);
+
+ hr = pSelf->pEncoder->Initialize(
+ (UINT32)TMEDIA_PRODUCER(pSelf)->video.fps,
+ (UINT32)TMEDIA_PRODUCER(pSelf)->video.width,
+ (UINT32)TMEDIA_PRODUCER(pSelf)->video.height,
+ (UINT32)pSelf->bitrate_bps);
+ if (SUCCEEDED(hr)) {
+ /*hr =*/ pSelf->pEncoder->SetGOPSize((PLUGIN_MF_GOP_SIZE_IN_SECONDS * TMEDIA_PRODUCER(pSelf)->video.fps));
+ }
+ if (FAILED(hr)) {
+ SafeRelease(&pSelf->pEncoder);
+ hr = S_OK;
+ }
+ }
+ if (SUCCEEDED(hr) && pSelf->pEncoder) {
+ TMEDIA_PRODUCER(pSelf)->encoder.codec_id = codec->id; // means encoded frames as input
+ }
+ else {
+ SafeRelease(&pSelf->pEncoder);
+ TSK_DEBUG_WARN("Failed to find H.264 HW encoder...fallback to SW implementation");
+ }
+ }
+ else { /* if(!bMFEncoderIsRegistered) */
+ TSK_DEBUG_INFO("Not bundling MF H.264 encoder even if low latency is supported because another implementation is registered: %s", codec->plugin->desc);
+ }
+ }
#endif
- // Set session attributes
- CHECK_HR(hr = MFCreateAttributes(&pSessionAttributes, 1));
- CHECK_HR(hr = pSessionAttributes->SetUINT32(PLUGIN_MF_LOW_LATENCY, 1));
-
- // Configure the media type that the Sample Grabber will receive.
- // Setting the major and subtype is usually enough for the topology loader
- // to resolve the topology.
-
- CHECK_HR(hr = MFCreateMediaType(&pSelf->pGrabberInputType));
- CHECK_HR(hr = pSelf->pGrabberInputType->SetGUID(MF_MT_MAJOR_TYPE, MFMediaType_Video));
- CHECK_HR(hr = pSelf->pGrabberInputType->SetUINT32(MF_MT_INTERLACE_MODE, MFVideoInterlace_Progressive));
-
- CHECK_HR(hr = MFSetAttributeSize(pSelf->pGrabberInputType, MF_MT_FRAME_SIZE, (UINT32)TMEDIA_PRODUCER(pSelf)->video.width, (UINT32)TMEDIA_PRODUCER(pSelf)->video.height));
- CHECK_HR(hr = MFSetAttributeRatio(pSelf->pGrabberInputType, MF_MT_FRAME_RATE, TMEDIA_PRODUCER(pSelf)->video.fps, 1));
-
- CHECK_HR(hr = MFSetAttributeRatio(pSelf->pGrabberInputType, MF_MT_PIXEL_ASPECT_RATIO, 1, 1));
- CHECK_HR(hr = pSelf->pGrabberInputType->SetUINT32(MF_MT_ALL_SAMPLES_INDEPENDENT, pSelf->pEncoder ? FALSE : TRUE));
- CHECK_HR(hr = pSelf->pGrabberInputType->SetUINT32(MF_MT_FIXED_SIZE_SAMPLES, pSelf->pEncoder ? FALSE : TRUE));
- if (pSelf->pEncoder) {
- switch (codec->id){
- case tmedia_codec_id_h264_bp: case tmedia_codec_id_h264_mp:
- {
- CHECK_HR(hr = pSelf->pGrabberInputType->SetGUID(MF_MT_SUBTYPE, MFVideoFormat_H264));
- CHECK_HR(hr = pSelf->pGrabberInputType->SetUINT32(MF_MT_MPEG2_PROFILE, (codec->id == tmedia_codec_id_h264_bp) ? eAVEncH264VProfile_Base : eAVEncH264VProfile_Main));
- CHECK_HR(hr = pSelf->pGrabberInputType->SetUINT32(MF_MT_AVG_BITRATE, pSelf->bitrate_bps));
- break;
- }
- default:
- {
- TSK_DEBUG_ERROR("HW encoder with id = %d not expected", codec->id);
- assert(false);
- }
- }
- TMEDIA_PRODUCER(pSelf)->video.chroma = tmedia_chroma_nv12;
- TSK_DEBUG_INFO("MF video producer chroma = NV12 (because of HW encoder)");
- }
- else {
- // Video Processors will be inserted in the topology if the source cannot produce I420 frames
- // IMPORTANT: Must not be NV12 because not supported by Video Resizer DSP (http://msdn.microsoft.com/en-us/library/windows/desktop/ff819491(v=vs.85).aspx)
- CHECK_HR(hr = pSelf->pGrabberInputType->SetGUID(MF_MT_SUBTYPE, pcPreferredSubTypeGuidPair ? pcPreferredSubTypeGuidPair->fourcc : MFVideoFormat_I420));
- TMEDIA_PRODUCER(pSelf)->video.chroma = pcPreferredSubTypeGuidPair ? pcPreferredSubTypeGuidPair->chroma : tmedia_chroma_yuv420p;
- TSK_DEBUG_INFO("MF video producer chroma = %d", TMEDIA_PRODUCER(pSelf)->video.chroma);
- }
-
- if (pSelf->pEncoder) {
- // Unlock the encoder
- //BOOL bIsAsyncMFT = FALSE;
- //CHECK_HR(hr = MFUtils::IsAsyncMFT(pSelf->pEncoder->GetMFT(), &bIsAsyncMFT));
- //if(bIsAsyncMFT)
- //{
- // CHECK_HR(hr = MFUtils::UnlockAsyncMFT(pSelf->pEncoderpSelf->pEncoder->GetMFT()));
- //}
- // Apply Encoder output type (must be called before SetInputType)
- //CHECK_HR(hr = pSelf->pEncoder->GetMFT()->SetOutputType(0, pSelf->pGrabberInputType, 0/*MFT_SET_TYPE_TEST_ONLY*/));
-
- // HW encoders support only NV12
- //CHECK_HR(hr = MFUtils::ConvertVideoTypeToUncompressedType(pSelf->pGrabberInputType, MFVideoFormat_NV12, &pEncoderInputType));
- //CHECK_HR(hr = pSelf->pEncoder->GetMFT()->SetInputType(0, pEncoderInputType, 0/*MFT_SET_TYPE_TEST_ONLY*/));
- }
- // Create the sample grabber sink.
- CHECK_HR(hr = SampleGrabberCB::CreateInstance(TMEDIA_PRODUCER(pSelf), &pSelf->pCallback));
- CHECK_HR(hr = MFCreateSampleGrabberSinkActivate(pSelf->pGrabberInputType, pSelf->pCallback, &pSelf->pSinkGrabber));
-
- // To run as fast as possible, set this attribute (requires Windows 7):
- CHECK_HR(hr = pSelf->pSinkGrabber->SetUINT32(MF_SAMPLEGRABBERSINK_IGNORE_CLOCK, TRUE));
-
- // Create the Media Session.
- CHECK_HR(hr = MFCreateMediaSession(pSessionAttributes, &pSelf->pSession));
-
- // Create the EVR activation object for the preview.
- CHECK_HR(hr = MFCreateVideoRendererActivate(pSelf->hWndPreview, &pSelf->pSinkActivatePreview));
-
- // Create the topology.
- CHECK_HR(hr = MFUtils::CreateTopology(
- pSelf->pSource,
- pSelf->pEncoder ? pSelf->pEncoder->GetMFT() : NULL,
- pSelf->pSinkGrabber,
- pSelf->pSinkActivatePreview,
- pSelf->pGrabberInputType,
- &pTopology));
- // Resolve topology (adds video processors if needed).
- CHECK_HR(hr = MFUtils::ResolveTopology(pTopology, &pSelf->pTopology));
-
- // Find EVR for the preview.
- CHECK_HR(hr = MFUtils::FindNodeObject(pSelf->pTopology, MFUtils::g_ullTopoIdSinkPreview, (void**)&pEvr));
-
- // Find negotiated media and update producer
- UINT32 nNegWidth = (UINT32)TMEDIA_PRODUCER(pSelf)->video.width, nNegHeight = (UINT32)TMEDIA_PRODUCER(pSelf)->video.height, nNegNumeratorFps = (UINT32)TMEDIA_PRODUCER(pSelf)->video.fps, nNegDenominatorFps = 1;
- CHECK_HR(hr = pSelf->pTopology->GetNodeByID(MFUtils::g_ullTopoIdSinkMain, &pNodeGrabber));
- CHECK_HR(hr = pNodeGrabber->GetInputPrefType(0, &pGrabberNegotiatedInputMedia));
- hr = MFGetAttributeSize(pGrabberNegotiatedInputMedia, MF_MT_FRAME_SIZE, &nNegWidth, &nNegHeight);
- if (SUCCEEDED(hr))
- {
- TSK_DEBUG_INFO("MF video producer topology vs sdp parameters: width(%u/%u), height(%u/%u)",
- TMEDIA_PRODUCER(pSelf)->video.width, nNegWidth,
- TMEDIA_PRODUCER(pSelf)->video.height, nNegHeight
- );
- TMEDIA_PRODUCER(pSelf)->video.width = nNegWidth;
- TMEDIA_PRODUCER(pSelf)->video.height = nNegHeight;
- }
- hr = MFGetAttributeRatio(pGrabberNegotiatedInputMedia, MF_MT_FRAME_RATE, &nNegNumeratorFps, &nNegDenominatorFps);
- if (SUCCEEDED(hr))
- {
- TSK_DEBUG_INFO("MF video producer topology vs sdp parameters: fps(%u/%u)",
- TMEDIA_PRODUCER(pSelf)->video.fps, (nNegNumeratorFps / nNegDenominatorFps)
- );
- TMEDIA_PRODUCER(pSelf)->video.fps = (nNegNumeratorFps / nNegDenominatorFps);
- }
-
- // Create EVR watcher for the preview.
- pSelf->pWatcherPreview = new DisplayWatcher(pSelf->hWndPreview, pEvr, hr);
- CHECK_HR(hr);
- }
+ // Set session attributes
+ CHECK_HR(hr = MFCreateAttributes(&pSessionAttributes, 1));
+ CHECK_HR(hr = pSessionAttributes->SetUINT32(PLUGIN_MF_LOW_LATENCY, 1));
+
+ // Configure the media type that the Sample Grabber will receive.
+ // Setting the major and subtype is usually enough for the topology loader
+ // to resolve the topology.
+
+ CHECK_HR(hr = MFCreateMediaType(&pSelf->pGrabberInputType));
+ CHECK_HR(hr = pSelf->pGrabberInputType->SetGUID(MF_MT_MAJOR_TYPE, MFMediaType_Video));
+ CHECK_HR(hr = pSelf->pGrabberInputType->SetUINT32(MF_MT_INTERLACE_MODE, MFVideoInterlace_Progressive));
+
+ CHECK_HR(hr = MFSetAttributeSize(pSelf->pGrabberInputType, MF_MT_FRAME_SIZE, (UINT32)TMEDIA_PRODUCER(pSelf)->video.width, (UINT32)TMEDIA_PRODUCER(pSelf)->video.height));
+ CHECK_HR(hr = MFSetAttributeRatio(pSelf->pGrabberInputType, MF_MT_FRAME_RATE, TMEDIA_PRODUCER(pSelf)->video.fps, 1));
+
+ CHECK_HR(hr = MFSetAttributeRatio(pSelf->pGrabberInputType, MF_MT_PIXEL_ASPECT_RATIO, 1, 1));
+ CHECK_HR(hr = pSelf->pGrabberInputType->SetUINT32(MF_MT_ALL_SAMPLES_INDEPENDENT, pSelf->pEncoder ? FALSE : TRUE));
+ CHECK_HR(hr = pSelf->pGrabberInputType->SetUINT32(MF_MT_FIXED_SIZE_SAMPLES, pSelf->pEncoder ? FALSE : TRUE));
+ if (pSelf->pEncoder) {
+ switch (codec->id) {
+ case tmedia_codec_id_h264_bp:
+ case tmedia_codec_id_h264_mp: {
+ CHECK_HR(hr = pSelf->pGrabberInputType->SetGUID(MF_MT_SUBTYPE, MFVideoFormat_H264));
+ CHECK_HR(hr = pSelf->pGrabberInputType->SetUINT32(MF_MT_MPEG2_PROFILE, (codec->id == tmedia_codec_id_h264_bp) ? eAVEncH264VProfile_Base : eAVEncH264VProfile_Main));
+ CHECK_HR(hr = pSelf->pGrabberInputType->SetUINT32(MF_MT_AVG_BITRATE, pSelf->bitrate_bps));
+ break;
+ }
+ default: {
+ TSK_DEBUG_ERROR("HW encoder with id = %d not expected", codec->id);
+ assert(false);
+ }
+ }
+ TMEDIA_PRODUCER(pSelf)->video.chroma = tmedia_chroma_nv12;
+ TSK_DEBUG_INFO("MF video producer chroma = NV12 (because of HW encoder)");
+ }
+ else {
+ // Video Processors will be inserted in the topology if the source cannot produce I420 frames
+ // IMPORTANT: Must not be NV12 because not supported by Video Resizer DSP (http://msdn.microsoft.com/en-us/library/windows/desktop/ff819491(v=vs.85).aspx)
+ CHECK_HR(hr = pSelf->pGrabberInputType->SetGUID(MF_MT_SUBTYPE, pcPreferredSubTypeGuidPair ? pcPreferredSubTypeGuidPair->fourcc : MFVideoFormat_I420));
+ TMEDIA_PRODUCER(pSelf)->video.chroma = pcPreferredSubTypeGuidPair ? pcPreferredSubTypeGuidPair->chroma : tmedia_chroma_yuv420p;
+ TSK_DEBUG_INFO("MF video producer chroma = %d", TMEDIA_PRODUCER(pSelf)->video.chroma);
+ }
+
+ if (pSelf->pEncoder) {
+ // Unlock the encoder
+ //BOOL bIsAsyncMFT = FALSE;
+ //CHECK_HR(hr = MFUtils::IsAsyncMFT(pSelf->pEncoder->GetMFT(), &bIsAsyncMFT));
+ //if(bIsAsyncMFT)
+ //{
+ // CHECK_HR(hr = MFUtils::UnlockAsyncMFT(pSelf->pEncoderpSelf->pEncoder->GetMFT()));
+ //}
+ // Apply Encoder output type (must be called before SetInputType)
+ //CHECK_HR(hr = pSelf->pEncoder->GetMFT()->SetOutputType(0, pSelf->pGrabberInputType, 0/*MFT_SET_TYPE_TEST_ONLY*/));
+
+ // HW encoders support only NV12
+ //CHECK_HR(hr = MFUtils::ConvertVideoTypeToUncompressedType(pSelf->pGrabberInputType, MFVideoFormat_NV12, &pEncoderInputType));
+ //CHECK_HR(hr = pSelf->pEncoder->GetMFT()->SetInputType(0, pEncoderInputType, 0/*MFT_SET_TYPE_TEST_ONLY*/));
+ }
+ // Create the sample grabber sink.
+ CHECK_HR(hr = SampleGrabberCB::CreateInstance(TMEDIA_PRODUCER(pSelf), &pSelf->pCallback));
+ CHECK_HR(hr = MFCreateSampleGrabberSinkActivate(pSelf->pGrabberInputType, pSelf->pCallback, &pSelf->pSinkGrabber));
+
+ // To run as fast as possible, set this attribute (requires Windows 7):
+ CHECK_HR(hr = pSelf->pSinkGrabber->SetUINT32(MF_SAMPLEGRABBERSINK_IGNORE_CLOCK, TRUE));
+
+ // Create the Media Session.
+ CHECK_HR(hr = MFCreateMediaSession(pSessionAttributes, &pSelf->pSession));
+
+ // Create the EVR activation object for the preview.
+ CHECK_HR(hr = MFCreateVideoRendererActivate(pSelf->hWndPreview, &pSelf->pSinkActivatePreview));
+
+ // Create the topology.
+ CHECK_HR(hr = MFUtils::CreateTopology(
+ pSelf->pSource,
+ pSelf->pEncoder ? pSelf->pEncoder->GetMFT() : NULL,
+ pSelf->pSinkGrabber,
+ pSelf->pSinkActivatePreview,
+ pSelf->pGrabberInputType,
+ &pTopology));
+ // Resolve topology (adds video processors if needed).
+ CHECK_HR(hr = MFUtils::ResolveTopology(pTopology, &pSelf->pTopology));
+
+ // Find EVR for the preview.
+ CHECK_HR(hr = MFUtils::FindNodeObject(pSelf->pTopology, MFUtils::g_ullTopoIdSinkPreview, (void**)&pEvr));
+
+ // Find negotiated media and update producer
+ UINT32 nNegWidth = (UINT32)TMEDIA_PRODUCER(pSelf)->video.width, nNegHeight = (UINT32)TMEDIA_PRODUCER(pSelf)->video.height, nNegNumeratorFps = (UINT32)TMEDIA_PRODUCER(pSelf)->video.fps, nNegDenominatorFps = 1;
+ CHECK_HR(hr = pSelf->pTopology->GetNodeByID(MFUtils::g_ullTopoIdSinkMain, &pNodeGrabber));
+ CHECK_HR(hr = pNodeGrabber->GetInputPrefType(0, &pGrabberNegotiatedInputMedia));
+ hr = MFGetAttributeSize(pGrabberNegotiatedInputMedia, MF_MT_FRAME_SIZE, &nNegWidth, &nNegHeight);
+ if (SUCCEEDED(hr)) {
+ TSK_DEBUG_INFO("MF video producer topology vs sdp parameters: width(%u/%u), height(%u/%u)",
+ TMEDIA_PRODUCER(pSelf)->video.width, nNegWidth,
+ TMEDIA_PRODUCER(pSelf)->video.height, nNegHeight
+ );
+ TMEDIA_PRODUCER(pSelf)->video.width = nNegWidth;
+ TMEDIA_PRODUCER(pSelf)->video.height = nNegHeight;
+ }
+ hr = MFGetAttributeRatio(pGrabberNegotiatedInputMedia, MF_MT_FRAME_RATE, &nNegNumeratorFps, &nNegDenominatorFps);
+ if (SUCCEEDED(hr)) {
+ TSK_DEBUG_INFO("MF video producer topology vs sdp parameters: fps(%u/%u)",
+ TMEDIA_PRODUCER(pSelf)->video.fps, (nNegNumeratorFps / nNegDenominatorFps)
+ );
+ TMEDIA_PRODUCER(pSelf)->video.fps = (nNegNumeratorFps / nNegDenominatorFps);
+ }
+
+ // Create EVR watcher for the preview.
+ pSelf->pWatcherPreview = new DisplayWatcher(pSelf->hWndPreview, pEvr, hr);
+ CHECK_HR(hr);
+ }
bail:
- SafeRelease(&pSessionAttributes);
- SafeRelease(&pTopology);
- SafeRelease(&pEvr);
- SafeRelease(&pEncoderInputType);
- SafeRelease(&pNodeGrabber);
- SafeRelease(&pGrabberNegotiatedInputMedia);
-
- pSelf->bPrepared = SUCCEEDED(hr);
- return pSelf->bPrepared ? 0 : -1;
+ SafeRelease(&pSessionAttributes);
+ SafeRelease(&pTopology);
+ SafeRelease(&pEvr);
+ SafeRelease(&pEncoderInputType);
+ SafeRelease(&pNodeGrabber);
+ SafeRelease(&pGrabberNegotiatedInputMedia);
+
+ pSelf->bPrepared = SUCCEEDED(hr);
+ return pSelf->bPrepared ? 0 : -1;
}
static int plugin_win_mf_producer_video_start(tmedia_producer_t* self)
{
- plugin_win_mf_producer_video_t* pSelf = (plugin_win_mf_producer_video_t*)self;
-
- if (!pSelf){
- TSK_DEBUG_ERROR("Invalid parameter");
- return -1;
- }
-
- if (pSelf->bStarted){
- TSK_DEBUG_INFO("MF video producer already started");
- return 0;
- }
- if (!pSelf->bPrepared){
- TSK_DEBUG_ERROR("MF video producer not prepared");
- return -1;
- }
-
- HRESULT hr = S_OK;
-
- // Run preview watcher
- if (pSelf->pWatcherPreview) {
- CHECK_HR(hr = pSelf->pWatcherPreview->Start());
- }
-
- // Run the media session.
- CHECK_HR(hr = MFUtils::RunSession(pSelf->pSession, pSelf->pTopology));
-
- // Start asynchronous watcher thread
- pSelf->bStarted = true;
- int ret = tsk_thread_create(&pSelf->ppTread[0], RunSessionThread, pSelf);
- if (ret != 0) {
- TSK_DEBUG_ERROR("Failed to create thread");
- hr = E_FAIL;
- pSelf->bStarted = false;
- if (pSelf->ppTread[0]){
- tsk_thread_join(&pSelf->ppTread[0]);
- }
- MFUtils::ShutdownSession(pSelf->pSession, pSelf->pSource);
- goto bail;
- }
+ plugin_win_mf_producer_video_t* pSelf = (plugin_win_mf_producer_video_t*)self;
+
+ if (!pSelf) {
+ TSK_DEBUG_ERROR("Invalid parameter");
+ return -1;
+ }
+
+ if (pSelf->bStarted) {
+ TSK_DEBUG_INFO("MF video producer already started");
+ return 0;
+ }
+ if (!pSelf->bPrepared) {
+ TSK_DEBUG_ERROR("MF video producer not prepared");
+ return -1;
+ }
+
+ HRESULT hr = S_OK;
+
+ // Run preview watcher
+ if (pSelf->pWatcherPreview) {
+ CHECK_HR(hr = pSelf->pWatcherPreview->Start());
+ }
+
+ // Run the media session.
+ CHECK_HR(hr = MFUtils::RunSession(pSelf->pSession, pSelf->pTopology));
+
+ // Start asynchronous watcher thread
+ pSelf->bStarted = true;
+ int ret = tsk_thread_create(&pSelf->ppTread[0], RunSessionThread, pSelf);
+ if (ret != 0) {
+ TSK_DEBUG_ERROR("Failed to create thread");
+ hr = E_FAIL;
+ pSelf->bStarted = false;
+ if (pSelf->ppTread[0]) {
+ tsk_thread_join(&pSelf->ppTread[0]);
+ }
+ MFUtils::ShutdownSession(pSelf->pSession, pSelf->pSource);
+ goto bail;
+ }
bail:
- return SUCCEEDED(hr) ? 0 : -1;
+ return SUCCEEDED(hr) ? 0 : -1;
}
static int plugin_win_mf_producer_video_pause(tmedia_producer_t* self)
{
- plugin_win_mf_producer_video_t* pSelf = (plugin_win_mf_producer_video_t*)self;
+ plugin_win_mf_producer_video_t* pSelf = (plugin_win_mf_producer_video_t*)self;
- if (!pSelf){
- TSK_DEBUG_ERROR("Invalid parameter");
- return -1;
- }
- if (!pSelf->bStarted)
- {
- TSK_DEBUG_INFO("MF video producer not started");
- return 0;
- }
+ if (!pSelf) {
+ TSK_DEBUG_ERROR("Invalid parameter");
+ return -1;
+ }
+ if (!pSelf->bStarted) {
+ TSK_DEBUG_INFO("MF video producer not started");
+ return 0;
+ }
- HRESULT hr = MFUtils::PauseSession(pSelf->pSession);
+ HRESULT hr = MFUtils::PauseSession(pSelf->pSession);
- return SUCCEEDED(hr) ? 0 : -1;
+ return SUCCEEDED(hr) ? 0 : -1;
}
static int plugin_win_mf_producer_video_stop(tmedia_producer_t* self)
{
- plugin_win_mf_producer_video_t* pSelf = (plugin_win_mf_producer_video_t*)self;
+ plugin_win_mf_producer_video_t* pSelf = (plugin_win_mf_producer_video_t*)self;
- if (!pSelf){
- TSK_DEBUG_ERROR("Invalid parameter");
- return -1;
- }
+ if (!pSelf) {
+ TSK_DEBUG_ERROR("Invalid parameter");
+ return -1;
+ }
- HRESULT hr = S_OK;
+ HRESULT hr = S_OK;
- if (pSelf->pWatcherPreview){
- hr = pSelf->pWatcherPreview->Stop();
- }
+ if (pSelf->pWatcherPreview) {
+ hr = pSelf->pWatcherPreview->Stop();
+ }
- // for the thread
- pSelf->bStarted = false;
- hr = MFUtils::ShutdownSession(pSelf->pSession, NULL); // stop session to wakeup the asynchronous thread
- if (pSelf->ppTread[0]){
- tsk_thread_join(&pSelf->ppTread[0]);
- }
- hr = MFUtils::ShutdownSession(NULL, pSelf->pSource); // stop source to release the camera
+ // for the thread
+ pSelf->bStarted = false;
+ hr = MFUtils::ShutdownSession(pSelf->pSession, NULL); // stop session to wakeup the asynchronous thread
+ if (pSelf->ppTread[0]) {
+ tsk_thread_join(&pSelf->ppTread[0]);
+ }
+ hr = MFUtils::ShutdownSession(NULL, pSelf->pSource); // stop source to release the camera
- // next start() will be called after prepare()
- return _plugin_win_mf_producer_video_unprepare(pSelf);
+ // next start() will be called after prepare()
+ return _plugin_win_mf_producer_video_unprepare(pSelf);
}
static int _plugin_win_mf_producer_video_unprepare(plugin_win_mf_producer_video_t* pSelf)
{
- if (!pSelf){
- TSK_DEBUG_ERROR("Invalid parameter");
- return -1;
- }
-
- if (pSelf->bStarted) {
- // plugin_win_mf_producer_video_stop(TMEDIA_PRODUCER(pSelf));
- TSK_DEBUG_ERROR("Producer must be stopped before calling unprepare");
- }
- if (pSelf->pDeviceList){
- delete pSelf->pDeviceList, pSelf->pDeviceList = NULL;
- }
- if (pSelf->pWatcherPreview){
- pSelf->pWatcherPreview->Stop();
- }
- if (pSelf->pSource){
- pSelf->pSource->Shutdown();
- }
- if (pSelf->pSession){
- pSelf->pSession->Shutdown();
- }
-
- SafeRelease(&pSelf->pEncoder);
- SafeRelease(&pSelf->pSession);
- SafeRelease(&pSelf->pSource);
- SafeRelease(&pSelf->pSinkActivatePreview);
- SafeRelease(&pSelf->pCallback);
- SafeRelease(&pSelf->pSinkGrabber);
- SafeRelease(&pSelf->pTopology);
- SafeRelease(&pSelf->pGrabberInputType);
-
- if (pSelf->pWatcherPreview){
- delete pSelf->pWatcherPreview;
- pSelf->pWatcherPreview = NULL;
- }
-
- pSelf->bPrepared = false;
-
- return 0;
+ if (!pSelf) {
+ TSK_DEBUG_ERROR("Invalid parameter");
+ return -1;
+ }
+
+ if (pSelf->bStarted) {
+ // plugin_win_mf_producer_video_stop(TMEDIA_PRODUCER(pSelf));
+ TSK_DEBUG_ERROR("Producer must be stopped before calling unprepare");
+ }
+ if (pSelf->pDeviceList) {
+ delete pSelf->pDeviceList, pSelf->pDeviceList = NULL;
+ }
+ if (pSelf->pWatcherPreview) {
+ pSelf->pWatcherPreview->Stop();
+ }
+ if (pSelf->pSource) {
+ pSelf->pSource->Shutdown();
+ }
+ if (pSelf->pSession) {
+ pSelf->pSession->Shutdown();
+ }
+
+ SafeRelease(&pSelf->pEncoder);
+ SafeRelease(&pSelf->pSession);
+ SafeRelease(&pSelf->pSource);
+ SafeRelease(&pSelf->pSinkActivatePreview);
+ SafeRelease(&pSelf->pCallback);
+ SafeRelease(&pSelf->pSinkGrabber);
+ SafeRelease(&pSelf->pTopology);
+ SafeRelease(&pSelf->pGrabberInputType);
+
+ if (pSelf->pWatcherPreview) {
+ delete pSelf->pWatcherPreview;
+ pSelf->pWatcherPreview = NULL;
+ }
+
+ pSelf->bPrepared = false;
+
+ return 0;
}
//
@@ -611,63 +590,61 @@ static int _plugin_win_mf_producer_video_unprepare(plugin_win_mf_producer_video_
/* constructor */
static tsk_object_t* plugin_win_mf_producer_video_ctor(tsk_object_t * self, va_list * app)
{
- MFUtils::Startup();
-
- plugin_win_mf_producer_video_t *pSelf = (plugin_win_mf_producer_video_t *)self;
- if (pSelf){
- /* init base */
- tmedia_producer_init(TMEDIA_PRODUCER(pSelf));
-
- /* init self with default values*/
- TMEDIA_PRODUCER(pSelf)->encoder.codec_id = tmedia_codec_id_none; // means RAW frames as input
- TMEDIA_PRODUCER(pSelf)->video.chroma = tmedia_chroma_nv12;
- TMEDIA_PRODUCER(pSelf)->video.fps = 15;
- TMEDIA_PRODUCER(pSelf)->video.width = 352;
- TMEDIA_PRODUCER(pSelf)->video.height = 288;
-
- TSK_DEBUG_INFO("Create WinMF video producer");
- }
- return self;
+ MFUtils::Startup();
+
+ plugin_win_mf_producer_video_t *pSelf = (plugin_win_mf_producer_video_t *)self;
+ if (pSelf) {
+ /* init base */
+ tmedia_producer_init(TMEDIA_PRODUCER(pSelf));
+
+ /* init self with default values*/
+ TMEDIA_PRODUCER(pSelf)->encoder.codec_id = tmedia_codec_id_none; // means RAW frames as input
+ TMEDIA_PRODUCER(pSelf)->video.chroma = tmedia_chroma_nv12;
+ TMEDIA_PRODUCER(pSelf)->video.fps = 15;
+ TMEDIA_PRODUCER(pSelf)->video.width = 352;
+ TMEDIA_PRODUCER(pSelf)->video.height = 288;
+
+ TSK_DEBUG_INFO("Create WinMF video producer");
+ }
+ return self;
}
/* destructor */
static tsk_object_t* plugin_win_mf_producer_video_dtor(tsk_object_t * self)
{
- plugin_win_mf_producer_video_t *pSelf = (plugin_win_mf_producer_video_t *)self;
- if (pSelf){
- /* stop */
- if (pSelf->bStarted){
- plugin_win_mf_producer_video_stop(TMEDIA_PRODUCER(pSelf));
- }
-
- /* deinit base */
- tmedia_producer_deinit(TMEDIA_PRODUCER(pSelf));
- /* deinit self */
- _plugin_win_mf_producer_video_unprepare(pSelf);
- }
-
- return self;
+ plugin_win_mf_producer_video_t *pSelf = (plugin_win_mf_producer_video_t *)self;
+ if (pSelf) {
+ /* stop */
+ if (pSelf->bStarted) {
+ plugin_win_mf_producer_video_stop(TMEDIA_PRODUCER(pSelf));
+ }
+
+ /* deinit base */
+ tmedia_producer_deinit(TMEDIA_PRODUCER(pSelf));
+ /* deinit self */
+ _plugin_win_mf_producer_video_unprepare(pSelf);
+ }
+
+ return self;
}
/* object definition */
-static const tsk_object_def_t plugin_win_mf_producer_video_def_s =
-{
- sizeof(plugin_win_mf_producer_video_t),
- plugin_win_mf_producer_video_ctor,
- plugin_win_mf_producer_video_dtor,
- tsk_null,
+static const tsk_object_def_t plugin_win_mf_producer_video_def_s = {
+ sizeof(plugin_win_mf_producer_video_t),
+ plugin_win_mf_producer_video_ctor,
+ plugin_win_mf_producer_video_dtor,
+ tsk_null,
};
/* plugin definition*/
-static const tmedia_producer_plugin_def_t plugin_win_mf_producer_video_plugin_def_s =
-{
- &plugin_win_mf_producer_video_def_s,
+static const tmedia_producer_plugin_def_t plugin_win_mf_producer_video_plugin_def_s = {
+ &plugin_win_mf_producer_video_def_s,
- tmedia_video,
- "Microsoft Windows Media Foundation producer (Video)",
+ tmedia_video,
+ "Microsoft Windows Media Foundation producer (Video)",
- plugin_win_mf_producer_video_set,
- plugin_win_mf_producer_video_prepare,
- plugin_win_mf_producer_video_start,
- plugin_win_mf_producer_video_pause,
- plugin_win_mf_producer_video_stop
+ plugin_win_mf_producer_video_set,
+ plugin_win_mf_producer_video_prepare,
+ plugin_win_mf_producer_video_start,
+ plugin_win_mf_producer_video_pause,
+ plugin_win_mf_producer_video_stop
};
const tmedia_producer_plugin_def_t *plugin_win_mf_producer_video_plugin_def_t = &plugin_win_mf_producer_video_plugin_def_s;
@@ -675,40 +652,38 @@ const tmedia_producer_plugin_def_t *plugin_win_mf_producer_video_plugin_def_t =
// Run session async thread
static void* TSK_STDCALL RunSessionThread(void *pArg)
{
- plugin_win_mf_producer_video_t *pSelf = (plugin_win_mf_producer_video_t *)pArg;
- HRESULT hrStatus = S_OK;
- HRESULT hr = S_OK;
- IMFMediaEvent *pEvent = NULL;
- MediaEventType met;
-
- TSK_DEBUG_INFO("RunSessionThread (MF video producer) - ENTER");
-
- while (pSelf->bStarted){
- hr = pSelf->pSession->GetEvent(0, &pEvent);
- if (hr == MF_E_SHUTDOWN) {
- if (pSelf->bStarted) {
- CHECK_HR(hr); // Shutdown called but "bStarted" not equal to false
- }
- break; // Shutdown called and "bStarted" is equal to false => break the loop
- }
- CHECK_HR(hr = pEvent->GetStatus(&hrStatus));
- CHECK_HR(hr = pEvent->GetType(&met));
-
- if (FAILED(hrStatus) /*&& hrStatus != MF_E_NO_SAMPLE_TIMESTAMP*/)
- {
- TSK_DEBUG_ERROR("Session error: 0x%x (event id: %d)\n", hrStatus, met);
- hr = hrStatus;
- goto bail;
- }
- if (met == MESessionEnded)
- {
- break;
- }
- SafeRelease(&pEvent);
- }
+ plugin_win_mf_producer_video_t *pSelf = (plugin_win_mf_producer_video_t *)pArg;
+ HRESULT hrStatus = S_OK;
+ HRESULT hr = S_OK;
+ IMFMediaEvent *pEvent = NULL;
+ MediaEventType met;
+
+ TSK_DEBUG_INFO("RunSessionThread (MF video producer) - ENTER");
+
+ while (pSelf->bStarted) {
+ hr = pSelf->pSession->GetEvent(0, &pEvent);
+ if (hr == MF_E_SHUTDOWN) {
+ if (pSelf->bStarted) {
+ CHECK_HR(hr); // Shutdown called but "bStarted" not equal to false
+ }
+ break; // Shutdown called and "bStarted" is equal to false => break the loop
+ }
+ CHECK_HR(hr = pEvent->GetStatus(&hrStatus));
+ CHECK_HR(hr = pEvent->GetType(&met));
+
+ if (FAILED(hrStatus) /*&& hrStatus != MF_E_NO_SAMPLE_TIMESTAMP*/) {
+ TSK_DEBUG_ERROR("Session error: 0x%x (event id: %d)\n", hrStatus, met);
+ hr = hrStatus;
+ goto bail;
+ }
+ if (met == MESessionEnded) {
+ break;
+ }
+ SafeRelease(&pEvent);
+ }
bail:
- TSK_DEBUG_INFO("RunSessionThread (MF video producer) - EXIT");
+ TSK_DEBUG_INFO("RunSessionThread (MF video producer) - EXIT");
- return NULL;
+ return NULL;
} \ No newline at end of file
diff --git a/plugins/pluginWinMF/plugin_win_mf_tdav.cxx b/plugins/pluginWinMF/plugin_win_mf_tdav.cxx
index d08bcfc..1885617 100755
--- a/plugins/pluginWinMF/plugin_win_mf_tdav.cxx
+++ b/plugins/pluginWinMF/plugin_win_mf_tdav.cxx
@@ -1,18 +1,18 @@
/* Copyright (C) 2013 Mamadou DIOP
* Copyright (C) 2013 Doubango Telecom <http://www.doubango.org>
-*
+*
* This file is part of Open Source Doubango Framework.
*
* DOUBANGO is free software: you can redistribute it and/or modify
* it under the terms of the GNU General Public License as published by
* the Free Software Foundation, either version 3 of the License, or
* (at your option) any later version.
-*
+*
* DOUBANGO is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
* GNU General Public License for more details.
-*
+*
* You should have received a copy of the GNU General Public License
* along with DOUBANGO.
*/
OpenPOWER on IntegriCloud