summaryrefslogtreecommitdiffstats
path: root/plugins/pluginWinMF
diff options
context:
space:
mode:
Diffstat (limited to 'plugins/pluginWinMF')
-rw-r--r--plugins/pluginWinMF/dllmain_mf.cxx244
-rw-r--r--plugins/pluginWinMF/internals/mf_codec.cxx888
-rw-r--r--plugins/pluginWinMF/internals/mf_codec.h158
-rw-r--r--plugins/pluginWinMF/internals/mf_codec_topology.cxx473
-rw-r--r--plugins/pluginWinMF/internals/mf_codec_topology.h87
-rw-r--r--plugins/pluginWinMF/internals/mf_custom_src.cxx1722
-rw-r--r--plugins/pluginWinMF/internals/mf_custom_src.h340
-rw-r--r--plugins/pluginWinMF/internals/mf_devices.cxx151
-rw-r--r--plugins/pluginWinMF/internals/mf_devices.h64
-rw-r--r--plugins/pluginWinMF/internals/mf_display_watcher.cxx160
-rw-r--r--plugins/pluginWinMF/internals/mf_display_watcher.h55
-rw-r--r--plugins/pluginWinMF/internals/mf_sample_grabber.cxx135
-rw-r--r--plugins/pluginWinMF/internals/mf_sample_grabber.h68
-rw-r--r--plugins/pluginWinMF/internals/mf_sample_queue.cxx158
-rw-r--r--plugins/pluginWinMF/internals/mf_sample_queue.h81
-rw-r--r--plugins/pluginWinMF/internals/mf_utils.cxx2104
-rw-r--r--plugins/pluginWinMF/internals/mf_utils.h260
-rw-r--r--plugins/pluginWinMF/pluginWinMF.vcproj319
-rw-r--r--plugins/pluginWinMF/plugin_win_mf_codec_h264.cxx750
-rw-r--r--plugins/pluginWinMF/plugin_win_mf_config.h75
-rw-r--r--plugins/pluginWinMF/plugin_win_mf_consumer_audio.cxx163
-rw-r--r--plugins/pluginWinMF/plugin_win_mf_consumer_video.cxx1620
-rw-r--r--plugins/pluginWinMF/plugin_win_mf_converter_video.cxx600
-rw-r--r--plugins/pluginWinMF/plugin_win_mf_producer_audio.cxx333
-rw-r--r--plugins/pluginWinMF/plugin_win_mf_producer_video.cxx708
-rw-r--r--plugins/pluginWinMF/plugin_win_mf_tdav.cxx22
-rw-r--r--plugins/pluginWinMF/version.rc102
27 files changed, 11840 insertions, 0 deletions
diff --git a/plugins/pluginWinMF/dllmain_mf.cxx b/plugins/pluginWinMF/dllmain_mf.cxx
new file mode 100644
index 0000000..aeeb863
--- /dev/null
+++ b/plugins/pluginWinMF/dllmain_mf.cxx
@@ -0,0 +1,244 @@
+/* Copyright (C) 2013 Mamadou DIOP
+* Copyright (C) 2013 Doubango Telecom <http://www.doubango.org>
+*
+* This file is part of Open Source Doubango Framework.
+*
+* DOUBANGO is free software: you can redistribute it and/or modify
+* it under the terms of the GNU General Public License as published by
+* the Free Software Foundation, either version 3 of the License, or
+* (at your option) any later version.
+*
+* DOUBANGO is distributed in the hope that it will be useful,
+* but WITHOUT ANY WARRANTY; without even the implied warranty of
+* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
+* GNU General Public License for more details.
+*
+* You should have received a copy of the GNU General Public License
+* along with DOUBANGO.
+*/
+#include "plugin_win_mf_config.h"
+#include "internals/mf_utils.h"
+
+#include "tinymedia/tmedia_producer.h"
+#include "tinymedia/tmedia_consumer.h"
+#include "tinymedia/tmedia_converter_video.h"
+
+#include "tsk_plugin.h"
+#include "tsk_debug.h"
+
+#include <windows.h>
+
+#if defined(_MSC_VER)
+# pragma comment(lib, "mfplat")
+# pragma comment(lib, "mf")
+# pragma comment(lib, "mfuuid")
+# pragma comment(lib, "shlwapi")
+# pragma comment(lib, "Strmiids")
+#endif
+
+#if !defined(PLUGIN_MF_ENABLE_AUDIO_IO)
+# define PLUGIN_MF_ENABLE_AUDIO_IO 0 /* audio not good as DirectSound */
+#endif
+#if !defined(PLUGIN_MF_ENABLE_VIDEO_CONVERTER)
+# define PLUGIN_MF_ENABLE_VIDEO_CONVERTER 1
+#endif
+#if !defined(PLUGIN_MF_ENABLE_VIDEO_IO)
+# define PLUGIN_MF_ENABLE_VIDEO_IO 1
+#endif
+
+extern const tmedia_codec_plugin_def_t *mf_codec_h264_main_plugin_def_t;
+extern const tmedia_codec_plugin_def_t *mf_codec_h264_base_plugin_def_t;
+
+#if PLUGIN_MF_ENABLE_VIDEO_CONVERTER
+extern const tmedia_converter_video_plugin_def_t *plugin_win_mf_converter_video_ms_plugin_def_t;
+#endif
+#if PLUGIN_MF_ENABLE_VIDEO_IO
+extern const tmedia_producer_plugin_def_t *plugin_win_mf_producer_video_plugin_def_t;
+extern const tmedia_consumer_plugin_def_t *plugin_win_mf_consumer_video_plugin_def_t;
+#endif
+#if PLUGIN_MF_ENABLE_AUDIO_IO
+extern const tmedia_producer_plugin_def_t *plugin_win_mf_producer_audio_plugin_def_t;
+extern const tmedia_consumer_plugin_def_t *plugin_win_mf_consumer_audio_plugin_def_t;
+#endif
+
+PLUGIN_WIN_MF_BEGIN_DECLS /* BEGIN */
+PLUGIN_WIN_MFP_API int __plugin_get_def_count();
+PLUGIN_WIN_MFP_API tsk_plugin_def_type_t __plugin_get_def_type_at(int index);
+PLUGIN_WIN_MFP_API tsk_plugin_def_media_type_t __plugin_get_def_media_type_at(int index);
+PLUGIN_WIN_MFP_API tsk_plugin_def_ptr_const_t __plugin_get_def_at(int index);
+PLUGIN_WIN_MF_END_DECLS /* END */
+
+BOOL APIENTRY DllMain( HMODULE hModule,
+ DWORD ul_reason_for_call,
+ LPVOID lpReserved
+ )
+{
+ switch (ul_reason_for_call)
+ {
+ case DLL_PROCESS_ATTACH:
+ break;
+ case DLL_THREAD_ATTACH:
+ break;
+ case DLL_THREAD_DETACH:
+ break;
+ case DLL_PROCESS_DETACH:
+ break;
+ }
+ return TRUE;
+}
+
+
+typedef enum PLUGIN_INDEX_E
+{
+#if PLUGIN_MF_ENABLE_AUDIO_IO
+ PLUGIN_INDEX_AUDIO_CONSUMER,
+ PLUGIN_INDEX_AUDIO_PRODUCER,
+#endif
+#if PLUGIN_MF_ENABLE_VIDEO_IO
+ PLUGIN_INDEX_VIDEO_PRODUCER,
+ PLUGIN_INDEX_VIDEO_CONSUMER,
+#endif
+#if PLUGIN_MF_ENABLE_VIDEO_CONVERTER
+ PLUGIN_INDEX_VIDEO_CONVERTER,
+#endif
+
+ PLUGIN_INDEX_CODEC_H264_MAIN,
+ PLUGIN_INDEX_CODEC_H264_BASE,
+
+ PLUGIN_INDEX_COUNT
+}
+PLUGIN_INDEX_T;
+
+
+int __plugin_get_def_count()
+{
+ int count = PLUGIN_INDEX_COUNT;
+ if(!MFUtils::IsLowLatencyH264Supported())
+ {
+ count -= 2;
+ }
+ return count;
+}
+
+tsk_plugin_def_type_t __plugin_get_def_type_at(int index)
+{
+ switch(index){
+#if PLUGIN_MF_ENABLE_AUDIO_IO
+ case PLUGIN_INDEX_AUDIO_CONSUMER:
+ case PLUGIN_INDEX_AUDIO_PRODUCER:
+ {
+ return (index == PLUGIN_INDEX_AUDIO_CONSUMER) ? tsk_plugin_def_type_consumer : tsk_plugin_def_type_producer;
+ }
+#endif
+#if PLUGIN_MF_ENABLE_VIDEO_IO
+ case PLUGIN_INDEX_VIDEO_CONSUMER:
+ {
+ return MFUtils::IsD3D9Supported() ? tsk_plugin_def_type_consumer : tsk_plugin_def_type_none;
+ }
+ case PLUGIN_INDEX_VIDEO_PRODUCER:
+ {
+ return tsk_plugin_def_type_producer;
+ }
+#endif
+#if PLUGIN_MF_ENABLE_VIDEO_CONVERTER
+ case PLUGIN_INDEX_VIDEO_CONVERTER:
+ {
+ return tsk_plugin_def_type_converter;
+ }
+#endif
+ case PLUGIN_INDEX_CODEC_H264_MAIN:
+ case PLUGIN_INDEX_CODEC_H264_BASE:
+ {
+ return MFUtils::IsLowLatencyH264Supported() ? tsk_plugin_def_type_codec : tsk_plugin_def_type_none;
+ }
+ default:
+ {
+ TSK_DEBUG_ERROR("No plugin at index %d", index);
+ return tsk_plugin_def_type_none;
+ }
+ }
+}
+
+tsk_plugin_def_media_type_t __plugin_get_def_media_type_at(int index)
+{
+ switch(index){
+#if PLUGIN_MF_ENABLE_AUDIO_IO
+ case PLUGIN_INDEX_AUDIO_CONSUMER:
+ case PLUGIN_INDEX_AUDIO_PRODUCER:
+ {
+ return tsk_plugin_def_media_type_audio;
+ }
+#endif
+#if PLUGIN_MF_ENABLE_VIDEO_IO
+ case PLUGIN_INDEX_VIDEO_CONSUMER:
+ {
+ return MFUtils::IsD3D9Supported() ? tsk_plugin_def_media_type_video : tsk_plugin_def_media_type_none;
+ }
+ case PLUGIN_INDEX_VIDEO_PRODUCER:
+ {
+ return tsk_plugin_def_media_type_video;
+ }
+#endif
+#if PLUGIN_MF_ENABLE_VIDEO_CONVERTER
+ case PLUGIN_INDEX_VIDEO_CONVERTER:
+ {
+ return tsk_plugin_def_media_type_video;
+ }
+#endif
+ case PLUGIN_INDEX_CODEC_H264_MAIN:
+ case PLUGIN_INDEX_CODEC_H264_BASE:
+ {
+ return MFUtils::IsLowLatencyH264Supported() ? tsk_plugin_def_media_type_video : tsk_plugin_def_media_type_none;
+ }
+ default:
+ {
+ TSK_DEBUG_ERROR("No plugin at index %d", index);
+ return tsk_plugin_def_media_type_none;
+ }
+ }
+}
+
+tsk_plugin_def_ptr_const_t __plugin_get_def_at(int index)
+{
+ switch(index){
+#if PLUGIN_MF_ENABLE_VIDEO_IO
+ case PLUGIN_INDEX_VIDEO_PRODUCER:
+ {
+ return plugin_win_mf_producer_video_plugin_def_t;
+ }
+ case PLUGIN_INDEX_VIDEO_CONSUMER:
+ {
+ return MFUtils::IsD3D9Supported() ? plugin_win_mf_consumer_video_plugin_def_t : tsk_null;
+ }
+#endif
+#if PLUGIN_MF_ENABLE_AUDIO_IO
+ case PLUGIN_INDEX_AUDIO_PRODUCER:
+ {
+ return plugin_win_mf_producer_audio_plugin_def_t;
+ }
+ case PLUGIN_INDEX_AUDIO_CONSUMER:
+ {
+ return plugin_win_mf_consumer_audio_plugin_def_t;
+ }
+#endif
+#if PLUGIN_MF_ENABLE_VIDEO_CONVERTER
+ case PLUGIN_INDEX_VIDEO_CONVERTER:
+ {
+ return plugin_win_mf_converter_video_ms_plugin_def_t;
+ }
+#endif
+ case PLUGIN_INDEX_CODEC_H264_MAIN:
+ {
+ return MFUtils::IsLowLatencyH264Supported() ? mf_codec_h264_main_plugin_def_t : tsk_null;
+ }
+ case PLUGIN_INDEX_CODEC_H264_BASE:
+ {
+ return MFUtils::IsLowLatencyH264Supported() ? mf_codec_h264_base_plugin_def_t : tsk_null;
+ }
+ default:
+ {
+ TSK_DEBUG_ERROR("No plugin at index %d", index);
+ return tsk_null;
+ }
+ }
+}
diff --git a/plugins/pluginWinMF/internals/mf_codec.cxx b/plugins/pluginWinMF/internals/mf_codec.cxx
new file mode 100644
index 0000000..e2968f4
--- /dev/null
+++ b/plugins/pluginWinMF/internals/mf_codec.cxx
@@ -0,0 +1,888 @@
+/* Copyright (C) 2013 Mamadou DIOP
+* Copyright (C) 2013 Doubango Telecom <http://www.doubango.org>
+*
+* This file is part of Open Source Doubango Framework.
+*
+* DOUBANGO is free software: you can redistribute it and/or modify
+* it under the terms of the GNU General Public License as published by
+* the Free Software Foundation, either version 3 of the License, or
+* (at your option) any later version.
+*
+* DOUBANGO is distributed in the hope that it will be useful,
+* but WITHOUT ANY WARRANTY; without even the implied warranty of
+* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
+* GNU General Public License for more details.
+*
+* You should have received a copy of the GNU General Public License
+* along with DOUBANGO.
+*/
+#include "mf_codec.h"
+#include "mf_utils.h"
+#include "mf_sample_queue.h"
+
+#include "tinymedia/tmedia_common.h"
+
+#include "tsk_debug.h"
+
+#include <KS.h>
+#include <Codecapi.h>
+#include <assert.h>
+#include <initguid.h>
+
+// NV12 is the only format supported by all HW encoders and decoders
+#if !defined(kMFCodecUncompressedFormat)
+# define kMFCodecUncompressedFormat MFVideoFormat_NV12
+#endif
+
+// Max frames allowed in the queue
+#if !defined(kMFCodecQueuedFramesMax)
+# define kMFCodecQueuedFramesMax (30 << 1)
+#endif
+
+// Make sure usable on Win7 SDK targeting Win8 OS
+#if !defined(CODECAPI_AVLowLatencyMode)
+DEFINE_GUID(CODECAPI_AVLowLatencyMode,
+ 0x9c27891a, 0xed7a, 0x40e1, 0x88, 0xe8, 0xb2, 0x27, 0x27, 0xa0, 0x24, 0xee);
+#endif
+#if !defined(CODECAPI_AVDecVideoH264ErrorConcealment)
+DEFINE_GUID(CODECAPI_AVDecVideoH264ErrorConcealment,
+0xececace8, 0x3436, 0x462c, 0x92, 0x94, 0xcd, 0x7b, 0xac, 0xd7, 0x58, 0xa9);
+#endif
+
+//
+// MFCodec
+//
+
+MFCodec::MFCodec(MFCodecId_t eId, MFCodecType_t eType, IMFTransform *pMFT /*= NULL*/)
+: m_nRefCount(1)
+, m_eId(eId)
+, m_eType(eType)
+, m_pMFT(NULL)
+, m_pCodecAPI(NULL)
+, m_pOutputType(NULL)
+, m_pInputType(NULL)
+, m_dwInputID(0)
+, m_dwOutputID(0)
+, m_rtStart(0)
+, m_rtDuration(0)
+, m_pSampleIn(NULL)
+, m_pSampleOut(NULL)
+, m_pEventGenerator(NULL)
+, m_bIsAsync(FALSE)
+, m_bIsFirstFrame(TRUE)
+, m_bIsBundled(FALSE)
+, m_nMETransformNeedInputCount(0)
+, m_nMETransformHaveOutputCount(0)
+, m_pSampleQueueAsyncInput(NULL)
+{
+ MFUtils::Startup();
+
+ HRESULT hr = S_OK;
+
+ switch(eId)
+ {
+ case MFCodecId_H264Base:
+ case MFCodecId_H264Main:
+ {
+ m_eMediaType = MFCodecMediaType_Video;
+ m_guidCompressedFormat = MFVideoFormat_H264;
+ break;
+ }
+ case MFCodecId_AAC:
+ {
+ m_eMediaType = MFCodecMediaType_Audio;
+ m_guidCompressedFormat = MFAudioFormat_AAC;
+ break;
+ }
+ default:
+ {
+ assert(false);
+ break;
+ }
+ }
+ CHECK_HR(hr = MFCreateMediaType(&m_pOutputType));
+ CHECK_HR(hr = MFCreateMediaType(&m_pInputType));
+ if(pMFT) // up to the caller to make sure all parameters are corrrect
+ {
+ m_pMFT = pMFT;
+ m_pMFT->AddRef();
+ }
+ else
+ {
+ CHECK_HR(hr = MFUtils::GetBestCodec(
+ (m_eType == MFCodecType_Encoder) ? TRUE : FALSE, // Encoder ?
+ (m_eMediaType == MFCodecMediaType_Video) ? MFMediaType_Video : MFMediaType_Audio, // Media Type
+ (m_eType == MFCodecType_Encoder) ? kMFCodecUncompressedFormat : m_guidCompressedFormat/*GUID_NULL*/, // Input
+ (m_eType == MFCodecType_Encoder) ? m_guidCompressedFormat : kMFCodecUncompressedFormat, // Output
+ &m_pMFT));
+ }
+ hr = m_pMFT->QueryInterface(IID_PPV_ARGS(&m_pCodecAPI));
+ if(FAILED(hr) && m_eType == MFCodecType_Encoder) // Required only for Encoders
+ {
+ CHECK_HR(hr);
+ }
+
+
+ CHECK_HR(hr = MFUtils::IsAsyncMFT(m_pMFT, &m_bIsAsync));
+ if(m_bIsAsync)
+ {
+ m_pSampleQueueAsyncInput = new MFSampleQueue();
+ if(!m_pSampleQueueAsyncInput)
+ {
+ CHECK_HR(hr = E_OUTOFMEMORY);
+ }
+ CHECK_HR(hr = MFUtils::UnlockAsyncMFT(m_pMFT));
+ CHECK_HR(hr = m_pMFT->QueryInterface(IID_PPV_ARGS(&m_pEventGenerator)));
+ }
+
+bail:
+ if(FAILED(hr))
+ {
+ SafeRelease(&m_pMFT);
+ SafeRelease(&m_pCodecAPI);
+ }
+ if(!IsValid())
+ {
+ TSK_DEBUG_ERROR("Failed to create codec with id = %d", m_eId);
+ }
+}
+
+MFCodec::~MFCodec()
+{
+ assert(m_nRefCount == 0);
+
+ if(m_bIsAsync && m_pMFT)
+ {
+ m_pMFT->ProcessMessage(MFT_MESSAGE_NOTIFY_END_OF_STREAM, NULL);
+ m_pMFT->ProcessMessage(MFT_MESSAGE_COMMAND_DRAIN, NULL);
+ }
+
+ SafeRelease(&m_pMFT);
+ SafeRelease(&m_pCodecAPI);
+ SafeRelease(&m_pOutputType);
+ SafeRelease(&m_pInputType);
+ SafeRelease(&m_pSampleIn);
+ SafeRelease(&m_pSampleOut);
+ SafeRelease(&m_pEventGenerator);
+ SafeRelease(&m_pSampleQueueAsyncInput);
+}
+
+ULONG MFCodec::AddRef()
+{
+ return InterlockedIncrement(&m_nRefCount);
+}
+
+ULONG MFCodec::Release()
+{
+ ULONG uCount = InterlockedDecrement(&m_nRefCount);
+ if (uCount == 0)
+ {
+ delete this;
+ }
+ // For thread safety, return a temporary variable.
+ return uCount;
+}
+
+HRESULT MFCodec::QueryInterface(REFIID iid, void** ppv)
+{
+ if(!IsValid())
+ {
+ return E_FAIL;
+ }
+ return m_pMFT->QueryInterface(iid, ppv);
+}
+
+// IMFAsyncCallback
+STDMETHODIMP MFCodec::GetParameters(DWORD *pdwFlags, DWORD *pdwQueue)
+{
+ return E_NOTIMPL;
+}
+
+STDMETHODIMP MFCodec::Invoke(IMFAsyncResult *pAsyncResult)
+{
+ HRESULT hr = S_OK, hrStatus = S_OK;
+ IMFMediaEvent* pEvent = NULL;
+ MediaEventType meType = MEUnknown;
+
+ CHECK_HR(hr = m_pEventGenerator->EndGetEvent(pAsyncResult, &pEvent));
+ CHECK_HR(hr = pEvent->GetType(&meType));
+ CHECK_HR(hr = pEvent->GetStatus(&hrStatus));
+
+ if (SUCCEEDED(hrStatus))
+ {
+ switch(meType)
+ {
+ case METransformNeedInput:
+ {
+ InterlockedIncrement(&m_nMETransformNeedInputCount);
+ break;
+ }
+
+ case METransformHaveOutput:
+ {
+ InterlockedIncrement(&m_nMETransformHaveOutputCount);
+ break;
+ }
+ }
+ }
+
+ CHECK_HR(hr = m_pEventGenerator->BeginGetEvent(this, NULL));
+
+bail:
+ SafeRelease(&pEvent);
+ return hr;
+}
+
+HRESULT MFCodec::ProcessInput(IMFSample* pSample)
+{
+ assert(IsReady());
+
+ HRESULT hr = S_OK;
+
+ if(m_bIsFirstFrame)
+ {
+ if(m_bIsAsync && !m_bIsBundled)
+ {
+ CHECK_HR(hr = m_pMFT->ProcessMessage(MFT_MESSAGE_NOTIFY_START_OF_STREAM, NULL));
+ CHECK_HR(hr = m_pEventGenerator->BeginGetEvent(this, NULL));
+ }
+ m_bIsFirstFrame = FALSE;
+ }
+
+ if(m_bIsAsync)
+ {
+ if(m_nMETransformNeedInputCount == 1 && m_pSampleQueueAsyncInput->IsEmpty())
+ {
+ InterlockedDecrement(&m_nMETransformNeedInputCount);
+ return m_pMFT->ProcessInput(m_dwInputID, pSample, 0);
+ }
+
+ if(m_pSampleQueueAsyncInput->Count() > kMFCodecQueuedFramesMax)
+ {
+ m_pSampleQueueAsyncInput->Clear();
+ CHECK_HR(hr = E_UNEXPECTED);
+ }
+
+ // Input sample holds shared memory (also used by other samples)
+ IMFSample *pSampleCopy = NULL;
+ IMFMediaBuffer *pMediaBuffer = NULL, *pMediaBufferCopy = NULL;
+ BYTE *pBufferPtr = NULL, *pBufferPtrCopy = NULL;
+ DWORD dwDataLength = 0;
+ BOOL bMediaBufferLocked = FALSE, bMediaBufferLockedCopy = FALSE;
+
+ CHECK_HR(hr = pSample->GetBufferByIndex(0, &pMediaBuffer));
+ hr = pMediaBuffer->GetCurrentLength(&dwDataLength);
+ if(FAILED(hr))
+ {
+ goto endofcopy;
+ }
+ hr = pMediaBuffer->Lock(&pBufferPtr, NULL, NULL);
+ if(FAILED(hr))
+ {
+ goto endofcopy;
+ }
+ bMediaBufferLocked = TRUE;
+
+ hr = MFUtils::CreateMediaSample(dwDataLength, &pSampleCopy);
+ if(FAILED(hr))
+ {
+ goto endofcopy;
+ }
+ hr = pSampleCopy->GetBufferByIndex(0, &pMediaBufferCopy);
+ if(FAILED(hr))
+ {
+ goto endofcopy;
+ }
+ hr = pMediaBufferCopy->Lock(&pBufferPtrCopy, NULL, NULL);
+ if(FAILED(hr))
+ {
+ goto endofcopy;
+ }
+ bMediaBufferLockedCopy = TRUE;
+
+ memcpy(pBufferPtrCopy, pBufferPtr, dwDataLength);
+ hr = pMediaBufferCopy->SetCurrentLength(dwDataLength);
+ if(FAILED(hr))
+ {
+ goto endofcopy;
+ }
+
+ LONGLONG hnsSampleTime = 0;
+ LONGLONG hnsSampleDuration = 0;
+ hr = pSample->GetSampleTime(&hnsSampleTime);
+ if(SUCCEEDED(hr))
+ {
+ hr = pSampleCopy->SetSampleTime(hnsSampleTime);
+ }
+ hr = pSample->GetSampleDuration(&hnsSampleDuration);
+ if(SUCCEEDED(hr))
+ {
+ hr = pSampleCopy->SetSampleDuration(hnsSampleDuration);
+ }
+
+ // EnQueue
+ hr = m_pSampleQueueAsyncInput->Queue(pSampleCopy);
+endofcopy:
+ if(pMediaBuffer && bMediaBufferLocked)
+ {
+ pMediaBuffer->Unlock();
+ }
+ if(pMediaBufferCopy && bMediaBufferLockedCopy)
+ {
+ pMediaBufferCopy->Unlock();
+ }
+ SafeRelease(&pSampleCopy);
+ SafeRelease(&pMediaBuffer);
+ CHECK_HR(hr);
+
+ while(m_nMETransformNeedInputCount > 0)
+ {
+ if(m_pSampleQueueAsyncInput->IsEmpty())
+ {
+ break;
+ }
+ IMFSample *_pSample = NULL;
+ hr = m_pSampleQueueAsyncInput->Dequeue(&_pSample);
+ if(SUCCEEDED(hr))
+ {
+ InterlockedDecrement(&m_nMETransformNeedInputCount);
+ hr = m_pMFT->ProcessInput(m_dwInputID, _pSample, 0);
+ }
+ SafeRelease(&_pSample);
+ CHECK_HR(hr);
+ }
+ }
+ else
+ {
+ CHECK_HR(hr = m_pMFT->ProcessInput(m_dwInputID, pSample, 0));
+ }
+
+bail:
+ return hr;
+}
+
+HRESULT MFCodec::ProcessOutput(IMFSample **ppSample)
+{
+ assert(IsReady());
+
+ if(m_bIsAsync)
+ {
+ if(m_nMETransformHaveOutputCount == 0)
+ {
+ return S_OK;
+ }
+ InterlockedDecrement(&m_nMETransformHaveOutputCount);
+ }
+
+ *ppSample = NULL;
+
+ IMFMediaBuffer* pBufferOut = NULL;
+
+ DWORD dwStatus;
+
+ HRESULT hr = S_OK;
+
+ MFT_OUTPUT_STREAM_INFO mftStreamInfo = { 0 };
+ MFT_OUTPUT_DATA_BUFFER mftOutputData = { 0 };
+
+ CHECK_HR(hr = m_pMFT->GetOutputStreamInfo(m_dwOutputID, &mftStreamInfo));
+
+ BOOL bOutputStreamProvidesSamples = (mftStreamInfo.dwFlags & MFT_OUTPUT_STREAM_PROVIDES_SAMPLES) == MFT_OUTPUT_STREAM_PROVIDES_SAMPLES;
+
+ if(!bOutputStreamProvidesSamples)
+ {
+ if(!m_pSampleOut)
+ {
+ CHECK_HR(hr = MFUtils::CreateMediaSample(mftStreamInfo.cbSize, &m_pSampleOut));
+ hr = m_pSampleOut->GetBufferByIndex(0, &pBufferOut);
+ if(FAILED(hr))
+ {
+ SafeRelease(&m_pSampleOut);
+ CHECK_HR(hr);
+ }
+ }
+ else
+ {
+ DWORD dwMaxLength = 0;
+ CHECK_HR(hr = m_pSampleOut->GetBufferByIndex(0, &pBufferOut));
+ CHECK_HR(hr = pBufferOut->GetMaxLength(&dwMaxLength));
+ if(dwMaxLength < mftStreamInfo.cbSize)
+ {
+ CHECK_HR(hr = m_pSampleOut->RemoveAllBuffers());
+ SafeRelease(&pBufferOut);
+ CHECK_HR(hr = MFCreateMemoryBuffer(mftStreamInfo.cbSize, &pBufferOut));
+ CHECK_HR(hr = m_pSampleOut->AddBuffer(pBufferOut));
+ }
+ }
+ }
+
+ if(pBufferOut)
+ {
+ CHECK_HR(hr = pBufferOut->SetCurrentLength(0));
+ }
+
+ //Set the output sample
+ mftOutputData.pSample = bOutputStreamProvidesSamples ? NULL : m_pSampleOut;
+ //Set the output id
+ mftOutputData.dwStreamID = m_dwOutputID;
+
+ //Generate the output sample
+ hr = m_pMFT->ProcessOutput(0, 1, &mftOutputData, &dwStatus);
+ if (hr == MF_E_TRANSFORM_NEED_MORE_INPUT)
+ {
+ hr = S_OK;
+ goto bail;
+ }
+
+ if (FAILED(hr))
+ {
+ goto bail;
+ }
+
+ *ppSample = mftOutputData.pSample;
+ if(*ppSample)
+ {
+ (*ppSample)->AddRef();
+ }
+
+bail:
+ if(bOutputStreamProvidesSamples)
+ {
+ SafeRelease(&mftOutputData.pSample);
+ }
+ SafeRelease(&pBufferOut);
+ return hr;
+}
+
+bool MFCodec::IsValid()
+{
+ return (m_pMFT && (m_eType == MFCodecType_Decoder || m_pCodecAPI));
+}
+
+bool MFCodec::IsReady()
+{
+ return (IsValid() && m_pOutputType && m_pInputType);
+}
+
+HRESULT MFCodec::Process(const void* pcInputPtr, UINT32 nInputSize, IMFSample **ppSampleOut)
+{
+ if(!pcInputPtr || !nInputSize || !ppSampleOut)
+ {
+ TSK_DEBUG_ERROR("Invalid parameter");
+ return E_INVALIDARG;
+ }
+
+ *ppSampleOut = NULL;
+
+ HRESULT hr = S_OK;
+
+ IMFMediaBuffer* pBufferIn = NULL;
+ BYTE* pBufferPtr = NULL;
+ BOOL bMediaChangeHandled = FALSE; // Endless loop guard
+
+ if(!m_pSampleIn)
+ {
+ CHECK_HR(hr = MFUtils::CreateMediaSample(nInputSize, &m_pSampleIn));
+ hr = m_pSampleIn->GetBufferByIndex(0, &pBufferIn);
+ if(FAILED(hr))
+ {
+ SafeRelease(&m_pSampleIn);
+ CHECK_HR(hr);
+ }
+ }
+ else
+ {
+ DWORD dwMaxLength = 0;
+ CHECK_HR(hr = m_pSampleIn->GetBufferByIndex(0, &pBufferIn));
+ CHECK_HR(hr = pBufferIn->GetMaxLength(&dwMaxLength));
+ if(dwMaxLength < nInputSize)
+ {
+ CHECK_HR(hr = m_pSampleIn->RemoveAllBuffers());
+ SafeRelease(&pBufferIn);
+ CHECK_HR(hr = MFCreateMemoryBuffer(nInputSize, &pBufferIn));
+ CHECK_HR(hr = m_pSampleIn->AddBuffer(pBufferIn));
+ }
+ }
+
+ CHECK_HR(hr = pBufferIn->Lock(&pBufferPtr, NULL, NULL));
+ memcpy(pBufferPtr, pcInputPtr, nInputSize);
+ CHECK_HR(hr = pBufferIn->Unlock());
+ CHECK_HR(hr = pBufferIn->SetCurrentLength(nInputSize));
+
+ if(m_eType == MFCodecType_Encoder)
+ {
+ CHECK_HR(hr = m_pSampleIn->SetSampleDuration(m_rtDuration));
+ CHECK_HR(hr = m_pSampleIn->SetSampleTime(m_rtStart)); // FIXME: use clock(), Same for custom source
+ }
+Label_ProcessInput:
+ hr = ProcessInput(m_pSampleIn);
+ while(hr == MF_E_NOTACCEPTING)
+ {
+ TSK_DEBUG_INFO("MF_E_NOTACCEPTING");
+ IMFSample* pSample = NULL;
+ hr = ProcessOutput(&pSample);
+ if(SUCCEEDED(hr) && pSample)
+ {
+ SafeRelease(ppSampleOut);
+ *ppSampleOut = pSample, pSample = NULL;
+
+ hr = m_pSampleIn->SetUINT32(MFSampleExtension_Discontinuity, TRUE);
+ hr = ProcessInput(m_pSampleIn);
+ }
+ }
+ if(!*ppSampleOut)
+ {
+ hr = ProcessOutput(ppSampleOut);
+ if(hr == MF_E_TRANSFORM_STREAM_CHANGE) /* Handling Stream Changes: http://msdn.microsoft.com/en-us/library/windows/desktop/ee663587(v=vs.85).aspx */
+ {
+ TSK_DEBUG_INFO("[MF Codec] Stream changed");
+ if(m_eType == MFCodecType_Decoder)
+ {
+ IMFMediaType *pTypeOut = NULL;
+ hr = m_pMFT->GetOutputAvailableType(m_dwOutputID, 0, &pTypeOut);
+ if(SUCCEEDED(hr))
+ {
+ UINT32 uWidth = 0, uHeight = 0;
+ hr = MFGetAttributeSize(pTypeOut, MF_MT_FRAME_SIZE, &uWidth, &uHeight);
+ if(SUCCEEDED(hr))
+ {
+ TSK_DEBUG_INFO("[MF Decoder] New size: width=%u, height=%u", uWidth, uHeight);
+ hr = m_pMFT->SetOutputType(m_dwOutputID, pTypeOut, 0);
+ if(SUCCEEDED(hr))
+ {
+ SafeRelease(&m_pOutputType);
+ pTypeOut->AddRef();
+ m_pOutputType = pTypeOut;
+ if(m_eMediaType == MFCodecMediaType_Video)
+ {
+ dynamic_cast<MFCodecVideo*>(this)->m_nWidth = uWidth;
+ dynamic_cast<MFCodecVideo*>(this)->m_nHeight = uHeight;
+ }
+ }
+ }
+ }
+ SafeRelease(&pTypeOut);
+ if(SUCCEEDED(hr))
+ {
+ if(!bMediaChangeHandled)
+ {
+ bMediaChangeHandled = TRUE;
+ goto Label_ProcessInput;
+ }
+ }
+ }
+ }
+ }
+
+ m_rtStart += m_rtDuration;
+
+bail:
+ SafeRelease(&pBufferIn);
+ return hr;
+}
+
+enum tmedia_chroma_e MFCodec::GetUncompressedChroma()
+{
+ if(kMFCodecUncompressedFormat == MFVideoFormat_NV12)
+ {
+ return tmedia_chroma_nv12;
+ }
+ assert(false);
+ return tmedia_chroma_none;
+}
+
+//
+// MFCodecVideo
+//
+
+MFCodecVideo::MFCodecVideo(MFCodecId_t eId, MFCodecType_t eType, IMFTransform *pMFT /*= NULL*/)
+: MFCodec(eId, eType, pMFT)
+, m_nFrameRate(0)
+, m_nWidth(0)
+, m_nHeight(0)
+{
+ assert(m_eMediaType == MFCodecMediaType_Video);
+}
+
+MFCodecVideo::~MFCodecVideo()
+{
+
+}
+
+HRESULT MFCodecVideo::Initialize(
+ UINT32 nFrameRate,
+ UINT32 nWidth,
+ UINT32 nHeight,
+ UINT32 nOutputBitRateInBps /*= 0*/
+ )
+{
+ assert(IsValid());
+
+ HRESULT hr = S_OK;
+
+ VARIANT var = {0};
+
+ // make sure identifiers are zero-based (other layouts not supported yet)
+ hr = m_pMFT->GetStreamIDs(1, &m_dwInputID, 1, &m_dwOutputID);
+ if (hr == E_NOTIMPL)
+ {
+ m_dwInputID = 0;
+ m_dwOutputID = 0;
+ hr = S_OK;
+ }
+ else if (FAILED(hr))
+ {
+ TSK_DEBUG_ERROR("The stream identifiers are not zero-based");
+ return hr;
+ }
+
+ m_rtStart = 0;
+ CHECK_HR(hr = MFFrameRateToAverageTimePerFrame(nFrameRate, 1, &m_rtDuration));
+
+ CHECK_HR(hr = m_pOutputType->SetGUID(MF_MT_MAJOR_TYPE, (m_eMediaType == MFCodecMediaType_Video) ? MFMediaType_Video : MFMediaType_Audio));
+ CHECK_HR(hr = m_pInputType->SetGUID(MF_MT_MAJOR_TYPE, (m_eMediaType == MFCodecMediaType_Video) ? MFMediaType_Video : MFMediaType_Audio));
+
+ CHECK_HR(hr = m_pOutputType->SetGUID(MF_MT_SUBTYPE, (m_eType == MFCodecType_Encoder) ? m_guidCompressedFormat : kMFCodecUncompressedFormat));
+ CHECK_HR(hr = m_pInputType->SetGUID(MF_MT_SUBTYPE, (m_eType == MFCodecType_Encoder) ? kMFCodecUncompressedFormat : m_guidCompressedFormat));
+
+ CHECK_HR(hr = m_pOutputType->SetUINT32(MF_MT_ALL_SAMPLES_INDEPENDENT, (m_eType == MFCodecType_Encoder) ? FALSE : TRUE));
+ CHECK_HR(hr = m_pInputType->SetUINT32(MF_MT_ALL_SAMPLES_INDEPENDENT, (m_eType == MFCodecType_Encoder) ? TRUE : FALSE));
+
+ CHECK_HR(hr = m_pOutputType->SetUINT32(MF_MT_FIXED_SIZE_SAMPLES, (m_eType == MFCodecType_Encoder) ? FALSE : TRUE));
+ CHECK_HR(hr = m_pInputType->SetUINT32(MF_MT_FIXED_SIZE_SAMPLES, (m_eType == MFCodecType_Encoder) ? TRUE : FALSE));
+
+ // Set bitrate
+ // Set (MF_MT_AVG_BITRATE) for MediaType
+ // Set (CODECAPI_AVEncCommonMeanBitRate) for H.264
+ hr = SetBitRate(nOutputBitRateInBps);
+
+ CHECK_HR(hr = m_pOutputType->SetUINT32(MF_MT_INTERLACE_MODE, MFVideoInterlace_Progressive));
+ CHECK_HR(hr = m_pInputType->SetUINT32(MF_MT_INTERLACE_MODE, MFVideoInterlace_Progressive));
+
+ CHECK_HR(hr = MFSetAttributeSize(m_pOutputType, MF_MT_FRAME_SIZE, nWidth, nHeight));
+ CHECK_HR(hr = MFSetAttributeSize(m_pInputType, MF_MT_FRAME_SIZE, nWidth, nHeight));
+
+ CHECK_HR(hr = MFSetAttributeRatio(m_pOutputType, MF_MT_FRAME_RATE, nFrameRate, 1));
+ CHECK_HR(hr = MFSetAttributeRatio(m_pInputType, MF_MT_FRAME_RATE, nFrameRate, 1));
+
+ CHECK_HR(hr = MFSetAttributeRatio(m_pOutputType, MF_MT_PIXEL_ASPECT_RATIO, 1, 1));
+ CHECK_HR(hr = MFSetAttributeRatio(m_pInputType, MF_MT_PIXEL_ASPECT_RATIO, 1, 1));
+
+ // Encoder: Output format must be set before input
+ // Decoder: Input format must be set before output
+ if(m_eType == MFCodecType_Encoder)
+ {
+ CHECK_HR(hr = m_pMFT->SetOutputType(m_dwOutputID, m_pOutputType, 0));
+ CHECK_HR(hr = m_pMFT->SetInputType(m_dwInputID, m_pInputType, 0));
+ }
+ else
+ {
+ CHECK_HR(hr = m_pMFT->SetInputType(m_dwInputID, m_pInputType, 0));
+ CHECK_HR(hr = m_pMFT->SetOutputType(m_dwOutputID, m_pOutputType, 0));
+ }
+
+ if(m_eId == MFCodecId_H264Base || m_eId == MFCodecId_H264Main)
+ {
+ if(m_eType == MFCodecType_Decoder)
+ {
+ // Only decoder support GetAttributes()
+ IMFAttributes* pAttributes = NULL;
+ hr = m_pMFT->GetAttributes(&pAttributes);
+ if(SUCCEEDED(hr))
+ {
+ // FIXME: Very strange that "CODECAPI_AVLowLatencyMode" only works with "IMFAttributes->" and not "ICodecAPI->SetValue()"
+ hr = pAttributes->SetUINT32(CODECAPI_AVLowLatencyMode, TRUE);
+ }
+ SafeRelease(&pAttributes);
+ }
+ else
+ {
+ var.vt = VT_BOOL;
+ var.boolVal = VARIANT_TRUE;
+ hr = m_pCodecAPI->SetValue(&CODECAPI_AVLowLatencyMode, &var);
+
+ var.vt = VT_BOOL;
+ var.boolVal = VARIANT_TRUE;
+ hr = m_pCodecAPI->SetValue(&CODECAPI_AVEncCommonLowLatency, &var); // Correct for the decoder
+
+ // Disable B-Frames
+ var.vt = VT_UI4;
+ var.ulVal = 0;
+ hr = m_pCodecAPI->SetValue(&CODECAPI_AVEncMPVDefaultBPictureCount, &var);
+
+ // Constant bitrate (updated using RTCP)
+ var.vt = VT_UI4;
+ var.ulVal = eAVEncCommonRateControlMode_CBR;
+ hr = m_pCodecAPI->SetValue(&CODECAPI_AVEncCommonRateControlMode, &var);
+ }
+
+ hr = S_OK; // Not mandatory features
+ }
+
+bail:
+
+ if(SUCCEEDED(hr))
+ {
+ m_nFrameRate = nFrameRate;
+ m_nWidth = nWidth;
+ m_nHeight = nHeight;
+ }
+
+ return hr;
+}
+
+HRESULT MFCodecVideo::SetGOPSize(UINT32 nFramesCount)
+{
+ assert(IsValid());
+
+ HRESULT hr = S_OK;
+
+ if(m_eType == MFCodecType_Encoder && (m_eId == MFCodecId_H264Base || m_eId == MFCodecId_H264Main))
+ {
+ VARIANT var = {0};
+ var.vt = VT_UI4;
+ var.ullVal = nFramesCount;
+ CHECK_HR(hr = m_pCodecAPI->SetValue(&CODECAPI_AVEncMPVGOPSize, &var));
+ }
+
+bail:
+ return hr;
+}
+
+HRESULT MFCodecVideo::SetBitRate(UINT32 nBitRateInBps)
+{
+ assert(IsValid());
+
+ HRESULT hr = S_OK;
+
+ if(nBitRateInBps > 0 && m_eType == MFCodecType_Encoder)
+ {
+ CHECK_HR(hr = m_pOutputType->SetUINT32(MF_MT_AVG_BITRATE, nBitRateInBps));
+
+ if((m_eId == MFCodecId_H264Base || m_eId == MFCodecId_H264Main))
+ {
+ VARIANT var = {0};
+
+ // Set BitRate
+ var.vt = VT_UI4;
+ var.ullVal = nBitRateInBps;
+ CHECK_HR(hr = m_pCodecAPI->SetValue(&CODECAPI_AVEncCommonMeanBitRate, &var));
+ }
+ }
+
+bail:
+ return hr;
+}
+
+HRESULT MFCodecVideo::IsSetSliceMaxSizeInBytesSupported(BOOL &supported)
+{
+ HRESULT hr = S_OK;
+ supported = FALSE;
+
+ if ((m_eId == MFCodecId_H264Base || m_eId == MFCodecId_H264Main))
+ {
+#if defined(CODECAPI_AVEncSliceControlMode) && defined(CODECAPI_AVEncSliceControlSize)
+ if (m_pCodecAPI->IsSupported(&CODECAPI_AVEncSliceControlMode) == S_OK && m_pCodecAPI->IsSupported(&CODECAPI_AVEncSliceControlSize) == S_OK) {
+ supported = TRUE;
+ }
+#endif
+ }
+ return hr;
+}
+
+HRESULT MFCodecVideo::SetSliceMaxSizeInBytes(UINT32 nSliceMaxSizeInBytes)
+{
+ assert(IsValid() && nSliceMaxSizeInBytes > 0);
+
+ HRESULT hr = S_OK;
+
+ if ((m_eId == MFCodecId_H264Base || m_eId == MFCodecId_H264Main))
+ {
+#if defined(CODECAPI_AVEncSliceControlMode) && defined(CODECAPI_AVEncSliceControlSize)
+ if (m_pCodecAPI->IsSupported(&CODECAPI_AVEncSliceControlMode) == S_OK && m_pCodecAPI->IsSupported(&CODECAPI_AVEncSliceControlSize) == S_OK) {
+ VARIANT var = { 0 };
+ var.vt = VT_UI4;
+
+ var.ulVal = 1; // Bits
+ CHECK_HR(hr = m_pCodecAPI->SetValue(&CODECAPI_AVEncSliceControlMode, &var));
+
+ var.ulVal = (nSliceMaxSizeInBytes << 3); // From Bytes to Bits
+ CHECK_HR(hr = m_pCodecAPI->SetValue(&CODECAPI_AVEncSliceControlSize, &var));
+ }
+#else
+ CHECK_HR(hr = S_OK);
+#endif
+ }
+
+bail:
+ return hr;
+}
+
+HRESULT MFCodecVideo::RequestKeyFrame()
+{
+ assert(IsValid());
+
+ HRESULT hr = S_OK;
+
+ if ((m_eId == MFCodecId_H264Base || m_eId == MFCodecId_H264Main))
+ {
+#if defined(CODECAPI_AVEncVideoForceKeyFrame)
+ if (m_pCodecAPI->IsSupported(&CODECAPI_AVEncVideoForceKeyFrame) == S_OK) {
+ VARIANT var = { 0 };
+
+ var.vt = VT_UI4;
+ var.ulVal = 1;
+ CHECK_HR(hr = m_pCodecAPI->SetValue(&CODECAPI_AVEncVideoForceKeyFrame, &var));
+ }
+#else
+ CHECK_HR(hr = S_OK);
+#endif
+ }
+
+bail:
+ return hr;
+}
+
+//
+// MFCodecVideo
+//
+MFCodecVideoH264::MFCodecVideoH264(MFCodecId_t eId, MFCodecType_t eType, IMFTransform *pMFT /*= NULL*/)
+: MFCodecVideo(eId, eType, pMFT)
+{
+ assert(eId == MFCodecId_H264Base || eId == MFCodecId_H264Main);
+
+ HRESULT hr = S_OK;
+
+ if(m_pOutputType)
+ {
+ CHECK_HR(hr = m_pOutputType->SetUINT32(MF_MT_MPEG2_PROFILE, (m_eId == MFCodecId_H264Base) ? eAVEncH264VProfile_Base : eAVEncH264VProfile_Main));
+ }
+
+bail:
+ assert(SUCCEEDED(hr));
+}
+
+MFCodecVideoH264::~MFCodecVideoH264()
+{
+
+}
+
+MFCodecVideoH264* MFCodecVideoH264::CreateCodecH264Base(MFCodecType_t eType, IMFTransform *pMFT /*= NULL*/)
+{
+ MFCodecVideoH264* pCodec = new MFCodecVideoH264(MFCodecId_H264Base, eType, pMFT);
+ if(pCodec && !pCodec->IsValid())
+ {
+ SafeRelease(&pCodec);
+ }
+ return pCodec;
+}
+
+MFCodecVideoH264* MFCodecVideoH264::CreateCodecH264Main(MFCodecType_t eType, IMFTransform *pMFT /*= NULL*/)
+{
+ MFCodecVideoH264* pCodec = new MFCodecVideoH264(MFCodecId_H264Main, eType, pMFT);
+ if(pCodec && !pCodec->IsValid())
+ {
+ SafeRelease(&pCodec);
+ }
+ return pCodec;
+}
diff --git a/plugins/pluginWinMF/internals/mf_codec.h b/plugins/pluginWinMF/internals/mf_codec.h
new file mode 100644
index 0000000..51b06dc
--- /dev/null
+++ b/plugins/pluginWinMF/internals/mf_codec.h
@@ -0,0 +1,158 @@
+/* Copyright (C) 2013 Mamadou DIOP
+* Copyright (C) 2013 Doubango Telecom <http://www.doubango.org>
+*
+* This file is part of Open Source Doubango Framework.
+*
+* DOUBANGO is free software: you can redistribute it and/or modify
+* it under the terms of the GNU General Public License as published by
+* the Free Software Foundation, either version 3 of the License, or
+* (at your option) any later version.
+*
+* DOUBANGO is distributed in the hope that it will be useful,
+* but WITHOUT ANY WARRANTY; without even the implied warranty of
+* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
+* GNU General Public License for more details.
+*
+* You should have received a copy of the GNU General Public License
+* along with DOUBANGO.
+*/
+#ifndef PLUGIN_WIN_MF_CODEC_H
+#define PLUGIN_WIN_MF_CODEC_H
+
+#include "../plugin_win_mf_config.h"
+
+#include <new>
+#include <mfapi.h>
+#include <mfidl.h>
+#include <Mferror.h>
+#include <shlwapi.h>
+#include <strmif.h>
+
+class MFSampleQueue;
+
+typedef enum MFCodecId_e
+{
+ MFCodecId_H264Base,
+ MFCodecId_H264Main,
+ MFCodecId_AAC
+}
+MFCodecId_t;
+
+typedef enum MFCodecType_e
+{
+ MFCodecType_Encoder,
+ MFCodecType_Decoder
+}
+MFCodecType_t;
+
+typedef enum MFCodecMediaType_e
+{
+ MFCodecMediaType_Audio,
+ MFCodecMediaType_Video
+}
+MFCodecMediaType_t;
+
+class MFCodec : IMFAsyncCallback
+{
+protected:
+ MFCodec(MFCodecId_t eId, MFCodecType_t eType, IMFTransform *pMFT = NULL);
+ virtual ~MFCodec();
+ HRESULT ProcessInput(IMFSample* pSample);
+ HRESULT ProcessOutput(IMFSample **ppSample);
+
+public:
+ virtual bool IsValid();
+ virtual bool IsReady();
+ virtual HRESULT Process(const void* pcInputPtr, UINT32 nInputSize, IMFSample **ppSampleOut);
+ static enum tmedia_chroma_e GetUncompressedChroma();
+ inline IMFTransform* GetMFT(){ return m_pMFT; }
+ inline MFCodecId_t GetId() { return m_eId; }
+ inline MFCodecType_t GetType() { return m_eType; }
+ inline void setBundled(BOOL bBundled) { m_bIsBundled = bBundled; }
+
+ // IUnknown
+ STDMETHODIMP QueryInterface(REFIID iid, void** ppv);
+ STDMETHODIMP_(ULONG) AddRef();
+ STDMETHODIMP_(ULONG) Release();
+
+ // IMFAsyncCallback
+ STDMETHODIMP GetParameters(DWORD *pdwFlags, DWORD *pdwQueue);
+ STDMETHODIMP Invoke(IMFAsyncResult *pAsyncResult);
+
+private:
+ long m_nRefCount;
+
+protected:
+ MFCodecId_t m_eId; // Codec Id
+ MFCodecType_t m_eType; // Codec type.
+ MFCodecMediaType_t m_eMediaType; // Codec Media type.
+ DWORD m_dwInputID; // Input stream ID.
+ DWORD m_dwOutputID; // Output stream ID.
+
+ GUID m_guidCompressedFormat; // Compressed Media format (e.g. MFVideoFormat_H264)
+ IMFTransform *m_pMFT; // Pointer to the encoder MFT.
+ ICodecAPI *m_pCodecAPI; // Pointer to CodecAPI.
+ IMFMediaType *m_pOutputType; // Output media type of the codec.
+ IMFMediaType *m_pInputType; // Input media type of the codec.
+
+ LONGLONG m_rtStart;
+ UINT64 m_rtDuration;
+
+ IMFSample *m_pSampleIn;
+ IMFSample *m_pSampleOut;
+
+ MFSampleQueue *m_pSampleQueueAsyncInput;
+ BOOL m_bIsBundled; // Bundled with a producer or cosumer -> do not monitor events
+ BOOL m_bIsAsync;
+ IMFMediaEventGenerator *m_pEventGenerator;
+ BOOL m_bIsFirstFrame;
+ long m_nMETransformNeedInputCount, m_nMETransformHaveOutputCount;
+};
+
+
+class MFCodecVideo : public MFCodec
+{
+ friend class MFCodec;
+protected:
+ MFCodecVideo(MFCodecId_t eId, MFCodecType_t eType, IMFTransform *pMFT = NULL);
+ virtual ~MFCodecVideo();
+
+public:
+ virtual HRESULT Initialize(
+ UINT32 nFrameRate,
+ UINT32 nWidth,
+ UINT32 nHeight,
+ UINT32 nOutputBitRateInBps = 0 // Only for encoders
+ );
+ virtual HRESULT SetGOPSize(UINT32 nFramesCount);
+ virtual HRESULT SetBitRate(UINT32 nBitRateInBps);
+ virtual HRESULT SetSliceMaxSizeInBytes(UINT32 nSliceMaxSizeInBytes);
+ virtual HRESULT RequestKeyFrame();
+
+ virtual HRESULT IsSetSliceMaxSizeInBytesSupported(BOOL &supported);
+ virtual inline UINT32 GetFrameRate() { return m_nFrameRate; }
+ virtual inline UINT32 GetWidth() { return m_nWidth; }
+ virtual inline UINT32 GetHeight() { return m_nHeight; }
+
+protected:
+ UINT32 m_nFrameRate;
+ UINT32 m_nWidth;
+ UINT32 m_nHeight;
+};
+
+class MFCodecVideoH264 : public MFCodecVideo
+{
+protected:
+ MFCodecVideoH264(MFCodecId_t eId, MFCodecType_t eType, IMFTransform *pMFT = NULL);
+
+public:
+ virtual ~MFCodecVideoH264();
+ static MFCodecVideoH264* CreateCodecH264Base(MFCodecType_t eType, IMFTransform *pMFT = NULL);
+ static MFCodecVideoH264* CreateCodecH264Main(MFCodecType_t eType, IMFTransform *pMFT = NULL);
+
+protected:
+
+};
+
+
+#endif /* PLUGIN_WIN_MF_CODEC_H */
diff --git a/plugins/pluginWinMF/internals/mf_codec_topology.cxx b/plugins/pluginWinMF/internals/mf_codec_topology.cxx
new file mode 100644
index 0000000..1ee2a16
--- /dev/null
+++ b/plugins/pluginWinMF/internals/mf_codec_topology.cxx
@@ -0,0 +1,473 @@
+/* Copyright (C) 2013 Mamadou DIOP
+* Copyright (C) 2013 Doubango Telecom <http://www.doubango.org>
+*
+* This file is part of Open Source Doubango Framework.
+*
+* DOUBANGO is free software: you can redistribute it and/or modify
+* it under the terms of the GNU General Public License as published by
+* the Free Software Foundation, either version 3 of the License, or
+* (at your option) any later version.
+*
+* DOUBANGO is distributed in the hope that it will be useful,
+* but WITHOUT ANY WARRANTY; without even the implied warranty of
+* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
+* GNU General Public License for more details.
+*
+* You should have received a copy of the GNU General Public License
+* along with DOUBANGO.
+*/
+#include "mf_codec_topology.h"
+#include "mf_utils.h"
+
+#include "tsk_debug.h"
+
+//
+// MFCodecTopologySampleGrabberCB
+//
+
+class MFCodecTopologySampleGrabberCB : public IMFSampleGrabberSinkCallback
+{
+ long m_cRef;
+ MFCodecTopology *m_pCodecTopology;
+
+ MFCodecTopologySampleGrabberCB(MFCodecTopology *pCodecTopology)
+ : m_cRef(1)
+ {
+ m_pCodecTopology = pCodecTopology;
+ m_pCodecTopology->AddRef();
+ }
+ virtual ~MFCodecTopologySampleGrabberCB()
+ {
+ SafeRelease(&m_pCodecTopology);
+ }
+
+public:
+ // Create a new instance of the object.
+ static HRESULT MFCodecTopologySampleGrabberCB::CreateInstance(MFCodecTopology *pCodecTopology, MFCodecTopologySampleGrabberCB **ppCB)
+ {
+ *ppCB = new (std::nothrow) MFCodecTopologySampleGrabberCB(pCodecTopology);
+
+ if (ppCB == NULL)
+ {
+ return E_OUTOFMEMORY;
+ }
+ return S_OK;
+ }
+
+ STDMETHODIMP MFCodecTopologySampleGrabberCB::QueryInterface(REFIID riid, void** ppv)
+ {
+ static const QITAB qit[] =
+ {
+ QITABENT(MFCodecTopologySampleGrabberCB, IMFSampleGrabberSinkCallback),
+ QITABENT(MFCodecTopologySampleGrabberCB, IMFClockStateSink),
+ { 0 }
+ };
+ return QISearch(this, qit, riid, ppv);
+ }
+
+ STDMETHODIMP_(ULONG) MFCodecTopologySampleGrabberCB::AddRef()
+ {
+ return InterlockedIncrement(&m_cRef);
+ }
+
+ STDMETHODIMP_(ULONG) MFCodecTopologySampleGrabberCB::Release()
+ {
+ ULONG cRef = InterlockedDecrement(&m_cRef);
+ if (cRef == 0)
+ {
+ delete this;
+ }
+ return cRef;
+
+ }
+
+ // IMFClockStateSink methods
+
+ STDMETHODIMP MFCodecTopologySampleGrabberCB::OnClockStart(MFTIME hnsSystemTime, LONGLONG llClockStartOffset)
+ {
+ TSK_DEBUG_INFO("MFCodecTopologySampleGrabberCB::OnClockStart(%lld, %lld)", hnsSystemTime, llClockStartOffset);
+ return S_OK;
+ }
+
+ STDMETHODIMP MFCodecTopologySampleGrabberCB::OnClockStop(MFTIME hnsSystemTime)
+ {
+ TSK_DEBUG_INFO("MFCodecTopologySampleGrabberCB::OnClockStop(%lld)", hnsSystemTime);
+ return S_OK;
+ }
+
+ STDMETHODIMP MFCodecTopologySampleGrabberCB::OnClockPause(MFTIME hnsSystemTime)
+ {
+ TSK_DEBUG_INFO("MFCodecTopologySampleGrabberCB::OnClockPause(%lld)", hnsSystemTime);
+ return S_OK;
+ }
+
+ STDMETHODIMP MFCodecTopologySampleGrabberCB::OnClockRestart(MFTIME hnsSystemTime)
+ {
+ TSK_DEBUG_INFO("MFCodecTopologySampleGrabberCB::OnClockRestart(%lld)", hnsSystemTime);
+ return S_OK;
+ }
+
+ STDMETHODIMP MFCodecTopologySampleGrabberCB::OnClockSetRate(MFTIME hnsSystemTime, float flRate)
+ {
+ TSK_DEBUG_INFO("MFCodecTopologySampleGrabberCB::OnClockSetRate(%lld, %f)", hnsSystemTime, flRate);
+ return S_OK;
+ }
+
+ // IMFSampleGrabberSink methods.
+
+ STDMETHODIMP MFCodecTopologySampleGrabberCB::OnSetPresentationClock(IMFPresentationClock* pClock)
+ {
+ TSK_DEBUG_INFO("MFCodecTopologySampleGrabberCB::OnSetPresentationClock");
+ return S_OK;
+ }
+
+ STDMETHODIMP MFCodecTopologySampleGrabberCB::OnProcessSample(
+ REFGUID guidMajorMediaType, DWORD dwSampleFlags,
+ LONGLONG llSampleTime, LONGLONG llSampleDuration, const BYTE * pSampleBuffer,
+ DWORD dwSampleSize)
+ {
+ HRESULT hr = S_OK;
+ IMFSample *pSample = NULL;
+ IMFMediaBuffer* pMediaBuffer = NULL;
+ BYTE* _pcBufferPtr = NULL;
+
+ CHECK_HR(hr = MFUtils::CreateMediaSample(dwSampleSize, &pSample));
+ CHECK_HR(hr = pSample->SetSampleTime(llSampleTime));
+ CHECK_HR(hr = pSample->SetSampleDuration(llSampleDuration));
+ CHECK_HR(hr = pSample->GetBufferByIndex(0, &pMediaBuffer));
+ CHECK_HR(hr = pMediaBuffer->Lock(&_pcBufferPtr, NULL, NULL));
+ memcpy(_pcBufferPtr, pSampleBuffer, dwSampleSize);
+ CHECK_HR(hr = pMediaBuffer->SetCurrentLength(dwSampleSize));
+ CHECK_HR(hr = pMediaBuffer->Unlock());
+
+ m_pCodecTopology->m_SampleQueue.Queue(pSample); // thread-safe
+
+bail:
+ SafeRelease(&pSample);
+ SafeRelease(&pMediaBuffer);
+ return hr;
+ }
+
+ STDMETHODIMP MFCodecTopologySampleGrabberCB::OnShutdown()
+ {
+ TSK_DEBUG_INFO("MFCodecTopologySampleGrabberCB::OnShutdown");
+ return S_OK;
+ }
+};
+
+//
+// MFCodecTopology
+//
+
+
+MFCodecTopology::MFCodecTopology(MFCodec* pCodec, HRESULT &hr)
+: m_nRefCount(1)
+, m_bInitialized(FALSE)
+, m_bStarted(FALSE)
+, m_pCodec(NULL)
+, m_pSource(NULL)
+, m_pSession(NULL)
+, m_pTopologyFull(NULL)
+, m_pTopologyPartial(NULL)
+, m_pOutputType(NULL)
+, m_pInputType(NULL)
+, m_pGrabberCallback(NULL)
+, m_pGrabberActivate(NULL)
+, m_pTread(NULL)
+{
+ hr = S_OK;
+
+ if(!pCodec)
+ {
+ CHECK_HR(hr = E_POINTER);
+ }
+
+ m_pCodec = pCodec;
+ m_pCodec->AddRef();
+
+bail: ;
+}
+
+MFCodecTopology::~MFCodecTopology()
+{
+ DeInitialize();
+}
+
+ULONG MFCodecTopology::AddRef()
+{
+ return InterlockedIncrement(&m_nRefCount);
+}
+
+ULONG MFCodecTopology::Release()
+{
+ ULONG uCount = InterlockedDecrement(&m_nRefCount);
+ if (uCount == 0)
+ {
+ delete this;
+ }
+ // For thread safety, return a temporary variable.
+ return uCount;
+}
+
+HRESULT MFCodecTopology::QueryInterface(REFIID iid, void** ppv)
+{
+ return E_NOTIMPL;
+}
+
+HRESULT MFCodecTopology::Start()
+{
+ HRESULT hr = S_OK;
+
+ if(m_bStarted)
+ {
+ return S_OK;
+ }
+
+ if(!m_bInitialized)
+ {
+ CHECK_HR(hr = E_FAIL);
+ }
+
+ CHECK_HR(hr = MFUtils::RunSession(m_pSession, m_pTopologyFull));
+
+ // Start asynchronous watcher thread
+ m_bStarted = TRUE;
+ int ret = tsk_thread_create(&m_pTread, MFCodecTopology::RunSessionThread, this);
+ if(ret != 0)
+ {
+ TSK_DEBUG_ERROR("Failed to create thread");
+ m_bStarted = FALSE;
+ if(m_pTread)
+ {
+ tsk_thread_join(&m_pTread);
+ }
+ MFUtils::ShutdownSession(m_pSession, m_pSource);
+ CHECK_HR(hr = E_FAIL);
+ }
+
+ // FIXME
+ Sleep(2000);
+
+bail:
+ return hr;
+}
+
+HRESULT MFCodecTopology::Stop()
+{
+ HRESULT hr = S_OK;
+
+ if(!m_bStarted)
+ {
+ return S_OK;
+ }
+
+ m_bStarted = FALSE;
+ hr = MFUtils::ShutdownSession(m_pSession, NULL); // stop session to wakeup the asynchronous thread
+ if(m_pTread)
+ {
+ tsk_thread_join(&m_pTread);
+ }
+ hr = MFUtils::ShutdownSession(NULL, m_pSource);
+
+ return hr;
+}
+
+HRESULT MFCodecTopology::Initialize()
+{
+ HRESULT hr = S_OK;
+ IMFAttributes* pSessionAttributes = NULL;
+
+ if(m_bInitialized)
+ {
+ CHECK_HR(hr = E_FAIL);
+ }
+
+ // Set session attributes
+ CHECK_HR(hr = MFCreateAttributes(&pSessionAttributes, 1));
+ CHECK_HR(hr = pSessionAttributes->SetUINT32(PLUGIN_MF_LOW_LATENCY, 1));
+
+ // Get input and output type
+ CHECK_HR(hr = m_pCodec->GetInputType(&m_pInputType));
+ CHECK_HR(hr = m_pCodec->GetOutputType(&m_pOutputType));
+
+ // Create custom source
+ CHECK_HR(hr = CMFSource::CreateInstanceEx(IID_IMFMediaSource, (void**)&m_pSource, m_pInputType));
+
+ // Create the sample grabber sink.
+ CHECK_HR(hr = MFCodecTopologySampleGrabberCB::CreateInstance(this, &m_pGrabberCallback));
+ CHECK_HR(hr = MFCreateSampleGrabberSinkActivate(m_pOutputType, m_pGrabberCallback, &m_pGrabberActivate));
+
+ // To run as fast as possible, set this attribute (requires Windows 7 or later):
+ CHECK_HR(hr = m_pGrabberActivate->SetUINT32(MF_SAMPLEGRABBERSINK_IGNORE_CLOCK, TRUE));
+
+ // Create the Media Session.
+ CHECK_HR(hr = MFCreateMediaSession(pSessionAttributes, &m_pSession));
+
+ // Create the topology.
+ CHECK_HR(hr = MFUtils::CreateTopology(
+ m_pSource,
+ m_pCodec->GetMFT(),
+ m_pGrabberActivate,
+ NULL, // no preview
+ m_pOutputType,
+ &m_pTopologyPartial));
+ // Resolve topology (adds video processors if needed).
+ CHECK_HR(hr = MFUtils::ResolveTopology(m_pTopologyPartial, &m_pTopologyFull));
+
+ m_bInitialized = TRUE;
+
+bail:
+ SafeRelease(&pSessionAttributes);
+
+ if(FAILED(hr))
+ {
+ DeInitialize();
+ }
+
+ return hr;
+}
+
+void* TSK_STDCALL MFCodecTopology::RunSessionThread(void *pArg)
+{
+ MFCodecTopology *pSelf = (MFCodecTopology *)pArg;
+ HRESULT hrStatus = S_OK;
+ HRESULT hr = S_OK;
+ IMFMediaEvent *pEvent = NULL;
+ MediaEventType met;
+
+ TSK_DEBUG_INFO("RunSessionThread (MFCodecTopology) - ENTER");
+
+ while(pSelf->isStarted())
+ {
+ CHECK_HR(hr = pSelf->m_pSession->GetEvent(0, &pEvent));
+ CHECK_HR(hr = pEvent->GetStatus(&hrStatus));
+ CHECK_HR(hr = pEvent->GetType(&met));
+
+ if (FAILED(hrStatus) /*&& hrStatus != MF_E_NO_SAMPLE_TIMESTAMP*/)
+ {
+ TSK_DEBUG_ERROR("Session error: 0x%x (event id: %d)\n", hrStatus, met);
+ hr = hrStatus;
+ goto bail;
+ }
+ if (met == MESessionEnded)
+ {
+ break;
+ }
+ SafeRelease(&pEvent);
+ }
+
+bail:
+ TSK_DEBUG_INFO("RunSessionThread (MFCodecTopology) - EXIT");
+
+ return NULL;
+}
+
+HRESULT MFCodecTopology::DeInitialize()
+{
+ Stop();
+
+ SafeRelease(&m_pCodec);
+ SafeRelease(&m_pSource);
+ SafeRelease(&m_pCodec);
+ SafeRelease(&m_pSession);
+ SafeRelease(&m_pTopologyFull);
+ SafeRelease(&m_pTopologyPartial);
+ SafeRelease(&m_pOutputType);
+ SafeRelease(&m_pInputType);
+ SafeRelease(&m_pGrabberCallback);
+ SafeRelease(&m_pGrabberActivate);
+
+ if(m_pTread)
+ {
+ tsk_thread_join(&m_pTread);
+ }
+
+ m_SampleQueue.Clear();
+
+ m_bInitialized = FALSE;
+
+ return S_OK;
+}
+
+HRESULT MFCodecTopology::ProcessInput(IMFSample* pSample)
+{
+ HRESULT hr = S_OK;
+ IMFMediaBuffer* pMediaBuffer = NULL;
+ BYTE* _pcBufferPtr = NULL;
+
+ if(!pSample)
+ {
+ CHECK_HR(hr = E_POINTER);
+ }
+
+ if(m_pCodec->GetMediaType() != MFCodecMediaType_Video)
+ {
+ CHECK_HR(hr = E_NOTIMPL);
+ }
+
+ if(!m_bStarted)
+ {
+ CHECK_HR(hr = Start());
+ }
+
+ CHECK_HR(hr = pSample->GetBufferByIndex(0, &pMediaBuffer));
+
+ DWORD dwDataLength = 0;
+ BOOL bLocked = FALSE;
+ CHECK_HR(hr = pMediaBuffer->GetCurrentLength(&dwDataLength));
+ bLocked = TRUE;
+ if(dwDataLength > 0)
+ {
+ CHECK_HR(hr = pMediaBuffer->Lock(&_pcBufferPtr, NULL, NULL));
+ CHECK_HR(hr = m_pSource->CopyVideoBuffer(
+ dynamic_cast<MFCodecVideo*>(m_pCodec)->GetWidth(),
+ dynamic_cast<MFCodecVideo*>(m_pCodec)->GetHeight(),
+ _pcBufferPtr, dwDataLength));
+ }
+
+bail:
+ if(bLocked)
+ {
+ pMediaBuffer->Unlock();
+ }
+ SafeRelease(&pMediaBuffer);
+ return hr;
+}
+
+HRESULT MFCodecTopology::ProcessOutput(IMFSample **ppSample)
+{
+ HRESULT hr = S_OK;
+
+ if(!ppSample)
+ {
+ CHECK_HR(hr = E_POINTER);
+ }
+
+ if(!m_SampleQueue.IsEmpty())
+ {
+ CHECK_HR(hr = m_SampleQueue.Dequeue(ppSample)); // thread-safe
+ }
+
+bail:
+ return hr;
+}
+
+//
+// MFCodecVideoTopology
+//
+
+
+MFCodecVideoTopology::MFCodecVideoTopology(MFCodec* pCodec, HRESULT &hr)
+: MFCodecTopology(pCodec, hr)
+, m_nWidth(0)
+, m_nHeight(0)
+{
+ assert(pCodec->GetMediaType() == MFCodecMediaType_Video);
+}
+
+MFCodecVideoTopology::~MFCodecVideoTopology()
+{
+
+}
+
+
diff --git a/plugins/pluginWinMF/internals/mf_codec_topology.h b/plugins/pluginWinMF/internals/mf_codec_topology.h
new file mode 100644
index 0000000..c5d2f34
--- /dev/null
+++ b/plugins/pluginWinMF/internals/mf_codec_topology.h
@@ -0,0 +1,87 @@
+/* Copyright (C) 2013 Mamadou DIOP
+* Copyright (C) 2013 Doubango Telecom <http://www.doubango.org>
+*
+* This file is part of Open Source Doubango Framework.
+*
+* DOUBANGO is free software: you can redistribute it and/or modify
+* it under the terms of the GNU General Public License as published by
+* the Free Software Foundation, either version 3 of the License, or
+* (at your option) any later version.
+*
+* DOUBANGO is distributed in the hope that it will be useful,
+* but WITHOUT ANY WARRANTY; without even the implied warranty of
+* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
+* GNU General Public License for more details.
+*
+* You should have received a copy of the GNU General Public License
+* along with DOUBANGO.
+*/
+#ifndef PLUGIN_WIN_MF_CODEC_TOPOLOGY_H
+#define PLUGIN_WIN_MF_CODEC_TOPOLOGY_H
+
+#include "mf_codec.h"
+#include "mf_custom_src.h"
+
+#include "tsk_thread.h"
+
+class MFCodecTopologySampleGrabberCB;
+
+class MFCodecTopology : IUnknown
+{
+ friend class MFCodecTopologySampleGrabberCB;
+public:
+ MFCodecTopology(MFCodec* pCodec, HRESULT &hr);
+ virtual ~MFCodecTopology();
+
+ virtual HRESULT Initialize();
+ virtual HRESULT DeInitialize();
+
+ virtual HRESULT ProcessInput(IMFSample* pSample);
+ virtual HRESULT ProcessOutput(IMFSample **ppSample);
+
+ // IUnknown
+ STDMETHODIMP QueryInterface(REFIID iid, void** ppv);
+ STDMETHODIMP_(ULONG) AddRef();
+ STDMETHODIMP_(ULONG) Release();
+
+ inline BOOL isStarted() { return m_bStarted; }
+ inline BOOL isInitialized() { return m_bInitialized; }
+
+private:
+ static void* TSK_STDCALL RunSessionThread(void *pArg);
+
+protected:
+ HRESULT Start();
+ HRESULT Stop();
+
+private:
+ long m_nRefCount;
+
+protected:
+ BOOL m_bInitialized;
+ BOOL m_bStarted;
+ MFCodec* m_pCodec;
+ CMFSource *m_pSource;
+ IMFMediaSession *m_pSession;
+ IMFTopology *m_pTopologyFull;
+ IMFTopology *m_pTopologyPartial;
+ IMFMediaType *m_pOutputType;
+ IMFMediaType *m_pInputType;
+ MFCodecTopologySampleGrabberCB *m_pGrabberCallback;
+ IMFActivate *m_pGrabberActivate;
+ tsk_thread_handle_t* m_pTread;
+ SampleQueue m_SampleQueue;
+};
+
+class MFCodecVideoTopology : public MFCodecTopology
+{
+public:
+ MFCodecVideoTopology(MFCodec* pCodec, HRESULT &hr);
+ virtual ~MFCodecVideoTopology();
+
+private:
+ UINT32 m_nWidth, m_nHeight;
+};
+
+
+#endif /* PLUGIN_WIN_MF_CODEC_TOPOLOGY_H */
diff --git a/plugins/pluginWinMF/internals/mf_custom_src.cxx b/plugins/pluginWinMF/internals/mf_custom_src.cxx
new file mode 100644
index 0000000..1de9904
--- /dev/null
+++ b/plugins/pluginWinMF/internals/mf_custom_src.cxx
@@ -0,0 +1,1722 @@
+/*
+* Copyright (C) Microsoft Corporation. All rights reserved.
+* Copyright (C) 2013 Mamadou DIOP
+* Copyright (C) 2013 Doubango Telecom <http://www.doubango.org>
+*
+* This file is part of Open Source Doubango Framework.
+*
+* DOUBANGO is free software: you can redistribute it and/or modify
+* it under the terms of the GNU General Public License as published by
+* the Free Software Foundation, either version 3 of the License, or
+* (at your option) any later version.
+*
+* DOUBANGO is distributed in the hope that it will be useful,
+* but WITHOUT ANY WARRANTY; without even the implied warranty of
+* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
+* GNU General Public License for more details.
+*
+* You should have received a copy of the GNU General Public License
+* along with DOUBANGO.
+*/
+
+// Implementing custom source: http://msdn.microsoft.com/en-us/library/windows/desktop/ms700134(v=vs.85).aspx
+
+#include "mf_custom_src.h"
+#include "mf_utils.h"
+
+#include "tsk_debug.h"
+
+#include <assert.h>
+
+//
+// Locking:
+// The source and stream objects both have critical sections. If you
+// hold both locks, the source lock must be held FIRST, to avoid
+// deadlocks.
+//
+// Shutdown:
+// Most methods start by calling CheckShutdown(). This method
+// fails if the source was shut down.
+//
+
+
+
+template <class T>
+T AlignUp(T num, T mult)
+{
+ assert(num >= 0);
+ T tmp = num + mult - 1;
+ return tmp - (tmp % mult);
+}
+
+
+
+// Helper Functions
+
+HRESULT QueueEventWithIUnknown(
+ IMFMediaEventGenerator *pMEG,
+ MediaEventType meType,
+ HRESULT hrStatus,
+ IUnknown *pUnk);
+
+LONGLONG BufferSizeFromAudioDuration(const WAVEFORMATEX *pWav, LONGLONG duration);
+
+HRESULT CMFSource_CreateInstance(REFIID iid, void **ppMFT)
+{
+ return CMFSource::CreateInstance(iid, ppMFT);
+}
+
+
+//-------------------------------------------------------------------
+// Name: CreateInstance
+// Description: Static method to create an instance of the source.
+//
+// iid: IID of the requested interface on the source.
+// ppSource: Receives a ref-counted pointer to the source.
+//-------------------------------------------------------------------
+
+HRESULT CMFSource::CreateInstance(REFIID iid, void **ppSource) // Called when source used as plugin
+{
+ return CreateInstanceEx(iid, ppSource, NULL);
+}
+
+HRESULT CMFSource::CreateInstanceEx(REFIID iid, void **ppSource, IMFMediaType *pMediaType) // Called when source directly called
+{
+ if (ppSource == NULL)
+ {
+ return E_POINTER;
+ }
+
+ HRESULT hr = S_OK;
+ CMFSource *pSource = new (std::nothrow) CMFSource(hr, pMediaType); // Created with ref count = 1.
+ if (pSource == NULL)
+ {
+ return E_OUTOFMEMORY;
+ }
+
+ if (SUCCEEDED(hr))
+ {
+ hr = pSource->QueryInterface(iid, ppSource);
+ if(SUCCEEDED(hr))
+ {
+ ((CMFSource*)(*ppSource))->AddRef();
+ }
+ }
+
+ SafeRelease(&pSource);
+ return hr;
+}
+
+
+//-------------------------------------------------------------------
+// CMFSource constructor.
+//
+// hr: If the constructor fails, this value is set to a failure code.
+//-------------------------------------------------------------------
+
+CMFSource::CMFSource(HRESULT& hr, IMFMediaType *pMediaType)
+ : m_nRefCount(1),
+ m_pEventQueue(NULL),
+ m_pPresentationDescriptor(NULL),
+ m_IsShutdown(FALSE),
+ m_state(STATE_STOPPED),
+ m_pStream(NULL),
+ m_pMediaType(NULL)
+{
+ // Create the media event queue.
+ hr = MFCreateEventQueue(&m_pEventQueue);
+
+ if(pMediaType)
+ {
+ m_pMediaType = pMediaType;
+ pMediaType->AddRef();
+ }
+
+ InitializeCriticalSection(&m_critSec);
+}
+
+
+//-------------------------------------------------------------------
+// CMFSource destructor.
+//-------------------------------------------------------------------
+
+
+CMFSource::~CMFSource()
+{
+ assert(m_IsShutdown);
+ assert(m_nRefCount == 0);
+ SafeRelease(&m_pMediaType);
+
+ DeleteCriticalSection(&m_critSec);
+}
+
+// IMFCustomSource methods
+
+HRESULT CMFSource::CopyVideoBuffer(UINT32 nWidth, UINT32 nHeight, const void* pBufferPtr, UINT32 nBufferSize)
+{
+ if(!pBufferPtr)
+ {
+ TSK_DEBUG_ERROR("Invalid buffer pointer");
+ return E_POINTER;
+ }
+
+ if(!nWidth || !nHeight || !nBufferSize)
+ {
+ TSK_DEBUG_ERROR("Invalid parameter");
+ return E_INVALIDARG;
+ }
+ if(m_pStream)
+ {
+ return m_pStream->CopyVideoBuffer(nWidth, nHeight, pBufferPtr, nBufferSize);
+ }
+ else
+ {
+ TSK_DEBUG_ERROR("No stream associated to this source");
+ return E_NOT_VALID_STATE;
+ }
+}
+
+// IUnknown methods
+
+ULONG CMFSource::AddRef()
+{
+ return InterlockedIncrement(&m_nRefCount);
+}
+
+ULONG CMFSource::Release()
+{
+ ULONG uCount = InterlockedDecrement(&m_nRefCount);
+ if (uCount == 0)
+ {
+ delete this;
+ }
+ // For thread safety, return a temporary variable.
+ return uCount;
+}
+
+HRESULT CMFSource::QueryInterface(REFIID iid, void** ppv)
+{
+ static const QITAB qit[] =
+ {
+ QITABENT(CMFSource, IMFMediaEventGenerator),
+ QITABENT(CMFSource, IMFMediaSource),
+ { 0 }
+ };
+ return QISearch(this, qit, iid, ppv);
+}
+
+
+// IMFMediaEventGenerator methods
+//
+// All of the IMFMediaEventGenerator methods do the following:
+// 1. Check for shutdown status.
+// 2. Call the event generator helper object.
+
+HRESULT CMFSource::BeginGetEvent(IMFAsyncCallback* pCallback, IUnknown* punkState)
+{
+ HRESULT hr = S_OK;
+
+ EnterCriticalSection(&m_critSec);
+
+ hr = CheckShutdown();
+
+ if (SUCCEEDED(hr))
+ {
+ hr = m_pEventQueue->BeginGetEvent(pCallback, punkState);
+ }
+
+ LeaveCriticalSection(&m_critSec);
+
+ return hr;
+}
+
+HRESULT CMFSource::EndGetEvent(IMFAsyncResult* pResult, IMFMediaEvent** ppEvent)
+{
+ HRESULT hr = S_OK;
+
+ EnterCriticalSection(&m_critSec);
+
+ hr = CheckShutdown();
+
+ if (SUCCEEDED(hr))
+ {
+ hr = m_pEventQueue->EndGetEvent(pResult, ppEvent);
+ }
+
+ LeaveCriticalSection(&m_critSec);
+
+ return hr;
+}
+
+HRESULT CMFSource::GetEvent(DWORD dwFlags, IMFMediaEvent** ppEvent)
+{
+ // NOTE: GetEvent can block indefinitely, so we don't hold the
+ // CMFSource lock. This requires some juggling with the
+ // event queue pointer.
+
+ HRESULT hr = S_OK;
+
+ IMFMediaEventQueue *pQueue = NULL;
+
+ EnterCriticalSection(&m_critSec);
+
+ // Check shutdown
+ hr = CheckShutdown();
+
+ if (SUCCEEDED(hr))
+ {
+ pQueue = m_pEventQueue;
+ pQueue->AddRef();
+ }
+
+ LeaveCriticalSection(&m_critSec);
+
+ if (SUCCEEDED(hr))
+ {
+ hr = pQueue->GetEvent(dwFlags, ppEvent);
+ }
+
+ SafeRelease(&pQueue);
+ return hr;
+}
+
+HRESULT CMFSource::QueueEvent(MediaEventType met, REFGUID guidExtendedType, HRESULT hrStatus, const PROPVARIANT* pvValue)
+{
+ HRESULT hr = S_OK;
+
+ EnterCriticalSection(&m_critSec);
+
+ hr = CheckShutdown();
+
+ if (SUCCEEDED(hr))
+ {
+ hr = m_pEventQueue->QueueEventParamVar(met, guidExtendedType, hrStatus, pvValue);
+ }
+
+ LeaveCriticalSection(&m_critSec);
+
+ return hr;
+}
+
+
+// IMFMediaSource methods
+
+
+//-------------------------------------------------------------------
+// Name: CreatePresentationDescriptor
+// Description: Returns a copy of the default presentation descriptor.
+//-------------------------------------------------------------------
+
+HRESULT CMFSource::CreatePresentationDescriptor(IMFPresentationDescriptor** ppPresentationDescriptor)
+{
+ if (ppPresentationDescriptor == NULL)
+ {
+ return E_POINTER;
+ }
+
+ EnterCriticalSection(&m_critSec);
+
+ HRESULT hr = S_OK;
+
+ hr = CheckShutdown();
+
+ if (SUCCEEDED(hr))
+ {
+ if (m_pPresentationDescriptor == NULL)
+ {
+ hr = CreatePresentationDescriptor();
+ }
+ }
+
+ // Clone our default presentation descriptor.
+ if (SUCCEEDED(hr))
+ {
+ hr = m_pPresentationDescriptor->Clone(ppPresentationDescriptor);
+ }
+
+ LeaveCriticalSection(&m_critSec);
+
+ return hr;
+}
+
+
+//-------------------------------------------------------------------
+// Name: GetCharacteristics
+// Description: Returns flags the describe the source.
+//-------------------------------------------------------------------
+
+HRESULT CMFSource::GetCharacteristics(DWORD* pdwCharacteristics)
+{
+ if (pdwCharacteristics == NULL)
+ {
+ return E_POINTER;
+ }
+
+ EnterCriticalSection(&m_critSec);
+
+ HRESULT hr = S_OK;
+
+ hr = CheckShutdown();
+
+ if (SUCCEEDED(hr))
+ {
+ *pdwCharacteristics = MFMEDIASOURCE_CAN_PAUSE | MFMEDIASOURCE_IS_LIVE;
+ }
+
+ LeaveCriticalSection(&m_critSec);
+
+ return hr;
+}
+
+//-------------------------------------------------------------------
+// Name: Start
+// Description: Switches to running state.
+//-------------------------------------------------------------------
+
+HRESULT CMFSource::Start(
+ IMFPresentationDescriptor* pPresentationDescriptor,
+ const GUID* pguidTimeFormat,
+ const PROPVARIANT* pvarStartPosition
+ )
+{
+ HRESULT hr = S_OK;
+ LONGLONG llStartOffset = 0;
+ BOOL bIsSeek = FALSE;
+ BOOL bIsRestartFromCurrentPosition = FALSE;
+ BOOL bQueuedStartEvent = FALSE;
+
+ IMFMediaEvent *pEvent = NULL;
+
+ PROPVARIANT var;
+ PropVariantInit(&var);
+
+ // Check parameters.
+ // Start position and presentation descriptor cannot be NULL.
+ if (pvarStartPosition == NULL || pPresentationDescriptor == NULL)
+ {
+ return E_INVALIDARG;
+ }
+
+ // Check the time format. Must be "reference time" units.
+ if ((pguidTimeFormat != NULL) && (*pguidTimeFormat != GUID_NULL))
+ {
+ // Unrecognized time format GUID.
+ return MF_E_UNSUPPORTED_TIME_FORMAT;
+ }
+
+ EnterCriticalSection(&m_critSec);
+
+ // Fail if the source is shut down.
+ CHECK_HR(hr = CheckShutdown());
+
+ // Check the start position.
+ if (pvarStartPosition->vt == VT_I8)
+ {
+ // Start position is given in pvarStartPosition in 100-ns units.
+ llStartOffset = pvarStartPosition->hVal.QuadPart;
+
+ if (m_state != STATE_STOPPED)
+ {
+ // Source is running or paused, so this is a seek.
+ bIsSeek = TRUE;
+ }
+ }
+ else if (pvarStartPosition->vt == VT_EMPTY)
+ {
+ // Start position is "current position".
+ // For stopped, that means 0. Otherwise, use the current position.
+ if (m_state == STATE_STOPPED)
+ {
+ llStartOffset = 0;
+ }
+ else
+ {
+ llStartOffset = GetCurrentPosition();
+ bIsRestartFromCurrentPosition = TRUE;
+ }
+ }
+ else
+ {
+ // We don't support this time format.
+ hr = MF_E_UNSUPPORTED_TIME_FORMAT;
+ goto bail;
+ }
+
+ // Validate the caller's presentation descriptor.
+ CHECK_HR(hr = ValidatePresentationDescriptor(pPresentationDescriptor));
+
+ // Sends the MENewStream or MEUpdatedStream event.
+ CHECK_HR(hr = QueueNewStreamEvent(pPresentationDescriptor));
+
+ // Notify the stream of the new start time.
+ CHECK_HR(hr = m_pStream->SetPosition(llStartOffset));
+
+ // Send Started or Seeked events.
+
+ var.vt = VT_I8;
+ var.hVal.QuadPart = llStartOffset;
+
+ // Send the source event.
+ if (bIsSeek)
+ {
+ CHECK_HR(hr = QueueEvent(MESourceSeeked, GUID_NULL, hr, &var));
+ }
+ else
+ {
+ // For starting, if we are RESTARTING from the current position and our
+ // previous state was running/paused, then we need to add the
+ // MF_EVENT_SOURCE_ACTUAL_START attribute to the event. This requires
+ // creating the event object first.
+
+ // Create the event.
+ CHECK_HR(hr = MFCreateMediaEvent(MESourceStarted, GUID_NULL, hr, &var, &pEvent));
+
+ // For restarts, set the actual start time as an attribute.
+ if (bIsRestartFromCurrentPosition)
+ {
+ CHECK_HR(hr = pEvent->SetUINT64(MF_EVENT_SOURCE_ACTUAL_START, llStartOffset));
+ }
+
+ // Now queue the event.
+ CHECK_HR(hr = m_pEventQueue->QueueEvent(pEvent));
+ }
+
+ bQueuedStartEvent = TRUE;
+
+ // Send the stream event.
+ if (m_pStream)
+ {
+ if (bIsSeek)
+ {
+ CHECK_HR(hr = m_pStream->QueueEvent(MEStreamSeeked, GUID_NULL, hr, &var));
+ }
+ else
+ {
+ CHECK_HR(hr = m_pStream->QueueEvent(MEStreamStarted, GUID_NULL, hr, &var));
+ }
+ }
+
+ if (bIsSeek)
+ {
+ // For seek requests, flush any queued samples.
+ CHECK_HR(hr = m_pStream->Flush());
+ }
+ else
+ {
+ // Otherwise, deliver any queued samples.
+ CHECK_HR(hr = m_pStream->DeliverQueuedSamples());
+ }
+
+ // Initialize Stream parameters
+ CHECK_HR(hr = m_pStream->InitializeParams());
+
+ m_state = STATE_STARTED;
+
+bail:
+
+ // If a failure occurred and we have not sent the
+ // MESourceStarted/MESourceSeeked event yet, then it is
+ // OK just to return an error code from Start().
+
+ // If a failure occurred and we have already sent the
+ // event (with a success code), then we need to raise an
+ // MEError event.
+
+ if (FAILED(hr) && bQueuedStartEvent)
+ {
+ hr = QueueEvent(MEError, GUID_NULL, hr, &var);
+ }
+
+ PropVariantClear(&var);
+ SafeRelease(&pEvent);
+
+ LeaveCriticalSection(&m_critSec);
+
+ return hr;
+}
+
+
+//-------------------------------------------------------------------
+// Name: Pause
+// Description: Switches to paused state.
+//-------------------------------------------------------------------
+
+HRESULT CMFSource::Pause()
+{
+ EnterCriticalSection(&m_critSec);
+
+ HRESULT hr = S_OK;
+
+ hr = CheckShutdown();
+
+ // Pause is only allowed from started state.
+ if (SUCCEEDED(hr))
+ {
+ if (m_state != STATE_STARTED)
+ {
+ hr = MF_E_INVALID_STATE_TRANSITION;
+ }
+ }
+
+ // Send the appropriate events.
+ if (SUCCEEDED(hr))
+ {
+ if (m_pStream)
+ {
+ hr = m_pStream->QueueEvent(MEStreamPaused, GUID_NULL, S_OK, NULL);
+ }
+ }
+
+ if (SUCCEEDED(hr))
+ {
+ hr = QueueEvent(MESourcePaused, GUID_NULL, S_OK, NULL);
+ }
+
+ // Update our state.
+ if (SUCCEEDED(hr))
+ {
+ m_state = STATE_PAUSED;
+ }
+
+ LeaveCriticalSection(&m_critSec);
+
+ return hr;
+}
+
+
+//-------------------------------------------------------------------
+// Name: Stop
+// Description: Switches to stopped state.
+//-------------------------------------------------------------------
+
+HRESULT CMFSource::Stop()
+{
+ EnterCriticalSection(&m_critSec);
+
+ HRESULT hr = S_OK;
+
+ hr = CheckShutdown();
+
+ if (SUCCEEDED(hr))
+ {
+ // Update our state.
+ m_state = STATE_STOPPED;
+
+ // Flush all queued samples.
+ hr = m_pStream->Flush();
+ }
+
+ //
+ // Queue events.
+ //
+
+ if (SUCCEEDED(hr))
+ {
+ if (m_pStream)
+ {
+ hr = m_pStream->QueueEvent(MEStreamStopped, GUID_NULL, S_OK, NULL);
+ }
+ }
+ if (SUCCEEDED(hr))
+ {
+ hr = QueueEvent(MESourceStopped, GUID_NULL, S_OK, NULL);
+ }
+
+ LeaveCriticalSection(&m_critSec);
+
+ return hr;
+}
+
+
+//-------------------------------------------------------------------
+// Name: Shutdown
+// Description: Releases resources.
+//
+// The source and stream objects hold reference counts on each other.
+// To avoid memory leaks caused by circular ref. counts, the Shutdown
+// method releases the pointer to the stream.
+//-------------------------------------------------------------------
+
+HRESULT CMFSource::Shutdown()
+{
+ EnterCriticalSection(&m_critSec);
+
+ HRESULT hr = S_OK;
+
+ hr = CheckShutdown();
+
+ if (SUCCEEDED(hr))
+ {
+ // Shut down the stream object.
+ if (m_pStream)
+ {
+ (void)m_pStream->Shutdown();
+ }
+
+ // Shut down the event queue.
+ if (m_pEventQueue)
+ {
+ (void)m_pEventQueue->Shutdown();
+ }
+
+ // Release objects.
+ SafeRelease(&m_pStream);
+ SafeRelease(&m_pEventQueue);
+ SafeRelease(&m_pPresentationDescriptor);
+
+ // Set our shutdown flag.
+ m_IsShutdown = TRUE;
+ }
+
+ LeaveCriticalSection(&m_critSec);
+
+ return hr;
+}
+
+/////////////// Private CMFSource methods
+
+// NOTE: These private methods do not hold the source's critical
+// section. The caller must ensure the critical section is held.
+// Also, these methods do not check for shut-down.
+
+
+//-------------------------------------------------------------------
+// Name: CreatePresentationDescriptor
+// Description: Creates the default presentation descriptor.
+//-------------------------------------------------------------------
+
+HRESULT CMFSource::CreatePresentationDescriptor()
+{
+ HRESULT hr = S_OK;
+
+ IMFStreamDescriptor *pStreamDescriptor = NULL;
+ IMFMediaTypeHandler *pHandler = NULL;
+
+ // Create the stream descriptor.
+ hr = MFCreateStreamDescriptor(
+ 0, // stream identifier
+ 1, // Number of media types.
+ &m_pMediaType, // Array of media types
+ &pStreamDescriptor
+ );
+
+ // Set the default media type on the media type handler.
+ if (SUCCEEDED(hr))
+ {
+ hr = pStreamDescriptor->GetMediaTypeHandler(&pHandler);
+ }
+
+ if (SUCCEEDED(hr))
+ {
+ hr = pHandler->SetCurrentMediaType(m_pMediaType);
+ }
+
+ // Create the presentation descriptor.
+ if (SUCCEEDED(hr))
+ {
+ hr = MFCreatePresentationDescriptor(
+ 1, // Number of stream descriptors
+ &pStreamDescriptor, // Array of stream descriptors
+ &m_pPresentationDescriptor
+ );
+ }
+ // Select the first stream
+ if (SUCCEEDED(hr))
+ {
+ hr = m_pPresentationDescriptor->SelectStream(0);
+ }
+
+ // Set the file/stream duration as an attribute on the presentation descriptor.
+ if (SUCCEEDED(hr))
+ {
+ hr = m_pPresentationDescriptor->SetUINT64(MF_PD_DURATION, (UINT64)ULLONG_MAX);
+ }
+
+ SafeRelease(&pStreamDescriptor);
+ SafeRelease(&pHandler);
+ return hr;
+}
+
+
+
+//-------------------------------------------------------------------
+// Name: ValidatePresentationDescriptor
+// Description: Validates the caller's presentation descriptor.
+//
+// This method is called when Start() is called with a non-NULL
+// presentation descriptor. The caller is supposed to give us back
+// the same PD that we gave out in CreatePresentationDescriptor().
+// This method performs a sanity check on the caller's PD to make
+// sure it matches ours.
+//
+// Note: Because this media source has one stream with single, fixed
+// media type, there is not much for the caller to decide. In
+// a more complicated source, the caller might select different
+// streams, or select from a list of media types.
+//-------------------------------------------------------------------
+
+HRESULT CMFSource::ValidatePresentationDescriptor(IMFPresentationDescriptor *pPD)
+{
+ HRESULT hr;
+
+ assert(pPD != NULL);
+
+ IMFStreamDescriptor *pStreamDescriptor = NULL;
+ IMFMediaTypeHandler *pHandler = NULL;
+ IMFMediaType *pMediaType = NULL;
+ GUID majorType;
+
+ DWORD cStreamDescriptors = 0;
+ BOOL fSelected = FALSE;
+
+ // Make sure there is only one stream.
+ hr = pPD->GetStreamDescriptorCount(&cStreamDescriptors);
+
+ if (SUCCEEDED(hr))
+ {
+ if (cStreamDescriptors != 1)
+ {
+ hr = MF_E_UNSUPPORTED_REPRESENTATION;
+ }
+ }
+
+ // Get the stream descriptor.
+ if (SUCCEEDED(hr))
+ {
+ hr = pPD->GetStreamDescriptorByIndex(0, &fSelected, &pStreamDescriptor);
+ }
+
+ // Make sure it's selected. (This media source has only one stream, so it
+ // is not useful to deselect the only stream.)
+ if (SUCCEEDED(hr))
+ {
+ if (!fSelected)
+ {
+ hr = MF_E_UNSUPPORTED_REPRESENTATION;
+ }
+ }
+
+ // Get the media type handler, so that we can get the media type.
+ if (SUCCEEDED(hr))
+ {
+ hr = pStreamDescriptor->GetMediaTypeHandler(&pHandler);
+ }
+
+ if (SUCCEEDED(hr))
+ {
+ hr = pHandler->GetCurrentMediaType(&pMediaType);
+ }
+
+ hr = pMediaType->GetMajorType(&majorType);
+
+ if (SUCCEEDED(hr))
+ {
+ if(majorType == MFMediaType_Video)
+ {
+ if (SUCCEEDED(hr))
+ {
+ hr = MFUtils::ValidateVideoFormat(pMediaType);
+ }
+ }
+ else
+ {
+ WAVEFORMATEX *pFormat = NULL;
+ UINT32 cbWaveFormat = 0;
+
+ if (SUCCEEDED(hr))
+ {
+ hr = MFCreateWaveFormatExFromMFMediaType(
+ pMediaType,
+ &pFormat,
+ &cbWaveFormat);
+ }
+ if (SUCCEEDED(hr))
+ {
+ /*assert(this->WaveFormat() != NULL);
+
+ if (cbWaveFormat < this->WaveFormatSize())
+ {
+ hr = MF_E_INVALIDMEDIATYPE;
+ }*/
+ }
+
+ if (SUCCEEDED(hr))
+ {
+ /*if (memcmp(pFormat, WaveFormat(), WaveFormatSize()) != 0)
+ {
+ hr = MF_E_INVALIDMEDIATYPE;
+ }*/
+ }
+
+ CoTaskMemFree(pFormat);
+ }
+ }
+
+ SafeRelease(&pStreamDescriptor);
+ SafeRelease(&pHandler);
+ SafeRelease(&pMediaType);
+
+ return hr;
+}
+
+
+//-------------------------------------------------------------------
+// Name: QueueNewStreamEvent
+// Description:
+// Queues an MENewStream or MEUpdatedStream event during Start.
+//
+// pPD: The presentation descriptor.
+//
+// Precondition: The presentation descriptor is assumed to be valid.
+// Call ValidatePresentationDescriptor before calling this method.
+//-------------------------------------------------------------------
+
+HRESULT CMFSource::QueueNewStreamEvent(IMFPresentationDescriptor *pPD)
+{
+ assert(pPD != NULL);
+
+ HRESULT hr = S_OK;
+ IMFStreamDescriptor *pSD = NULL;
+
+ BOOL fSelected = FALSE;
+
+ hr = pPD->GetStreamDescriptorByIndex(0, &fSelected, &pSD);
+
+ if (SUCCEEDED(hr))
+ {
+ // The stream must be selected, because we don't allow the app
+ // to de-select the stream. See ValidatePresentationDescriptor.
+ assert(fSelected);
+
+ if (m_pStream)
+ {
+ // The stream already exists, and is still selected.
+ // Send the MEUpdatedStream event.
+ hr = QueueEventWithIUnknown(this, MEUpdatedStream, S_OK, m_pStream);
+ }
+ else
+ {
+ // The stream does not exist, and is now selected.
+ // Create a new stream.
+
+ hr = CreateCMFStreamSource(pSD);
+
+ if (SUCCEEDED(hr))
+ {
+ // CreateCMFStreamSource creates the stream, so m_pStream is no longer NULL.
+ assert(m_pStream != NULL);
+
+ // Send the MENewStream event.
+ hr = QueueEventWithIUnknown(this, MENewStream, S_OK, m_pStream);
+ }
+ }
+ }
+
+ SafeRelease(&pSD);
+ return hr;
+}
+
+//-------------------------------------------------------------------
+// Name: CreateCMFStreamSource
+// Description: Creates the source's media stream object.
+//-------------------------------------------------------------------
+
+HRESULT CMFSource::CreateCMFStreamSource(IMFStreamDescriptor *pSD)
+{
+ HRESULT hr = S_OK;
+ m_pStream = new (std::nothrow) CMFStreamSource(this, pSD, hr);
+
+ if (m_pStream == NULL)
+ {
+ hr = E_OUTOFMEMORY;
+ }
+
+ if (FAILED(hr))
+ {
+ SafeRelease(&m_pStream);
+ }
+
+ return hr;
+}
+
+
+
+//-------------------------------------------------------------------
+// Name: GetCurrentPosition
+// Description: Returns the current playback position.
+//-------------------------------------------------------------------
+
+LONGLONG CMFSource::GetCurrentPosition() const
+{
+ if (m_pStream)
+ {
+ return m_pStream->GetCurrentPosition();
+ }
+ else
+ {
+ // If no stream is selected, we are at time 0 by definition.
+ return 0;
+ }
+}
+
+
+
+////////// AUDIO STREAM
+
+//-------------------------------------------------------------------
+// CMFStreamSource constructor.
+//
+// pSource: Parent media source.
+// pSD: Stream descriptor that describes this stream.
+// hr: If the constructor fails, this value is set to a failure code.
+//-------------------------------------------------------------------
+
+
+CMFStreamSource::CMFStreamSource(CMFSource *pSource, IMFStreamDescriptor *pSD, HRESULT& hr) :
+ m_nRefCount(1),
+ m_pEventQueue(NULL),
+ m_IsShutdown(FALSE),
+ m_rtCurrentPosition(0),
+ m_rtDuration(0),
+ m_discontinuity(FALSE),
+ m_EOS(FALSE),
+ m_pMediaBuffer(NULL),
+ m_nBufferSize(0)
+{
+ m_pSource = pSource;
+ m_pSource->AddRef();
+
+ m_pStreamDescriptor = pSD;
+ m_pStreamDescriptor->AddRef();
+
+ // Create the media event queue.
+ CHECK_HR(hr = MFCreateEventQueue(&m_pEventQueue));
+
+ //CHECK_HR(hr = InitializeParams());
+
+ InitializeCriticalSection(&m_critSec);
+
+bail:
+ return;
+}
+
+
+//-------------------------------------------------------------------
+// CMFStreamSource destructor.
+//-------------------------------------------------------------------
+
+CMFStreamSource::~CMFStreamSource()
+{
+ assert(m_IsShutdown);
+ assert(m_nRefCount == 0);
+
+ SafeRelease(&m_pMediaBuffer);
+
+ DeleteCriticalSection(&m_critSec);
+}
+
+
+// IMFCustomSource methods
+
+HRESULT CMFStreamSource::CopyVideoBuffer(UINT32 nWidth, UINT32 nHeight, const void* pBufferPtr, UINT32 nBufferSize)
+{
+ // Buffer pointer and size validity already checked by source (caller)
+ if(m_guidMajorType != MFMediaType_Video)
+ {
+ TSK_DEBUG_ERROR("Calling CopyVideoBuffer on no-video stream");
+#if defined(E_ILLEGAL_METHOD_CALL)
+ return E_ILLEGAL_METHOD_CALL;
+#else
+ return _HRESULT_TYPEDEF_(0x8000000EL);
+#endif
+ }
+ if(nWidth != m_structVideoParams.nWidth || nHeight != m_structVideoParams.nHeigh || nBufferSize != m_nBufferSize)
+ {
+ TSK_DEBUG_ERROR("Invalid argument %u#%u or %u#%u or %u#%u. If the call is from a video consumer then, you can safely ignore this message.", nWidth, m_structVideoParams.nWidth, nHeight, m_structVideoParams.nHeigh, nBufferSize, m_nBufferSize);
+#if defined(E_BOUNDS)
+ return E_BOUNDS;
+#else
+ return _HRESULT_TYPEDEF_(0x8000000BL);
+#endif
+ }
+
+ HRESULT hr = S_OK;
+
+ BYTE* pMediaBufferPtr = NULL;
+ DWORD cbMaxLength = nBufferSize, cbCurrentLength = nBufferSize;
+ CHECK_HR(hr = m_pMediaBuffer->Lock(&pMediaBufferPtr, &cbMaxLength, &cbCurrentLength));
+
+ memcpy(pMediaBufferPtr, pBufferPtr, nBufferSize);
+ CHECK_HR(hr = m_pMediaBuffer->SetCurrentLength(nBufferSize));
+ CHECK_HR(hr = m_pMediaBuffer->Unlock());
+
+bail:
+ return hr;
+}
+
+// IUnknown methods
+
+ULONG CMFStreamSource::AddRef()
+{
+ return InterlockedIncrement(&m_nRefCount);
+}
+
+ULONG CMFStreamSource::Release()
+{
+ ULONG uCount = InterlockedDecrement(&m_nRefCount);
+ if (uCount == 0)
+ {
+ delete this;
+ }
+ // For thread safety, return a temporary variable.
+ return uCount;
+}
+
+HRESULT CMFStreamSource::QueryInterface(REFIID iid, void** ppv)
+{
+ static const QITAB qit[] =
+ {
+ QITABENT(CMFStreamSource, IMFMediaEventGenerator),
+ QITABENT(CMFStreamSource, IMFMediaStream),
+ { 0 }
+ };
+ return QISearch(this, qit, iid, ppv);
+}
+
+
+// IMFMediaEventGenerator methods
+// [See note for CMFSource class]
+
+HRESULT CMFStreamSource::BeginGetEvent(IMFAsyncCallback* pCallback, IUnknown* punkState)
+{
+ HRESULT hr = S_OK;
+
+ EnterCriticalSection(&m_critSec);
+
+ hr = CheckShutdown();
+ if (SUCCEEDED(hr))
+ {
+ hr = m_pEventQueue->BeginGetEvent(pCallback, punkState);
+ }
+
+ LeaveCriticalSection(&m_critSec);
+ return hr;
+}
+
+HRESULT CMFStreamSource::EndGetEvent(IMFAsyncResult* pResult, IMFMediaEvent** ppEvent)
+{
+ HRESULT hr = S_OK;
+
+ EnterCriticalSection(&m_critSec);
+
+ hr = CheckShutdown();
+ if (SUCCEEDED(hr))
+ {
+ hr = m_pEventQueue->EndGetEvent(pResult, ppEvent);
+ }
+
+ LeaveCriticalSection(&m_critSec);
+ return hr;
+}
+
+HRESULT CMFStreamSource::GetEvent(DWORD dwFlags, IMFMediaEvent** ppEvent)
+{
+ HRESULT hr = S_OK;
+
+ IMFMediaEventQueue *pQueue = NULL;
+
+ EnterCriticalSection(&m_critSec);
+
+ hr = CheckShutdown();
+
+ if (SUCCEEDED(hr))
+ {
+ pQueue = m_pEventQueue;
+ pQueue->AddRef();
+ }
+
+ LeaveCriticalSection(&m_critSec);
+
+ if (SUCCEEDED(hr))
+ {
+ hr = pQueue->GetEvent(dwFlags, ppEvent);
+ }
+
+ SafeRelease(&pQueue);
+ return hr;
+}
+
+HRESULT CMFStreamSource::QueueEvent(MediaEventType met, REFGUID guidExtendedType, HRESULT hrStatus, const PROPVARIANT* pvValue)
+{
+ HRESULT hr = S_OK;
+
+ EnterCriticalSection(&m_critSec);
+
+ hr = CheckShutdown();
+ if (SUCCEEDED(hr))
+ {
+ hr = m_pEventQueue->QueueEventParamVar(met, guidExtendedType, hrStatus, pvValue);
+ }
+
+ LeaveCriticalSection(&m_critSec);
+ return hr;
+}
+
+
+// IMFMediaStream methods.
+
+
+//-------------------------------------------------------------------
+// Name: GetMediaSource
+// Description: Returns a pointer to the media source.
+//-------------------------------------------------------------------
+
+HRESULT CMFStreamSource::GetMediaSource(IMFMediaSource** ppMediaSource)
+{
+ if (ppMediaSource == NULL)
+ {
+ return E_POINTER;
+ }
+
+ EnterCriticalSection(&m_critSec);
+
+ HRESULT hr = S_OK;
+
+ // If called after shutdown, them m_pSource is NULL.
+ // Otherwise, m_pSource should not be NULL.
+
+ hr = CheckShutdown();
+
+ if (SUCCEEDED(hr))
+ {
+ if (m_pSource == NULL)
+ {
+ hr = E_UNEXPECTED;
+ }
+ }
+
+ if (SUCCEEDED(hr))
+ {
+ hr = m_pSource->QueryInterface(IID_PPV_ARGS(ppMediaSource));
+ }
+
+ LeaveCriticalSection(&m_critSec);
+ return hr;
+}
+
+
+//-------------------------------------------------------------------
+// Name: GetStreamDescriptor
+// Description: Returns the stream descriptor for this stream.
+//-------------------------------------------------------------------
+
+HRESULT CMFStreamSource::GetStreamDescriptor(IMFStreamDescriptor** ppStreamDescriptor)
+{
+ if (ppStreamDescriptor == NULL)
+ {
+ return E_POINTER;
+ }
+
+ if (m_pStreamDescriptor == NULL)
+ {
+ return E_UNEXPECTED;
+ }
+
+ EnterCriticalSection(&m_critSec);
+
+ HRESULT hr = S_OK;
+
+ hr = CheckShutdown();
+
+ if (SUCCEEDED(hr))
+ {
+ *ppStreamDescriptor = m_pStreamDescriptor;
+ (*ppStreamDescriptor)->AddRef();
+ }
+
+ LeaveCriticalSection(&m_critSec);
+
+ return hr;
+}
+
+
+
+//-------------------------------------------------------------------
+// Name: RequestSample
+// Description: Requests a new sample.
+//
+// pToken: Token object. Can be NULL.
+//-------------------------------------------------------------------
+
+HRESULT CMFStreamSource::RequestSample(IUnknown* pToken)
+{
+ if (m_pSource == NULL)
+ {
+ return E_UNEXPECTED;
+ }
+
+ HRESULT hr = S_OK;
+
+ IMFMediaSource *pSource = NULL;
+ IMFSample *pSample = NULL; // Sample to deliver.
+
+ EnterCriticalSection(&m_critSec);
+
+ // Check if we are shut down.
+ hr = CheckShutdown();
+
+ // Check if we already reached the end of the stream.
+ if (SUCCEEDED(hr))
+ {
+ if (m_EOS)
+ {
+ hr = MF_E_END_OF_STREAM;
+ }
+ }
+
+ // Check the source is stopped.
+ // GetState does not hold the source's critical section. Safe to call.
+ if (SUCCEEDED(hr))
+ {
+ if (m_pSource->GetState() == CMFSource::STATE_STOPPED)
+ {
+ hr = MF_E_INVALIDREQUEST;
+ }
+ }
+
+ if (SUCCEEDED(hr))
+ {
+ // Create a new audio sample.
+ hr = CreateSample(&pSample);
+ }
+
+ if (SUCCEEDED(hr))
+ {
+ // If the caller provided a token, attach it to the sample as
+ // an attribute.
+
+ // NOTE: If we processed sample requests asynchronously, we would
+ // need to call AddRef on the token and put the token onto a FIFO
+ // queue. See documenation for IMFMediaStream::RequestSample.
+ if (pToken && pSample)
+ {
+ hr = pSample->SetUnknown(MFSampleExtension_Token, pToken);
+ }
+ }
+
+ // If paused, queue the sample for later delivery. Otherwise, deliver the sample now.
+ if (SUCCEEDED(hr) && pSample)
+ {
+ if (m_pSource->GetState() == CMFSource::STATE_PAUSED)
+ {
+ hr = m_sampleQueue.Queue(pSample);
+ }
+ else
+ {
+ hr = DeliverSample(pSample);
+ }
+ }
+
+ // Cache a pointer to the source, prior to leaving the critical section.
+ if (SUCCEEDED(hr))
+ {
+ pSource = m_pSource;
+ pSource->AddRef();
+ }
+
+ LeaveCriticalSection(&m_critSec);
+
+
+ // We only have one stream, so the end of the stream is also the end of the
+ // presentation. Therefore, when we reach the end of the stream, we need to
+ // queue the end-of-presentation event from the source. Logically we would do
+ // this inside the CheckEndOfStream method. However, we cannot hold the
+ // source's critical section while holding the stream's critical section, at
+ // risk of deadlock.
+
+ if (SUCCEEDED(hr))
+ {
+ if (m_EOS)
+ {
+ hr = pSource->QueueEvent(MEEndOfPresentation, GUID_NULL, S_OK, NULL);
+ }
+ }
+
+ SafeRelease(&pSample);
+ SafeRelease(&pSource);
+ return hr;
+}
+
+
+///// Private CMFStreamSource methods
+
+HRESULT CMFStreamSource::InitializeParams()
+{
+ HRESULT hr = S_OK;
+
+ IMFMediaTypeHandler *pMediaTypeHandler = NULL;
+ IMFMediaType* pMediaType = NULL;
+
+ CHECK_HR(hr = m_pStreamDescriptor->GetMediaTypeHandler(&pMediaTypeHandler));
+ CHECK_HR(hr = pMediaTypeHandler->GetCurrentMediaType(&pMediaType));
+
+ GUID majorType, subType;
+ pMediaType->GetMajorType(&majorType);
+ if(majorType == MFMediaType_Video)
+ {
+ memset(&m_structVideoParams, 0, sizeof(m_structVideoParams));
+ CHECK_HR(hr = MFGetAttributeSize(pMediaType, MF_MT_FRAME_SIZE, &m_structVideoParams.nWidth, &m_structVideoParams.nHeigh));
+ CHECK_HR(hr = pMediaType->GetGUID(MF_MT_SUBTYPE, &subType));
+
+ m_guidMajorType = MFMediaType_Video;
+ m_guidSubType = subType;
+
+ // Guess video size
+ UINT32 nBufferSize;
+ if(subType == MFVideoFormat_RGB32)
+ {
+ nBufferSize = (m_structVideoParams.nWidth * m_structVideoParams.nHeigh << 2);
+ }
+ else if(subType == MFVideoFormat_RGB24)
+ {
+ nBufferSize = (m_structVideoParams.nWidth * m_structVideoParams.nHeigh << 2);
+ }
+ else if(subType == MFVideoFormat_NV12 || subType == MFVideoFormat_I420)
+ {
+ nBufferSize = (m_structVideoParams.nWidth * m_structVideoParams.nHeigh * 3) >> 1;
+ }
+ else
+ {
+ TSK_DEBUG_ERROR("Video subType not supported");
+ CHECK_HR(hr = E_NOTIMPL);
+ }
+
+ // Allocate media buffer
+ SafeRelease(&m_pMediaBuffer);
+ CHECK_HR(hr = MFCreateMemoryBuffer(nBufferSize, &m_pMediaBuffer));
+ m_nBufferSize = nBufferSize;
+ {
+ //FIXME: DeliverSample() stops if no data
+ BYTE* pBuffer = NULL;
+ CHECK_HR(hr = m_pMediaBuffer->Lock(&pBuffer, NULL, NULL));
+ memset(pBuffer, 0, nBufferSize);
+ CHECK_HR(hr = m_pMediaBuffer->SetCurrentLength(nBufferSize));
+ CHECK_HR(hr = m_pMediaBuffer->Unlock());
+ }
+
+ // Retrieve video Frame rate
+ UINT32 unNumerator, unDenominator;
+ CHECK_HR(hr = MFGetAttributeRatio(pMediaType, MF_MT_FRAME_RATE, &unNumerator, &unDenominator));
+ m_structVideoParams.nFps = (unNumerator / unDenominator);
+
+ // Retrieve sample duration based on framerate
+ m_rtCurrentPosition = 0;
+ CHECK_HR(hr = MFFrameRateToAverageTimePerFrame(m_structVideoParams.nFps, 1, &m_rtDuration));
+ }
+ else
+ {
+ TSK_DEBUG_ERROR("Only video media type is supported");
+ CHECK_HR(hr = E_NOTIMPL);
+ }
+
+bail:
+ SafeRelease(&pMediaTypeHandler);
+ SafeRelease(&pMediaType);
+
+ return hr;
+}
+
+// NOTE: Some of these methods hold the stream's critical section
+// because they are called by the media source object.
+
+//-------------------------------------------------------------------
+// Name: CreateSample
+// Description: Creates a new audio/video sample.
+//-------------------------------------------------------------------
+
+HRESULT CMFStreamSource::CreateSample(IMFSample **ppSample)
+{
+ *ppSample = NULL;
+
+ HRESULT hr = S_OK;
+
+ IMFSample *pSample = NULL;
+ DWORD nCurrentLength = 0;
+
+ CHECK_HR(hr = m_pMediaBuffer->GetCurrentLength(&nCurrentLength));
+
+ if(nCurrentLength > 0)
+ {
+ CHECK_HR(hr = MFCreateSample(&pSample));
+ CHECK_HR(hr = pSample->SetSampleTime(m_rtCurrentPosition));
+ CHECK_HR(hr = pSample->SetSampleDuration(m_rtDuration));
+ m_rtCurrentPosition += m_rtDuration;
+ CHECK_HR(hr = pSample->AddBuffer(m_pMediaBuffer));
+
+ if((*ppSample = pSample))
+ {
+ (*ppSample)->AddRef();
+ }
+ }
+
+bail:
+ SafeRelease(&pSample);
+ return hr;
+}
+
+//-------------------------------------------------------------------
+// Name: DeliverSample
+// Description: Delivers a sample by sending an MEMediaSample event.
+//-------------------------------------------------------------------
+HRESULT CMFStreamSource::DeliverSample(IMFSample *pSample)
+{
+ HRESULT hr = S_OK;
+
+ if(pSample)
+ {
+ // Send the MEMediaSample event with the new sample.
+ hr = QueueEventWithIUnknown(this, MEMediaSample, hr, pSample);
+ }
+
+ // See if we reached the end of the stream.
+ if (SUCCEEDED(hr))
+ {
+ hr = CheckEndOfStream(); // This method sends MEEndOfStream if needed.
+ }
+
+ return hr;
+}
+
+//-------------------------------------------------------------------
+// Name: DeliverQueuedSamples
+// Description: Delivers any samples waiting in the queue.
+//
+// Note: If the client requests a sample while the source is paused,
+// the sample is queued and delivered on the next non-seeking call
+// to Start(). The queue is flushed if the source is seeked or
+// stopped.
+//-------------------------------------------------------------------
+
+HRESULT CMFStreamSource::DeliverQueuedSamples()
+{
+ HRESULT hr = S_OK;
+ IMFSample *pSample = NULL;
+
+ EnterCriticalSection(&m_critSec);
+
+ // If we already reached the end of the stream, send the MEEndStream
+ // event again.
+ if (m_EOS)
+ {
+ hr = QueueEvent(MEEndOfStream, GUID_NULL, S_OK, NULL);
+ }
+
+ if (SUCCEEDED(hr))
+ {
+ // Deliver any queued samples.
+ while (!m_sampleQueue.IsEmpty())
+ {
+ hr = m_sampleQueue.Dequeue(&pSample);
+ if (FAILED(hr))
+ {
+ break;
+ }
+
+ hr = DeliverSample(pSample);
+ if (FAILED(hr))
+ {
+ break;
+ }
+
+ SafeRelease(&pSample);
+ }
+ }
+
+ LeaveCriticalSection(&m_critSec);
+
+ // If we reached the end of the stream, send the end-of-presentation event from
+ // the media source.
+ if (SUCCEEDED(hr))
+ {
+ if (m_EOS)
+ {
+ hr = m_pSource->QueueEvent(MEEndOfPresentation, GUID_NULL, S_OK, NULL);
+ }
+ }
+
+ SafeRelease(&pSample);
+ return hr;
+}
+
+
+//-------------------------------------------------------------------
+// Name: Flush
+// Description: Flushes the sample queue.
+//-------------------------------------------------------------------
+
+HRESULT CMFStreamSource::Flush()
+{
+ EnterCriticalSection(&m_critSec);
+
+ m_sampleQueue.Clear();
+
+ LeaveCriticalSection(&m_critSec);
+ return S_OK;
+}
+
+
+//-------------------------------------------------------------------
+// Name: Shutdown
+// Description: Notifies the stream that the source was shut down.
+//-------------------------------------------------------------------
+
+HRESULT CMFStreamSource::Shutdown()
+{
+ EnterCriticalSection(&m_critSec);
+
+ // Flush queued samples.
+ Flush();
+
+ // Shut down the event queue.
+ if (m_pEventQueue)
+ {
+ m_pEventQueue->Shutdown();
+ }
+
+ SafeRelease(&m_pEventQueue);
+ SafeRelease(&m_pSource);
+ SafeRelease(&m_pStreamDescriptor);
+
+ m_IsShutdown = TRUE;
+
+ LeaveCriticalSection(&m_critSec);
+ return S_OK;
+}
+
+//-------------------------------------------------------------------
+// Name: SetPosition
+// Description: Updates the new stream position.
+//-------------------------------------------------------------------
+
+HRESULT CMFStreamSource::SetPosition(LONGLONG rtNewPosition)
+{
+ EnterCriticalSection(&m_critSec);
+
+ HRESULT hr = S_OK;
+
+/*
+ // Check if the requested position is beyond the end of the stream.
+ LONGLONG duration = AudioDurationFromBufferSize(m_pRiff->Format(), m_pRiff->Chunk().DataSize());
+
+ if (rtNewPosition > duration)
+ {
+ LeaveCriticalSection(&m_critSec);
+
+ return MF_E_INVALIDREQUEST; // Start position is past the end of the presentation.
+ }
+
+ if (m_rtCurrentPosition != rtNewPosition)
+ {
+ LONGLONG offset = BufferSizeFromAudioDuration(m_pRiff->Format(), rtNewPosition);
+
+ // The chunk size is a DWORD. So if our calculations are correct, there is no
+ // way that the maximum valid seek position can be larger than a DWORD.
+ assert(offset <= MAXDWORD);
+
+ hr = m_pRiff->MoveToChunkOffset((DWORD)offset);
+
+ if (SUCCEEDED(hr))
+ {
+ m_rtCurrentPosition = rtNewPosition;
+ m_discontinuity = TRUE;
+ m_EOS = FALSE;
+ }
+ }
+*/
+ LeaveCriticalSection(&m_critSec);
+ return hr;
+}
+
+HRESULT CMFStreamSource::CheckEndOfStream()
+{
+ HRESULT hr = S_OK;
+/*
+ if (m_pRiff->BytesRemainingInChunk() < m_pRiff->Format()->nBlockAlign)
+ {
+ // The remaining data is smaller than the audio block size. (In theory there shouldn't be
+ // partial bits of data at the end, so we should reach an even zero bytes, but the file
+ // might not be authored correctly.)
+ m_EOS = TRUE;
+
+ // Send the end-of-stream event,
+ hr = QueueEvent(MEEndOfStream, GUID_NULL, S_OK, NULL);
+ }
+ */
+ return hr;
+}
+
+
+
+
+//-------------------------------------------------------------------
+// Name: QueueEventWithIUnknown
+// Description: Helper function to queue an event with an IUnknown
+// pointer value.
+//
+// pMEG: Media event generator that will queue the event.
+// meType: Media event type.
+// hrStatus: Status code for the event.
+// pUnk: IUnknown pointer value.
+//
+//-------------------------------------------------------------------
+
+
+HRESULT QueueEventWithIUnknown(
+ IMFMediaEventGenerator *pMEG,
+ MediaEventType meType,
+ HRESULT hrStatus,
+ IUnknown *pUnk)
+{
+
+ // Create the PROPVARIANT to hold the IUnknown value.
+ PROPVARIANT var;
+ var.vt = VT_UNKNOWN;
+ var.punkVal = pUnk;
+ pUnk->AddRef();
+
+ // Queue the event.
+ HRESULT hr = pMEG->QueueEvent(meType, GUID_NULL, hrStatus, &var);
+
+ // Clear the PROPVARIANT.
+ PropVariantClear(&var);
+
+ return hr;
+}
+
+LONGLONG AudioDurationFromBufferSize(const WAVEFORMATEX *pWav, DWORD cbAudioDataSize)
+{
+ assert(pWav != NULL);
+
+ if (pWav->nAvgBytesPerSec == 0)
+ {
+ return 0;
+ }
+ return (LONGLONG)cbAudioDataSize * 10000000 / pWav->nAvgBytesPerSec;
+}
+
+LONGLONG BufferSizeFromAudioDuration(const WAVEFORMATEX *pWav, LONGLONG duration)
+{
+ LONGLONG cbSize = duration * pWav->nAvgBytesPerSec / 10000000;
+
+ ULONG ulRemainder = (ULONG)(cbSize % pWav->nBlockAlign);
+
+ // Round up to the next block.
+ if(ulRemainder)
+ {
+ cbSize += pWav->nBlockAlign - ulRemainder;
+ }
+
+ return cbSize;
+}
+
+
diff --git a/plugins/pluginWinMF/internals/mf_custom_src.h b/plugins/pluginWinMF/internals/mf_custom_src.h
new file mode 100644
index 0000000..f9194c9
--- /dev/null
+++ b/plugins/pluginWinMF/internals/mf_custom_src.h
@@ -0,0 +1,340 @@
+/*
+* Copyright (C) Microsoft Corporation. All rights reserved.
+* Copyright (C) 2013 Mamadou DIOP
+* Copyright (C) 2013 Doubango Telecom <http://www.doubango.org>
+*
+* This file is part of Open Source Doubango Framework.
+*
+* DOUBANGO is free software: you can redistribute it and/or modify
+* it under the terms of the GNU General Public License as published by
+* the Free Software Foundation, either version 3 of the License, or
+* (at your option) any later version.
+*
+* DOUBANGO is distributed in the hope that it will be useful,
+* but WITHOUT ANY WARRANTY; without even the implied warranty of
+* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
+* GNU General Public License for more details.
+*
+* You should have received a copy of the GNU General Public License
+* along with DOUBANGO.
+*/
+#ifndef PLUGIN_WIN_MF_CUSTOM_SOURCE_H
+#define PLUGIN_WIN_MF_CUSTOM_SOURCE_H
+
+#include "../plugin_win_mf_config.h"
+
+#include <new>
+#include <windows.h>
+#include <assert.h>
+
+#include <mfapi.h>
+#include <mfobjects.h>
+#include <mfidl.h>
+#include <mferror.h>
+#include <shlwapi.h>
+
+class CMFStreamSource;
+class CMFSource;
+
+LONGLONG AudioDurationFromBufferSize(const WAVEFORMATEX *pWav, DWORD cbAudioDataSize);
+
+
+//////////////////////////////////////////////////////////////////////////
+// CMFSource
+// Description: Media source object.
+//////////////////////////////////////////////////////////////////////////
+
+class CMFSource : public IMFMediaSource
+{
+ friend class CMFStreamSource;
+
+public:
+ static HRESULT CreateInstance(REFIID iid, void **ppSource);
+ static HRESULT CreateInstanceEx(REFIID iid, void **ppSource, IMFMediaType *pMediaType);
+
+ // IMFCustomSource
+ HRESULT CopyVideoBuffer(UINT32 nWidth, UINT32 nHeight, const void* pBufferPtr, UINT32 nBufferSize);
+
+ // IUnknown
+ STDMETHODIMP QueryInterface(REFIID iid, void** ppv);
+ STDMETHODIMP_(ULONG) AddRef();
+ STDMETHODIMP_(ULONG) Release();
+
+ // IMFMediaEventGenerator
+ STDMETHODIMP BeginGetEvent(IMFAsyncCallback* pCallback,IUnknown* punkState);
+ STDMETHODIMP EndGetEvent(IMFAsyncResult* pResult, IMFMediaEvent** ppEvent);
+ STDMETHODIMP GetEvent(DWORD dwFlags, IMFMediaEvent** ppEvent);
+ STDMETHODIMP QueueEvent(MediaEventType met, REFGUID guidExtendedType, HRESULT hrStatus, const PROPVARIANT* pvValue);
+
+ // IMFMediaSource
+ STDMETHODIMP CreatePresentationDescriptor(IMFPresentationDescriptor** ppPresentationDescriptor);
+ STDMETHODIMP GetCharacteristics(DWORD* pdwCharacteristics);
+ STDMETHODIMP Pause();
+ STDMETHODIMP Shutdown();
+ STDMETHODIMP Start(
+ IMFPresentationDescriptor* pPresentationDescriptor,
+ const GUID* pguidTimeFormat,
+ const PROPVARIANT* pvarStartPosition
+ );
+ STDMETHODIMP Stop();
+
+private:
+
+ enum State
+ {
+ STATE_STOPPED,
+ STATE_PAUSED,
+ STATE_STARTED
+ };
+
+
+ // Constructor is private - client should use static CreateInstance method.
+ CMFSource(HRESULT &hr, IMFMediaType *pMediaType);
+ virtual ~CMFSource();
+
+ HRESULT CheckShutdown() const
+ {
+ if (m_IsShutdown)
+ {
+ return MF_E_SHUTDOWN;
+ }
+ else
+ {
+ return S_OK;
+ }
+ }
+
+ HRESULT CreatePresentationDescriptor();
+ HRESULT QueueNewStreamEvent(IMFPresentationDescriptor *pPD);
+ HRESULT CreateCMFStreamSource(IMFStreamDescriptor *pSD);
+ HRESULT ValidatePresentationDescriptor(IMFPresentationDescriptor *pPD);
+
+ LONGLONG GetCurrentPosition() const;
+ State GetState() const { return m_state; }
+
+ IMFMediaEventQueue *m_pEventQueue; // Event generator helper
+ IMFPresentationDescriptor *m_pPresentationDescriptor; // Default presentation
+
+ CMFStreamSource *m_pStream; // Media stream. Can be NULL is no stream is selected.
+
+ long m_nRefCount; // reference count
+ CRITICAL_SECTION m_critSec;
+ BOOL m_IsShutdown; // Flag to indicate if Shutdown() method was called.
+ State m_state; // Current state (running, stopped, paused)
+
+ IMFMediaType *m_pMediaType; // The supported mediaType
+};
+
+
+class SampleQueue
+{
+protected:
+
+ // Nodes in the linked list
+ struct Node
+ {
+ Node *prev;
+ Node *next;
+ IMFSample* item;
+
+ Node() : prev(NULL), next(NULL)
+ {
+ }
+
+ Node(IMFSample* item) : prev(NULL), next(NULL)
+ {
+ this->item = item;
+ }
+
+ IMFSample* Item() const { return item; }
+ };
+
+
+protected:
+ Node m_anchor; // Anchor node for the linked list.
+
+public:
+
+ SampleQueue()
+ {
+ m_anchor.next = &m_anchor;
+ m_anchor.prev = &m_anchor;
+ }
+
+ virtual ~SampleQueue()
+ {
+ Clear();
+ }
+
+ HRESULT Queue(IMFSample* item)
+ {
+ if (item == NULL)
+ {
+ return E_POINTER;
+ }
+
+ Node *pNode = new (std::nothrow) Node(item);
+ if (pNode == NULL)
+ {
+ return E_OUTOFMEMORY;
+ }
+
+ item->AddRef();
+
+ Node *pBefore = m_anchor.prev;
+
+ Node *pAfter = pBefore->next;
+
+ pBefore->next = pNode;
+ pAfter->prev = pNode;
+
+ pNode->prev = pBefore;
+ pNode->next = pAfter;
+
+ return S_OK;
+
+ }
+
+ HRESULT Dequeue(IMFSample* *ppItem)
+ {
+ if (IsEmpty())
+ {
+ return E_FAIL;
+ }
+ if (ppItem == NULL)
+ {
+ return E_POINTER;
+ }
+
+ Node *pNode = m_anchor.next;
+
+ // The next node's previous is this node's previous.
+ pNode->next->prev = m_anchor.next->prev;
+
+ // The previous node's next is this node's next.
+ pNode->prev->next = pNode->next;
+
+ *ppItem = pNode->item;
+ delete pNode;
+
+ return S_OK;
+ }
+
+ BOOL IsEmpty() const { return m_anchor.next == &m_anchor; }
+
+ void Clear()
+ {
+ Node *n = m_anchor.next;
+
+ // Delete the nodes
+ while (n != &m_anchor)
+ {
+ if (n->item)
+ {
+ n->item->Release();
+ }
+
+ Node *tmp = n->next;
+ delete n;
+ n = tmp;
+ }
+
+ // Reset the anchor to point at itself
+ m_anchor.next = &m_anchor;
+ m_anchor.prev = &m_anchor;
+ }
+
+};
+
+
+
+//////////////////////////////////////////////////////////////////////////
+// CMFStreamSource
+// Description: Media stream object.
+//////////////////////////////////////////////////////////////////////////
+
+
+class CMFStreamSource : public IMFMediaStream
+{
+ friend class CMFSource;
+
+public:
+
+ // IMFCustomSource
+ HRESULT CopyVideoBuffer(UINT32 nWidth, UINT32 nHeight, const void* pBufferPtr, UINT32 nBufferSize);
+
+ // IUnknown
+ STDMETHODIMP QueryInterface(REFIID iid, void** ppv);
+ STDMETHODIMP_(ULONG) AddRef();
+ STDMETHODIMP_(ULONG) Release();
+
+ // IMFMediaEventGenerator
+ STDMETHODIMP BeginGetEvent(IMFAsyncCallback* pCallback,IUnknown* punkState);
+ STDMETHODIMP EndGetEvent(IMFAsyncResult* pResult, IMFMediaEvent** ppEvent);
+ STDMETHODIMP GetEvent(DWORD dwFlags, IMFMediaEvent** ppEvent);
+ STDMETHODIMP QueueEvent(MediaEventType met, REFGUID guidExtendedType, HRESULT hrStatus, const PROPVARIANT* pvValue);
+
+ // IMFMediaStream
+ STDMETHODIMP GetMediaSource(IMFMediaSource** ppMediaSource);
+ STDMETHODIMP GetStreamDescriptor(IMFStreamDescriptor** ppStreamDescriptor);
+ STDMETHODIMP RequestSample(IUnknown* pToken);
+
+private:
+
+ CMFStreamSource(CMFSource *pSource, IMFStreamDescriptor *pSD, HRESULT& hr);
+ ~CMFStreamSource();
+
+
+ HRESULT CheckShutdown() const
+ {
+ if (m_IsShutdown)
+ {
+ return MF_E_SHUTDOWN;
+ }
+ else
+ {
+ return S_OK;
+ }
+ }
+
+ HRESULT InitializeParams();
+ HRESULT Shutdown();
+ HRESULT CreateSample(IMFSample **pSample);
+ HRESULT DeliverSample(IMFSample *pSample);
+ HRESULT DeliverQueuedSamples();
+ HRESULT Flush();
+
+ LONGLONG GetCurrentPosition() const { return m_rtCurrentPosition; }
+ HRESULT SetPosition(LONGLONG rtNewPosition);
+ HRESULT CheckEndOfStream();
+
+
+ long m_nRefCount; // reference count
+ CRITICAL_SECTION m_critSec;
+ BOOL m_IsShutdown; // Flag to indicate if source's Shutdown() method was called.
+ LONGLONG m_rtCurrentPosition; // Current position in the stream, in 100-ns units
+ UINT64 m_rtDuration; // Sample duration, in 100-ns units
+ BOOL m_discontinuity; // Is the next sample a discontinuity?
+ BOOL m_EOS; // Did we reach the end of the stream?
+
+ IMFMediaEventQueue *m_pEventQueue; // Event generator helper.
+ CMFSource *m_pSource; // Parent media source
+ IMFStreamDescriptor *m_pStreamDescriptor; // Stream descriptor for this stream.
+
+ SampleQueue m_sampleQueue; // Queue for samples while paused.
+ GUID m_guidMajorType; // major media type (e.g. MFMediaType_Video or MFMediaType_Audio)
+ GUID m_guidSubType; // Media subtype (e.g. MFVideoFormat_RGB32 or MFVideoFormat_H264)
+ IMFMediaBuffer *m_pMediaBuffer; // Pointer to the data to deliver
+ UINT32 m_nBufferSize; // Size of the data to deliver
+
+ struct
+ {
+ UINT32 nWidth;
+ UINT32 nHeigh;
+ UINT32 nFps;
+ }
+ m_structVideoParams;
+};
+
+
+#endif /* PLUGIN_WIN_MF_CUSTOM_SOURCE_H */
diff --git a/plugins/pluginWinMF/internals/mf_devices.cxx b/plugins/pluginWinMF/internals/mf_devices.cxx
new file mode 100644
index 0000000..22b862e
--- /dev/null
+++ b/plugins/pluginWinMF/internals/mf_devices.cxx
@@ -0,0 +1,151 @@
+/* Copyright (C) 2013 Mamadou DIOP
+* Copyright (C) 2013 Doubango Telecom <http://www.doubango.org>
+*
+* This file is part of Open Source Doubango Framework.
+*
+* DOUBANGO is free software: you can redistribute it and/or modify
+* it under the terms of the GNU General Public License as published by
+* the Free Software Foundation, either version 3 of the License, or
+* (at your option) any later version.
+*
+* DOUBANGO is distributed in the hope that it will be useful,
+* but WITHOUT ANY WARRANTY; without even the implied warranty of
+* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
+* GNU General Public License for more details.
+*
+* You should have received a copy of the GNU General Public License
+* along with DOUBANGO.
+*/
+#include "mf_devices.h"
+#include "mf_utils.h"
+
+DeviceList::DeviceList()
+: m_ppDevices(NULL)
+, m_cDevices(0)
+{
+
+}
+
+DeviceList::~DeviceList()
+{
+ Clear();
+}
+
+UINT32 DeviceList::Count()const
+{
+ return m_cDevices;
+}
+
+void DeviceList::Clear()
+{
+ for (UINT32 i = 0; i < m_cDevices; i++) {
+ SafeRelease(&m_ppDevices[i]);
+ }
+ CoTaskMemFree(m_ppDevices);
+ m_ppDevices = NULL;
+
+ m_cDevices = 0;
+}
+
+HRESULT DeviceList::EnumerateDevices(const GUID& sourceType)
+{
+ HRESULT hr = S_OK;
+ IMFAttributes *pAttributes = NULL;
+
+ Clear();
+
+ // Initialize an attribute store. We will use this to
+ // specify the enumeration parameters.
+
+ hr = MFCreateAttributes(&pAttributes, 1);
+
+ // Ask for source type = video capture devices
+ if (SUCCEEDED(hr))
+ {
+ hr = pAttributes->SetGUID(
+ MF_DEVSOURCE_ATTRIBUTE_SOURCE_TYPE,
+ sourceType
+ );
+ }
+
+ // Enumerate devices.
+ if (SUCCEEDED(hr))
+ {
+ hr = MFEnumDeviceSources(pAttributes, &m_ppDevices, &m_cDevices);
+ }
+
+ SafeRelease(&pAttributes);
+
+ return hr;
+}
+
+HRESULT DeviceList::GetDeviceAtIndex(UINT32 index, IMFActivate **ppActivate)
+{
+ if (index >= Count())
+ {
+ return E_INVALIDARG;
+ }
+
+ *ppActivate = m_ppDevices[index];
+ (*ppActivate)->AddRef();
+
+ return S_OK;
+}
+
+HRESULT DeviceList::GetDeviceBest(IMFActivate **ppActivate, WCHAR *pszName /*= NULL*/)
+{
+ UINT32 index = 0;
+ if(pszName)
+ {
+ WCHAR *_pszName = NULL;
+ BOOL bFound = FALSE;
+ for(UINT32 i = 0; i < Count() && !bFound; ++i)
+ {
+ if((SUCCEEDED(GetDeviceName(i, &_pszName))))
+ {
+ if(wcscmp(_pszName, pszName) == 0)
+ {
+ index = i;
+ bFound = TRUE;
+ // do not break the loop because we need to free(_pszName)
+ }
+ }
+ if(_pszName)
+ {
+ CoTaskMemFree(_pszName), _pszName = NULL;
+ }
+ }
+ }
+ return GetDeviceAtIndex(index, ppActivate);
+}
+
+// The caller must free the memory for the string by calling CoTaskMemFree
+HRESULT DeviceList::GetDeviceName(UINT32 index, WCHAR **ppszName)
+{
+ if (index >= Count())
+ {
+ return E_INVALIDARG;
+ }
+
+ HRESULT hr = S_OK;
+
+ hr = m_ppDevices[index]->GetAllocatedString(
+ MF_DEVSOURCE_ATTRIBUTE_FRIENDLY_NAME,
+ ppszName,
+ NULL
+ );
+
+ return hr;
+}
+
+HRESULT DeviceListAudio::EnumerateDevices()
+{
+ // call base class function
+ return DeviceList::EnumerateDevices(MF_DEVSOURCE_ATTRIBUTE_SOURCE_TYPE_AUDCAP_GUID);
+}
+
+HRESULT DeviceListVideo::EnumerateDevices()
+{
+ // call base class function
+ return DeviceList::EnumerateDevices(MF_DEVSOURCE_ATTRIBUTE_SOURCE_TYPE_VIDCAP_GUID);
+}
diff --git a/plugins/pluginWinMF/internals/mf_devices.h b/plugins/pluginWinMF/internals/mf_devices.h
new file mode 100644
index 0000000..03d010f
--- /dev/null
+++ b/plugins/pluginWinMF/internals/mf_devices.h
@@ -0,0 +1,64 @@
+/* Copyright (C) 2013 Mamadou DIOP
+* Copyright (C) 2013 Doubango Telecom <http://www.doubango.org>
+*
+* This file is part of Open Source Doubango Framework.
+*
+* DOUBANGO is free software: you can redistribute it and/or modify
+* it under the terms of the GNU General Public License as published by
+* the Free Software Foundation, either version 3 of the License, or
+* (at your option) any later version.
+*
+* DOUBANGO is distributed in the hope that it will be useful,
+* but WITHOUT ANY WARRANTY; without even the implied warranty of
+* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
+* GNU General Public License for more details.
+*
+* You should have received a copy of the GNU General Public License
+* along with DOUBANGO.
+*/
+#ifndef PLUGIN_WIN_MF_DEVICES_H
+#define PLUGIN_WIN_MF_DEVICES_H
+
+#include "../plugin_win_mf_config.h"
+
+#include <new>
+#include <mfapi.h>
+#include <mfidl.h>
+#include <Mferror.h>
+#include <shlwapi.h>
+
+//
+// DeviceList [Declaration]
+//
+class DeviceList
+{
+ UINT32 m_cDevices;
+ IMFActivate **m_ppDevices;
+
+public:
+ DeviceList();
+ virtual ~DeviceList();
+
+ UINT32 Count()const;
+ void Clear();
+ HRESULT GetDeviceAtIndex(UINT32 index, IMFActivate **ppActivate);
+ HRESULT GetDeviceBest(IMFActivate **ppActivate, WCHAR *pszName = NULL);
+ HRESULT GetDeviceName(UINT32 index, WCHAR **ppszName);
+
+protected:
+ HRESULT EnumerateDevices(const GUID& sourceType);
+};
+
+class DeviceListAudio : public DeviceList
+{
+public:
+ HRESULT EnumerateDevices();
+};
+
+class DeviceListVideo : public DeviceList
+{
+public:
+ HRESULT EnumerateDevices();
+};
+
+#endif /* PLUGIN_WIN_MF_DEVICES_H */
diff --git a/plugins/pluginWinMF/internals/mf_display_watcher.cxx b/plugins/pluginWinMF/internals/mf_display_watcher.cxx
new file mode 100644
index 0000000..62dbc5f
--- /dev/null
+++ b/plugins/pluginWinMF/internals/mf_display_watcher.cxx
@@ -0,0 +1,160 @@
+/* Copyright (C) 2013 Mamadou DIOP
+* Copyright (C) 2013 Doubango Telecom <http://www.doubango.org>
+*
+* This file is part of Open Source Doubango Framework.
+*
+* DOUBANGO is free software: you can redistribute it and/or modify
+* it under the terms of the GNU General Public License as published by
+* the Free Software Foundation, either version 3 of the License, or
+* (at your option) any later version.
+*
+* DOUBANGO is distributed in the hope that it will be useful,
+* but WITHOUT ANY WARRANTY; without even the implied warranty of
+* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
+* GNU General Public License for more details.
+*
+* You should have received a copy of the GNU General Public License
+* along with DOUBANGO.
+*/
+#include "mf_display_watcher.h"
+#include "mf_utils.h"
+
+#include "tsk_debug.h"
+
+#include <assert.h>
+
+DisplayWatcher::DisplayWatcher(HWND hWnd, IMFMediaSink* pMediaSink, HRESULT &hr)
+: m_pDisplayControl(NULL)
+, m_hWnd(hWnd)
+, m_pWndProc(NULL)
+, m_bStarted(FALSE)
+, m_bFullScreen(FALSE)
+{
+ IMFGetService *pService = NULL;
+
+ CHECK_HR(hr = pMediaSink->QueryInterface(__uuidof(IMFGetService), (void**)&pService));
+ CHECK_HR(hr = pService->GetService(MR_VIDEO_RENDER_SERVICE, __uuidof(IMFVideoDisplayControl), (void**)&m_pDisplayControl));
+ CHECK_HR(hr = m_pDisplayControl->SetAspectRatioMode(MFVideoARMode_PreservePicture));
+bail:
+ SafeRelease(&pService);
+}
+
+DisplayWatcher::~DisplayWatcher()
+{
+ Stop();
+
+ SafeRelease(&m_pDisplayControl);
+}
+
+HRESULT DisplayWatcher::Start()
+{
+ HRESULT hr = S_OK;
+ HWND hWnd = m_hWnd; // save()
+ CHECK_HR(hr = Stop());
+
+ if((m_hWnd = hWnd) && m_pDisplayControl)
+ {
+ CHECK_HR(hr = m_pDisplayControl->SetVideoWindow(hWnd));
+
+ BOOL ret = SetPropA(m_hWnd, "This", this);
+ assert(ret);
+
+#if _M_X64
+ m_pWndProc = (WNDPROC)SetWindowLongPtr(m_hWnd, GWLP_WNDPROC, (LONG_PTR)DisplayWatcher::WndProc);
+#else
+ m_pWndProc = (WNDPROC)SetWindowLongPtr(m_hWnd, GWL_WNDPROC, (LONG)DisplayWatcher::WndProc);
+#endif
+
+ UpdatePosition(); // black screen if attached later
+ }
+ m_bStarted = TRUE;
+bail:
+ return hr;
+}
+
+HRESULT DisplayWatcher::SetFullscreen(BOOL bEnabled)
+{
+ if(m_pDisplayControl)
+ {
+ HRESULT hr = m_pDisplayControl->SetFullscreen(bEnabled);
+ m_bFullScreen = SUCCEEDED(hr);
+ return hr;
+ }
+
+ return E_FAIL;
+}
+
+HRESULT DisplayWatcher::SetHwnd(HWND hWnd)
+{
+ BOOL bWasStarted = m_bStarted;
+ Stop();
+ m_hWnd = hWnd;
+ if(bWasStarted)
+ {
+ return Start();
+ }
+ return S_OK;
+}
+
+HRESULT DisplayWatcher::Stop()
+{
+ if(m_hWnd && m_pWndProc)
+ {
+ // Restore
+
+#if _M_X64
+ SetWindowLongPtr(m_hWnd, GWLP_WNDPROC, (LONG_PTR)m_pWndProc);
+#else
+ SetWindowLongPtr(m_hWnd, GWL_WNDPROC, (LONG)m_pWndProc);
+#endif
+ }
+ m_hWnd = NULL;
+ m_pWndProc = NULL;
+ m_bStarted = FALSE;
+ return S_OK;
+}
+
+void DisplayWatcher::UpdatePosition()
+{
+ if(m_pDisplayControl && m_hWnd)
+ {
+ RECT rcDst = { 0, 0, 0, 0 };
+ GetClientRect(m_hWnd, &rcDst);
+ m_pDisplayControl->SetVideoPosition(NULL, &rcDst);
+ }
+}
+
+LRESULT CALLBACK DisplayWatcher::WndProc(HWND hWnd, UINT uMsg, WPARAM wParam, LPARAM lParam)
+{
+ switch(uMsg)
+ {
+ case WM_CREATE:
+ case WM_SIZE:
+ case WM_MOVE:
+ {
+ DisplayWatcher* This = dynamic_cast<DisplayWatcher*>((DisplayWatcher*)GetPropA(hWnd, "This"));
+ if(This)
+ {
+ This->UpdatePosition();
+ }
+ break;
+ }
+
+ case WM_CHAR:
+ case WM_KEYUP:
+ {
+ DisplayWatcher* This = dynamic_cast<DisplayWatcher*>((DisplayWatcher*)GetPropA(hWnd, "This"));
+ if(This)
+ {
+ if(This->m_bFullScreen && (wParam == 0x1B || wParam == VK_ESCAPE))
+ {
+ This->SetFullscreen(FALSE);
+ }
+ }
+
+ break;
+ }
+ }
+
+ return DefWindowProc(hWnd, uMsg, wParam, lParam);
+} \ No newline at end of file
diff --git a/plugins/pluginWinMF/internals/mf_display_watcher.h b/plugins/pluginWinMF/internals/mf_display_watcher.h
new file mode 100644
index 0000000..d41d6a6
--- /dev/null
+++ b/plugins/pluginWinMF/internals/mf_display_watcher.h
@@ -0,0 +1,55 @@
+/* Copyright (C) 2013 Mamadou DIOP
+* Copyright (C) 2013 Doubango Telecom <http://www.doubango.org>
+*
+* This file is part of Open Source Doubango Framework.
+*
+* DOUBANGO is free software: you can redistribute it and/or modify
+* it under the terms of the GNU General Public License as published by
+* the Free Software Foundation, either version 3 of the License, or
+* (at your option) any later version.
+*
+* DOUBANGO is distributed in the hope that it will be useful,
+* but WITHOUT ANY WARRANTY; without even the implied warranty of
+* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
+* GNU General Public License for more details.
+*
+* You should have received a copy of the GNU General Public License
+* along with DOUBANGO.
+*/
+#ifndef PLUGIN_WIN_MF_DISPLAY_WATCHER_H
+#define PLUGIN_WIN_MF_DISPLAY_WATCHER_H
+
+#include "../plugin_win_mf_config.h"
+
+#include <new>
+#include <mfapi.h>
+#include <mfidl.h>
+#include <Mferror.h>
+#include <shlwapi.h>
+#include <Evr.h>
+
+class DisplayWatcher
+{
+public:
+ DisplayWatcher(HWND hWnd, IMFMediaSink* pMediaSink, HRESULT &hr);
+ virtual ~DisplayWatcher();
+
+public:
+ HRESULT Start();
+ HRESULT SetFullscreen(BOOL bEnabled);
+ HRESULT SetHwnd(HWND hWnd);
+ HRESULT Stop();
+
+private:
+ void UpdatePosition();
+ static LRESULT CALLBACK WndProc(HWND hWnd, UINT uMsg, WPARAM wParam, LPARAM lParam);
+
+private:
+ IMFVideoDisplayControl *m_pDisplayControl;
+ HWND m_hWnd;
+ WNDPROC m_pWndProc;
+ BOOL m_bStarted;
+ BOOL m_bFullScreen;
+};
+
+#endif /* PLUGIN_WIN_MF_DISPLAY_WATCHER_H */
diff --git a/plugins/pluginWinMF/internals/mf_sample_grabber.cxx b/plugins/pluginWinMF/internals/mf_sample_grabber.cxx
new file mode 100644
index 0000000..87aa6af
--- /dev/null
+++ b/plugins/pluginWinMF/internals/mf_sample_grabber.cxx
@@ -0,0 +1,135 @@
+/* Copyright (C) 2013 Mamadou DIOP
+* Copyright (C) 2013 Doubango Telecom <http://www.doubango.org>
+*
+* This file is part of Open Source Doubango Framework.
+*
+* DOUBANGO is free software: you can redistribute it and/or modify
+* it under the terms of the GNU General Public License as published by
+* the Free Software Foundation, either version 3 of the License, or
+* (at your option) any later version.
+*
+* DOUBANGO is distributed in the hope that it will be useful,
+* but WITHOUT ANY WARRANTY; without even the implied warranty of
+* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
+* GNU General Public License for more details.
+*
+* You should have received a copy of the GNU General Public License
+* along with DOUBANGO.
+*/
+#include "mf_sample_grabber.h"
+
+#include "tinymedia/tmedia_producer.h"
+
+#include "tsk_memory.h"
+#include "tsk_debug.h"
+
+#include <assert.h>
+
+// Create a new instance of the object.
+HRESULT SampleGrabberCB::CreateInstance(const struct tmedia_producer_s* pcWrappedProducer, SampleGrabberCB **ppCB)
+{
+ assert(pcWrappedProducer);
+
+ *ppCB = new (std::nothrow) SampleGrabberCB(pcWrappedProducer);
+
+ if (ppCB == NULL)
+ {
+ return E_OUTOFMEMORY;
+ }
+ return S_OK;
+}
+
+STDMETHODIMP SampleGrabberCB::QueryInterface(REFIID riid, void** ppv)
+{
+ static const QITAB qit[] =
+ {
+ QITABENT(SampleGrabberCB, IMFSampleGrabberSinkCallback),
+ QITABENT(SampleGrabberCB, IMFClockStateSink),
+ { 0 }
+ };
+ return QISearch(this, qit, riid, ppv);
+}
+
+STDMETHODIMP_(ULONG) SampleGrabberCB::AddRef()
+{
+ return InterlockedIncrement(&m_cRef);
+}
+
+STDMETHODIMP_(ULONG) SampleGrabberCB::Release()
+{
+ ULONG cRef = InterlockedDecrement(&m_cRef);
+ if (cRef == 0)
+ {
+ delete this;
+ }
+ return cRef;
+
+}
+
+// IMFClockStateSink methods.
+
+// In these example, the IMFClockStateSink methods do not perform any actions.
+// You can use these methods to track the state of the sample grabber sink.
+
+STDMETHODIMP SampleGrabberCB::OnClockStart(MFTIME hnsSystemTime, LONGLONG llClockStartOffset)
+{
+ TSK_DEBUG_INFO("SampleGrabberCB::OnClockStart(%lld, %lld)", hnsSystemTime, llClockStartOffset);
+ return S_OK;
+}
+
+STDMETHODIMP SampleGrabberCB::OnClockStop(MFTIME hnsSystemTime)
+{
+ TSK_DEBUG_INFO("SampleGrabberCB::OnClockStop(%lld)", hnsSystemTime);
+ return S_OK;
+}
+
+STDMETHODIMP SampleGrabberCB::OnClockPause(MFTIME hnsSystemTime)
+{
+ TSK_DEBUG_INFO("SampleGrabberCB::OnClockPause(%lld)", hnsSystemTime);
+ return S_OK;
+}
+
+STDMETHODIMP SampleGrabberCB::OnClockRestart(MFTIME hnsSystemTime)
+{
+ TSK_DEBUG_INFO("SampleGrabberCB::OnClockRestart(%lld)", hnsSystemTime);
+ return S_OK;
+}
+
+STDMETHODIMP SampleGrabberCB::OnClockSetRate(MFTIME hnsSystemTime, float flRate)
+{
+ TSK_DEBUG_INFO("SampleGrabberCB::OnClockSetRate(%lld, %f)", hnsSystemTime, flRate);
+ return S_OK;
+}
+
+// IMFSampleGrabberSink methods.
+
+STDMETHODIMP SampleGrabberCB::OnSetPresentationClock(IMFPresentationClock* pClock)
+{
+ TSK_DEBUG_INFO("SampleGrabberCB::OnSetPresentationClock");
+ return S_OK;
+}
+
+STDMETHODIMP SampleGrabberCB::OnProcessSample(
+ REFGUID guidMajorMediaType, DWORD dwSampleFlags,
+ LONGLONG llSampleTime, LONGLONG llSampleDuration, const BYTE * pSampleBuffer,
+ DWORD dwSampleSize)
+{
+ if (m_pWrappedProducer && TMEDIA_PRODUCER(m_pWrappedProducer)->enc_cb.callback) {
+#if 1
+ if (m_bMuted) {
+ // Send zeros. Do not skip sending data to avoid NAT issues and session deconnection.
+ // Some TelePresence systems disconnect the session when the remote peer stops sending video data.
+ memset((void*)pSampleBuffer, 0, dwSampleSize);
+ }
+#endif
+ TMEDIA_PRODUCER(m_pWrappedProducer)->enc_cb.callback(TMEDIA_PRODUCER(m_pWrappedProducer)->enc_cb.callback_data, pSampleBuffer, dwSampleSize);
+ }
+
+ return S_OK;
+}
+
+STDMETHODIMP SampleGrabberCB::OnShutdown()
+{
+ TSK_DEBUG_INFO("SampleGrabberCB::OnShutdown");
+ return S_OK;
+} \ No newline at end of file
diff --git a/plugins/pluginWinMF/internals/mf_sample_grabber.h b/plugins/pluginWinMF/internals/mf_sample_grabber.h
new file mode 100644
index 0000000..858f3c1
--- /dev/null
+++ b/plugins/pluginWinMF/internals/mf_sample_grabber.h
@@ -0,0 +1,68 @@
+/* Copyright (C) 2013 Mamadou DIOP
+* Copyright (C) 2013 Doubango Telecom <http://www.doubango.org>
+*
+* This file is part of Open Source Doubango Framework.
+*
+* DOUBANGO is free software: you can redistribute it and/or modify
+* it under the terms of the GNU General Public License as published by
+* the Free Software Foundation, either version 3 of the License, or
+* (at your option) any later version.
+*
+* DOUBANGO is distributed in the hope that it will be useful,
+* but WITHOUT ANY WARRANTY; without even the implied warranty of
+* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
+* GNU General Public License for more details.
+*
+* You should have received a copy of the GNU General Public License
+* along with DOUBANGO.
+*/
+#ifndef PLUGIN_WIN_MF_SAMPLE_GRABBER_H
+#define PLUGIN_WIN_MF_SAMPLE_GRABBER_H
+
+#include "../plugin_win_mf_config.h"
+
+#include <new>
+#include <mfapi.h>
+#include <mfidl.h>
+#include <Mferror.h>
+#include <shlwapi.h>
+
+//
+// Sample Grabber callback [Declaration]
+// http://msdn.microsoft.com/en-us/library/windows/desktop/hh184779(v=vs.85).aspx
+//
+class SampleGrabberCB : public IMFSampleGrabberSinkCallback
+{
+ bool m_bMuted;
+ long m_cRef;
+ const struct tmedia_producer_s* m_pWrappedProducer;
+
+ SampleGrabberCB(const struct tmedia_producer_s* pcWrappedProducer) : m_cRef(1), m_bMuted(false), m_pWrappedProducer(pcWrappedProducer) {}
+
+public:
+ static HRESULT CreateInstance(const struct tmedia_producer_s* pcWrappedProducer, SampleGrabberCB **ppCB);
+
+ void SetMute(bool bMuted) { m_bMuted = bMuted; }
+
+ // IUnknown methods
+ STDMETHODIMP QueryInterface(REFIID iid, void** ppv);
+ STDMETHODIMP_(ULONG) AddRef();
+ STDMETHODIMP_(ULONG) Release();
+
+ // IMFClockStateSink methods
+ STDMETHODIMP OnClockStart(MFTIME hnsSystemTime, LONGLONG llClockStartOffset);
+ STDMETHODIMP OnClockStop(MFTIME hnsSystemTime);
+ STDMETHODIMP OnClockPause(MFTIME hnsSystemTime);
+ STDMETHODIMP OnClockRestart(MFTIME hnsSystemTime);
+ STDMETHODIMP OnClockSetRate(MFTIME hnsSystemTime, float flRate);
+
+ // IMFSampleGrabberSinkCallback methods
+ STDMETHODIMP OnSetPresentationClock(IMFPresentationClock* pClock);
+ STDMETHODIMP OnProcessSample(REFGUID guidMajorMediaType, DWORD dwSampleFlags,
+ LONGLONG llSampleTime, LONGLONG llSampleDuration, const BYTE * pSampleBuffer,
+ DWORD dwSampleSize);
+ STDMETHODIMP OnShutdown();
+};
+
+
+#endif /* PLUGIN_WIN_MF_SAMPLE_GRABBER_H */
diff --git a/plugins/pluginWinMF/internals/mf_sample_queue.cxx b/plugins/pluginWinMF/internals/mf_sample_queue.cxx
new file mode 100644
index 0000000..05c2bc6
--- /dev/null
+++ b/plugins/pluginWinMF/internals/mf_sample_queue.cxx
@@ -0,0 +1,158 @@
+/* Copyright (C) 2013 Mamadou DIOP
+* Copyright (C) 2013 Doubango Telecom <http://www.doubango.org>
+*
+* This file is part of Open Source Doubango Framework.
+*
+* DOUBANGO is free software: you can redistribute it and/or modify
+* it under the terms of the GNU General Public License as published by
+* the Free Software Foundation, either version 3 of the License, or
+* (at your option) any later version.
+*
+* DOUBANGO is distributed in the hope that it will be useful,
+* but WITHOUT ANY WARRANTY; without even the implied warranty of
+* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
+* GNU General Public License for more details.
+*
+* You should have received a copy of the GNU General Public License
+* along with DOUBANGO.
+*/
+#include "mf_sample_queue.h"
+
+#include <assert.h>
+
+MFSampleQueue::MFSampleQueue()
+: m_nRefCount(1)
+, m_nCount(0)
+{
+ InitializeCriticalSection(&m_critSec);
+
+ m_anchor.next = &m_anchor;
+ m_anchor.prev = &m_anchor;
+}
+
+MFSampleQueue::~MFSampleQueue()
+{
+ assert(m_nRefCount == 0);
+
+ Clear();
+
+ DeleteCriticalSection(&m_critSec);
+}
+
+STDMETHODIMP MFSampleQueue::QueryInterface(REFIID iid, void** ppv)
+{
+ return E_NOTIMPL;
+}
+
+STDMETHODIMP_(ULONG) MFSampleQueue::AddRef()
+{
+ return InterlockedIncrement(&m_nRefCount);
+}
+
+STDMETHODIMP_(ULONG) MFSampleQueue::Release()
+{
+ ULONG uCount = InterlockedDecrement(&m_nRefCount);
+ if (uCount == 0)
+ {
+ delete this;
+ }
+ // For thread safety, return a temporary variable.
+ return uCount;
+}
+
+HRESULT MFSampleQueue::Queue(IMFSample* item)
+{
+ if (item == NULL)
+ {
+ return E_POINTER;
+ }
+
+ Node *pNode = new (std::nothrow) Node(item);
+ if (pNode == NULL)
+ {
+ return E_OUTOFMEMORY;
+ }
+
+ item->AddRef();
+
+ EnterCriticalSection(&m_critSec);
+
+ Node *pBefore = m_anchor.prev;
+
+ Node *pAfter = pBefore->next;
+
+ pBefore->next = pNode;
+ pAfter->prev = pNode;
+
+ pNode->prev = pBefore;
+ pNode->next = pAfter;
+
+ m_nCount++;
+
+ LeaveCriticalSection(&m_critSec);
+
+ return S_OK;
+}
+
+HRESULT MFSampleQueue::Dequeue(IMFSample* *ppItem)
+{
+ if (ppItem == NULL)
+ {
+ return E_POINTER;
+ }
+
+ EnterCriticalSection(&m_critSec);
+
+ if (IsEmpty())
+ {
+ LeaveCriticalSection(&m_critSec);
+ return E_FAIL;
+ }
+
+ Node *pNode = m_anchor.next;
+
+ // The next node's previous is this node's previous.
+ pNode->next->prev = m_anchor.next->prev;
+
+ // The previous node's next is this node's next.
+ pNode->prev->next = pNode->next;
+
+ *ppItem = pNode->item;
+ delete pNode;
+
+ m_nCount--;
+
+ LeaveCriticalSection(&m_critSec);
+
+ return S_OK;
+}
+
+HRESULT MFSampleQueue::Clear()
+{
+ EnterCriticalSection(&m_critSec);
+
+ Node *n = m_anchor.next;
+
+ // Delete the nodes
+ while (n != &m_anchor)
+ {
+ if (n->item)
+ {
+ n->item->Release();
+ }
+
+ Node *tmp = n->next;
+ delete n;
+ n = tmp;
+ }
+
+ // Reset the anchor to point at itself
+ m_anchor.next = &m_anchor;
+ m_anchor.prev = &m_anchor;
+
+ m_nCount = 0;
+
+ LeaveCriticalSection(&m_critSec);
+
+ return S_OK;
+}
diff --git a/plugins/pluginWinMF/internals/mf_sample_queue.h b/plugins/pluginWinMF/internals/mf_sample_queue.h
new file mode 100644
index 0000000..b42ecde
--- /dev/null
+++ b/plugins/pluginWinMF/internals/mf_sample_queue.h
@@ -0,0 +1,81 @@
+/* Copyright (C) 2013 Mamadou DIOP
+* Copyright (C) 2013 Doubango Telecom <http://www.doubango.org>
+*
+* This file is part of Open Source Doubango Framework.
+*
+* DOUBANGO is free software: you can redistribute it and/or modify
+* it under the terms of the GNU General Public License as published by
+* the Free Software Foundation, either version 3 of the License, or
+* (at your option) any later version.
+*
+* DOUBANGO is distributed in the hope that it will be useful,
+* but WITHOUT ANY WARRANTY; without even the implied warranty of
+* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
+* GNU General Public License for more details.
+*
+* You should have received a copy of the GNU General Public License
+* along with DOUBANGO.
+*/
+#ifndef PLUGIN_WIN_MF_SAMPLE_QUEUE_H
+#define PLUGIN_WIN_MF_SAMPLE_QUEUE_H
+
+#include "../plugin_win_mf_config.h"
+
+#include <new>
+#include <mfapi.h>
+#include <mfidl.h>
+#include <Mferror.h>
+#include <shlwapi.h>
+
+class MFSampleQueue : public IUnknown
+{
+protected:
+
+ // Nodes in the linked list
+ struct Node
+ {
+ Node *prev;
+ Node *next;
+ IMFSample* item;
+
+ Node() : prev(NULL), next(NULL)
+ {
+ }
+
+ Node(IMFSample* item) : prev(NULL), next(NULL)
+ {
+ this->item = item;
+ }
+
+ IMFSample* Item() const { return item; }
+ };
+
+
+protected:
+ Node m_anchor;
+ long m_nCount;
+ CRITICAL_SECTION m_critSec;
+
+private:
+ long m_nRefCount;
+
+public:
+
+ MFSampleQueue();
+ virtual ~MFSampleQueue();
+
+ // IUnknown
+ STDMETHODIMP QueryInterface(REFIID iid, void** ppv);
+ STDMETHODIMP_(ULONG) AddRef();
+ STDMETHODIMP_(ULONG) Release();
+
+ HRESULT Queue(IMFSample* item);
+ HRESULT Dequeue(IMFSample* *ppItem);
+ HRESULT Clear();
+
+ inline BOOL IsEmpty() const { return m_anchor.next == &m_anchor; }
+ inline long Count() { return m_nCount; }
+};
+
+
+#endif /* PLUGIN_WIN_MF_SAMPLE_QUEUE_H */ \ No newline at end of file
diff --git a/plugins/pluginWinMF/internals/mf_utils.cxx b/plugins/pluginWinMF/internals/mf_utils.cxx
new file mode 100644
index 0000000..d1f326c
--- /dev/null
+++ b/plugins/pluginWinMF/internals/mf_utils.cxx
@@ -0,0 +1,2104 @@
+/* Copyright (C) 2013 Mamadou DIOP
+* Copyright (C) 2013 Doubango Telecom <http://www.doubango.org>
+*
+* This file is part of Open Source Doubango Framework.
+*
+* DOUBANGO is free software: you can redistribute it and/or modify
+* it under the terms of the GNU General Public License as published by
+* the Free Software Foundation, either version 3 of the License, or
+* (at your option) any later version.
+*
+* DOUBANGO is distributed in the hope that it will be useful,
+* but WITHOUT ANY WARRANTY; without even the implied warranty of
+* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
+* GNU General Public License for more details.
+*
+* You should have received a copy of the GNU General Public License
+* along with DOUBANGO.
+*/
+#include "mf_utils.h"
+#include "mf_codec.h"
+
+#include "tinymedia/tmedia_common.h"
+
+#include "tsk_debug.h"
+
+#include <KS.h>/* KS.H must be included before codecapi.H */
+#include <Codecapi.h>
+#include <initguid.h>
+#include <wmcodecdsp.h>
+#include <d3d9.h>
+#include <assert.h>
+
+
+#ifdef _MSC_VER
+#pragma comment(lib, "strmiids.lib")
+#pragma comment(lib, "wmcodecdspuuid.lib")
+#pragma comment(lib, "d3d9")
+#endif
+
+#if !defined(PLUGIN_MF_DISABLE_CODECS)
+// Must be "0" to use "Microsoft"/"Intel Quick Sync" MFT codecs. Testing: When set to "1", libx264 and FFmpeg will be used.
+// Metropolis code (G2J.COM TelePresence client) has "PLUGIN_MF_DISABLE_CODECS=1" because of interop issues against Tandberg and Intel QuickSync H.264 implementations.
+# define PLUGIN_MF_DISABLE_CODECS 1
+#endif
+#if !defined(PLUGIN_MF_DISABLE_MS_H264_ENCODER)
+// MS H.264 encoder produces artifacts when bundled with the producer. Disable until we found why this happens.
+// What is strange is that NVIDIA CUDA H.264 decoder doesn't produce artifacts when decoding MS frames while FFmpeg and MS decoder do.
+// To encode with MS and decode with CUDA:
+// - Force "bMFEncoderIsRegistered" value to "FALSE" in plugin_win_mf_producer_video.cxx
+// Metropolis code (G2J.COM TelePresence) has "PLUGIN_MF_DISABLE_MS_H264_ENCODER=1" beacause Microsoft H.264 not fully tested against Tandberg, Polycom, Hartallo...
+# define PLUGIN_MF_DISABLE_MS_H264_ENCODER 1
+#endif
+
+#if !defined(PLUGIN_MF_DISABLE_ASYNC_DECODERS)
+// Not fully tested
+# define PLUGIN_MF_DISABLE_ASYNC_DECODERS 1
+#endif
+
+BOOL MFUtils::g_bStarted = FALSE;
+
+DWORD MFUtils::g_dwMajorVersion = -1;
+DWORD MFUtils::g_dwMinorVersion = -1;
+
+BOOL MFUtils::g_bLowLatencyH264Checked = FALSE;
+BOOL MFUtils::g_bLowLatencyH264Supported = FALSE;
+BOOL MFUtils::g_bLowLatencyH264SupportsMaxSliceSize = FALSE;
+
+BOOL MFUtils::g_bD3D9Checked = FALSE;
+BOOL MFUtils::g_bD3D9Supported = FALSE;
+
+const TOPOID MFUtils::g_ullTopoIdSinkMain = 111;
+const TOPOID MFUtils::g_ullTopoIdSinkPreview = 222;
+const TOPOID MFUtils::g_ullTopoIdSource = 333;
+const TOPOID MFUtils::g_ullTopoIdVideoProcessor = 444;
+
+// Preferred VideoSubTypes
+static const VideoSubTypeGuidPair PreferredVideoSubTypeGuidPairs[] =
+{
+ { tmedia_chroma_yuv420p, MFVideoFormat_I420 },
+ { tmedia_chroma_nv12, MFVideoFormat_NV12 },
+ { tmedia_chroma_uyvy422, MFVideoFormat_UYVY },
+ { tmedia_chroma_yuyv422, MFVideoFormat_YUY2 },
+ /* TODO: Add more YUV formats */
+ { tmedia_chroma_rgb565le, MFVideoFormat_RGB565 },
+ { tmedia_chroma_bgr24, MFVideoFormat_RGB24 },
+ { tmedia_chroma_rgb32, MFVideoFormat_RGB32 },
+};
+static const tsk_size_t PreferredVideoSubTypeGuidPairsCount = sizeof(PreferredVideoSubTypeGuidPairs)/sizeof(PreferredVideoSubTypeGuidPairs[0]);
+
+// Video Processor
+DEFINE_GUID(CLSID_VideoProcessorMFT,
+ 0x88753b26, 0x5b24, 0x49bd, 0xb2, 0xe7, 0xc, 0x44, 0x5c, 0x78, 0xc9, 0x82);
+
+// {4BE8D3C0-0515-4A37-AD55-E4BAE19AF471}
+DEFINE_GUID(CLSID_MF_INTEL_H264EncFilter, // Intel Quick Sync Encoder
+0x4be8d3c0, 0x0515, 0x4a37, 0xad, 0x55, 0xe4, 0xba, 0xe1, 0x9a, 0xf4, 0x71);
+
+// {0855C9AC-BC6F-4371-8954-671CCD4EC16F}
+DEFINE_GUID(CLSID_MF_INTEL_H264DecFilter, // Intel Quick Sync Decoder
+0x0855c9ac, 0xbc6f, 0x4371, 0x89, 0x54, 0x67, 0x1c, 0xcd, 0x4e, 0xc1, 0x6f);
+
+#if WINVER < 0x0602/* From "sdkddkver.h" and defines the SDK version not the host */
+// 6ca50344-051a-4ded-9779-a43305165e35
+DEFINE_GUID(CLSID_CMSH264EncoderMFT, // MS H.264 encoder
+0x6ca50344, 0x051a, 0x4ded, 0x97, 0x79, 0xa4, 0x33, 0x05, 0x16, 0x5e, 0x35);
+#endif /* WINVER */
+
+#define IsWin7_OrLater(dwMajorVersion, dwMinorVersion) ( (dwMajorVersion > 6) || ( (dwMajorVersion == 6) && (dwMinorVersion >= 1) ) )
+#define IsWin8_OrLater(dwMajorVersion, dwMinorVersion) ( (dwMajorVersion > 6) || ( (dwMajorVersion == 6) && (dwMinorVersion >= 2) ) )
+
+
+HRESULT MFUtils::Startup()
+{
+ if(!g_bStarted)
+ {
+ HRESULT hr = CoInitializeEx(NULL, COINIT_MULTITHREADED);
+ if(SUCCEEDED(hr) || hr == 0x80010106) // 0x80010106 when called from managed code (e.g. Boghe) - More info: http://support.microsoft.com/kb/824480
+ {
+ hr = MFStartup(MF_VERSION);
+ }
+ g_bStarted = SUCCEEDED(hr);
+
+ OSVERSIONINFO osvi;
+ ZeroMemory(&osvi, sizeof(OSVERSIONINFO));
+ osvi.dwOSVersionInfoSize = sizeof(OSVERSIONINFO);
+ GetVersionEx(&osvi);
+ g_dwMajorVersion = osvi.dwMajorVersion;
+ g_dwMinorVersion = osvi.dwMinorVersion;
+
+ return hr;
+ }
+ return S_OK;
+}
+
+HRESULT MFUtils::Shutdown()
+{
+ if(g_bStarted)
+ {
+ g_bStarted = false;
+ return S_OK;
+ }
+ return S_OK;
+}
+
+BOOL MFUtils::IsD3D9Supported()
+{
+ if (MFUtils::g_bD3D9Checked)
+ {
+ return MFUtils::g_bD3D9Supported;
+ }
+ MFUtils::g_bD3D9Checked = TRUE;
+ HRESULT hr = S_OK;
+ IDirect3D9* pD3D = NULL;
+ D3DDISPLAYMODE mode = { 0 };
+ D3DPRESENT_PARAMETERS pp = {0};
+ IDirect3DDevice9* pDevice = NULL;
+
+ CHECK_HR(hr = MFUtils::Startup());
+
+ if (!(pD3D = Direct3DCreate9(D3D_SDK_VERSION)))
+ {
+ CHECK_HR(hr = E_OUTOFMEMORY);
+ }
+
+ hr = pD3D->GetAdapterDisplayMode(
+ D3DADAPTER_DEFAULT,
+ &mode
+ );
+ if (FAILED(hr))
+ {
+ goto bail;
+ }
+
+ hr = pD3D->CheckDeviceType(
+ D3DADAPTER_DEFAULT,
+ D3DDEVTYPE_HAL,
+ mode.Format,
+ D3DFMT_X8R8G8B8,
+ TRUE // windowed
+ );
+ if (FAILED(hr))
+ {
+ goto bail;
+ }
+ pp.BackBufferFormat = D3DFMT_X8R8G8B8;
+ pp.SwapEffect = D3DSWAPEFFECT_DISCARD;
+ pp.PresentationInterval = D3DPRESENT_INTERVAL_IMMEDIATE;
+ pp.Windowed = TRUE;
+ pp.hDeviceWindow = GetDesktopWindow();
+ hr = pD3D->CreateDevice(
+ D3DADAPTER_DEFAULT,
+ D3DDEVTYPE_HAL,
+ pp.hDeviceWindow,
+ D3DCREATE_HARDWARE_VERTEXPROCESSING,
+ &pp,
+ &pDevice
+ );
+ if (FAILED(hr))
+ {
+ goto bail;
+ }
+
+ // Everythings is OK
+ MFUtils::g_bD3D9Supported = TRUE;
+ TSK_DEBUG_INFO("D3D9 supported");
+
+bail:
+ if (!MFUtils::g_bD3D9Supported) {
+ TSK_DEBUG_WARN("D3D9 not supported");
+ }
+ SafeRelease(&pDevice);
+ SafeRelease(&pD3D);
+ return MFUtils::g_bD3D9Supported;
+}
+
+BOOL MFUtils::IsLowLatencyH264Supported()
+{
+ if(MFUtils::g_bLowLatencyH264Checked)
+ {
+ return MFUtils::g_bLowLatencyH264Supported;
+ }
+
+#if PLUGIN_MF_DISABLE_CODECS
+ MFUtils::g_bLowLatencyH264Checked = TRUE;
+ MFUtils::g_bLowLatencyH264Supported = FALSE;
+#else
+ Startup();
+
+ HRESULT hr = S_OK;
+ IMFTransform *pEncoderMFT = NULL;
+ IMFTransform *pDecoderMFT = NULL;
+ MFCodecVideoH264* pEncoderCodec = NULL;
+ MFCodecVideoH264* pDecoderCodec = NULL;
+
+ static const BOOL IsEncoderYes = TRUE;
+
+ // Encoder
+ hr = MFUtils::GetBestCodec(IsEncoderYes, MFMediaType_Video, MFVideoFormat_NV12, MFVideoFormat_H264, &pEncoderMFT);
+ if(FAILED(hr))
+ {
+ TSK_DEBUG_INFO("No low latency H.264 encoder");
+ goto bail;
+ }
+
+ // Decoder
+ hr = MFUtils::GetBestCodec(!IsEncoderYes, MFMediaType_Video, MFVideoFormat_H264, MFVideoFormat_NV12, &pDecoderMFT);
+ if(FAILED(hr))
+ {
+ TSK_DEBUG_INFO("No low latency H.264 decoder");
+ goto bail;
+ }
+
+ // Make sure both encoder and decoder are working well. Check encoding/decoding 1080p@30 would work.
+
+ TSK_DEBUG_INFO("Probing H.264 MFT encoder...");
+ pEncoderCodec = MFCodecVideoH264::CreateCodecH264Main(MFCodecType_Encoder, pEncoderMFT);
+ if(!pEncoderCodec)
+ {
+ CHECK_HR(hr = E_FAIL);
+ }
+ CHECK_HR(hr = pEncoderCodec->Initialize(
+ 30, // FPS
+ 1920, // WIDTH
+ 1080, // HEIGHT
+ tmedia_get_video_bandwidth_kbps_2(1920, 1080, 30) * 1024) // BITRATE
+ );
+ CHECK_HR(pEncoderCodec->IsSetSliceMaxSizeInBytesSupported(MFUtils::g_bLowLatencyH264SupportsMaxSliceSize));
+
+ TSK_DEBUG_INFO("Probing H.264 MFT decoder...");
+ pDecoderCodec = MFCodecVideoH264::CreateCodecH264Main(MFCodecType_Decoder, pDecoderMFT);
+ if(!pDecoderCodec)
+ {
+ CHECK_HR(hr = E_FAIL);
+ }
+ CHECK_HR(hr = pDecoderCodec->Initialize(
+ 30, // FPS
+ 1920, // WIDTH
+ 1080 // HEIGHT
+ ));
+
+bail:
+ MFUtils::g_bLowLatencyH264Checked = TRUE;
+ MFUtils::g_bLowLatencyH264Supported = SUCCEEDED(hr) ? TRUE : FALSE;
+ SafeRelease(&pEncoderMFT);
+ SafeRelease(&pEncoderCodec);
+ SafeRelease(&pDecoderMFT);
+ SafeRelease(&pDecoderCodec);
+#endif /* PLUGIN_MF_DISABLE_CODECS */
+
+ return MFUtils::g_bLowLatencyH264Supported;
+}
+
+BOOL MFUtils::IsLowLatencyH264SupportsMaxSliceSize()
+{
+ return MFUtils::IsLowLatencyH264Supported() && MFUtils::g_bLowLatencyH264SupportsMaxSliceSize;
+}
+
+HRESULT MFUtils::IsAsyncMFT(
+ IMFTransform *pMFT, // The MFT to check
+ BOOL* pbIsAsync // Whether the MFT is Async
+ )
+{
+ if(!pbIsAsync || !pMFT)
+ {
+ return E_POINTER;
+ }
+
+ IMFAttributes *pAttributes = NULL;
+ UINT32 nIsAsync = 0;
+ HRESULT hr = S_OK;
+
+ hr = pMFT->GetAttributes(&pAttributes);
+ if(SUCCEEDED(hr))
+ {
+ hr = pAttributes->GetUINT32(MF_TRANSFORM_ASYNC, &nIsAsync);
+ }
+
+ // Never fails: just say not Async
+ CHECK_HR(hr = S_OK);
+
+ *pbIsAsync = !!nIsAsync;
+
+bail:
+ return hr;
+}
+
+HRESULT MFUtils::UnlockAsyncMFT(
+ IMFTransform *pMFT // The MFT to unlock
+ )
+{
+ IMFAttributes *pAttributes = NULL;
+ UINT32 nValue = 0;
+ HRESULT hr = S_OK;
+
+ hr = pMFT->GetAttributes(&pAttributes);
+ if(FAILED(hr))
+ {
+ hr = S_OK;
+ goto bail;
+ }
+
+ hr = pAttributes->GetUINT32(MF_TRANSFORM_ASYNC, &nValue);
+ if(FAILED(hr))
+ {
+ hr = S_OK;
+ goto bail;
+ }
+
+ if(nValue == TRUE)
+ {
+ CHECK_HR(hr = pAttributes->SetUINT32(MF_TRANSFORM_ASYNC_UNLOCK, TRUE));
+ }
+
+bail:
+ SafeRelease(&pAttributes);
+ return hr;
+}
+//-------------------------------------------------------------------
+// CreatePCMAudioType
+//
+// Creates a media type that describes an uncompressed PCM audio
+// format.
+//-------------------------------------------------------------------
+
+HRESULT MFUtils::CreatePCMAudioType(
+ UINT32 sampleRate, // Samples per second
+ UINT32 bitsPerSample, // Bits per sample
+ UINT32 cChannels, // Number of channels
+ IMFMediaType **ppType // Receives a pointer to the media type.
+ )
+{
+ HRESULT hr = S_OK;
+
+ IMFMediaType *pType = NULL;
+
+ // Calculate derived values.
+ UINT32 blockAlign = cChannels * (bitsPerSample / 8);
+ UINT32 bytesPerSecond = blockAlign * sampleRate;
+
+ // Create the empty media type.
+ hr = MFCreateMediaType(&pType);
+
+ // Set attributes on the type.
+ if (SUCCEEDED(hr))
+ {
+ hr = pType->SetGUID(MF_MT_MAJOR_TYPE, MFMediaType_Audio);
+ }
+
+ if (SUCCEEDED(hr))
+ {
+ hr = pType->SetGUID(MF_MT_SUBTYPE, MFAudioFormat_PCM);
+ }
+
+ if (SUCCEEDED(hr))
+ {
+ hr = pType->SetUINT32(MF_MT_AUDIO_NUM_CHANNELS, cChannels);
+ }
+
+ if (SUCCEEDED(hr))
+ {
+ hr = pType->SetUINT32(MF_MT_AUDIO_SAMPLES_PER_SECOND, sampleRate);
+ }
+
+ if (SUCCEEDED(hr))
+ {
+ hr = pType->SetUINT32(MF_MT_AUDIO_BLOCK_ALIGNMENT, blockAlign);
+ }
+
+ if (SUCCEEDED(hr))
+ {
+ hr = pType->SetUINT32(MF_MT_AUDIO_AVG_BYTES_PER_SECOND, bytesPerSecond);
+ }
+
+ if (SUCCEEDED(hr))
+ {
+ hr = pType->SetUINT32(MF_MT_AUDIO_BITS_PER_SAMPLE, bitsPerSample);
+ }
+
+ if (SUCCEEDED(hr))
+ {
+ hr = pType->SetUINT32(MF_MT_ALL_SAMPLES_INDEPENDENT, TRUE);
+ }
+
+ if (SUCCEEDED(hr))
+ {
+ // Return the type to the caller.
+ *ppType = pType;
+ (*ppType)->AddRef();
+ }
+
+ SafeRelease(&pType);
+ return hr;
+}
+
+
+//-------------------------------------------------------------------
+// CreateVideoType
+//
+// Creates a media type that describes a video subtype
+// format.
+//-------------------------------------------------------------------
+HRESULT MFUtils::CreateVideoType(
+ const GUID* subType, // video subType
+ IMFMediaType **ppType, // Receives a pointer to the media type.
+ UINT32 unWidth, // Video width (0 to ignore)
+ UINT32 unHeight // Video height (0 to ignore)
+ )
+{
+ HRESULT hr = S_OK;
+
+ IMFMediaType *pType = NULL;
+
+ CHECK_HR(hr = MFCreateMediaType(&pType));
+
+ CHECK_HR(hr = pType->SetGUID(MF_MT_MAJOR_TYPE, MFMediaType_Video));
+
+ CHECK_HR(hr = pType->SetGUID(MF_MT_SUBTYPE, *subType));
+
+ CHECK_HR(hr = pType->SetUINT32(MF_MT_ALL_SAMPLES_INDEPENDENT, TRUE)); // UnCompressed
+
+ CHECK_HR(hr = pType->SetUINT32(MF_MT_FIXED_SIZE_SAMPLES, TRUE)); // UnCompressed
+
+ CHECK_HR(hr = pType->SetUINT32(MF_MT_INTERLACE_MODE, MFVideoInterlace_Progressive));
+
+ if(unWidth > 0 && unHeight > 0)
+ {
+ CHECK_HR(hr = MFSetAttributeSize(pType, MF_MT_FRAME_SIZE, unWidth, unHeight));
+ }
+
+ *ppType = pType;
+ (*ppType)->AddRef();
+
+bail:
+ SafeRelease(&pType);
+ return hr;
+}
+
+//-------------------------------------------------------------------
+// Name: ValidateVideoFormat
+// Description: Validates a media type for this sink.
+//-------------------------------------------------------------------
+HRESULT MFUtils::ValidateVideoFormat(IMFMediaType *pmt)
+{
+ GUID major_type = GUID_NULL;
+ GUID subtype = GUID_NULL;
+ MFVideoInterlaceMode interlace = MFVideoInterlace_Unknown;
+ UINT32 val = 0;
+ BOOL bFoundMatchingSubtype = FALSE;
+
+ HRESULT hr = S_OK;
+
+ // Major type must be video.
+ CHECK_HR(hr = pmt->GetGUID(MF_MT_MAJOR_TYPE, &major_type));
+
+ if (major_type != MFMediaType_Video)
+ {
+ CHECK_HR(hr = MF_E_INVALIDMEDIATYPE);
+ }
+
+ // Subtype must be one of the subtypes in our global list.
+
+ // Get the subtype GUID.
+ CHECK_HR(hr = pmt->GetGUID(MF_MT_SUBTYPE, &subtype));
+
+#if 0
+ // Look for the subtype in our list of accepted types.
+ for (DWORD i = 0; i < g_NumVideoSubtypes; i++)
+ {
+ if (subtype == *g_VideoSubtypes[i])
+ {
+ bFoundMatchingSubtype = TRUE;
+ break;
+ }
+ }
+ if (!bFoundMatchingSubtype)
+ {
+ CHECK_HR(hr = MF_E_INVALIDMEDIATYPE);
+ }
+#endif
+
+ // Video must be progressive frames.
+ CHECK_HR(hr = pmt->GetUINT32(MF_MT_INTERLACE_MODE, (UINT32*)&interlace));
+ if (interlace != MFVideoInterlace_Progressive)
+ {
+ CHECK_HR(hr = MF_E_INVALIDMEDIATYPE);
+ }
+
+bail:
+ return hr;
+}
+
+HRESULT MFUtils::ConvertVideoTypeToUncompressedType(
+ IMFMediaType *pType, // Pointer to an encoded video type.
+ const GUID& subtype, // Uncompressed subtype (eg, RGB-32, AYUV)
+ IMFMediaType **ppType // Receives a matching uncompressed video type.
+ )
+{
+ IMFMediaType *pTypeUncomp = NULL;
+
+ HRESULT hr = S_OK;
+ GUID majortype = { 0 };
+ MFRatio par = { 0 };
+
+ hr = pType->GetMajorType(&majortype);
+
+ if (majortype != MFMediaType_Video)
+ {
+ return MF_E_INVALIDMEDIATYPE;
+ }
+
+ // Create a new media type and copy over all of the items.
+ // This ensures that extended color information is retained.
+
+ if (SUCCEEDED(hr))
+ {
+ hr = MFCreateMediaType(&pTypeUncomp);
+ }
+
+ if (SUCCEEDED(hr))
+ {
+ hr = pType->CopyAllItems(pTypeUncomp);
+ }
+
+ // Set the subtype.
+ if (SUCCEEDED(hr))
+ {
+ hr = pTypeUncomp->SetGUID(MF_MT_SUBTYPE, subtype);
+ }
+
+ // Uncompressed means all samples are independent.
+ if (SUCCEEDED(hr))
+ {
+ hr = pTypeUncomp->SetUINT32(MF_MT_ALL_SAMPLES_INDEPENDENT, TRUE);
+ }
+
+ // Fix up PAR if not set on the original type.
+ if (SUCCEEDED(hr))
+ {
+ hr = MFGetAttributeRatio(
+ pTypeUncomp,
+ MF_MT_PIXEL_ASPECT_RATIO,
+ (UINT32*)&par.Numerator,
+ (UINT32*)&par.Denominator
+ );
+
+ // Default to square pixels.
+ if (FAILED(hr))
+ {
+ hr = MFSetAttributeRatio(
+ pTypeUncomp,
+ MF_MT_PIXEL_ASPECT_RATIO,
+ 1, 1
+ );
+ }
+ }
+
+ if (SUCCEEDED(hr))
+ {
+ *ppType = pTypeUncomp;
+ (*ppType)->AddRef();
+ }
+
+ SafeRelease(&pTypeUncomp);
+ return hr;
+}
+
+HRESULT MFUtils::CreateMediaSample(
+ DWORD cbData, // Maximum buffer size
+ IMFSample **ppSample // Receives the sample
+)
+{
+ assert(ppSample);
+
+ HRESULT hr = S_OK;
+
+ IMFSample *pSample = NULL;
+ IMFMediaBuffer *pBuffer = NULL;
+
+ CHECK_HR(hr = MFCreateSample(&pSample));
+ CHECK_HR(hr = MFCreateMemoryBuffer(cbData, &pBuffer));
+ CHECK_HR(hr = pSample->AddBuffer(pBuffer));
+
+ *ppSample = pSample;
+ (*ppSample)->AddRef();
+
+bail:
+ SafeRelease(&pSample);
+ SafeRelease(&pBuffer);
+ return hr;
+}
+
+// Gets the best encoder and decoder. Up to the caller to release the returned pointer
+HRESULT MFUtils::GetBestCodec(
+ BOOL bEncoder, // Whether we request an encoder or not (TRUE=encoder, FALSE=decoder)
+ const GUID& mediaType, // The MediaType
+ const GUID& inputFormat, // The input MediaFormat (e.g. MFVideoFormat_NV12)
+ const GUID& outputFormat, // The output MediaFormat (e.g. MFVideoFormat_H264)
+ IMFTransform **ppMFT // Receives the decoder/encoder transform
+ )
+{
+ assert(ppMFT);
+ assert(mediaType == MFMediaType_Video || mediaType == MFMediaType_Audio); // only audio and video codecs are support for now
+
+ *ppMFT = NULL;
+
+ HRESULT hr = S_OK;
+
+ if(outputFormat == MFVideoFormat_H264 || inputFormat == MFVideoFormat_H264)
+ {
+ if(bEncoder)
+ {
+ // Force using Intel Quick Sync Encoder
+ hr = CoCreateInstance(CLSID_MF_INTEL_H264EncFilter, NULL,
+ CLSCTX_INPROC_SERVER, IID_PPV_ARGS(ppMFT));
+ if(SUCCEEDED(hr) && *ppMFT)
+ {
+ TSK_DEBUG_INFO("Using Intel Quick Sync encoder :)");
+ return hr;
+ }
+ TSK_DEBUG_INFO("Not using Intel Quick Sync encoder :(");
+ }
+ else
+ {
+#if !PLUGIN_MF_DISABLE_ASYNC_DECODERS // Intel Quick Sync decoder is asynchronous
+ // Force using Intel Quick Sync Decoder
+ hr = CoCreateInstance(CLSID_MF_INTEL_H264DecFilter, NULL,
+ CLSCTX_INPROC_SERVER, IID_PPV_ARGS(ppMFT));
+#endif
+ if(SUCCEEDED(hr) && *ppMFT)
+ {
+ TSK_DEBUG_INFO("Using Intel Quick Sync decoder :)");
+ return hr;
+ }
+ TSK_DEBUG_INFO("Not using Intel Quick Sync decoder :(");
+ }
+ }
+
+ UINT32 count = 0;
+ BOOL bAsync = FALSE;
+ GUID guidActivateCLSID = GUID_NULL;
+
+ IMFActivate **ppActivate = NULL;
+
+ MFT_REGISTER_TYPE_INFO infoInput = { mediaType, inputFormat };
+ MFT_REGISTER_TYPE_INFO infoOutput = { mediaType, outputFormat };
+
+ UINT32 unFlags = MFT_ENUM_FLAG_HARDWARE |
+ MFT_ENUM_FLAG_SYNCMFT |
+ MFT_ENUM_FLAG_ASYNCMFT |
+ MFT_ENUM_FLAG_LOCALMFT |
+ MFT_ENUM_FLAG_TRANSCODE_ONLY | // Otherwise Intel Quick Sync will not be listed
+ MFT_ENUM_FLAG_SORTANDFILTER;
+
+ hr = MFTEnumEx(
+ (mediaType == MFMediaType_Video) ? (bEncoder ? MFT_CATEGORY_VIDEO_ENCODER : MFT_CATEGORY_VIDEO_DECODER) : (bEncoder ? MFT_CATEGORY_AUDIO_ENCODER : MFT_CATEGORY_AUDIO_DECODER),
+ unFlags,
+ (inputFormat == GUID_NULL) ? NULL : &infoInput, // Input type
+ (outputFormat == GUID_NULL) ? NULL : &infoOutput, // Output type
+ &ppActivate,
+ &count
+ );
+
+ for(UINT32 i = 0; i < count; ++i)
+ {
+ SafeRelease(ppMFT);
+ hr = ppActivate[i]->GetGUID(MFT_TRANSFORM_CLSID_Attribute, &guidActivateCLSID);
+ if(FAILED(hr))
+ {
+ continue;
+ }
+
+ if(bEncoder)
+ {
+ // Encoder
+ if(guidActivateCLSID == CLSID_CMSH264EncoderMFT) // MS H.264 encoder ?
+ {
+ if(PLUGIN_MF_DISABLE_MS_H264_ENCODER)
+ {
+ // Microsoft H.264 encoder is disabled
+ TSK_DEBUG_INFO("MS H.264 encoder is disabled...skipping");
+ continue;
+ }
+ if(!IsWin8_OrLater(g_dwMajorVersion, g_dwMinorVersion))
+ {
+ // Microsoft H.264 encoder doesn't support low latency on Win7.
+ TSK_DEBUG_INFO("MS H.264 encoder doesn't support low delay on (%ld, %ld)...skipping", g_dwMajorVersion, g_dwMinorVersion);
+ continue;
+ }
+ }
+ }
+ else
+ {
+ // Decoder
+ if(guidActivateCLSID == CLSID_CMSH264DecoderMFT) // MS H.264 decoder ?
+ {
+ if(!IsWin8_OrLater(g_dwMajorVersion, g_dwMinorVersion))
+ {
+ // Microsoft H.264 decoder doesn't support low latency on Win7.
+ TSK_DEBUG_INFO("MS H.264 decoder doesn't support low delay on (%ld, %ld)...skipping", g_dwMajorVersion, g_dwMinorVersion);
+ continue;
+ }
+ }
+ }
+
+ hr = ppActivate[i]->ActivateObject(IID_PPV_ARGS(ppMFT));
+ if(SUCCEEDED(hr) && *ppMFT) // For now we just get the first one. FIXME: Give HW encoders/decoders higher priority.
+ {
+ if(bEncoder)
+ {
+ // Encoder
+
+ }
+ else
+ {
+ // Decoder
+#if PLUGIN_MF_DISABLE_ASYNC_DECODERS
+ hr = IsAsyncMFT(*ppMFT, &bAsync);
+ if(bAsync)
+ {
+ TSK_DEBUG_INFO("Skipping async decoder because not supported yet");
+ continue; // Async decoders not supported yet
+ }
+#endif
+ }
+ break;
+ }
+ }
+
+ for (UINT32 i = 0; i < count; i++)
+ {
+ ppActivate[i]->Release();
+ }
+ CoTaskMemFree(ppActivate);
+
+ return *ppMFT ? S_OK : MF_E_NOT_FOUND;
+}
+
+HRESULT MFUtils::IsVideoProcessorSupported(BOOL *pbSupported)
+{
+ HRESULT hr = S_OK;
+ IMFTransform *pTransform = NULL;
+
+ if(!pbSupported)
+ {
+ CHECK_HR(hr = E_POINTER);
+ }
+
+ hr = CoCreateInstance(CLSID_VideoProcessorMFT, NULL,
+ CLSCTX_INPROC_SERVER, IID_PPV_ARGS(&pTransform));
+ *pbSupported = SUCCEEDED(hr);
+ if(FAILED(hr))
+ {
+ hr = S_OK; // not an error
+ }
+
+bail:
+ SafeRelease(&pTransform);
+ return hr;
+}
+
+HRESULT MFUtils::GetBestVideoProcessor(
+ const GUID& inputFormat, // The input MediaFormat (e.g. MFVideoFormat_I420)
+ const GUID& outputFormat, // The output MediaFormat (e.g. MFVideoFormat_NV12)
+ IMFTransform **ppProcessor // Receives the video processor
+ )
+{
+ assert(ppProcessor);
+
+ *ppProcessor = NULL;
+
+ HRESULT hr = S_OK;
+ UINT32 count = 0;
+
+ IMFActivate **ppActivate = NULL;
+
+ MFT_REGISTER_TYPE_INFO infoInput = { MFMediaType_Video, inputFormat };
+ MFT_REGISTER_TYPE_INFO infoOutput = { MFMediaType_Video, outputFormat };
+
+ UINT32 unFlags = MFT_ENUM_FLAG_HARDWARE |
+ MFT_ENUM_FLAG_SYNCMFT |
+ MFT_ENUM_FLAG_LOCALMFT |
+ MFT_ENUM_FLAG_SORTANDFILTER;
+
+ hr = MFTEnumEx(
+ MFT_CATEGORY_VIDEO_PROCESSOR,
+ unFlags,
+ &infoInput, // Input type
+ &infoOutput, // Output type
+ &ppActivate,
+ &count
+ );
+
+ for(UINT32 i = 0; i < count; ++i)
+ {
+ hr = ppActivate[i]->ActivateObject(IID_PPV_ARGS(ppProcessor));
+ if(SUCCEEDED(hr) && *ppProcessor)
+ {
+ break;
+ }
+ SafeRelease(ppProcessor);
+ }
+
+ for (UINT32 i = 0; i < count; i++)
+ {
+ ppActivate[i]->Release();
+ }
+ CoTaskMemFree(ppActivate);
+
+ return *ppProcessor ? S_OK : MF_E_NOT_FOUND;
+}
+
+// Add an transform node to a topology.
+HRESULT MFUtils::AddTransformNode(
+ IMFTopology *pTopology, // Topology.
+ IMFTransform *pMFT, // MFT.
+ DWORD dwId, // Identifier of the stream sink.
+ IMFTopologyNode **ppNode // Receives the node pointer.
+ )
+{
+ *ppNode = NULL;
+
+ IMFTopologyNode *pNode = NULL;
+ HRESULT hr = S_OK;
+
+ // Create the node.
+ CHECK_HR(hr = MFCreateTopologyNode(MF_TOPOLOGY_TRANSFORM_NODE, &pNode));
+ // Set the object pointer.
+ CHECK_HR(hr = pNode->SetObject(pMFT));
+ CHECK_HR(hr = pNode->SetUINT32(MF_TOPONODE_STREAMID, dwId));
+ CHECK_HR(hr = pNode->SetUINT32(MF_TOPONODE_NOSHUTDOWN_ON_REMOVE, FALSE));
+ // Add the node to the topology.
+ CHECK_HR(hr = pTopology->AddNode(pNode));
+
+ // Return the pointer to the caller.
+ *ppNode = pNode;
+ (*ppNode)->AddRef();
+
+bail:
+ SafeRelease(&pNode);
+ return hr;
+}
+
+// Sets the IMFStreamSink pointer on an output node.
+HRESULT MFUtils::BindOutputNode(
+ IMFTopologyNode *pNode // The Node
+ )
+{
+ assert(pNode);
+
+ HRESULT hr = S_OK;
+ IUnknown *pNodeObject = NULL;
+ IMFActivate *pActivate = NULL;
+ IMFStreamSink *pStream = NULL;
+ IMFMediaSink *pSink = NULL;
+
+ // Get the node's object pointer.
+ CHECK_HR(hr = pNode->GetObject(&pNodeObject));
+
+ // The object pointer should be one of the following:
+ // 1. An activation object for the media sink.
+ // 2. The stream sink.
+
+ // If it's #2, then we're already done.
+
+ // First, check if it's an activation object.
+ CHECK_HR(hr = pNodeObject->QueryInterface(IID_PPV_ARGS(&pActivate)));
+
+ if (SUCCEEDED(hr))
+ {
+ DWORD dwStreamID = 0;
+
+ // The object pointer is an activation object.
+
+ // Try to create the media sink.
+ hr = pActivate->ActivateObject(IID_PPV_ARGS(&pSink));
+
+ // Look up the stream ID. (Default to zero.)
+
+ if (SUCCEEDED(hr))
+ {
+ dwStreamID = MFGetAttributeUINT32(pNode, MF_TOPONODE_STREAMID, 0);
+ }
+
+ // Now try to get or create the stream sink.
+
+ // Check if the media sink already has a stream sink with the requested ID.
+
+ if (SUCCEEDED(hr))
+ {
+ hr = pSink->GetStreamSinkById(dwStreamID, &pStream);
+ if (FAILED(hr))
+ {
+ // Try to add a new stream sink.
+ hr = pSink->AddStreamSink(dwStreamID, NULL, &pStream);
+ }
+ }
+
+ // Replace the node's object pointer with the stream sink.
+ if (SUCCEEDED(hr))
+ {
+ hr = pNode->SetObject(pStream);
+ }
+ }
+ else
+ {
+ // Not an activation object. Is it a stream sink?
+ hr = pNodeObject->QueryInterface(IID_PPV_ARGS(&pStream));
+ }
+
+bail:
+ SafeRelease(&pNodeObject);
+ SafeRelease(&pActivate);
+ SafeRelease(&pStream);
+ SafeRelease(&pSink);
+ return hr;
+}
+
+// Add an output node to a topology.
+HRESULT MFUtils::AddOutputNode(
+ IMFTopology *pTopology, // Topology.
+ IMFActivate *pActivate, // Media sink activation object.
+ DWORD dwId, // Identifier of the stream sink.
+ IMFTopologyNode **ppNode) // Receives the node pointer
+{
+ IMFTopologyNode *pNode = NULL;
+
+ HRESULT hr = S_OK;
+ CHECK_HR(hr = MFCreateTopologyNode(MF_TOPOLOGY_OUTPUT_NODE, &pNode));
+ CHECK_HR(hr = pNode->SetObject(pActivate));
+ CHECK_HR(hr = pNode->SetUINT32(MF_TOPONODE_STREAMID, dwId));
+ CHECK_HR(hr = pNode->SetUINT32(MF_TOPONODE_NOSHUTDOWN_ON_REMOVE, FALSE));
+ CHECK_HR(hr = pTopology->AddNode(pNode));
+
+ // Return the pointer to the caller.
+ *ppNode = pNode;
+ (*ppNode)->AddRef();
+
+bail:
+ SafeRelease(&pNode);
+ return hr;
+}
+
+// Add a source node to a topology
+HRESULT MFUtils::AddSourceNode(
+ IMFTopology *pTopology, // Topology.
+ IMFMediaSource *pSource, // Media source.
+ IMFPresentationDescriptor *pPD, // Presentation descriptor.
+ IMFStreamDescriptor *pSD, // Stream descriptor.
+ IMFTopologyNode **ppNode // Receives the node pointer.
+ )
+{
+ IMFTopologyNode *pNode = NULL;
+
+ HRESULT hr = S_OK;
+ CHECK_HR(hr = MFCreateTopologyNode(MF_TOPOLOGY_SOURCESTREAM_NODE, &pNode));
+ CHECK_HR(hr = pNode->SetUnknown(MF_TOPONODE_SOURCE, pSource));
+ CHECK_HR(hr = pNode->SetUnknown(MF_TOPONODE_PRESENTATION_DESCRIPTOR, pPD));
+ CHECK_HR(hr = pNode->SetUnknown(MF_TOPONODE_STREAM_DESCRIPTOR, pSD));
+ CHECK_HR(hr = pTopology->AddNode(pNode));
+
+ // Return the pointer to the caller.
+ *ppNode = pNode;
+ (*ppNode)->AddRef();
+
+bail:
+ SafeRelease(&pNode);
+ return hr;
+}
+
+// Create the topology
+//
+// [source] -> (Transform) -> [SinkMain]
+// \-> (SinkPreview)
+//
+HRESULT MFUtils::CreateTopology(
+ IMFMediaSource *pSource, // Media source
+ IMFTransform *pTransform, // Transform filter (e.g. encoder or decoder) to insert between the source and Sink. NULL is valid.
+ IMFActivate *pSinkActivateMain, // Main sink (e.g. sample grabber or EVR).
+ IMFActivate *pSinkActivatePreview, // Preview sink. Optional. Could be NULL.
+ IMFMediaType *pIputTypeMain, // Main sink input MediaType
+ IMFTopology **ppTopo // Receives the newly created topology
+ )
+{
+ IMFTopology *pTopology = NULL;
+ IMFPresentationDescriptor *pPD = NULL;
+ IMFStreamDescriptor *pSD = NULL;
+ IMFMediaTypeHandler *pHandler = NULL;
+ IMFTopologyNode *pNodeSource = NULL;
+ IMFTopologyNode *pNodeSinkMain = NULL;
+ IMFTopologyNode *pNodeSinkPreview = NULL;
+ IMFTopologyNode *pNodeTransform = NULL;
+ IMFTopologyNode *pNodeTee = NULL;
+ IMFMediaType *pMediaType = NULL;
+ IMFTransform *pVideoProcessor = NULL;
+ IMFTopologyNode *pNodeVideoProcessor = NULL;
+ IMFTransform *pConvFrameRate = NULL;
+ IMFTransform *pConvSize = NULL;
+ IMFTransform *pConvColor = NULL;
+ IMFTopologyNode *pNodeConvFrameRate = NULL;
+ IMFTopologyNode *pNodeConvSize = NULL;
+ IMFTopologyNode *pNodeConvColor = NULL;
+ IMFMediaType *pTransformInputType = NULL;
+ IMFMediaType *pSinkMainInputType = NULL;
+ const IMFTopologyNode *pcNodeBeforeSinkMain = NULL;
+
+ HRESULT hr = S_OK;
+ DWORD cStreams = 0;
+ BOOL bSourceFound = FALSE;
+ BOOL bSupportedSize = FALSE;
+ BOOL bSupportedFps = FALSE;
+ BOOL bSupportedFormat = FALSE;
+ BOOL bVideoProcessorSupported = FALSE;
+ GUID inputMajorType, inputSubType;
+
+ CHECK_HR(hr = IsVideoProcessorSupported(&bVideoProcessorSupported));
+ CHECK_HR(hr = pIputTypeMain->GetMajorType(&inputMajorType));
+
+ CHECK_HR(hr = MFCreateTopology(&pTopology));
+ CHECK_HR(hr = pSource->CreatePresentationDescriptor(&pPD));
+ CHECK_HR(hr = pPD->GetStreamDescriptorCount(&cStreams));
+
+ for (DWORD i = 0; i < cStreams; i++)
+ {
+ BOOL fSelected = FALSE;
+ GUID majorType;
+
+ CHECK_HR(hr = pPD->GetStreamDescriptorByIndex(i, &fSelected, &pSD));
+ CHECK_HR(hr = pSD->GetMediaTypeHandler(&pHandler));
+ CHECK_HR(hr = pHandler->GetMajorType(&majorType));
+
+ if (majorType == inputMajorType && fSelected)
+ {
+ CHECK_HR(hr = AddSourceNode(pTopology, pSource, pPD, pSD, &pNodeSource));
+ CHECK_HR(hr = pNodeSource->SetTopoNodeID(MFUtils::g_ullTopoIdSource));
+ CHECK_HR(hr = AddOutputNode(pTopology, pSinkActivateMain, 0, &pNodeSinkMain));
+ CHECK_HR(hr = pNodeSinkMain->SetTopoNodeID(MFUtils::g_ullTopoIdSinkMain));
+ CHECK_HR(hr = MFUtils::BindOutputNode(pNodeSinkMain)); // To avoid MF_E_TOPO_SINK_ACTIVATES_UNSUPPORTED
+
+ //
+ // Create preview
+ //
+
+ if(pSinkActivatePreview)
+ {
+ CHECK_HR(hr = AddOutputNode(pTopology, pSinkActivatePreview, 0, &pNodeSinkPreview));
+ CHECK_HR(hr = pNodeSinkPreview->SetTopoNodeID(MFUtils::g_ullTopoIdSinkPreview));
+ CHECK_HR(hr = MFUtils::BindOutputNode(pNodeSinkPreview));
+
+ CHECK_HR(hr = MFCreateTopologyNode(MF_TOPOLOGY_TEE_NODE, &pNodeTee));
+ CHECK_HR(hr = pTopology->AddNode(pNodeTee));
+ }
+
+ //
+ // Create converters
+ //
+
+ if(majorType == MFMediaType_Video)
+ {
+ // Even when size matches the topology could add a resizer which doesn't keep ratio when resizing while video processor does.
+ if(!bVideoProcessorSupported)
+ {
+ hr = IsSupported(
+ pPD,
+ i,
+ pIputTypeMain,
+ &bSupportedSize,
+ &bSupportedFps,
+ &bSupportedFormat);
+ }
+
+ CHECK_HR(hr = pIputTypeMain->GetGUID(MF_MT_SUBTYPE, &inputSubType));
+
+ if(!bSupportedSize || !bSupportedFps || !bSupportedFormat)
+ {
+ // Use video processor single MFT or 3 different MFTs
+ if(!pVideoProcessor)
+ {
+ hr = CoCreateInstance(CLSID_VideoProcessorMFT, NULL,
+ CLSCTX_INPROC_SERVER, IID_PPV_ARGS(&pVideoProcessor));
+ }
+ if(!pVideoProcessor)
+ {
+ // Video Resizer DSP(http://msdn.microsoft.com/en-us/library/windows/desktop/ff819491(v=vs.85).aspx) supports I420 only
+ if(!bSupportedSize && !pConvSize && inputSubType == MFVideoFormat_I420)
+ {
+ hr = CoCreateInstance(CLSID_CResizerDMO, NULL,
+ CLSCTX_INPROC_SERVER, IID_PPV_ARGS(&pConvSize));
+ }
+ // Frame Rate Converter DSP(http://msdn.microsoft.com/en-us/library/windows/desktop/ff819100(v=vs.85).aspx) supports neither NV12 nor I420
+ /*if(!bSupportedFps && !pConvFrameRate)
+ {
+ hr = CoCreateInstance(CLSID_CFrameRateConvertDmo, NULL,
+ CLSCTX_INPROC_SERVER, IID_PPV_ARGS(&pConvFrameRate));
+ }*/
+ // Color Converter DSP (http://msdn.microsoft.com/en-us/library/windows/desktop/ff819079(v=vs.85).aspx) supports both NV12 and I420
+ if(!bSupportedFormat && !pConvColor)
+ {
+ hr = CoCreateInstance(CLSID_CColorConvertDMO, NULL,
+ CLSCTX_INPROC_SERVER, IID_PPV_ARGS(&pConvColor));
+ }
+ }
+ }
+ else
+ {
+ // MediaType supported
+ CHECK_HR(hr = pHandler->SetCurrentMediaType(pIputTypeMain));
+ }
+
+ if(pVideoProcessor && !pNodeVideoProcessor)
+ {
+ CHECK_HR(hr = AddTransformNode(pTopology, pVideoProcessor, 0, &pNodeVideoProcessor));
+ CHECK_HR(hr = pNodeVideoProcessor->SetTopoNodeID(MFUtils::g_ullTopoIdVideoProcessor));
+ }
+ if(pConvColor && !pNodeConvColor)
+ {
+ CHECK_HR(hr = AddTransformNode(pTopology, pConvColor, 0, &pNodeConvColor));
+ }
+ if(pConvFrameRate && !pNodeConvFrameRate)
+ {
+ CHECK_HR(hr = AddTransformNode(pTopology, pConvFrameRate, 0, &pNodeConvFrameRate));
+ }
+ if(pConvSize && !pNodeConvSize)
+ {
+ CHECK_HR(hr = AddTransformNode(pTopology, pConvSize, 0, &pNodeConvSize));
+ }
+ } // if(majorType == MFMediaType_Video)
+
+
+ //
+ // Set media type
+ //
+
+ if(pTransform)
+ {
+ CHECK_HR(hr = AddTransformNode(pTopology, pTransform, 0, &pNodeTransform));
+ hr = pTransform->GetInputCurrentType(0, &pTransformInputType);
+ if(FAILED(hr))
+ {
+ pTransformInputType = pIputTypeMain;
+ pTransformInputType->AddRef();
+ hr = S_OK;
+ }
+ if(pVideoProcessor)
+ {
+ CHECK_HR(hr = pVideoProcessor->SetOutputType(0, pTransformInputType, 0));
+ }
+ else
+ {
+ if(pConvColor)
+ {
+ /*CHECK_HR*/(hr = pConvColor->SetOutputType(0, pTransformInputType, 0));
+ }
+ if(pConvFrameRate)
+ {
+ /*CHECK_HR*/(hr = pConvFrameRate->SetOutputType(0, pTransformInputType, 0));
+ }
+ if(pConvSize)
+ {
+ // Transform requires NV12
+ //Video Resizer DSP(http://msdn.microsoft.com/en-us/library/windows/desktop/ff819491(v=vs.85).aspx) doesn't support NV12
+ //*CHECK_HR*/(hr = pConvSize->SetOutputType(0, pTransformInputType, 0));
+ }
+ }
+ }
+ else
+ {
+ hr = pNodeSinkMain->GetInputPrefType(0, &pSinkMainInputType);
+ if(FAILED(hr))
+ {
+ pSinkMainInputType = pIputTypeMain;
+ pSinkMainInputType->AddRef();
+ hr = S_OK;
+ }
+ if(SUCCEEDED(hr))
+ {
+ if(pVideoProcessor)
+ {
+ CHECK_HR(hr = pVideoProcessor->SetOutputType(0, pSinkMainInputType, 0));
+ }
+ else
+ {
+ //!\ MUST NOT SET OUTPUT TYPE
+ if(pConvColor)
+ {
+ //*CHECK_HR*/(hr = pConvColor->SetOutputType(0, pSinkMainInputType, 0));
+ }
+ if(pConvFrameRate)
+ {
+ //*CHECK_HR*/(hr = pConvFrameRate->SetOutputType(0, pSinkMainInputType, 0));
+ }
+ if(pConvSize)
+ {
+ //*CHECK_HR*/(hr = pConvSize->SetOutputType(0, pSinkMainInputType, 0));
+ }
+ }
+ }
+ }
+
+ //
+ // Connect
+ //
+
+ if(pNodeTee)
+ {
+ // Connect(Source -> Tee)
+ CHECK_HR(hr = pNodeSource->ConnectOutput(0, pNodeTee, 0));
+
+ // Connect(Tee -> SinkPreview)
+ CHECK_HR(hr = pNodeTee->ConnectOutput(1, pNodeSinkPreview, 0));
+
+ // Connect(Tee ->(Processors)
+ if(pVideoProcessor)
+ {
+ CHECK_HR(hr = pNodeTee->ConnectOutput(0, pNodeVideoProcessor, 0));
+ pcNodeBeforeSinkMain = pNodeVideoProcessor;
+ }
+ else if(pNodeConvFrameRate || pNodeConvSize || pNodeConvColor)
+ {
+ CHECK_HR(hr = ConnectConverters(
+ pNodeTee,
+ 0,
+ pNodeConvFrameRate,
+ pNodeConvColor,
+ pNodeConvSize
+ ));
+ pcNodeBeforeSinkMain = pNodeConvSize ? pNodeConvSize : (pNodeConvColor ? pNodeConvColor : pNodeConvFrameRate);
+ }
+ else
+ {
+ pcNodeBeforeSinkMain = pNodeTee;
+ }
+ }
+ else
+ {
+ // Connect(Source -> (Processors))
+ if(pVideoProcessor)
+ {
+ CHECK_HR(hr = pNodeSource->ConnectOutput(0, pNodeVideoProcessor, 0));
+ pcNodeBeforeSinkMain = pNodeVideoProcessor;
+ }
+ else if(pNodeConvFrameRate || pNodeConvFrameRate || pNodeConvColor)
+ {
+ CHECK_HR(hr = ConnectConverters(
+ pNodeSource,
+ 0,
+ pNodeConvFrameRate,
+ pNodeConvSize,
+ pNodeConvColor
+ ));
+ pcNodeBeforeSinkMain = pNodeConvSize ? pNodeConvSize : (pNodeConvColor ? pNodeConvColor : pNodeConvFrameRate);
+ }
+ else
+ {
+ pcNodeBeforeSinkMain = pNodeSource;
+ }
+ }
+
+
+ if(pNodeTransform)
+ {
+ // Connect(X->Transform)
+ CHECK_HR(hr = ((IMFTopologyNode *)pcNodeBeforeSinkMain)->ConnectOutput(0, pNodeTransform, 0));
+ pcNodeBeforeSinkMain = pNodeTransform;
+ }
+
+ // Connect(X -> SinkMain)
+ CHECK_HR(hr = ((IMFTopologyNode *)pcNodeBeforeSinkMain)->ConnectOutput(0, pNodeSinkMain, 0));
+
+ bSourceFound = TRUE;
+ break;
+ }
+ else
+ {
+ CHECK_HR(hr = pPD->DeselectStream(i));
+ }
+ SafeRelease(&pSD);
+ SafeRelease(&pHandler);
+ }
+
+ *ppTopo = pTopology;
+ (*ppTopo)->AddRef();
+
+bail:
+ SafeRelease(&pTopology);
+ SafeRelease(&pNodeSource);
+ SafeRelease(&pNodeSinkMain);
+ SafeRelease(&pNodeSinkPreview);
+ SafeRelease(&pNodeTransform);
+ SafeRelease(&pNodeTee);
+ SafeRelease(&pPD);
+ SafeRelease(&pSD);
+ SafeRelease(&pHandler);
+ SafeRelease(&pMediaType);
+ SafeRelease(&pTransformInputType);
+ SafeRelease(&pSinkMainInputType);
+
+ SafeRelease(&pVideoProcessor);
+ SafeRelease(&pNodeVideoProcessor);
+ SafeRelease(&pConvFrameRate);
+ SafeRelease(&pConvSize);
+ SafeRelease(&pConvColor);
+ SafeRelease(&pNodeConvFrameRate);
+ SafeRelease(&pNodeConvSize);
+ SafeRelease(&pNodeConvColor);
+
+ if(!bSourceFound)
+ {
+ TSK_DEBUG_ERROR("No source node found");
+ return E_NOT_SET;
+ }
+
+ return hr;
+}
+
+// Creates a fully loaded topology from the input partial topology.
+HRESULT MFUtils::ResolveTopology(
+ IMFTopology *pInputTopo, // A pointer to the IMFTopology interface of the partial topology to be resolved.
+ IMFTopology **ppOutputTopo, // Receives a pointer to the IMFTopology interface of the completed topology. The caller must release the interface.
+ IMFTopology *pCurrentTopo /*= NULL*/ // A pointer to the IMFTopology interface of the previous full topology. The topology loader can re-use objects from this topology in the new topology. This parameter can be NULL.
+ )
+{
+ assert(ppOutputTopo && pInputTopo);
+
+ HRESULT hr = S_OK;
+ IMFTopoLoader* pTopoLoader = NULL;
+
+ *ppOutputTopo = NULL;
+
+ CHECK_HR(hr = MFCreateTopoLoader(&pTopoLoader));
+ CHECK_HR(hr = pTopoLoader->Load(pInputTopo, ppOutputTopo, pCurrentTopo));
+
+bail:
+ SafeRelease(&pTopoLoader);
+ return hr;
+}
+
+HRESULT MFUtils::FindNodeObject(
+ IMFTopology *pInputTopo, // The Topology containing the node to find
+ TOPOID qwTopoNodeID, //The identifier for the node
+ void** ppObject // Receives the Object
+ )
+{
+ assert(pInputTopo && ppObject);
+
+ *ppObject = NULL;
+
+ IMFTopologyNode *pNode = NULL;
+ HRESULT hr = S_OK;
+
+ CHECK_HR(hr = pInputTopo->GetNodeByID(qwTopoNodeID, &pNode));
+ CHECK_HR(hr = pNode->GetObject((IUnknown**)ppObject));
+
+bail:
+ SafeRelease(&pNode);
+ return hr;
+}
+
+// Create an activation object for a renderer, based on the stream media type.
+HRESULT MFUtils::CreateMediaSinkActivate(
+ IMFStreamDescriptor *pSourceSD, // Pointer to the stream descriptor.
+ HWND hVideoWindow, // Handle to the video clipping window.
+ IMFActivate **ppActivate
+)
+{
+ HRESULT hr = S_OK;
+ IMFMediaTypeHandler *pHandler = NULL;
+ IMFActivate *pActivate = NULL;
+
+ // Get the media type handler for the stream.
+ CHECK_HR(hr = pSourceSD->GetMediaTypeHandler(&pHandler));
+ // Get the major media type.
+ GUID guidMajorType;
+ CHECK_HR(hr = pHandler->GetMajorType(&guidMajorType));
+
+ // Create an IMFActivate object for the renderer, based on the media type.
+ if (MFMediaType_Audio == guidMajorType)
+ {
+ // Create the audio renderer.
+ CHECK_HR(hr = MFCreateAudioRendererActivate(&pActivate));
+ }
+ else if (MFMediaType_Video == guidMajorType)
+ {
+ // Create the video renderer.
+ CHECK_HR(hr = MFCreateVideoRendererActivate(hVideoWindow, &pActivate));
+ }
+ else
+ {
+ // Unknown stream type.
+ hr = E_FAIL;
+ // Optionally, you could deselect this stream instead of failing.
+ }
+ if (FAILED(hr))
+ {
+ goto bail;
+ }
+
+ // Return IMFActivate pointer to caller.
+ *ppActivate = pActivate;
+ (*ppActivate)->AddRef();
+
+bail:
+ SafeRelease(&pHandler);
+ SafeRelease(&pActivate);
+ return hr;
+}
+
+// Set source output media type
+HRESULT MFUtils::SetMediaType(
+ IMFMediaSource *pSource, // Media source.
+ IMFMediaType* pMediaType // Media Type.
+ )
+{
+ assert(pSource && pMediaType);
+
+ IMFPresentationDescriptor *pPD = NULL;
+ IMFStreamDescriptor *pSD = NULL;
+ IMFMediaTypeHandler *pHandler = NULL;
+
+ HRESULT hr = S_OK;
+ DWORD cStreams = 0;
+ GUID inputMajorType;
+
+ CHECK_HR(hr = pSource->CreatePresentationDescriptor(&pPD));
+ CHECK_HR(hr = pPD->GetStreamDescriptorCount(&cStreams));
+ CHECK_HR(hr = pMediaType->GetMajorType(&inputMajorType));
+
+ for (DWORD i = 0; i < cStreams; i++)
+ {
+ BOOL fSelected = FALSE;
+ GUID majorType;
+
+ CHECK_HR(hr = pPD->GetStreamDescriptorByIndex(i, &fSelected, &pSD));
+ CHECK_HR(hr = pSD->GetMediaTypeHandler(&pHandler));
+ CHECK_HR(hr = pHandler->GetMajorType(&majorType));
+
+ if (majorType == inputMajorType && fSelected)
+ {
+ CHECK_HR(hr = pHandler->SetCurrentMediaType(pMediaType));
+ }
+ else
+ {
+ CHECK_HR(hr = pPD->DeselectStream(i));
+ }
+ SafeRelease(&pSD);
+ SafeRelease(&pHandler);
+ }
+
+
+bail:
+ SafeRelease(&pPD);
+ SafeRelease(&pSD);
+ SafeRelease(&pHandler);
+
+ return hr;
+}
+
+HRESULT MFUtils::SetVideoWindow(
+ IMFTopology *pTopology, // Topology.
+ IMFMediaSource *pSource, // Media source.
+ HWND hVideoWnd // Window for video playback.
+ )
+{
+ HRESULT hr = S_OK;
+ IMFStreamDescriptor *pSD = NULL;
+ IMFPresentationDescriptor *pPD = NULL;
+ IMFActivate *pSinkActivate = NULL;
+ IMFTopologyNode *pSourceNode = NULL;
+ IMFTopologyNode *pOutputNode = NULL;
+ DWORD cStreams = 0, iStream;
+
+ CHECK_HR(hr = pSource->CreatePresentationDescriptor(&pPD));
+ CHECK_HR(hr = pPD->GetStreamDescriptorCount(&cStreams));
+
+ for(iStream = 0; iStream < cStreams; ++iStream)
+ {
+ BOOL fSelected = FALSE;
+
+ CHECK_HR(hr = pPD->GetStreamDescriptorByIndex(iStream, &fSelected, &pSD));
+
+ if (fSelected)
+ {
+ // Create the media sink activation object.
+ CHECK_HR(hr = CreateMediaSinkActivate(pSD, hVideoWnd, &pSinkActivate));
+ // Add a source node for this stream.
+ CHECK_HR(hr = AddSourceNode(pTopology, pSource, pPD, pSD, &pSourceNode));
+ // Create the output node for the renderer.
+ CHECK_HR(hr = AddOutputNode(pTopology, pSinkActivate, 0, &pOutputNode));
+ // Connect the source node to the output node.
+ CHECK_HR(hr = pSourceNode->ConnectOutput(0, pOutputNode, 0));
+ }
+ // else: If not selected, don't add the branch.
+ }
+
+bail:
+ SafeRelease(&pPD);
+ SafeRelease(&pSD);
+ SafeRelease(&pSinkActivate);
+ SafeRelease(&pSourceNode);
+ SafeRelease(&pOutputNode);
+ return hr;
+}
+
+// Run the session
+HRESULT MFUtils::RunSession(
+ IMFMediaSession *pSession, // Session to run
+ IMFTopology *pTopology // The toppology
+ )
+{
+ assert(pSession && pTopology);
+
+ IMFMediaEvent *pEvent = NULL;
+
+ PROPVARIANT var;
+ PropVariantInit(&var);
+
+ MediaEventType met;
+ HRESULT hrStatus = S_OK;
+ HRESULT hr = S_OK;
+ CHECK_HR(hr = pSession->SetTopology(MFSESSION_SETTOPOLOGY_IMMEDIATE, pTopology)); // MFSESSION_SETTOPOLOGY_IMMEDIATE required to update (reload) topology when media type change
+ CHECK_HR(hr = pSession->Start(&GUID_NULL, &var));
+
+ // Check first event
+ hr = pSession->GetEvent(MF_EVENT_FLAG_NO_WAIT, &pEvent);
+ if(hr == MF_E_NO_EVENTS_AVAILABLE || hr == MF_E_MULTIPLE_SUBSCRIBERS){ // MF_E_MULTIPLE_SUBSCRIBERS means already listening
+ hr = S_OK;
+ goto bail;
+ }
+ if(pEvent) {
+ CHECK_HR(hr = pEvent->GetStatus(&hrStatus));
+ }
+ else {
+ hrStatus = hr;
+ }
+ if (FAILED(hrStatus))
+ {
+ CHECK_HR(hr = pEvent->GetType(&met));
+ TSK_DEBUG_ERROR("Session error: 0x%x (event id: %d)\n", hrStatus, met);
+ hr = hrStatus;
+ goto bail;
+ }
+
+bail:
+ SafeRelease(&pEvent);
+ return hr;
+}
+
+// Stop session
+HRESULT MFUtils::ShutdownSession(
+ IMFMediaSession *pSession, // The Session
+ IMFMediaSource *pSource // Source to shutdown (optional)
+ )
+{
+ // MUST be source then session
+ if(pSource){
+ pSource->Stop();
+ pSource->Shutdown();
+ }
+ if(pSession){
+ pSession->Shutdown();
+ }
+ return S_OK;
+}
+
+// Pause session
+HRESULT MFUtils::PauseSession(
+ IMFMediaSession *pSession, // The session
+ IMFMediaSource *pSource // Source to pause (optional)
+ )
+{
+ if(!pSession){
+ return E_INVALIDARG;
+ }
+ if(pSource){
+ pSource->Pause();
+ }
+ return pSession->Pause();
+}
+
+// Returns -1 if none is supported
+INT MFUtils::GetSupportedSubTypeIndex(
+ IMFMediaSource *pSource, // The source
+ const GUID& mediaType, // The MediaType
+ const VideoSubTypeGuidPair* subTypes, UINT subTypesCount // List of preferred subtypes (in ascending order)
+ )
+{
+ assert(pSource);
+
+ IMFPresentationDescriptor *pPD = NULL;
+ IMFStreamDescriptor *pSD = NULL;
+ IMFMediaTypeHandler *pHandler = NULL;
+ IMFMediaType *pMediaType = NULL;
+
+ INT nIndex = -1;
+ HRESULT hr = S_OK;
+ DWORD cStreams = 0, cMediaTypesCount;
+ GUID majorType, subType;
+ BOOL fSelected;
+
+ CHECK_HR(hr = pSource->CreatePresentationDescriptor(&pPD));
+ CHECK_HR(hr = pPD->GetStreamDescriptorCount(&cStreams));
+
+ for (UINT subTypesIndex = 0; subTypesIndex < subTypesCount && nIndex == -1; ++subTypesIndex)
+ {
+ for (DWORD cStreamIndex = 0; cStreamIndex < cStreams && nIndex == -1; ++cStreamIndex)
+ {
+ fSelected = FALSE;
+ CHECK_HR(hr = pPD->GetStreamDescriptorByIndex(cStreamIndex, &fSelected, &pSD));
+ if(fSelected)
+ {
+ CHECK_HR(hr = pSD->GetMediaTypeHandler(&pHandler));
+ CHECK_HR(hr = pHandler->GetMajorType(&majorType));
+ if(majorType == mediaType)
+ {
+ CHECK_HR(hr = pHandler->GetMediaTypeCount(&cMediaTypesCount));
+ for(DWORD cMediaTypesIndex = 0; cMediaTypesIndex < cMediaTypesCount && nIndex == -1; ++cMediaTypesIndex)
+ {
+ CHECK_HR(hr = pHandler->GetMediaTypeByIndex(cMediaTypesIndex, &pMediaType));
+ CHECK_HR(hr = pMediaType->GetGUID(MF_MT_SUBTYPE, &subType));
+ if (subTypes[subTypesIndex].fourcc == subType)
+ {
+ nIndex = subTypesIndex;
+ break;
+ }
+ SafeRelease(&pMediaType);
+ }
+ }
+ }
+
+ SafeRelease(&pSD);
+ SafeRelease(&pHandler);
+ }
+ }
+
+bail:
+ SafeRelease(&pMediaType);
+ SafeRelease(&pPD);
+ SafeRelease(&pSD);
+ SafeRelease(&pHandler);
+
+ return nIndex;
+}
+
+HRESULT MFUtils::IsSupported(
+ IMFPresentationDescriptor *pPD,
+ DWORD cStreamIndex,
+ UINT32 nWidth,
+ UINT32 nHeight,
+ UINT32 nFps,
+ const GUID& guidFormat,
+ BOOL* pbSupportedSize,
+ BOOL* pbSupportedFps,
+ BOOL* pbSupportedFormat
+ )
+{
+ HRESULT hr = S_OK;
+
+ BOOL fSelected = FALSE;
+ IMFStreamDescriptor *pSD = NULL;
+ IMFMediaTypeHandler *pHandler = NULL;
+ IMFMediaType *pMediaType = NULL;
+ UINT32 _nWidth = 0, _nHeight = 0, numeratorFps = 0, denominatorFps = 0;
+ GUID subType;
+ DWORD cMediaTypesCount;
+
+ if(!pPD || !pbSupportedSize || !pbSupportedFps || !pbSupportedFormat)
+ {
+ CHECK_HR(hr = E_POINTER);
+ }
+
+ *pbSupportedSize = FALSE;
+ *pbSupportedFps = FALSE;
+ *pbSupportedFormat = FALSE;
+
+ CHECK_HR(hr = pPD->GetStreamDescriptorByIndex(cStreamIndex, &fSelected, &pSD));
+ if(fSelected)
+ {
+ CHECK_HR(hr = pSD->GetMediaTypeHandler(&pHandler));
+ CHECK_HR(hr = pHandler->GetMediaTypeCount(&cMediaTypesCount));
+ for(DWORD cMediaTypesIndex = 0; cMediaTypesIndex < cMediaTypesCount; ++cMediaTypesIndex)
+ {
+ CHECK_HR(hr = pHandler->GetMediaTypeByIndex(cMediaTypesIndex, &pMediaType));
+ CHECK_HR(hr = MFGetAttributeSize(pMediaType, MF_MT_FRAME_SIZE, &_nWidth, &_nHeight));
+ CHECK_HR(hr = pMediaType->GetGUID(MF_MT_SUBTYPE, &subType));
+ if(FAILED(hr = MFGetAttributeRatio(pMediaType, MF_MT_FRAME_RATE, &numeratorFps, &denominatorFps)))
+ {
+ numeratorFps = 30;
+ denominatorFps = 1;
+ }
+
+ // all must match for the same stream
+ if(_nWidth == nWidth && _nHeight == nHeight && subType == guidFormat && (numeratorFps/denominatorFps) == nFps)
+ {
+ *pbSupportedSize = TRUE;
+ *pbSupportedFormat = TRUE;
+ *pbSupportedFps = TRUE;
+ break;
+ }
+
+ SafeRelease(&pMediaType);
+ }
+ SafeRelease(&pHandler);
+ }
+
+bail:
+ SafeRelease(&pSD);
+ SafeRelease(&pHandler);
+ SafeRelease(&pMediaType);
+
+ return hr;
+}
+
+HRESULT MFUtils::IsSupported(
+ IMFPresentationDescriptor *pPD,
+ DWORD cStreamIndex,
+ IMFMediaType* pMediaType,
+ BOOL* pbSupportedSize,
+ BOOL* pbSupportedFps,
+ BOOL* pbSupportedFormat
+ )
+{
+ HRESULT hr = S_OK;
+
+ UINT32 nWidth = 0, nHeight = 0, nFps = 0, numeratorFps = 30, denominatorFps = 1;
+ GUID subType;
+
+ if(!pPD || !pMediaType || !pbSupportedSize || !pbSupportedFps || !pbSupportedFormat)
+ {
+ CHECK_HR(hr = E_POINTER);
+ }
+
+ CHECK_HR(hr = MFGetAttributeSize(pMediaType, MF_MT_FRAME_SIZE, &nWidth, &nHeight));
+ CHECK_HR(hr = pMediaType->GetGUID(MF_MT_SUBTYPE, &subType));
+ if(FAILED(hr = MFGetAttributeRatio(pMediaType, MF_MT_FRAME_RATE, &numeratorFps, &denominatorFps)))
+ {
+ numeratorFps = 30;
+ denominatorFps = 1;
+ }
+
+ CHECK_HR(hr = IsSupported(
+ pPD,
+ cStreamIndex,
+ nWidth,
+ nHeight,
+ (numeratorFps / denominatorFps),
+ subType,
+ pbSupportedSize,
+ pbSupportedFps,
+ pbSupportedFormat
+ ));
+bail:
+ return hr;
+}
+
+HRESULT MFUtils::IsSupportedByInput(
+ IMFPresentationDescriptor *pPD,
+ DWORD cStreamIndex,
+ IMFTopologyNode *pNode,
+ BOOL* pbSupportedSize,
+ BOOL* pbSupportedFps,
+ BOOL* pbSupportedFormat
+ )
+{
+ HRESULT hr = S_OK;
+
+ IMFMediaType *pMediaType = NULL;
+ IUnknown* pObject = NULL;
+ IMFActivate *pActivate = NULL;
+ IMFMediaSink *pMediaSink = NULL;
+ IMFTransform *pTransform = NULL;
+ IMFStreamSink *pStreamSink = NULL;
+ IMFMediaTypeHandler *pHandler = NULL;
+
+ if(!pPD || !pNode || !pbSupportedSize || !pbSupportedFps || !pbSupportedFormat)
+ {
+ CHECK_HR(hr = E_POINTER);
+ }
+
+ CHECK_HR(hr = pNode->GetObject(&pObject));
+ hr = pObject->QueryInterface(IID_PPV_ARGS(&pActivate));
+ if(SUCCEEDED(hr))
+ {
+ SafeRelease(&pObject);
+ hr = pActivate->ActivateObject(IID_IMFMediaSink, (void**)&pObject);
+ if(FAILED(hr))
+ {
+ hr = pActivate->ActivateObject(IID_IMFTransform, (void**)&pObject);
+ }
+ }
+
+ if(!pObject)
+ {
+ CHECK_HR(hr = E_NOINTERFACE);
+ }
+
+ hr = pObject->QueryInterface(IID_PPV_ARGS(&pMediaSink));
+ if(FAILED(hr))
+ {
+ hr = pObject->QueryInterface(IID_PPV_ARGS(&pTransform));
+ }
+
+
+
+ if(pMediaSink)
+ {
+ CHECK_HR(hr = pMediaSink->GetStreamSinkByIndex(0, &pStreamSink));
+ CHECK_HR(hr = pStreamSink->GetMediaTypeHandler(&pHandler));
+ CHECK_HR(hr = pHandler->GetCurrentMediaType(&pMediaType));
+
+ }
+ else if(pTransform)
+ {
+ CHECK_HR(hr = pTransform->GetInputCurrentType(0, &pMediaType));
+ }
+ else
+ {
+ CHECK_HR(hr = pNode->GetInputPrefType(0, &pMediaType));
+ }
+
+ CHECK_HR(hr = IsSupported(
+ pPD,
+ cStreamIndex,
+ pMediaType,
+ pbSupportedSize,
+ pbSupportedFps,
+ pbSupportedFormat
+ ));
+
+bail:
+ SafeRelease(&pObject);
+ SafeRelease(&pActivate);
+ SafeRelease(&pMediaType);
+ SafeRelease(&pStreamSink);
+ SafeRelease(&pHandler);
+ return hr;
+}
+
+HRESULT MFUtils::ConnectConverters(
+ IMFTopologyNode *pNode,
+ DWORD dwOutputIndex,
+ IMFTopologyNode *pNodeConvFrameRate,
+ IMFTopologyNode *pNodeConvColor,
+ IMFTopologyNode *pNodeConvSize
+ )
+{
+ HRESULT hr = S_OK;
+
+ if(!pNode)
+ {
+ CHECK_HR(hr = E_POINTER);
+ }
+
+ if(pNodeConvFrameRate)
+ {
+ CHECK_HR(hr = pNode->ConnectOutput(dwOutputIndex, pNodeConvFrameRate, 0));
+ if(pNodeConvSize)
+ {
+ CHECK_HR(hr = pNodeConvFrameRate->ConnectOutput(0, pNodeConvSize, 0));
+ if(pNodeConvColor)
+ {
+ CHECK_HR(hr = pNodeConvSize->ConnectOutput(0, pNodeConvColor, 0));
+ }
+ }
+ else
+ {
+ if(pNodeConvColor)
+ {
+ CHECK_HR(hr = pNodeConvFrameRate->ConnectOutput(0, pNodeConvColor, 0));
+ }
+ }
+ }
+ else
+ {
+ if(pNodeConvSize)
+ {
+ CHECK_HR(hr = pNode->ConnectOutput(dwOutputIndex, pNodeConvSize, 0));
+ if(pNodeConvColor)
+ {
+ CHECK_HR(hr = pNodeConvSize->ConnectOutput(0, pNodeConvColor, 0));
+ }
+ }
+ else
+ {
+ if(pNodeConvColor)
+ {
+ CHECK_HR(hr = pNode->ConnectOutput(dwOutputIndex, pNodeConvColor, 0));
+ }
+ }
+ }
+
+bail:
+ return hr;
+}
+
+// This function should be called only if VideoProcessor is not supported
+HRESULT MFUtils::GetBestFormat(
+ IMFMediaSource *pSource,
+ const GUID *pSubType,
+ UINT32 nWidth,
+ UINT32 nHeight,
+ UINT32 nFps,
+ UINT32 *pnWidth,
+ UINT32 *pnHeight,
+ UINT32 *pnFps,
+ const VideoSubTypeGuidPair **ppSubTypeGuidPair
+ )
+{
+
+#define _FindPairByGuid(_guid, _index) { \
+ int _i; _index = -1; \
+ for (_i = 0; _i < PreferredVideoSubTypeGuidPairsCount; ++_i) { \
+ if (PreferredVideoSubTypeGuidPairs[_i].fourcc == _guid) { \
+ _index = _i; break; \
+ } \
+ } \
+}
+#if 0
+ *pnWidth = 640;
+ *pnHeight = 480;
+ *pnFps = 30;
+ return S_OK;
+#else
+ HRESULT hr = S_OK;
+ IMFPresentationDescriptor *pPD = NULL;
+ IMFStreamDescriptor *pSD = NULL;
+ IMFMediaTypeHandler *pHandler = NULL;
+ IMFMediaType *pMediaType = NULL;
+ DWORD cStreams = 0, cMediaTypesCount;
+ GUID majorType, subType, _BestSubType;
+ BOOL bFound = FALSE, fSelected;
+ UINT32 _nWidth, _nHeight, numeratorFps, denominatorFps, _nFps, _nScore, _nBestScore;
+ int PreferredVideoSubTypeGuidPairIndex;
+ static const UINT32 kSubTypeMismatchPad = _UI32_MAX >> 4;
+ static const UINT32 kFpsMismatchPad = _UI32_MAX >> 2;
+
+ if (!ppSubTypeGuidPair || !pSubType) {
+ CHECK_HR(hr = E_INVALIDARG);
+ }
+ _FindPairByGuid(*pSubType, PreferredVideoSubTypeGuidPairIndex);
+ if (PreferredVideoSubTypeGuidPairIndex == -1) {
+ CHECK_HR(hr = E_INVALIDARG);
+ }
+ *ppSubTypeGuidPair = &PreferredVideoSubTypeGuidPairs[PreferredVideoSubTypeGuidPairIndex];
+
+ _nBestScore = _UI32_MAX;
+ CHECK_HR(hr = pSource->CreatePresentationDescriptor(&pPD));
+ CHECK_HR(hr = pPD->GetStreamDescriptorCount(&cStreams));
+
+
+ for (DWORD i = 0; i < cStreams; i++)
+ {
+ fSelected = FALSE;
+
+ CHECK_HR(hr = pPD->GetStreamDescriptorByIndex(i, &fSelected, &pSD));
+
+ if (fSelected)
+ {
+ CHECK_HR(hr = pSD->GetMediaTypeHandler(&pHandler));
+
+ CHECK_HR(hr = pHandler->GetMajorType(&majorType));
+
+ if(majorType == MFMediaType_Video)
+ {
+ CHECK_HR(hr = pHandler->GetMediaTypeCount(&cMediaTypesCount));
+
+ for(DWORD cMediaTypesIndex = 0; cMediaTypesIndex < cMediaTypesCount; ++cMediaTypesIndex)
+ {
+ CHECK_HR(hr = pHandler->GetMediaTypeByIndex(cMediaTypesIndex, &pMediaType));
+
+ CHECK_HR(hr = pMediaType->GetGUID(MF_MT_SUBTYPE, &subType));
+ // if(subType == *pSubType)
+ {
+ CHECK_HR(hr = MFGetAttributeSize(pMediaType, MF_MT_FRAME_SIZE, &_nWidth, &_nHeight));
+ CHECK_HR(hr = MFGetAttributeRatio(pMediaType, MF_MT_FRAME_RATE, &numeratorFps, &denominatorFps));
+ _nFps = (numeratorFps / denominatorFps);
+
+ if (subType == *pSubType) {
+ _nScore = 0;
+ }
+ else {
+ _FindPairByGuid(subType, PreferredVideoSubTypeGuidPairIndex);
+ if (PreferredVideoSubTypeGuidPairIndex == -1) {
+ _nScore = kSubTypeMismatchPad; // Not a must but important: If(!VideoProcess) then CLSID_CColorConvertDMO
+ }
+ else {
+ _nScore = kSubTypeMismatchPad >> (PreferredVideoSubTypeGuidPairsCount - PreferredVideoSubTypeGuidPairIndex);
+ }
+ }
+ _nScore += abs((int)(_nWidth - nWidth)); // Not a must: If(!VideoProcess) then CLSID_CResizerDMO
+ _nScore += abs((int)(_nHeight - nHeight)); // Not a must: If(!VideoProcess) then CLSID_CResizerDMO
+ _nScore += (_nFps == nFps) ? 0 : kFpsMismatchPad; // Fps is a must because without video processor no alternative exist (CLSID_CFrameRateConvertDmo doesn't support I420)
+
+ if (_nScore <= _nBestScore || !bFound)
+ {
+ *pnWidth = _nWidth;
+ *pnHeight = _nHeight;
+ *pnFps = _nFps;
+ bFound = TRUE;
+ _BestSubType = subType;
+ _nBestScore = _nScore;
+ }
+ }
+
+ SafeRelease(&pMediaType);
+ }
+ }
+ }
+
+ SafeRelease(&pHandler);
+ SafeRelease(&pSD);
+ }
+
+bail:
+ SafeRelease(&pPD);
+ SafeRelease(&pSD);
+ SafeRelease(&pHandler);
+ SafeRelease(&pMediaType);
+
+ _FindPairByGuid(_BestSubType, PreferredVideoSubTypeGuidPairIndex);
+ if (PreferredVideoSubTypeGuidPairIndex != -1) {
+ *ppSubTypeGuidPair = &PreferredVideoSubTypeGuidPairs[PreferredVideoSubTypeGuidPairIndex];
+ }
+ else /*if (_nBestScore > kSubTypeMismatchPad)*/ {
+ *pnWidth = 640;
+ *pnHeight = 480;
+ *pnFps = 30;
+ TSK_DEBUG_WARN("Failed to math subtype...using VGA@30fps");
+ }
+
+ return SUCCEEDED(hr) ? (bFound ? S_OK : E_NOT_SET): hr;
+#endif
+
+}
+
+HWND MFUtils::GetConsoleHwnd(void)
+{
+ #define MY_BUFSIZE 1024 // Buffer size for console window titles.
+ HWND hwndFound; // This is what is returned to the caller.
+ TCHAR pszNewWindowTitle[MY_BUFSIZE]; // Contains fabricated
+ // WindowTitle.
+ TCHAR pszOldWindowTitle[MY_BUFSIZE]; // Contains original
+ // WindowTitle.
+
+ // Fetch current window title.
+ GetConsoleTitle(pszOldWindowTitle, MY_BUFSIZE);
+
+ // Format a "unique" NewWindowTitle.
+ wsprintf(pszNewWindowTitle,TEXT("%d/%d"),
+ GetTickCount(),
+ GetCurrentProcessId());
+
+ // Change current window title.
+ SetConsoleTitle(pszNewWindowTitle);
+
+ // Ensure window title has been updated.
+ Sleep(40);
+
+ // Look for NewWindowTitle.
+ hwndFound=FindWindow(NULL, pszNewWindowTitle);
+
+ // Restore original window title.
+ SetConsoleTitle(pszOldWindowTitle);
+
+ return(hwndFound);
+}
diff --git a/plugins/pluginWinMF/internals/mf_utils.h b/plugins/pluginWinMF/internals/mf_utils.h
new file mode 100644
index 0000000..0819597
--- /dev/null
+++ b/plugins/pluginWinMF/internals/mf_utils.h
@@ -0,0 +1,260 @@
+/* Copyright (C) 2013 Mamadou DIOP
+* Copyright (C) 2013 Doubango Telecom <http://www.doubango.org>
+*
+* This file is part of Open Source Doubango Framework.
+*
+* DOUBANGO is free software: you can redistribute it and/or modify
+* it under the terms of the GNU General Public License as published by
+* the Free Software Foundation, either version 3 of the License, or
+* (at your option) any later version.
+*
+* DOUBANGO is distributed in the hope that it will be useful,
+* but WITHOUT ANY WARRANTY; without even the implied warranty of
+* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
+* GNU General Public License for more details.
+*
+* You should have received a copy of the GNU General Public License
+* along with DOUBANGO.
+*/
+#ifndef PLUGIN_WIN_MF_UTILS_H
+#define PLUGIN_WIN_MF_UTILS_H
+
+#include "../plugin_win_mf_config.h"
+
+#include <new>
+#include <mfapi.h>
+#include <mfidl.h>
+#include <Mferror.h>
+#include <shlwapi.h>
+
+#undef SafeRelease
+#define SafeRelease(ppT) \
+{ \
+ if (*ppT) \
+ { \
+ (*ppT)->Release(); \
+ *ppT = NULL; \
+ } \
+}
+
+#undef CHECK_HR
+// In CHECK_HR(x) When (x) is a function it will be executed twice when used in "TSK_DEBUG_ERROR(x)" and "If(x)"
+#define CHECK_HR(x) { HRESULT __hr__ = (x); if (FAILED(__hr__)) { TSK_DEBUG_ERROR("Operation Failed (%08x)", __hr__); goto bail; } }
+
+typedef struct VideoSubTypeGuidPair
+{
+ enum tmedia_chroma_e chroma;
+ const GUID& fourcc;
+}
+VideoSubTypeGuidPair;
+
+class MFUtils
+{
+public:
+
+static HRESULT Startup();
+static HRESULT Shutdown();
+
+static BOOL IsD3D9Supported();
+static BOOL IsLowLatencyH264Supported();
+static BOOL IsLowLatencyH264SupportsMaxSliceSize();
+
+static HRESULT IsAsyncMFT(
+ IMFTransform *pMFT, // The MFT to check
+ BOOL* pbIsAsync // Whether the MFT is Async
+ );
+static HRESULT UnlockAsyncMFT(
+ IMFTransform *pMFT // The MFT to unlock
+ );
+
+static HRESULT CreatePCMAudioType(
+ UINT32 sampleRate, // Samples per second
+ UINT32 bitsPerSample, // Bits per sample
+ UINT32 cChannels, // Number of channels
+ IMFMediaType **ppType // Receives a pointer to the media type.
+ );
+static HRESULT CreateVideoType(
+ const GUID* subType, // video subType
+ IMFMediaType **ppType, // Receives a pointer to the media type.
+ UINT32 unWidth = 0, // Video width (0 to ignore)
+ UINT32 unHeight = 0 // Video height (0 to ignore)
+ );
+static HRESULT ConvertVideoTypeToUncompressedType(
+ IMFMediaType *pType, // Pointer to an encoded video type.
+ const GUID& subtype, // Uncompressed subtype (eg, RGB-32, AYUV)
+ IMFMediaType **ppType // Receives a matching uncompressed video type.
+ );
+static HRESULT CreateMediaSample(
+ DWORD cbData, // Maximum buffer size
+ IMFSample **ppSample // Receives the sample
+ );
+static HRESULT ValidateVideoFormat(
+ IMFMediaType *pmt
+ );
+static HRESULT IsVideoProcessorSupported(BOOL *pbSupported);
+static HRESULT GetBestVideoProcessor(
+ const GUID& inputFormat, // The input MediaFormat (e.g. MFVideoFormat_I420)
+ const GUID& outputFormat, // The output MediaFormat (e.g. MFVideoFormat_NV12)
+ IMFTransform **ppProcessor // Receives the video processor
+ );
+static HRESULT GetBestCodec(
+ BOOL bEncoder, // Whether we request an encoder or not (TRUE=encoder, FALSE=decoder)
+ const GUID& mediaType, // The MediaType
+ const GUID& inputFormat, // The input MediaFormat (e.g. MFVideoFormat_NV12)
+ const GUID& outputFormat, // The output MediaFormat (e.g. MFVideoFormat_H264)
+ IMFTransform **ppMFT // Receives the decoder/encoder transform
+ );
+static HRESULT BindOutputNode(
+ IMFTopologyNode *pNode // The Node
+ );
+static HRESULT AddOutputNode(
+ IMFTopology *pTopology, // Topology.
+ IMFActivate *pActivate, // Media sink activation object.
+ DWORD dwId, // Identifier of the stream sink.
+ IMFTopologyNode **ppNode // Receives the node pointer.
+ );
+static HRESULT AddTransformNode(
+ IMFTopology *pTopology, // Topology.
+ IMFTransform *pMFT, // MFT.
+ DWORD dwId, // Identifier of the stream sink.
+ IMFTopologyNode **ppNode // Receives the node pointer.
+ );
+static HRESULT AddSourceNode(
+ IMFTopology *pTopology, // Topology.
+ IMFMediaSource *pSource, // Media source.
+ IMFPresentationDescriptor *pPD, // Presentation descriptor.
+ IMFStreamDescriptor *pSD, // Stream descriptor.
+ IMFTopologyNode **ppNode // Receives the node pointer.
+ );
+static HRESULT CreateTopology(
+ IMFMediaSource *pSource, // Media source
+ IMFTransform *pTransform, // Transform filter (e.g. encoder or decoder) to insert between the source and Sink. NULL is valid.
+ IMFActivate *pSinkActivateMain, // Main sink (e.g. sample grabber or EVR).
+ IMFActivate *pSinkActivatePreview, // Preview sink. Optional. Could be NULL.
+ IMFMediaType *pIputTypeMain, // Main sink input MediaType
+ IMFTopology **ppTopo // Receives the newly created topology
+ );
+static HRESULT ResolveTopology(
+ IMFTopology *pInputTopo, // A pointer to the IMFTopology interface of the partial topology to be resolved.
+ IMFTopology **ppOutputTopo, // Receives a pointer to the IMFTopology interface of the completed topology. The caller must release the interface.
+ IMFTopology *pCurrentTopo = NULL // A pointer to the IMFTopology interface of the previous full topology. The topology loader can re-use objects from this topology in the new topology. This parameter can be NULL.
+ );
+static HRESULT FindNodeObject(
+ IMFTopology *pInputTopo, // The Topology containing the node to find
+ TOPOID qwTopoNodeID, //The identifier for the node
+ void** ppObject // Receives the Object
+ );
+static HRESULT CreateMediaSinkActivate(
+ IMFStreamDescriptor *pSourceSD, // Pointer to the stream descriptor.
+ HWND hVideoWindow, // Handle to the video clipping window.
+ IMFActivate **ppActivate
+);
+static HRESULT SetMediaType(
+ IMFMediaSource *pSource, // Media source.
+ IMFMediaType* pMediaType // Media Type.
+ );
+static HRESULT SetVideoWindow(
+ IMFTopology *pTopology, // Topology.
+ IMFMediaSource *pSource, // Media source.
+ HWND hVideoWnd // Window for video playback.
+ );
+static HRESULT RunSession(
+ IMFMediaSession *pSession, // Session to run
+ IMFTopology *pTopology // The toppology
+ );
+static HRESULT ShutdownSession(
+ IMFMediaSession *pSession, // The Session
+ IMFMediaSource *pSource = NULL // Source to shutdown (optional)
+ );
+static HRESULT PauseSession(
+ IMFMediaSession *pSession, // The session
+ IMFMediaSource *pSource = NULL// Source to pause (optional)
+ );
+static INT GetSupportedSubTypeIndex(
+ IMFMediaSource *pSource, // The source
+ const GUID& mediaType, // The MediaType
+ const VideoSubTypeGuidPair* subTypes, UINT subTypesCount // List of preferred subtypes (in ascending order)
+ );
+static HRESULT IsSupported(
+ IMFPresentationDescriptor *pPD,
+ DWORD cStreamIndex,
+ UINT32 nWidth,
+ UINT32 nHeight,
+ UINT32 nFps,
+ const GUID& guidFormat,
+ BOOL* pbSupportedSize,
+ BOOL* pbSupportedFps,
+ BOOL* pbSupportedFormat
+ );
+static HRESULT IsSupported(
+ IMFPresentationDescriptor *pPD,
+ DWORD cStreamIndex,
+ IMFMediaType* pMediaType,
+ BOOL* pbSupportedSize,
+ BOOL* pbSupportedFps,
+ BOOL* pbSupportedFormat
+ );
+static HRESULT IsSupportedByInput(
+ IMFPresentationDescriptor *pPD,
+ DWORD cStreamIndex,
+ IMFTopologyNode *pNode,
+ BOOL* pbSupportedSize,
+ BOOL* pbSupportedFps,
+ BOOL* pbSupportedFormat
+ );
+static HRESULT ConnectConverters(
+ IMFTopologyNode *pNode,
+ DWORD dwOutputIndex,
+ IMFTopologyNode *pNodeConvFrameRate,
+ IMFTopologyNode *pNodeConvColor,
+ IMFTopologyNode *pNodeConvSize
+ );
+static HRESULT GetBestFormat(
+ IMFMediaSource *pSource,
+ const GUID *pSubType,
+ UINT32 nWidth,
+ UINT32 nHeight,
+ UINT32 nFps,
+ UINT32 *pnWidth,
+ UINT32 *pnHeight,
+ UINT32 *pnFps,
+ const VideoSubTypeGuidPair **pSubTypeGuidPair
+ );
+
+static HWND GetConsoleHwnd(void);
+
+template <class Q>
+static HRESULT GetTopoNodeObject(IMFTopologyNode *pNode, Q **ppObject)
+{
+ IUnknown *pUnk = NULL; // zero output
+
+ HRESULT hr = pNode->GetObject(&pUnk);
+ if (SUCCEEDED(hr))
+ {
+ pUnk->QueryInterface(IID_PPV_ARGS(ppObject));
+ pUnk->Release();
+ }
+ return hr;
+}
+
+private:
+ static BOOL g_bStarted;
+
+ static DWORD g_dwMajorVersion;
+ static DWORD g_dwMinorVersion;
+
+ static BOOL g_bLowLatencyH264Checked;
+ static BOOL g_bLowLatencyH264Supported;
+ static BOOL g_bLowLatencyH264SupportsMaxSliceSize;
+
+ static BOOL g_bD3D9Checked;
+ static BOOL g_bD3D9Supported;
+
+public:
+ static const TOPOID g_ullTopoIdSinkMain;
+ static const TOPOID g_ullTopoIdSinkPreview;
+ static const TOPOID g_ullTopoIdSource;
+ static const TOPOID g_ullTopoIdVideoProcessor;
+};
+
+#endif /* PLUGIN_WIN_MF_UTILS_H */
diff --git a/plugins/pluginWinMF/pluginWinMF.vcproj b/plugins/pluginWinMF/pluginWinMF.vcproj
new file mode 100644
index 0000000..c9f620b
--- /dev/null
+++ b/plugins/pluginWinMF/pluginWinMF.vcproj
@@ -0,0 +1,319 @@
+<?xml version="1.0" encoding="Windows-1252"?>
+<VisualStudioProject
+ ProjectType="Visual C++"
+ Version="9.00"
+ Name="pluginWinMF"
+ ProjectGUID="{E8596446-CB3A-4AD5-83C3-6562EE426494}"
+ RootNamespace="pluginWinMF"
+ Keyword="Win32Proj"
+ TargetFrameworkVersion="196613"
+ >
+ <Platforms>
+ <Platform
+ Name="Win32"
+ />
+ </Platforms>
+ <ToolFiles>
+ </ToolFiles>
+ <Configurations>
+ <Configuration
+ Name="Debug|Win32"
+ OutputDirectory="$(SolutionDir)$(ConfigurationName)"
+ IntermediateDirectory="$(ConfigurationName)"
+ ConfigurationType="2"
+ CharacterSet="1"
+ >
+ <Tool
+ Name="VCPreBuildEventTool"
+ />
+ <Tool
+ Name="VCCustomBuildTool"
+ />
+ <Tool
+ Name="VCXMLDataGeneratorTool"
+ />
+ <Tool
+ Name="VCWebServiceProxyGeneratorTool"
+ />
+ <Tool
+ Name="VCMIDLTool"
+ />
+ <Tool
+ Name="VCCLCompilerTool"
+ Optimization="0"
+ AdditionalIncludeDirectories=".;..\..\thirdparties\win32\include;..\..\tinySAK\src;..\..\tinyMEDIA\include;..\..\tinySDP\include;..\..\tinyDAV\include;..\..\tinyRTP\include"
+ PreprocessorDefinitions="WIN32;_DEBUG;_WINDOWS;_USRDLL;PLUGIN_WIN_MFP_EXPORTS;DEBUG_LEVEL=DEBUG_LEVEL_INFO;TINYDAV_EXPORTS"
+ MinimalRebuild="true"
+ BasicRuntimeChecks="3"
+ RuntimeLibrary="3"
+ UsePrecompiledHeader="0"
+ WarningLevel="3"
+ WarnAsError="true"
+ DebugInformationFormat="4"
+ />
+ <Tool
+ Name="VCManagedResourceCompilerTool"
+ />
+ <Tool
+ Name="VCResourceCompilerTool"
+ />
+ <Tool
+ Name="VCPreLinkEventTool"
+ />
+ <Tool
+ Name="VCLinkerTool"
+ AdditionalDependencies="$(OutDir)\tinySAK.lib $(OutDir)\tinyMEDIA.lib"
+ LinkIncremental="2"
+ GenerateDebugInformation="true"
+ SubSystem="2"
+ TargetMachine="1"
+ />
+ <Tool
+ Name="VCALinkTool"
+ />
+ <Tool
+ Name="VCManifestTool"
+ />
+ <Tool
+ Name="VCXDCMakeTool"
+ />
+ <Tool
+ Name="VCBscMakeTool"
+ />
+ <Tool
+ Name="VCFxCopTool"
+ />
+ <Tool
+ Name="VCAppVerifierTool"
+ />
+ <Tool
+ Name="VCPostBuildEventTool"
+ />
+ </Configuration>
+ <Configuration
+ Name="Release|Win32"
+ OutputDirectory="$(SolutionDir)$(ConfigurationName)"
+ IntermediateDirectory="$(ConfigurationName)"
+ ConfigurationType="2"
+ CharacterSet="1"
+ WholeProgramOptimization="1"
+ >
+ <Tool
+ Name="VCPreBuildEventTool"
+ />
+ <Tool
+ Name="VCCustomBuildTool"
+ />
+ <Tool
+ Name="VCXMLDataGeneratorTool"
+ />
+ <Tool
+ Name="VCWebServiceProxyGeneratorTool"
+ />
+ <Tool
+ Name="VCMIDLTool"
+ />
+ <Tool
+ Name="VCCLCompilerTool"
+ Optimization="3"
+ EnableIntrinsicFunctions="true"
+ AdditionalIncludeDirectories=".;..\..\thirdparties\win32\include;..\..\tinySAK\src;..\..\tinyMEDIA\include;..\..\tinySDP\include;..\..\tinyDAV\include;..\..\tinyRTP\include"
+ PreprocessorDefinitions="WIN32;NDEBUG;_WINDOWS;_USRDLL;PLUGIN_WIN_MFP_EXPORTS;DEBUG_LEVEL=DEBUG_LEVEL_ERROR;TINYDAV_EXPORTS"
+ RuntimeLibrary="2"
+ EnableFunctionLevelLinking="true"
+ UsePrecompiledHeader="0"
+ WarningLevel="3"
+ WarnAsError="true"
+ DebugInformationFormat="0"
+ />
+ <Tool
+ Name="VCManagedResourceCompilerTool"
+ />
+ <Tool
+ Name="VCResourceCompilerTool"
+ />
+ <Tool
+ Name="VCPreLinkEventTool"
+ />
+ <Tool
+ Name="VCLinkerTool"
+ AdditionalDependencies="$(OutDir)\tinySAK.lib $(OutDir)\tinyMEDIA.lib"
+ LinkIncremental="1"
+ GenerateDebugInformation="false"
+ SubSystem="2"
+ OptimizeReferences="2"
+ EnableCOMDATFolding="2"
+ TargetMachine="1"
+ />
+ <Tool
+ Name="VCALinkTool"
+ />
+ <Tool
+ Name="VCManifestTool"
+ />
+ <Tool
+ Name="VCXDCMakeTool"
+ />
+ <Tool
+ Name="VCBscMakeTool"
+ />
+ <Tool
+ Name="VCFxCopTool"
+ />
+ <Tool
+ Name="VCAppVerifierTool"
+ />
+ <Tool
+ Name="VCPostBuildEventTool"
+ />
+ </Configuration>
+ </Configurations>
+ <References>
+ </References>
+ <Files>
+ <Filter
+ Name="Source Files"
+ Filter="cpp;c;cc;cxx;def;odl;idl;hpj;bat;asm;asmx"
+ UniqueIdentifier="{4FC737F1-C7A5-4376-A066-2A32D752A2FF}"
+ >
+ <File
+ RelativePath=".\dllmain_mf.cxx"
+ >
+ </File>
+ <File
+ RelativePath=".\plugin_win_mf_codec_h264.cxx"
+ >
+ </File>
+ <File
+ RelativePath=".\plugin_win_mf_consumer_audio.cxx"
+ >
+ </File>
+ <File
+ RelativePath=".\plugin_win_mf_consumer_video.cxx"
+ >
+ </File>
+ <File
+ RelativePath=".\plugin_win_mf_converter_video.cxx"
+ >
+ </File>
+ <File
+ RelativePath=".\plugin_win_mf_producer_audio.cxx"
+ >
+ </File>
+ <File
+ RelativePath=".\plugin_win_mf_producer_video.cxx"
+ >
+ </File>
+ <Filter
+ Name="internals"
+ >
+ <File
+ RelativePath=".\internals\mf_codec.cxx"
+ >
+ </File>
+ <File
+ RelativePath=".\internals\mf_custom_src.cxx"
+ >
+ </File>
+ <File
+ RelativePath=".\internals\mf_devices.cxx"
+ >
+ </File>
+ <File
+ RelativePath=".\internals\mf_display_watcher.cxx"
+ >
+ </File>
+ <File
+ RelativePath=".\internals\mf_sample_grabber.cxx"
+ >
+ </File>
+ <File
+ RelativePath=".\internals\mf_sample_queue.cxx"
+ >
+ </File>
+ <File
+ RelativePath=".\internals\mf_utils.cxx"
+ >
+ </File>
+ </Filter>
+ </Filter>
+ <Filter
+ Name="Header Files"
+ Filter="h;hpp;hxx;hm;inl;inc;xsd"
+ UniqueIdentifier="{93995380-89BD-4b04-88EB-625FBE52EBFB}"
+ >
+ <File
+ RelativePath=".\plugin_win_mf_config.h"
+ >
+ </File>
+ <Filter
+ Name="internals"
+ >
+ <File
+ RelativePath=".\internals\mf_codec.h"
+ >
+ </File>
+ <File
+ RelativePath=".\internals\mf_custom_src.h"
+ >
+ </File>
+ <File
+ RelativePath=".\internals\mf_devices.h"
+ >
+ </File>
+ <File
+ RelativePath=".\internals\mf_display_watcher.h"
+ >
+ </File>
+ <File
+ RelativePath=".\internals\mf_sample_grabber.h"
+ >
+ </File>
+ <File
+ RelativePath=".\internals\mf_sample_queue.h"
+ >
+ </File>
+ <File
+ RelativePath=".\internals\mf_utils.h"
+ >
+ </File>
+ </Filter>
+ </Filter>
+ <Filter
+ Name="Resource Files"
+ Filter="rc;ico;cur;bmp;dlg;rc2;rct;bin;rgs;gif;jpg;jpeg;jpe;resx;tiff;tif;png;wav"
+ UniqueIdentifier="{67DA6AB6-F800-4c08-8B7A-83BB121AAD01}"
+ >
+ <File
+ RelativePath=".\version.rc"
+ >
+ </File>
+ </Filter>
+ <Filter
+ Name="tdav"
+ >
+ <File
+ RelativePath="..\..\tinyDAV\include\tinydav\codecs\h264\tdav_codec_h264_common.h"
+ >
+ </File>
+ <File
+ RelativePath="..\..\tinyDAV\src\codecs\h264\tdav_codec_h264_rtp.c"
+ >
+ </File>
+ <File
+ RelativePath="..\..\tinyDAV\include\tinydav\codecs\h264\tdav_codec_h264_rtp.h"
+ >
+ </File>
+ <File
+ RelativePath="..\..\tinyDAV\src\audio\tdav_consumer_audio.c"
+ >
+ </File>
+ <File
+ RelativePath="..\..\tinyDAV\src\audio\tdav_producer_audio.c"
+ >
+ </File>
+ </Filter>
+ </Files>
+ <Globals>
+ </Globals>
+</VisualStudioProject>
diff --git a/plugins/pluginWinMF/plugin_win_mf_codec_h264.cxx b/plugins/pluginWinMF/plugin_win_mf_codec_h264.cxx
new file mode 100644
index 0000000..bee00f0
--- /dev/null
+++ b/plugins/pluginWinMF/plugin_win_mf_codec_h264.cxx
@@ -0,0 +1,750 @@
+/* Copyright (C) 2013 Mamadou DIOP
+* Copyright (C) 2013 Doubango Telecom <http://www.doubango.org>
+*
+* This file is part of Open Source Doubango Framework.
+*
+* DOUBANGO is free software: you can redistribute it and/or modify
+* it under the terms of the GNU General Public License as published by
+* the Free Software Foundation, either version 3 of the License, or
+* (at your option) any later version.
+*
+* DOUBANGO is distributed in the hope that it will be useful,
+* but WITHOUT ANY WARRANTY; without even the implied warranty of
+* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
+* GNU General Public License for more details.
+*
+* You should have received a copy of the GNU General Public License
+* along with DOUBANGO.
+*/
+#include "internals/mf_codec.h"
+#include "internals/mf_utils.h"
+
+#include "tinydav/codecs/h264/tdav_codec_h264_common.h"
+
+#include "tinyrtp/rtp/trtp_rtp_packet.h"
+
+#include "tinymedia/tmedia_codec.h"
+#include "tinymedia/tmedia_params.h"
+#include "tinymedia/tmedia_defaults.h"
+
+#include "tsk_params.h"
+#include "tsk_memory.h"
+#include "tsk_debug.h"
+
+typedef struct mf_codec_h264_s
+{
+ TDAV_DECLARE_CODEC_H264_COMMON;
+
+ // Encoder
+ struct{
+ MFCodecVideoH264* pInst;
+ void* buffer;
+ int64_t frame_count;
+ tsk_bool_t force_idr;
+ int32_t quality; // [1-31]
+ int rotation;
+ int neg_width;
+ int neg_height;
+ int neg_fps;
+ int max_bitrate_bps;
+ int32_t max_bw_kpbs;
+ tsk_bool_t passthrough; // whether to bypass encoding
+ } encoder;
+
+ // decoder
+ struct{
+ MFCodecVideoH264* pInst;
+ void* accumulator;
+ tsk_size_t accumulator_pos;
+ tsk_size_t accumulator_size;
+ uint16_t last_seq;
+ tsk_bool_t passthrough; // whether to bypass decoding
+ } decoder;
+}
+mf_codec_h264_t;
+
+#if !defined(PLUGIN_MF_H264_GOP_SIZE_IN_SECONDS)
+# define PLUGIN_MF_H264_GOP_SIZE_IN_SECONDS 25
+#endif
+
+static int mf_codec_h264_init(mf_codec_h264_t* self, profile_idc_t profile);
+static int mf_codec_h264_deinit(mf_codec_h264_t* self);
+static int mf_codec_h264_open_encoder(mf_codec_h264_t* self);
+static int mf_codec_h264_close_encoder(mf_codec_h264_t* self);
+static int mf_codec_h264_open_decoder(mf_codec_h264_t* self);
+static int mf_codec_h264_close_decoder(mf_codec_h264_t* self);
+
+/* ============ H.264 Base/Main Profile X.X Plugin interface functions ================= */
+
+static int mf_codec_h264_set(tmedia_codec_t* self, const tmedia_param_t* param)
+{
+ mf_codec_h264_t* h264 = (mf_codec_h264_t*)self;
+ if(!self->opened){
+ TSK_DEBUG_ERROR("Codec not opened");
+ return -1;
+ }
+ if(param->value_type == tmedia_pvt_int32){
+ if(tsk_striequals(param->key, "action")){
+ tmedia_codec_action_t action = (tmedia_codec_action_t)TSK_TO_INT32((uint8_t*)param->value);
+ switch(action){
+ case tmedia_codec_action_encode_idr:
+ {
+ h264->encoder.force_idr = tsk_true;
+ break;
+ }
+ case tmedia_codec_action_bw_down:
+ {
+ h264->encoder.quality = TSK_CLAMP(1, (h264->encoder.quality + 1), 31);
+ break;
+ }
+ case tmedia_codec_action_bw_up:
+ {
+ h264->encoder.quality = TSK_CLAMP(1, (h264->encoder.quality - 1), 31);
+ break;
+ }
+ }
+ return 0;
+ }
+ else if(tsk_striequals(param->key, "bypass-encoding")){
+ h264->encoder.passthrough = *((int32_t*)param->value) ? tsk_true : tsk_false;
+ h264->encoder.pInst->setBundled(h264->encoder.passthrough);
+ TSK_DEBUG_INFO("[H.264] bypass-encoding = %d", h264->encoder.passthrough);
+ return 0;
+ }
+ else if(tsk_striequals(param->key, "bypass-decoding")){
+ h264->decoder.passthrough = *((int32_t*)param->value) ? tsk_true : tsk_false;
+ h264->decoder.pInst->setBundled(h264->decoder.passthrough);
+ TSK_DEBUG_INFO("[H.264] bypass-decoding = %d", h264->decoder.passthrough);
+ return 0;
+ }
+ else if(tsk_striequals(param->key, "rotation")){
+ int rotation = *((int32_t*)param->value);
+ if(h264->encoder.rotation != rotation){
+ if(self->opened){
+ int ret;
+ h264->encoder.rotation = rotation;
+ if((ret = mf_codec_h264_close_encoder(h264))){
+ return ret;
+ }
+ if((ret = mf_codec_h264_open_encoder(h264))){
+ return ret;
+ }
+ }
+ }
+ return 0;
+ }
+ }
+ return -1;
+}
+
+
+static int mf_codec_h264_open(tmedia_codec_t* self)
+{
+ int ret;
+ mf_codec_h264_t* h264 = (mf_codec_h264_t*)self;
+
+ if(!h264){
+ TSK_DEBUG_ERROR("Invalid parameter");
+ return -1;
+ }
+
+ /* the caller (base class) already checked that the codec is not opened */
+
+ // Encoder
+ if((ret = mf_codec_h264_open_encoder(h264))){
+ return ret;
+ }
+
+ // Decoder
+ if((ret = mf_codec_h264_open_decoder(h264))){
+ return ret;
+ }
+
+ return 0;
+}
+
+static int mf_codec_h264_close(tmedia_codec_t* self)
+{
+ mf_codec_h264_t* h264 = (mf_codec_h264_t*)self;
+
+ if(!h264){
+ TSK_DEBUG_ERROR("Invalid parameter");
+ return -1;
+ }
+
+ /* the caller (base class) alreasy checked that the codec is opened */
+
+ // Encoder
+ mf_codec_h264_close_encoder(h264);
+
+ // Decoder
+ mf_codec_h264_close_decoder(h264);
+
+ return 0;
+}
+
+static tsk_size_t mf_codec_h264_encode(tmedia_codec_t* self, const void* in_data, tsk_size_t in_size, void** out_data, tsk_size_t* out_max_size)
+{
+ int ret = 0;
+ tsk_bool_t send_idr, send_hdr;
+
+ mf_codec_h264_t* h264 = (mf_codec_h264_t*)self;
+ tdav_codec_h264_common_t* common = (tdav_codec_h264_common_t*)self;
+
+ if(!self || !in_data || !in_size){
+ TSK_DEBUG_ERROR("Invalid parameter");
+ return 0;
+ }
+
+ if(!self->opened || !h264->encoder.pInst || !h264->encoder.pInst->IsReady()){
+ TSK_DEBUG_ERROR("Encoder not opened or not ready");
+ return 0;
+ }
+
+
+ HRESULT hr = S_OK;
+ IMFSample *pSampleOut = NULL;
+ IMFMediaBuffer* pBufferOut = NULL;
+
+ // send IDR for:
+ // - the first frame
+ // - remote peer requested an IDR
+ // - every second within the first 4seconds
+ send_idr = (
+ h264->encoder.frame_count++ == 0
+ || h264 ->encoder.force_idr
+ || ( (h264->encoder.frame_count < h264->encoder.neg_fps * 4) && ((h264->encoder.frame_count % h264->encoder.neg_fps)==0) )
+ );
+
+ if(send_idr) {
+ CHECK_HR(hr = h264->encoder.pInst->RequestKeyFrame());
+ }
+
+ // send SPS and PPS headers for:
+ // - IDR frames (not required but it's the easiest way to deal with pkt loss)
+ // - every 5 seconds after the first 4seconds
+ send_hdr = (
+ send_idr
+ || ( (h264->encoder.frame_count % (h264->encoder.neg_fps * 5))==0 )
+ );
+ if(send_hdr){
+ //FIXME: MF_MT_MPEG_SEQUENCE_HEADER
+ // tdav_codec_h264_rtp_encap(TDAV_CODEC_H264_COMMON(h264), h264->encoder.context->extradata, (tsk_size_t)h264->encoder.context->extradata_size);
+ }
+
+ if (h264->encoder.passthrough) {
+ tdav_codec_h264_rtp_encap(common, (const uint8_t*)in_data, in_size);
+ return 0;
+ }
+
+ // Encode data
+ CHECK_HR(hr = h264->encoder.pInst->Process(in_data, (UINT32)in_size, &pSampleOut));
+ if(pSampleOut) {
+ CHECK_HR(hr = pSampleOut->GetBufferByIndex(0, &pBufferOut));
+
+ BYTE* pBufferPtr = NULL;
+ DWORD dwDataLength = 0;
+ CHECK_HR(hr = pBufferOut->GetCurrentLength(&dwDataLength));
+ if(dwDataLength > 0) {
+ CHECK_HR(hr = pBufferOut->Lock(&pBufferPtr, NULL, NULL));
+ tdav_codec_h264_rtp_encap(common, (const uint8_t*)pBufferPtr, (tsk_size_t)dwDataLength);
+ CHECK_HR(hr = pBufferOut->Unlock());
+ }
+ }
+
+ // reset
+ h264->encoder.force_idr = tsk_false;
+
+bail:
+ SafeRelease(&pSampleOut);
+ SafeRelease(&pBufferOut);
+ return 0;
+}
+
+static tsk_size_t mf_codec_h264_decode(tmedia_codec_t* self, const void* in_data, tsk_size_t in_size, void** out_data, tsk_size_t* out_max_size, const tsk_object_t* proto_hdr)
+{
+ mf_codec_h264_t* h264 = (mf_codec_h264_t*)self;
+ const trtp_rtp_header_t* rtp_hdr = (const trtp_rtp_header_t*)proto_hdr;
+
+ const uint8_t* pay_ptr = tsk_null;
+ tsk_size_t pay_size = 0;
+ int ret;
+ tsk_bool_t append_scp, end_of_unit;
+ tsk_bool_t sps_or_pps;
+ tsk_size_t retsize = 0, size_to_copy = 0;
+ static const tsk_size_t xmax_size = (3840 * 2160 * 3) >> 3; // >>3 instead of >>1 (not an error)
+ static tsk_size_t start_code_prefix_size = sizeof(H264_START_CODE_PREFIX);
+
+ if(!h264 || !in_data || !in_size || !out_data)
+ {
+ TSK_DEBUG_ERROR("Invalid parameter");
+ return 0;
+ }
+
+ if(!self->opened || !h264->encoder.pInst || !h264->decoder.pInst->IsReady()){
+ TSK_DEBUG_ERROR("Decoder not opened or not ready");
+ return 0;
+ }
+
+ HRESULT hr = S_OK;
+ IMFSample *pSampleOut = NULL;
+ IMFMediaBuffer* pBufferOut = NULL;
+
+ /* Packet lost? */
+ if((h264->decoder.last_seq + 1) != rtp_hdr->seq_num && h264->decoder.last_seq){
+ TSK_DEBUG_INFO("[H.264] Packet loss, seq_num=%d", (h264->decoder.last_seq + 1));
+ }
+ h264->decoder.last_seq = rtp_hdr->seq_num;
+
+
+ /* 5.3. NAL Unit Octet Usage
+ +---------------+
+ |0|1|2|3|4|5|6|7|
+ +-+-+-+-+-+-+-+-+
+ |F|NRI| Type |
+ +---------------+
+ */
+ if (*((uint8_t*)in_data) & 0x80) {
+ TSK_DEBUG_WARN("F=1");
+ /* reset accumulator */
+ h264->decoder.accumulator_pos = 0;
+ return 0;
+ }
+
+ /* get payload */
+ if ((ret = tdav_codec_h264_get_pay(in_data, in_size, (const void**)&pay_ptr, &pay_size, &append_scp, &end_of_unit)) || !pay_ptr || !pay_size){
+ TSK_DEBUG_ERROR("Depayloader failed to get H.264 content");
+ return 0;
+ }
+ //append_scp = tsk_true;
+ size_to_copy = pay_size + (append_scp ? start_code_prefix_size : 0);
+ // whether it's SPS or PPS (append_scp is false for subsequent FUA chuncks)
+ sps_or_pps = append_scp && pay_ptr && ((pay_ptr[0] & 0x1F) == 7 || (pay_ptr[0] & 0x1F) == 8);
+
+ // start-accumulator
+ if (!h264->decoder.accumulator) {
+ if (size_to_copy > xmax_size) {
+ TSK_DEBUG_ERROR("%u too big to contain valid encoded data. xmax_size=%u", size_to_copy, xmax_size);
+ return 0;
+ }
+ if (!(h264->decoder.accumulator = tsk_calloc(size_to_copy, sizeof(uint8_t)))) {
+ TSK_DEBUG_ERROR("Failed to allocated new buffer");
+ return 0;
+ }
+ h264->decoder.accumulator_size = size_to_copy;
+ }
+ if ((h264->decoder.accumulator_pos + size_to_copy) >= xmax_size) {
+ TSK_DEBUG_ERROR("BufferOverflow");
+ h264->decoder.accumulator_pos = 0;
+ return 0;
+ }
+ if ((h264->decoder.accumulator_pos + size_to_copy) > h264->decoder.accumulator_size) {
+ if(!(h264->decoder.accumulator = tsk_realloc(h264->decoder.accumulator, (h264->decoder.accumulator_pos + size_to_copy)))){
+ TSK_DEBUG_ERROR("Failed to reallocated new buffer");
+ h264->decoder.accumulator_pos = 0;
+ h264->decoder.accumulator_size = 0;
+ return 0;
+ }
+ h264->decoder.accumulator_size = (h264->decoder.accumulator_pos + size_to_copy);
+ }
+
+ if (append_scp) {
+ memcpy(&((uint8_t*)h264->decoder.accumulator)[h264->decoder.accumulator_pos], H264_START_CODE_PREFIX, start_code_prefix_size);
+ h264->decoder.accumulator_pos += start_code_prefix_size;
+ }
+ memcpy(&((uint8_t*)h264->decoder.accumulator)[h264->decoder.accumulator_pos], pay_ptr, pay_size);
+ h264->decoder.accumulator_pos += pay_size;
+ // end-accumulator
+
+ /*if(sps_or_pps){
+ // http://libav-users.943685.n4.nabble.com/Decode-H264-streams-how-to-fill-AVCodecContext-from-SPS-PPS-td2484472.html
+ // SPS and PPS should be bundled with IDR
+ TSK_DEBUG_INFO("Receiving SPS or PPS ...to be tied to an IDR");
+ }
+ else */if (rtp_hdr->marker) {
+ if (h264->decoder.passthrough) {
+ if (*out_max_size < h264->decoder.accumulator_pos) {
+ if ((*out_data = tsk_realloc(*out_data, h264->decoder.accumulator_pos))) {
+ *out_max_size = h264->decoder.accumulator_pos;
+ }
+ else {
+ *out_max_size = 0;
+ return 0;
+ }
+ }
+ memcpy(*out_data, h264->decoder.accumulator, h264->decoder.accumulator_pos);
+ retsize = h264->decoder.accumulator_pos;
+ }
+ else { // !h264->decoder.passthrough
+ /* decode the picture */
+ CHECK_HR(hr = h264->decoder.pInst->Process(h264->decoder.accumulator, (UINT32)h264->decoder.accumulator_pos, &pSampleOut));
+ if (pSampleOut) {
+ CHECK_HR(hr = pSampleOut->GetBufferByIndex(0, &pBufferOut));
+
+ BYTE* pBufferPtr = NULL;
+ DWORD dwDataLength = 0;
+ CHECK_HR(hr = pBufferOut->GetCurrentLength(&dwDataLength));
+ if (dwDataLength > 0) {
+ CHECK_HR(hr = pBufferOut->Lock(&pBufferPtr, NULL, NULL));
+ {
+ /* IDR ? */
+ if(((pay_ptr[0] & 0x1F) == 0x05) && TMEDIA_CODEC_VIDEO(self)->in.callback){
+ TSK_DEBUG_INFO("Decoded H.264 IDR");
+ TMEDIA_CODEC_VIDEO(self)->in.result.type = tmedia_video_decode_result_type_idr;
+ TMEDIA_CODEC_VIDEO(self)->in.result.proto_hdr = proto_hdr;
+ TMEDIA_CODEC_VIDEO(self)->in.callback(&TMEDIA_CODEC_VIDEO(self)->in.result);
+ }
+ /* fill out */
+ if(*out_max_size < dwDataLength){
+ if((*out_data = tsk_realloc(*out_data, dwDataLength))){
+ *out_max_size = dwDataLength;
+ }
+ else{
+ *out_max_size = 0;
+ return 0;
+ }
+ }
+ retsize = (tsk_size_t)dwDataLength;
+ TMEDIA_CODEC_VIDEO(h264)->in.width = h264->decoder.pInst->GetWidth();
+ TMEDIA_CODEC_VIDEO(h264)->in.height = h264->decoder.pInst->GetHeight();
+ memcpy(*out_data, pBufferPtr, retsize);
+ }
+ CHECK_HR(hr = pBufferOut->Unlock());
+ }
+ }
+ }// else(!h264->decoder.passthrough)
+ } // else if(rtp_hdr->marker)
+
+bail:
+ if (rtp_hdr->marker) {
+ h264->decoder.accumulator_pos = 0;
+ }
+ if (FAILED(hr) /*|| (!pSampleOut && rtp_hdr->marker)*/){
+ TSK_DEBUG_INFO("Failed to decode the buffer with error code =%d, size=%u, append=%s", ret, h264->decoder.accumulator_pos, append_scp ? "yes" : "no");
+ if(TMEDIA_CODEC_VIDEO(self)->in.callback){
+ TMEDIA_CODEC_VIDEO(self)->in.result.type = tmedia_video_decode_result_type_error;
+ TMEDIA_CODEC_VIDEO(self)->in.result.proto_hdr = proto_hdr;
+ TMEDIA_CODEC_VIDEO(self)->in.callback(&TMEDIA_CODEC_VIDEO(self)->in.result);
+ }
+ }
+ SafeRelease(&pSampleOut);
+ SafeRelease(&pBufferOut);
+ return retsize;
+}
+
+static tsk_bool_t mf_codec_h264_sdp_att_match(const tmedia_codec_t* self, const char* att_name, const char* att_value)
+{
+ return tdav_codec_h264_common_sdp_att_match((tdav_codec_h264_common_t*)self, att_name, att_value);
+}
+
+static char* mf_codec_h264_sdp_att_get(const tmedia_codec_t* self, const char* att_name)
+{
+ char* att = tdav_codec_h264_common_sdp_att_get((const tdav_codec_h264_common_t*)self, att_name);
+ if(att && tsk_striequals(att_name, "fmtp")) {
+ tsk_strcat(&att, "; impl=MF");
+ }
+ return att;
+}
+
+
+
+
+/* ============ H.264 Base Profile Plugin interface ================= */
+
+/* constructor */
+static tsk_object_t* mf_codec_h264_base_ctor(tsk_object_t * self, va_list * app)
+{
+ mf_codec_h264_t *h264 = (mf_codec_h264_t*)self;
+ if(h264){
+ /* init base: called by tmedia_codec_create() */
+ /* init self */
+ if(mf_codec_h264_init(h264, profile_idc_baseline) != 0){
+ return tsk_null;
+ }
+ }
+ return self;
+}
+/* destructor */
+static tsk_object_t* mf_codec_h264_base_dtor(tsk_object_t * self)
+{
+ mf_codec_h264_t *h264 = (mf_codec_h264_t*)self;
+ if(h264){
+ /* deinit base */
+ tdav_codec_h264_common_deinit(TDAV_CODEC_H264_COMMON(self));
+ /* deinit self */
+ mf_codec_h264_deinit(h264);
+ }
+
+ return self;
+}
+/* object definition */
+static const tsk_object_def_t mf_codec_h264_base_def_s =
+{
+ sizeof(mf_codec_h264_t),
+ mf_codec_h264_base_ctor,
+ mf_codec_h264_base_dtor,
+ tmedia_codec_cmp,
+};
+/* plugin definition*/
+static const tmedia_codec_plugin_def_t mf_codec_h264_base_plugin_def_s =
+{
+ &mf_codec_h264_base_def_s,
+
+ tmedia_video,
+ tmedia_codec_id_h264_bp,
+ "H264",
+ "H264 Base Profile (Media Foundation)",
+ TMEDIA_CODEC_FORMAT_H264_BP,
+ tsk_true,
+ 90000, // rate
+
+ /* audio */
+ { 0 },
+
+ /* video (width, height, fps) */
+ {176, 144, 0}, // fps is @deprecated
+
+ mf_codec_h264_set,
+ mf_codec_h264_open,
+ mf_codec_h264_close,
+ mf_codec_h264_encode,
+ mf_codec_h264_decode,
+ mf_codec_h264_sdp_att_match,
+ mf_codec_h264_sdp_att_get
+};
+const tmedia_codec_plugin_def_t *mf_codec_h264_base_plugin_def_t = &mf_codec_h264_base_plugin_def_s;
+
+/* ============ H.264 Main Profile Plugin interface ================= */
+
+/* constructor */
+static tsk_object_t* mf_codec_h264_main_ctor(tsk_object_t * self, va_list * app)
+{
+ mf_codec_h264_t *h264 = (mf_codec_h264_t*)self;
+ if(h264){
+ /* init base: called by tmedia_codec_create() */
+ /* init self */
+ if(mf_codec_h264_init(h264, profile_idc_main) != 0){
+ return tsk_null;
+ }
+ }
+ return self;
+}
+/* destructor */
+static tsk_object_t* mf_codec_h264_main_dtor(tsk_object_t * self)
+{
+ mf_codec_h264_t *h264 = (mf_codec_h264_t*)self;
+ if(h264){
+ /* deinit base */
+ tdav_codec_h264_common_deinit(TDAV_CODEC_H264_COMMON(self));
+ /* deinit self */
+ mf_codec_h264_deinit(h264);
+
+ }
+
+ return self;
+}
+/* object definition */
+static const tsk_object_def_t mf_codec_h264_main_def_s =
+{
+ sizeof(mf_codec_h264_t),
+ mf_codec_h264_main_ctor,
+ mf_codec_h264_main_dtor,
+ tmedia_codec_cmp,
+};
+/* plugin definition*/
+static const tmedia_codec_plugin_def_t mf_codec_h264_main_plugin_def_s =
+{
+ &mf_codec_h264_main_def_s,
+
+ tmedia_video,
+ tmedia_codec_id_h264_mp,
+ "H264",
+ "H264 Main Profile (Media Foundation)",
+ TMEDIA_CODEC_FORMAT_H264_MP,
+ tsk_true,
+ 90000, // rate
+
+ /* audio */
+ { 0 },
+
+ /* video (width, height, fps)*/
+ {176, 144, 0},// fps is @deprecated
+
+ mf_codec_h264_set,
+ mf_codec_h264_open,
+ mf_codec_h264_close,
+ mf_codec_h264_encode,
+ mf_codec_h264_decode,
+ mf_codec_h264_sdp_att_match,
+ mf_codec_h264_sdp_att_get
+};
+const tmedia_codec_plugin_def_t *mf_codec_h264_main_plugin_def_t = &mf_codec_h264_main_plugin_def_s;
+
+
+
+/* ============ Common To all H264 codecs ================= */
+
+int mf_codec_h264_open_encoder(mf_codec_h264_t* self)
+{
+ HRESULT hr = S_OK;
+ int32_t max_bw_kpbs;
+ tdav_codec_h264_common_t* common = (tdav_codec_h264_common_t*)self;
+
+ if(self->encoder.pInst) {
+ TSK_DEBUG_ERROR("Encoder already initialized");
+#if defined(E_ILLEGAL_METHOD_CALL)
+ CHECK_HR(hr = E_ILLEGAL_METHOD_CALL);
+#else
+ CHECK_HR(hr = 0x8000000EL);
+#endif
+ }
+
+ // create encoder
+ if(!(self->encoder.pInst = (common->profile == profile_idc_baseline) ? MFCodecVideoH264::CreateCodecH264Base(MFCodecType_Encoder) : MFCodecVideoH264::CreateCodecH264Main(MFCodecType_Encoder))){
+ TSK_DEBUG_ERROR("Failed to find H.264 encoder");
+ CHECK_HR(hr = E_OUTOFMEMORY);
+ }
+
+ //self->encoder.context->pix_fmt = PIX_FMT_YUV420P;
+ //self->encoder.context->time_base.num = 1;
+ //self->encoder.context->time_base.den = TMEDIA_CODEC_VIDEO(self)->out.fps;
+ self->encoder.neg_width = (self->encoder.rotation == 90 || self->encoder.rotation == 270) ? TMEDIA_CODEC_VIDEO(self)->out.height : TMEDIA_CODEC_VIDEO(self)->out.width;
+ self->encoder.neg_height = (self->encoder.rotation == 90 || self->encoder.rotation == 270) ? TMEDIA_CODEC_VIDEO(self)->out.width : TMEDIA_CODEC_VIDEO(self)->out.height;
+ self->encoder.neg_fps = TMEDIA_CODEC_VIDEO(self)->out.fps;
+ max_bw_kpbs = TSK_CLAMP(
+ 0,
+ tmedia_get_video_bandwidth_kbps_2(self->encoder.neg_width, self->encoder.neg_height, self->encoder.neg_fps),
+ self->encoder.max_bw_kpbs
+ );
+ self->encoder.max_bitrate_bps = (max_bw_kpbs * 1024);
+
+ TSK_DEBUG_INFO("[H.264 MF Encoder] neg_width=%d, neg_height=%d, neg_fps=%d, max_bitrate_bps=%d",
+ self->encoder.neg_width,
+ self->encoder.neg_height,
+ self->encoder.neg_fps,
+ self->encoder.max_bitrate_bps
+ );
+
+ CHECK_HR(hr = self->encoder.pInst->Initialize(
+ self->encoder.neg_fps,
+ self->encoder.neg_width,
+ self->encoder.neg_height,
+ self->encoder.max_bitrate_bps));
+
+ CHECK_HR(hr = self->encoder.pInst->SetGOPSize(self->encoder.neg_fps * PLUGIN_MF_H264_GOP_SIZE_IN_SECONDS));
+ CHECK_HR(hr = self->encoder.pInst->SetSliceMaxSizeInBytes((H264_RTP_PAYLOAD_SIZE - 100)));
+bail:
+ return SUCCEEDED(hr) ? 0 : -1;
+}
+
+int mf_codec_h264_close_encoder(mf_codec_h264_t* self)
+{
+ if(self){
+ SafeRelease(&self->encoder.pInst);
+ if(self->encoder.buffer){
+ TSK_FREE(self->encoder.buffer);
+ }
+ self->encoder.frame_count = 0;
+ }
+
+ return 0;
+}
+
+int mf_codec_h264_open_decoder(mf_codec_h264_t* self)
+{
+ HRESULT hr = S_OK;
+ tdav_codec_h264_common_t* common = (tdav_codec_h264_common_t*)self;
+
+ if(self->decoder.pInst) {
+ TSK_DEBUG_ERROR("Decoder already initialized");
+#if defined(E_ILLEGAL_METHOD_CALL)
+ CHECK_HR(hr = E_ILLEGAL_METHOD_CALL);
+#else
+ CHECK_HR(hr = 0x8000000EL);
+#endif
+ }
+
+ // create decoder
+ if(!(self->decoder.pInst = (common->profile == profile_idc_baseline) ? MFCodecVideoH264::CreateCodecH264Base(MFCodecType_Decoder) : MFCodecVideoH264::CreateCodecH264Main(MFCodecType_Decoder))){
+ TSK_DEBUG_ERROR("Failed to find H.264 encoder");
+ CHECK_HR(hr = E_OUTOFMEMORY);
+ }
+
+ TSK_DEBUG_INFO("[H.264 MF Decoder] neg_width=%d, neg_height=%d, neg_fps=%d",
+ TMEDIA_CODEC_VIDEO(self)->in.width,
+ TMEDIA_CODEC_VIDEO(self)->in.height,
+ TMEDIA_CODEC_VIDEO(self)->in.fps
+ );
+
+ CHECK_HR(hr = self->decoder.pInst->Initialize(
+ TMEDIA_CODEC_VIDEO(self)->in.fps,
+ TMEDIA_CODEC_VIDEO(self)->in.width,
+ TMEDIA_CODEC_VIDEO(self)->in.height));
+
+bail:
+ return SUCCEEDED(hr) ? 0 : -1;
+}
+
+int mf_codec_h264_close_decoder(mf_codec_h264_t* self)
+{
+ if(self){
+ SafeRelease(&self->decoder.pInst);
+ TSK_FREE(self->decoder.accumulator);
+ self->decoder.accumulator_pos = 0;
+ }
+
+ return 0;
+}
+
+int mf_codec_h264_init(mf_codec_h264_t* self, profile_idc_t profile)
+{
+ int ret = 0;
+ level_idc_t level;
+ tdav_codec_h264_common_t* common = (tdav_codec_h264_common_t*)self;
+
+ if(!self){
+ TSK_DEBUG_ERROR("Invalid parameter");
+ return -1;
+ }
+
+ if((ret = tdav_codec_h264_common_init(common))){
+ TSK_DEBUG_ERROR("mf_codec_h264_common_init() faile with error code=%d", ret);
+ return ret;
+ }
+
+ if((ret = tdav_codec_h264_common_level_from_size(TMEDIA_CODEC_VIDEO(self)->out.width, TMEDIA_CODEC_VIDEO(self)->out.height, &level))){
+ TSK_DEBUG_ERROR("Failed to find level for size=[%u, %u]", TMEDIA_CODEC_VIDEO(self)->out.width, TMEDIA_CODEC_VIDEO(self)->out.height);
+ return ret;
+ }
+
+ (self)->encoder.max_bw_kpbs = tmedia_defaults_get_bandwidth_video_upload_max();
+ if (MFUtils::IsLowLatencyH264SupportsMaxSliceSize()) {
+ common->pack_mode_local = H264_PACKETIZATION_MODE;
+ }
+ else {
+ common->pack_mode_local = Non_Interleaved_Mode;
+ }
+ common->profile = profile;
+ common->level = level;
+ TMEDIA_CODEC_VIDEO(self)->in.max_mbps = TMEDIA_CODEC_VIDEO(self)->out.max_mbps = H264_MAX_MBPS*1000;
+ TMEDIA_CODEC_VIDEO(self)->in.max_br = TMEDIA_CODEC_VIDEO(self)->out.max_br = H264_MAX_BR*1000;
+
+ TMEDIA_CODEC_VIDEO(self)->in.chroma = tmedia_chroma_nv12;
+ TMEDIA_CODEC_VIDEO(self)->out.chroma = tmedia_chroma_nv12;
+
+ self->encoder.quality = 1;
+
+ return ret;
+}
+
+int mf_codec_h264_deinit(mf_codec_h264_t* self)
+{
+ if(!self){
+ TSK_DEBUG_ERROR("Invalid parameter");
+ return -1;
+ }
+
+ mf_codec_h264_close((tmedia_codec_t*)self);
+
+ return 0;
+} \ No newline at end of file
diff --git a/plugins/pluginWinMF/plugin_win_mf_config.h b/plugins/pluginWinMF/plugin_win_mf_config.h
new file mode 100644
index 0000000..f4f692a
--- /dev/null
+++ b/plugins/pluginWinMF/plugin_win_mf_config.h
@@ -0,0 +1,75 @@
+/* Copyright (C) 2013 Mamadou DIOP
+* Copyright (C) 2013 Doubango Telecom <http://www.doubango.org>
+*
+* This file is part of Open Source Doubango Framework.
+*
+* DOUBANGO is free software: you can redistribute it and/or modify
+* it under the terms of the GNU General Public License as published by
+* the Free Software Foundation, either version 3 of the License, or
+* (at your option) any later version.
+*
+* DOUBANGO is distributed in the hope that it will be useful,
+* but WITHOUT ANY WARRANTY; without even the implied warranty of
+* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
+* GNU General Public License for more details.
+*
+* You should have received a copy of the GNU General Public License
+* along with DOUBANGO.
+*/
+#ifndef PLUGIN_WIN_MF_CONFIG_H
+#define PLUGIN_WIN_MF_CONFIG_H
+
+#ifdef __SYMBIAN32__
+#undef _WIN32 /* Because of WINSCW */
+#endif
+
+
+// Windows (XP/Vista/7/CE and Windows Mobile) macro definition
+#if defined(WIN32)|| defined(_WIN32) || defined(_WIN32_WCE)
+# define PLUGIN_WIN_MF_UNDER_WINDOWS 1
+# if defined(WINAPI_FAMILY) && (WINAPI_FAMILY == WINAPI_FAMILY_PHONE_APP || WINAPI_FAMILY == WINAPI_FAMILY_APP)
+# define PLUGIN_WIN_MF_UNDER_WINDOWS_RT 1
+# endif
+#endif
+
+#if (PLUGIN_WIN_MF_UNDER_WINDOWS || defined(__SYMBIAN32__)) && defined(PLUGIN_WIN_MFP_EXPORTS)
+# define PLUGIN_WIN_MFP_API __declspec(dllexport)
+# define PLUGIN_WIN_MFP_GEXTERN extern __declspec(dllexport)
+#elif (PLUGIN_WIN_MF_UNDER_WINDOWS || defined(__SYMBIAN32__)) && !defined(PLUGIN_WIN_MFP_IMPORTS_IGNORE)
+# define PLUGIN_WIN_MFP_API __declspec(dllimport)
+# define PLUGIN_WIN_MFP_GEXTERN __declspec(dllimport)
+#else
+# define PLUGIN_WIN_MFP_API
+# define PLUGIN_WIN_MFP_GEXTERN extern
+#endif
+
+// x86
+#if defined(__x86_64__) || defined(__x86__) || defined(__i386__)
+# define PLUGIN_WIN_MF_UNDER_X86 1
+#endif
+
+// Guards against C++ name mangling
+#ifdef __cplusplus
+# define PLUGIN_WIN_MF_BEGIN_DECLS extern "C" {
+# define PLUGIN_WIN_MF_END_DECLS }
+#else
+# define PLUGIN_WIN_MF_BEGIN_DECLS
+# define PLUGIN_WIN_MF_END_DECLS
+#endif
+
+#ifdef _MSC_VER
+# define inline __inline
+# define _CRT_SECURE_NO_WARNINGS
+# define _ALLOW_KEYWORD_MACROS
+#endif
+
+#include <stdint.h>
+#ifdef __SYMBIAN32__
+#include <stdlib.h>
+#endif
+
+#if HAVE_CONFIG_H
+ #include <config.h>
+#endif
+
+#endif // PLUGIN_WIN_MF_CONFIG_H
diff --git a/plugins/pluginWinMF/plugin_win_mf_consumer_audio.cxx b/plugins/pluginWinMF/plugin_win_mf_consumer_audio.cxx
new file mode 100644
index 0000000..026f510
--- /dev/null
+++ b/plugins/pluginWinMF/plugin_win_mf_consumer_audio.cxx
@@ -0,0 +1,163 @@
+/* Copyright (C) 2013 Mamadou DIOP
+* Copyright (C) 2013 Doubango Telecom <http://www.doubango.org>
+*
+* This file is part of Open Source Doubango Framework.
+*
+* DOUBANGO is free software: you can redistribute it and/or modify
+* it under the terms of the GNU General Public License as published by
+* the Free Software Foundation, either version 3 of the License, or
+* (at your option) any later version.
+*
+* DOUBANGO is distributed in the hope that it will be useful,
+* but WITHOUT ANY WARRANTY; without even the implied warranty of
+* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
+* GNU General Public License for more details.
+*
+* You should have received a copy of the GNU General Public License
+* along with DOUBANGO.
+*/
+#include "plugin_win_mf_config.h"
+#include "internals/mf_utils.h"
+
+#include "tinydav/audio/tdav_consumer_audio.h"
+
+#include "tsk_debug.h"
+
+typedef struct plugin_win_mf_consumer_audio_s
+{
+ TDAV_DECLARE_CONSUMER_AUDIO;
+
+ bool bStarted;
+}
+plugin_win_mf_consumer_audio_t;
+
+
+/* ============ Consumer Interface ================= */
+static int plugin_win_mf_consumer_audio_set(tmedia_consumer_t* self, const tmedia_param_t* param)
+{
+ plugin_win_mf_consumer_audio_t* pSelf = (plugin_win_mf_consumer_audio_t*)self;
+ int ret = tdav_consumer_audio_set(TDAV_CONSUMER_AUDIO(self), param);
+
+ if(ret == 0){
+
+ }
+
+ return ret;
+}
+
+static int plugin_win_mf_consumer_audio_prepare(tmedia_consumer_t* self, const tmedia_codec_t* codec)
+{
+ plugin_win_mf_consumer_audio_t* pSelf = (plugin_win_mf_consumer_audio_t*)self;
+
+ if(!pSelf){
+ TSK_DEBUG_ERROR("Invalid parameter");
+ return -1;
+ }
+
+ return 0;
+}
+
+static int plugin_win_mf_consumer_audio_start(tmedia_consumer_t* self)
+{
+ plugin_win_mf_consumer_audio_t* pSelf = (plugin_win_mf_consumer_audio_t*)self;
+
+ pSelf->bStarted = true;
+
+ return 0;
+}
+
+static int plugin_win_mf_consumer_audio_consume(tmedia_consumer_t* self, const void* buffer, tsk_size_t size, const tsk_object_t* proto_hdr)
+{
+ if(!self || !buffer || !size){
+ TSK_DEBUG_ERROR("Invalid parameter");
+ return -1;
+ }
+ /* buffer is already decoded */
+ return tdav_consumer_audio_put(TDAV_CONSUMER_AUDIO(self), buffer, size, proto_hdr);
+}
+
+static int plugin_win_mf_consumer_audio_pause(tmedia_consumer_t* self)
+{
+ return 0;
+}
+
+static int plugin_win_mf_consumer_audio_stop(tmedia_consumer_t* self)
+{
+ plugin_win_mf_consumer_audio_t* pSelf = (plugin_win_mf_consumer_audio_t*)self;
+
+ if(!pSelf){
+ TSK_DEBUG_ERROR("Invalid parameter");
+ return -1;
+ }
+
+ if(!pSelf->bStarted){
+ TSK_DEBUG_INFO("WinMF audio consumer not started");
+ return 0;
+ }
+
+ /* should be done here */
+ pSelf->bStarted = false;
+
+ return 0;
+}
+
+
+//
+// WaveAPI consumer object definition
+//
+/* constructor */
+static tsk_object_t* plugin_win_mf_consumer_audio_ctor(tsk_object_t * self, va_list * app)
+{
+ MFUtils::Startup();
+
+ plugin_win_mf_consumer_audio_t *pSelf = (plugin_win_mf_consumer_audio_t *)self;
+ if(pSelf){
+ /* init base */
+ tdav_consumer_audio_init(TDAV_CONSUMER_AUDIO(pSelf));
+ /* init self */
+
+ }
+ return self;
+}
+/* destructor */
+static tsk_object_t* plugin_win_mf_consumer_audio_dtor(tsk_object_t * self)
+{
+ plugin_win_mf_consumer_audio_t *pSelf = (plugin_win_mf_consumer_audio_t *)self;
+ if(pSelf){
+ /* stop */
+ if(pSelf->bStarted){
+ plugin_win_mf_consumer_audio_stop(TMEDIA_CONSUMER(pSelf));
+ }
+
+ /* deinit base */
+ tdav_consumer_audio_deinit(TDAV_CONSUMER_AUDIO(pSelf));
+ /* deinit self */
+
+ }
+
+ return self;
+}
+/* object definition */
+static const tsk_object_def_t plugin_win_mf_consumer_audio_def_s =
+{
+ sizeof(plugin_win_mf_consumer_audio_t),
+ plugin_win_mf_consumer_audio_ctor,
+ plugin_win_mf_consumer_audio_dtor,
+ tdav_consumer_audio_cmp,
+};
+/* plugin definition*/
+static const tmedia_consumer_plugin_def_t plugin_win_mf_consumer_audio_plugin_def_s =
+{
+ &plugin_win_mf_consumer_audio_def_s,
+
+ tmedia_audio,
+ "Windows Media Foundation audio consumer",
+
+ plugin_win_mf_consumer_audio_set,
+ plugin_win_mf_consumer_audio_prepare,
+ plugin_win_mf_consumer_audio_start,
+ plugin_win_mf_consumer_audio_consume,
+ plugin_win_mf_consumer_audio_pause,
+ plugin_win_mf_consumer_audio_stop
+};
+const tmedia_consumer_plugin_def_t *plugin_win_mf_consumer_audio_plugin_def_t = &plugin_win_mf_consumer_audio_plugin_def_s;
diff --git a/plugins/pluginWinMF/plugin_win_mf_consumer_video.cxx b/plugins/pluginWinMF/plugin_win_mf_consumer_video.cxx
new file mode 100644
index 0000000..f6bef59
--- /dev/null
+++ b/plugins/pluginWinMF/plugin_win_mf_consumer_video.cxx
@@ -0,0 +1,1620 @@
+/* Copyright (C) 2013 Mamadou DIOP
+* Copyright (C) 2013 Doubango Telecom <http://www.doubango.org>
+*
+* This file is part of Open Source Doubango Framework.
+*
+* DOUBANGO is free software: you can redistribute it and/or modify
+* it under the terms of the GNU General Public License as published by
+* the Free Software Foundation, either version 3 of the License, or
+* (at your option) any later version.
+*
+* DOUBANGO is distributed in the hope that it will be useful,
+* but WITHOUT ANY WARRANTY; without even the implied warranty of
+* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
+* GNU General Public License for more details.
+*
+* You should have received a copy of the GNU General Public License
+* along with DOUBANGO.
+*/
+#include "plugin_win_mf_config.h"
+#include "internals/mf_utils.h"
+
+#include "tinymedia/tmedia_consumer.h"
+
+#include "tsk_safeobj.h"
+#include "tsk_string.h"
+#include "tsk_thread.h"
+#include "tsk_debug.h"
+
+#include <initguid.h>
+#include <assert.h>
+
+// Whether to use Direct3D device for direct rendering or Media Foundation topology and custom source
+// Using Media Foundation (MF) introduce delay when the input fps is different than the one in the custom src.
+// It's very hard to have someting accurate when using MF because the input FPS change depending on the congestion control. D3D is the best choice as frames are displayed as they arrive
+#if !defined(PLUGIN_MF_CV_USE_D3D9)
+# define PLUGIN_MF_CV_USE_D3D9 1
+#endif
+
+/******* ********/
+
+#if PLUGIN_MF_CV_USE_D3D9
+
+#include <d3d9.h>
+#include <dxva2api.h>
+
+#ifdef _MSC_VER
+#pragma comment(lib, "d3d9")
+#endif
+
+const DWORD NUM_BACK_BUFFERS = 2;
+
+static HRESULT CreateDeviceD3D9(
+ HWND hWnd,
+ IDirect3DDevice9** ppDevice,
+ IDirect3D9 **ppD3D,
+ D3DPRESENT_PARAMETERS &d3dpp
+ );
+static HRESULT TestCooperativeLevel(
+ struct plugin_win_mf_consumer_video_s *pSelf
+ );
+static HRESULT CreateSwapChain(
+ HWND hWnd,
+ UINT32 nFrameWidth,
+ UINT32 nFrameHeight,
+ IDirect3DDevice9* pDevice,
+ IDirect3DSwapChain9 **ppSwapChain);
+
+static LRESULT CALLBACK WndProc(HWND hWnd, UINT uMsg, WPARAM wParam, LPARAM lParam);
+
+static inline HWND Window(struct plugin_win_mf_consumer_video_s *pSelf);
+static inline LONG Width(const RECT& r);
+static inline LONG Height(const RECT& r);
+static inline RECT CorrectAspectRatio(const RECT& src, const MFRatio& srcPAR);
+static inline RECT LetterBoxRect(const RECT& rcSrc, const RECT& rcDst);
+static inline HRESULT UpdateDestinationRect(struct plugin_win_mf_consumer_video_s *pSelf, BOOL bForce = FALSE);
+static HRESULT ResetDevice(struct plugin_win_mf_consumer_video_s *pSelf, BOOL bUpdateDestinationRect = FALSE);
+static HRESULT SetFullscreen(struct plugin_win_mf_consumer_video_s *pSelf, BOOL bFullScreen);
+static HWND CreateFullScreenWindow(struct plugin_win_mf_consumer_video_s *pSelf);
+static HRESULT HookWindow(struct plugin_win_mf_consumer_video_s *pSelf, HWND hWnd);
+static HRESULT UnhookWindow(struct plugin_win_mf_consumer_video_s *pSelf);
+
+
+
+typedef struct plugin_win_mf_consumer_video_s
+{
+ TMEDIA_DECLARE_CONSUMER;
+
+ BOOL bStarted, bPrepared, bPaused, bFullScreen, bWindowHooked;
+ BOOL bPluginFireFox, bPluginWebRTC4All;
+ HWND hWindow;
+ WNDPROC wndProc;
+ HWND hWindowFullScreen;
+ RECT rcWindow;
+ RECT rcDest;
+ MFRatio pixelAR;
+
+ UINT32 nNegWidth;
+ UINT32 nNegHeight;
+ UINT32 nNegFps;
+
+ D3DLOCKED_RECT rcLock;
+ IDirect3DDevice9* pDevice;
+ IDirect3D9 *pD3D;
+ IDirect3DSwapChain9 *pSwapChain;
+ D3DPRESENT_PARAMETERS d3dpp;
+
+ TSK_DECLARE_SAFEOBJ;
+}
+plugin_win_mf_consumer_video_t;
+
+static int _plugin_win_mf_consumer_video_unprepare(plugin_win_mf_consumer_video_t* pSelf);
+
+/* ============ Media Consumer Interface ================= */
+static int plugin_win_mf_consumer_video_set(tmedia_consumer_t *self, const tmedia_param_t* param)
+{
+ int ret = 0;
+ HRESULT hr = S_OK;
+ plugin_win_mf_consumer_video_t* pSelf = (plugin_win_mf_consumer_video_t*)self;
+
+ if(!self || !param)
+ {
+ TSK_DEBUG_ERROR("Invalid parameter");
+ CHECK_HR(hr = E_POINTER);
+ }
+
+ if(param->value_type == tmedia_pvt_int64)
+ {
+ if(tsk_striequals(param->key, "remote-hwnd"))
+ {
+ HWND hWnd = reinterpret_cast<HWND>((INT64)*((int64_t*)param->value));
+ if(hWnd != pSelf->hWindow)
+ {
+ tsk_safeobj_lock(pSelf); // block consumer thread
+ pSelf->hWindow = hWnd;
+ if(pSelf->bPrepared)
+ {
+ hr = ResetDevice(pSelf);
+ }
+ tsk_safeobj_unlock(pSelf); // unblock consumer thread
+ }
+ }
+ }
+ else if(param->value_type == tmedia_pvt_int32)
+ {
+ if(tsk_striequals(param->key, "fullscreen"))
+ {
+ BOOL bFullScreen = !!*((int32_t*)param->value);
+ TSK_DEBUG_INFO("[MF video consumer] Full Screen = %d", bFullScreen);
+ CHECK_HR(hr = SetFullscreen(pSelf, bFullScreen));
+ }
+ else if(tsk_striequals(param->key, "create-on-current-thead"))
+ {
+ // DSCONSUMER(self)->create_on_ui_thread = *((int32_t*)param->value) ? tsk_false : tsk_true;
+ }
+ else if(tsk_striequals(param->key, "plugin-firefox"))
+ {
+ pSelf->bPluginFireFox = (*((int32_t*)param->value) != 0);
+ }
+ else if(tsk_striequals(param->key, "plugin-webrtc4all"))
+ {
+ pSelf->bPluginWebRTC4All = (*((int32_t*)param->value) != 0);
+ }
+ }
+
+ CHECK_HR(hr);
+
+bail:
+ return SUCCEEDED(hr) ? 0 : -1;
+}
+
+
+static int plugin_win_mf_consumer_video_prepare(tmedia_consumer_t* self, const tmedia_codec_t* codec)
+{
+ plugin_win_mf_consumer_video_t* pSelf = (plugin_win_mf_consumer_video_t*)self;
+
+ if(!pSelf || !codec && codec->plugin){
+ TSK_DEBUG_ERROR("Invalid parameter");
+ return -1;
+ }
+ if(pSelf->bPrepared){
+ TSK_DEBUG_WARN("D3D9 video consumer already prepared");
+ return -1;
+ }
+
+ // FIXME: DirectShow requires flipping but not D3D9
+ // The Core library always tries to flip when OSType==Win32. Must be changed
+ TMEDIA_CODEC_VIDEO(codec)->in.flip = tsk_false;
+
+ HRESULT hr = S_OK;
+ HWND hWnd = Window(pSelf);
+
+ TMEDIA_CONSUMER(pSelf)->video.fps = TMEDIA_CODEC_VIDEO(codec)->in.fps;
+ TMEDIA_CONSUMER(pSelf)->video.in.width = TMEDIA_CODEC_VIDEO(codec)->in.width;
+ TMEDIA_CONSUMER(pSelf)->video.in.height = TMEDIA_CODEC_VIDEO(codec)->in.height;
+
+ if(!TMEDIA_CONSUMER(pSelf)->video.display.width){
+ TMEDIA_CONSUMER(pSelf)->video.display.width = TMEDIA_CONSUMER(pSelf)->video.in.width;
+ }
+ if(!TMEDIA_CONSUMER(pSelf)->video.display.height){
+ TMEDIA_CONSUMER(pSelf)->video.display.height = TMEDIA_CONSUMER(pSelf)->video.in.height;
+ }
+
+ pSelf->nNegFps = TMEDIA_CONSUMER(pSelf)->video.fps;
+ pSelf->nNegWidth = (UINT32)TMEDIA_CONSUMER(pSelf)->video.display.width;
+ pSelf->nNegHeight = (UINT32)TMEDIA_CONSUMER(pSelf)->video.display.height;
+
+ TSK_DEBUG_INFO("D3D9 video consumer: fps=%d, width=%d, height=%d",
+ pSelf->nNegFps,
+ pSelf->nNegWidth,
+ pSelf->nNegHeight);
+
+ TMEDIA_CONSUMER(pSelf)->video.display.chroma = tmedia_chroma_rgb32;
+ TMEDIA_CONSUMER(pSelf)->decoder.codec_id = tmedia_codec_id_none; // means accept RAW fames
+
+ // The window handle is not created until the call is connect (incoming only) - At least on Internet Explorer 10
+ if(hWnd && !pSelf->bPluginWebRTC4All)
+ {
+ CHECK_HR(hr = CreateDeviceD3D9(hWnd, &pSelf->pDevice, &pSelf->pD3D, pSelf->d3dpp));
+ CHECK_HR(hr = CreateSwapChain(hWnd, pSelf->nNegWidth, pSelf->nNegHeight, pSelf->pDevice, &pSelf->pSwapChain));
+ }
+ else
+ {
+ if(hWnd && pSelf->bPluginWebRTC4All)
+ {
+ TSK_DEBUG_INFO("[MF consumer] HWND is defined but we detected webrtc4all...delaying D3D9 device creating until session get connected");
+ }
+ else
+ {
+ TSK_DEBUG_WARN("Delaying D3D9 device creation because HWND is not defined yet");
+ }
+ }
+
+bail:
+ pSelf->bPrepared = SUCCEEDED(hr);
+ return pSelf->bPrepared ? 0 : -1;
+}
+
+static int plugin_win_mf_consumer_video_start(tmedia_consumer_t* self)
+{
+ plugin_win_mf_consumer_video_t* pSelf = (plugin_win_mf_consumer_video_t*)self;
+
+ if(!pSelf){
+ TSK_DEBUG_ERROR("Invalid parameter");
+ return -1;
+ }
+
+ if(pSelf->bStarted){
+ TSK_DEBUG_INFO("D3D9 video consumer already started");
+ return 0;
+ }
+ if(!pSelf->bPrepared){
+ TSK_DEBUG_ERROR("D3D9 video consumer not prepared");
+ return -1;
+ }
+
+ HRESULT hr = S_OK;
+
+ pSelf->bPaused = false;
+ pSelf->bStarted = true;
+
+ return SUCCEEDED(hr) ? 0 : -1;
+}
+
+static int plugin_win_mf_consumer_video_consume(tmedia_consumer_t* self, const void* buffer, tsk_size_t size, const tsk_object_t* proto_hdr)
+{
+ plugin_win_mf_consumer_video_t* pSelf = (plugin_win_mf_consumer_video_t*)self;
+
+ HRESULT hr = S_OK;
+ HWND hWnd = Window(pSelf);
+
+ IDirect3DSurface9 *pSurf = NULL;
+ IDirect3DSurface9 *pBB = NULL;
+
+ if(!pSelf)
+ {
+ TSK_DEBUG_ERROR("Invalid parameter");
+ return -1; // because of the mutex lock do it here
+ }
+
+ tsk_safeobj_lock(pSelf);
+
+ if(!buffer || !size)
+ {
+ TSK_DEBUG_ERROR("Invalid parameter");
+ CHECK_HR(hr = E_INVALIDARG);
+ }
+
+ if(!pSelf->bStarted)
+ {
+ TSK_DEBUG_INFO("D3D9 video consumer not started");
+ CHECK_HR(hr = E_FAIL);
+ }
+
+ if(!hWnd)
+ {
+ TSK_DEBUG_INFO("Do not draw frame because HWND not set");
+ goto bail; // not an error as the application can decide to set the HWND at any time
+ }
+
+ if (!pSelf->bWindowHooked)
+ {
+ // Do not hook "hWnd" as it could be the fullscreen handle which is always hooked.
+ CHECK_HR(hr = HookWindow(pSelf, pSelf->hWindow));
+ }
+
+ if(!pSelf->pDevice || !pSelf->pD3D || !pSelf->pSwapChain)
+ {
+ if(pSelf->pDevice || pSelf->pD3D || pSelf->pSwapChain)
+ {
+ CHECK_HR(hr = E_POINTER); // They must be "all null" or "all valid"
+ }
+
+ if(hWnd)
+ {
+ // means HWND was not set but defined now
+ pSelf->nNegWidth = (UINT32)TMEDIA_CONSUMER(pSelf)->video.in.width;
+ pSelf->nNegHeight = (UINT32)TMEDIA_CONSUMER(pSelf)->video.in.height;
+
+ CHECK_HR(hr = CreateDeviceD3D9(hWnd, &pSelf->pDevice, &pSelf->pD3D, pSelf->d3dpp));
+ CHECK_HR(hr = CreateSwapChain(hWnd, pSelf->nNegWidth, pSelf->nNegHeight, pSelf->pDevice, &pSelf->pSwapChain));
+ }
+ }
+
+ if(pSelf->nNegWidth != TMEDIA_CONSUMER(pSelf)->video.in.width || pSelf->nNegHeight != TMEDIA_CONSUMER(pSelf)->video.in.height){
+ TSK_DEBUG_INFO("Negotiated and input video sizes are different:%d#%d or %d#%d",
+ pSelf->nNegWidth, TMEDIA_CONSUMER(pSelf)->video.in.width,
+ pSelf->nNegHeight, TMEDIA_CONSUMER(pSelf)->video.in.height);
+ // Update media type
+
+ SafeRelease(&pSelf->pSwapChain);
+ CHECK_HR(hr = CreateSwapChain(hWnd, (UINT32)TMEDIA_CONSUMER(pSelf)->video.in.width, (UINT32)TMEDIA_CONSUMER(pSelf)->video.in.height, pSelf->pDevice, &pSelf->pSwapChain));
+
+ pSelf->nNegWidth = (UINT32)TMEDIA_CONSUMER(pSelf)->video.in.width;
+ pSelf->nNegHeight = (UINT32)TMEDIA_CONSUMER(pSelf)->video.in.height;
+
+ // Update Destination will do noting if the window size haven't changed.
+ // Force updating the destination rect if negotiated size change
+ CHECK_HR(hr = UpdateDestinationRect(pSelf, TRUE/* Force */));
+ }
+
+ CHECK_HR(hr = TestCooperativeLevel(pSelf));
+
+ CHECK_HR(hr = UpdateDestinationRect(pSelf));
+
+ CHECK_HR(hr = pSelf->pSwapChain->GetBackBuffer(0, D3DBACKBUFFER_TYPE_MONO, &pSurf));
+ CHECK_HR(hr = pSurf->LockRect(&pSelf->rcLock, NULL, D3DLOCK_NOSYSLOCK ));
+
+ // Fast copy() using MMX, SSE, or SSE2
+ hr = MFCopyImage(
+ (BYTE*)pSelf->rcLock.pBits,
+ pSelf->rcLock.Pitch,
+ (BYTE*)buffer,
+ (pSelf->nNegWidth << 2),
+ (pSelf->nNegWidth << 2),
+ pSelf->nNegHeight
+ );
+ if(FAILED(hr))
+ {
+ // unlock() before leaving
+ pSurf->UnlockRect();
+ CHECK_HR(hr);
+ }
+
+ CHECK_HR(hr = pSurf->UnlockRect());
+
+ // Color fill the back buffer
+ CHECK_HR(hr = pSelf->pDevice->GetBackBuffer(0, 0, D3DBACKBUFFER_TYPE_MONO, &pBB));
+#if METROPOLIS
+ CHECK_HR(hr = pSelf->pDevice->ColorFill(pBB, NULL, D3DCOLOR_XRGB(0x00, 0x00, 0x00)));
+#else
+ CHECK_HR(hr = pSelf->pDevice->ColorFill(pBB, NULL, D3DCOLOR_XRGB(0xFF, 0xFF, 0xFF)));
+#endif
+
+ // Resize keeping aspect ratio and Blit the frame (required)
+ hr = pSelf->pDevice->StretchRect(
+ pSurf,
+ NULL,
+ pBB,
+ &pSelf->rcDest/*NULL*/,
+ D3DTEXF_LINEAR
+ ); // could fail when display is being resized
+ if(SUCCEEDED(hr))
+ {
+ // Present the frame
+ CHECK_HR(hr = pSelf->pDevice->Present(NULL, NULL, NULL, NULL));
+ }
+ else
+ {
+ TSK_DEBUG_INFO("StretchRect returned ...%x", hr);
+ }
+
+bail:
+ SafeRelease(&pSurf);
+ SafeRelease(&pBB);
+
+ tsk_safeobj_unlock(pSelf);
+
+ return SUCCEEDED(hr) ? 0 : -1;
+}
+
+static int plugin_win_mf_consumer_video_pause(tmedia_consumer_t* self)
+{
+ plugin_win_mf_consumer_video_t* pSelf = (plugin_win_mf_consumer_video_t*)self;
+
+ if(!pSelf){
+ TSK_DEBUG_ERROR("Invalid parameter");
+ return -1;
+ }
+ if(!pSelf->bStarted)
+ {
+ TSK_DEBUG_INFO("MF video producer not started");
+ return 0;
+ }
+
+ HRESULT hr = S_OK;
+
+ pSelf->bPaused = true;
+
+ return SUCCEEDED(hr) ? 0 : -1;
+}
+
+static int plugin_win_mf_consumer_video_stop(tmedia_consumer_t* self)
+{
+ plugin_win_mf_consumer_video_t* pSelf = (plugin_win_mf_consumer_video_t*)self;
+
+ if(!pSelf){
+ TSK_DEBUG_ERROR("Invalid parameter");
+ return -1;
+ }
+
+ HRESULT hr = S_OK;
+
+ pSelf->bStarted = false;
+ pSelf->bPaused = false;
+
+ if(pSelf->hWindowFullScreen)
+ {
+ ::InvalidateRect(pSelf->hWindowFullScreen, NULL, FALSE);
+ ::ShowWindow(pSelf->hWindowFullScreen, SW_HIDE);
+ }
+
+ // next start() will be called after prepare()
+ return _plugin_win_mf_consumer_video_unprepare(pSelf);
+}
+
+static int _plugin_win_mf_consumer_video_unprepare(plugin_win_mf_consumer_video_t* pSelf)
+{
+ if(!pSelf)
+ {
+ TSK_DEBUG_ERROR("Invalid parameter");
+ return -1;
+ }
+
+ UnhookWindow(pSelf);
+
+ if(pSelf->bStarted)
+ {
+ // plugin_win_mf_producer_video_stop(TMEDIA_PRODUCER(pSelf));
+ TSK_DEBUG_ERROR("Consumer must be stopped before calling unprepare");
+ return -1;
+ }
+
+ SafeRelease(&pSelf->pDevice);
+ SafeRelease(&pSelf->pD3D);
+ SafeRelease(&pSelf->pSwapChain);
+
+ pSelf->bPrepared = false;
+
+ return 0;
+}
+
+
+//
+// D3D9 video consumer object definition
+//
+/* constructor */
+static tsk_object_t* plugin_win_mf_consumer_video_ctor(tsk_object_t * self, va_list * app)
+{
+ MFUtils::Startup();
+
+ plugin_win_mf_consumer_video_t *pSelf = (plugin_win_mf_consumer_video_t *)self;
+ if(pSelf){
+ /* init base */
+ tmedia_consumer_init(TMEDIA_CONSUMER(pSelf));
+ TMEDIA_CONSUMER(pSelf)->video.display.chroma = tmedia_chroma_rgb32;
+ TMEDIA_CONSUMER(pSelf)->decoder.codec_id = tmedia_codec_id_none; // means accept RAW fames
+
+ /* init self */
+ tsk_safeobj_init(pSelf);
+ TMEDIA_CONSUMER(pSelf)->video.fps = 15;
+ TMEDIA_CONSUMER(pSelf)->video.display.width = 0; // use codec value
+ TMEDIA_CONSUMER(pSelf)->video.display.height = 0; // use codec value
+ TMEDIA_CONSUMER(pSelf)->video.display.auto_resize = tsk_true;
+
+ pSelf->pixelAR.Denominator = pSelf->pixelAR.Numerator = 1;
+ }
+ return self;
+}
+/* destructor */
+static tsk_object_t* plugin_win_mf_consumer_video_dtor(tsk_object_t * self)
+{
+ plugin_win_mf_consumer_video_t *pSelf = (plugin_win_mf_consumer_video_t *)self;
+ if(pSelf){
+ /* stop */
+ if(pSelf->bStarted)
+ {
+ plugin_win_mf_consumer_video_stop(TMEDIA_CONSUMER(pSelf));
+ }
+
+ /* deinit base */
+ tmedia_consumer_deinit(TMEDIA_CONSUMER(pSelf));
+ /* deinit self */
+ _plugin_win_mf_consumer_video_unprepare(pSelf);
+ tsk_safeobj_deinit(pSelf);
+ }
+
+ return self;
+}
+/* object definition */
+static const tsk_object_def_t plugin_win_mf_consumer_video_def_s =
+{
+ sizeof(plugin_win_mf_consumer_video_t),
+ plugin_win_mf_consumer_video_ctor,
+ plugin_win_mf_consumer_video_dtor,
+ tsk_null,
+};
+/* plugin definition*/
+static const tmedia_consumer_plugin_def_t plugin_win_mf_consumer_video_plugin_def_s =
+{
+ &plugin_win_mf_consumer_video_def_s,
+
+ tmedia_video,
+ "D3D9 video consumer",
+
+ plugin_win_mf_consumer_video_set,
+ plugin_win_mf_consumer_video_prepare,
+ plugin_win_mf_consumer_video_start,
+ plugin_win_mf_consumer_video_consume,
+ plugin_win_mf_consumer_video_pause,
+ plugin_win_mf_consumer_video_stop
+};
+const tmedia_consumer_plugin_def_t *plugin_win_mf_consumer_video_plugin_def_t = &plugin_win_mf_consumer_video_plugin_def_s;
+
+// Helper functions
+
+static HRESULT CreateDeviceD3D9(
+ HWND hWnd,
+ IDirect3DDevice9** ppDevice,
+ IDirect3D9 **ppD3D,
+ D3DPRESENT_PARAMETERS &d3dpp
+ )
+{
+ HRESULT hr = S_OK;
+
+ D3DDISPLAYMODE mode = { 0 };
+ D3DPRESENT_PARAMETERS pp = {0};
+
+ if(!ppDevice || *ppDevice || !ppD3D || *ppD3D)
+ {
+ CHECK_HR(hr = E_POINTER);
+ }
+
+ if(!(*ppD3D = Direct3DCreate9(D3D_SDK_VERSION)))
+ {
+ CHECK_HR(hr = E_OUTOFMEMORY);
+ }
+
+ CHECK_HR(hr = (*ppD3D)->GetAdapterDisplayMode(
+ D3DADAPTER_DEFAULT,
+ &mode
+ ));
+
+ CHECK_HR(hr = (*ppD3D)->CheckDeviceType(
+ D3DADAPTER_DEFAULT,
+ D3DDEVTYPE_HAL,
+ mode.Format,
+ D3DFMT_X8R8G8B8,
+ TRUE // windowed
+ ));
+ pp.BackBufferFormat = D3DFMT_X8R8G8B8;
+ pp.SwapEffect = D3DSWAPEFFECT_DISCARD;
+ pp.PresentationInterval = D3DPRESENT_INTERVAL_IMMEDIATE;
+ pp.Windowed = TRUE;
+ pp.hDeviceWindow = hWnd;
+ CHECK_HR(hr = (*ppD3D)->CreateDevice(
+ D3DADAPTER_DEFAULT,
+ D3DDEVTYPE_HAL,
+ hWnd,
+ D3DCREATE_HARDWARE_VERTEXPROCESSING,
+ &pp,
+ ppDevice
+ ));
+
+ d3dpp = pp;
+
+bail:
+ if(FAILED(hr))
+ {
+ SafeRelease(ppD3D);
+ SafeRelease(ppDevice);
+ }
+ return hr;
+}
+
+static HRESULT TestCooperativeLevel(
+ struct plugin_win_mf_consumer_video_s *pSelf
+ )
+{
+ HRESULT hr = S_OK;
+
+ if (!pSelf || !pSelf->pDevice)
+ {
+ CHECK_HR(hr = E_POINTER);
+ }
+
+ switch((hr = pSelf->pDevice->TestCooperativeLevel()))
+ {
+ case D3D_OK:
+ {
+ break;
+ }
+
+ case D3DERR_DEVICELOST:
+ {
+ hr = S_OK;
+ break;
+ }
+
+ case D3DERR_DEVICENOTRESET:
+ {
+ hr = ResetDevice(pSelf, TRUE);
+ break;
+ }
+
+ default:
+ {
+ break;
+ }
+ }
+
+ CHECK_HR(hr);
+
+bail:
+ return hr;
+}
+
+static HRESULT CreateSwapChain(
+ HWND hWnd,
+ UINT32 nFrameWidth,
+ UINT32 nFrameHeight,
+ IDirect3DDevice9* pDevice,
+ IDirect3DSwapChain9 **ppSwapChain
+ )
+{
+ HRESULT hr = S_OK;
+
+ D3DPRESENT_PARAMETERS pp = { 0 };
+
+ if(!pDevice || !ppSwapChain || *ppSwapChain)
+ {
+ CHECK_HR(hr = E_POINTER);
+ }
+
+ pp.BackBufferWidth = nFrameWidth;
+ pp.BackBufferHeight = nFrameHeight;
+ pp.Windowed = TRUE;
+ pp.SwapEffect = D3DSWAPEFFECT_FLIP;
+ pp.hDeviceWindow = hWnd;
+ pp.BackBufferFormat = D3DFMT_X8R8G8B8;
+ pp.Flags =
+ D3DPRESENTFLAG_VIDEO | D3DPRESENTFLAG_DEVICECLIP |
+ D3DPRESENTFLAG_LOCKABLE_BACKBUFFER;
+ pp.PresentationInterval = D3DPRESENT_INTERVAL_IMMEDIATE;
+ pp.BackBufferCount = NUM_BACK_BUFFERS;
+
+ CHECK_HR(hr = pDevice->CreateAdditionalSwapChain(&pp, ppSwapChain));
+
+bail:
+ return hr;
+}
+
+static inline HWND Window(struct plugin_win_mf_consumer_video_s *pSelf)
+{
+ return pSelf ? (pSelf->bFullScreen ? pSelf->hWindowFullScreen : pSelf->hWindow) : NULL;
+}
+
+static inline LONG Width(const RECT& r)
+{
+ return r.right - r.left;
+}
+
+static inline LONG Height(const RECT& r)
+{
+ return r.bottom - r.top;
+}
+
+//-----------------------------------------------------------------------------
+// CorrectAspectRatio
+//
+// Converts a rectangle from the source's pixel aspect ratio (PAR) to 1:1 PAR.
+// Returns the corrected rectangle.
+//
+// For example, a 720 x 486 rect with a PAR of 9:10, when converted to 1x1 PAR,
+// is stretched to 720 x 540.
+// Copyright (C) Microsoft
+//-----------------------------------------------------------------------------
+
+static inline RECT CorrectAspectRatio(const RECT& src, const MFRatio& srcPAR)
+{
+ // Start with a rectangle the same size as src, but offset to the origin (0,0).
+ RECT rc = {0, 0, src.right - src.left, src.bottom - src.top};
+
+ if ((srcPAR.Numerator != 1) || (srcPAR.Denominator != 1))
+ {
+ // Correct for the source's PAR.
+
+ if (srcPAR.Numerator > srcPAR.Denominator)
+ {
+ // The source has "wide" pixels, so stretch the width.
+ rc.right = MulDiv(rc.right, srcPAR.Numerator, srcPAR.Denominator);
+ }
+ else if (srcPAR.Numerator < srcPAR.Denominator)
+ {
+ // The source has "tall" pixels, so stretch the height.
+ rc.bottom = MulDiv(rc.bottom, srcPAR.Denominator, srcPAR.Numerator);
+ }
+ // else: PAR is 1:1, which is a no-op.
+ }
+ return rc;
+}
+
+//-------------------------------------------------------------------
+// LetterBoxDstRect
+//
+// Takes a src rectangle and constructs the largest possible
+// destination rectangle within the specifed destination rectangle
+// such thatthe video maintains its current shape.
+//
+// This function assumes that pels are the same shape within both the
+// source and destination rectangles.
+// Copyright (C) Microsoft
+//-------------------------------------------------------------------
+
+static inline RECT LetterBoxRect(const RECT& rcSrc, const RECT& rcDst)
+{
+ // figure out src/dest scale ratios
+ int iSrcWidth = Width(rcSrc);
+ int iSrcHeight = Height(rcSrc);
+
+ int iDstWidth = Width(rcDst);
+ int iDstHeight = Height(rcDst);
+
+ int iDstLBWidth;
+ int iDstLBHeight;
+
+ if (MulDiv(iSrcWidth, iDstHeight, iSrcHeight) <= iDstWidth) {
+
+ // Column letter boxing ("pillar box")
+
+ iDstLBWidth = MulDiv(iDstHeight, iSrcWidth, iSrcHeight);
+ iDstLBHeight = iDstHeight;
+ }
+ else {
+
+ // Row letter boxing.
+
+ iDstLBWidth = iDstWidth;
+ iDstLBHeight = MulDiv(iDstWidth, iSrcHeight, iSrcWidth);
+ }
+
+
+ // Create a centered rectangle within the current destination rect
+
+ RECT rc;
+
+ LONG left = rcDst.left + ((iDstWidth - iDstLBWidth) >> 1);
+ LONG top = rcDst.top + ((iDstHeight - iDstLBHeight) >> 1);
+
+ SetRect(&rc, left, top, left + iDstLBWidth, top + iDstLBHeight);
+
+ return rc;
+}
+
+static inline HRESULT UpdateDestinationRect(plugin_win_mf_consumer_video_t *pSelf, BOOL bForce /*= FALSE*/)
+{
+ HRESULT hr = S_OK;
+ HWND hwnd = Window(pSelf);
+
+ if(!pSelf)
+ {
+ CHECK_HR(hr = E_POINTER);
+ }
+
+ if(!hwnd)
+ {
+ CHECK_HR(hr = E_HANDLE);
+ }
+ RECT rcClient;
+ GetClientRect(hwnd, &rcClient);
+
+ // only update destination if window size changed
+ if(bForce || (rcClient.bottom != pSelf->rcWindow.bottom || rcClient.left != pSelf->rcWindow.left || rcClient.right != pSelf->rcWindow.right || rcClient.top != pSelf->rcWindow.top))
+ {
+ CHECK_HR(hr = ResetDevice(pSelf));
+
+ pSelf->rcWindow = rcClient;
+#if 1
+ RECT rcSrc = { 0, 0, pSelf->nNegWidth, pSelf->nNegHeight };
+ rcSrc = CorrectAspectRatio(rcSrc, pSelf->pixelAR);
+ pSelf->rcDest = LetterBoxRect(rcSrc, rcClient);
+#else
+ long w = rcClient.right - rcClient.left;
+ long h = rcClient.bottom - rcClient.top;
+ float ratio = ((float)pSelf->nNegWidth/(float)pSelf->nNegHeight);
+ // (w/h)=ratio =>
+ // 1) h=w/ratio
+ // and
+ // 2) w=h*ratio
+ pSelf->rcDest.right = (int)(w/ratio) > h ? (int)(h * ratio) : w;
+ pSelf->rcDest.bottom = (int)(pSelf->rcDest.right/ratio) > h ? h : (int)(pSelf->rcDest.right/ratio);
+ pSelf->rcDest.left = ((w - pSelf->rcDest.right) >> 1);
+ pSelf->rcDest.top = ((h - pSelf->rcDest.bottom) >> 1);
+#endif
+
+ //::InvalidateRect(hwnd, NULL, FALSE);
+ }
+
+bail:
+ return hr;
+}
+
+static HRESULT ResetDevice(plugin_win_mf_consumer_video_t *pSelf, BOOL bUpdateDestinationRect /*= FALSE*/)
+{
+ HRESULT hr = S_OK;
+
+ tsk_safeobj_lock(pSelf);
+
+ HWND hWnd = Window(pSelf);
+
+ if (pSelf->pDevice)
+ {
+ D3DPRESENT_PARAMETERS d3dpp = pSelf->d3dpp;
+
+ hr = pSelf->pDevice->Reset(&d3dpp);
+
+ if (FAILED(hr))
+ {
+ SafeRelease(&pSelf->pDevice);
+ SafeRelease(&pSelf->pD3D);
+ SafeRelease(&pSelf->pSwapChain);
+ }
+ }
+
+ if (pSelf->pDevice == NULL && hWnd)
+ {
+ CHECK_HR(hr = CreateDeviceD3D9(hWnd, &pSelf->pDevice, &pSelf->pD3D, pSelf->d3dpp));
+ CHECK_HR(hr = CreateSwapChain(hWnd, pSelf->nNegWidth, pSelf->nNegHeight, pSelf->pDevice, &pSelf->pSwapChain));
+ }
+
+ if(bUpdateDestinationRect) // endless loop guard
+ {
+ CHECK_HR(hr = UpdateDestinationRect(pSelf));
+ }
+
+bail:
+ tsk_safeobj_unlock(pSelf);
+
+ return hr;
+}
+
+static HRESULT SetFullscreen(struct plugin_win_mf_consumer_video_s *pSelf, BOOL bFullScreen)
+{
+ HRESULT hr = S_OK;
+ if(!pSelf)
+ {
+ CHECK_HR(hr = E_POINTER);
+ }
+
+ if(pSelf->bFullScreen != bFullScreen)
+ {
+ tsk_safeobj_lock(pSelf);
+ if(bFullScreen)
+ {
+ HWND hWnd = CreateFullScreenWindow(pSelf);
+ if(hWnd)
+ {
+ ::ShowWindow(hWnd, SW_SHOWDEFAULT);
+ ::UpdateWindow(hWnd);
+ }
+ }
+ else if(pSelf->hWindowFullScreen)
+ {
+ ::ShowWindow(pSelf->hWindowFullScreen, SW_HIDE);
+ }
+ pSelf->bFullScreen = bFullScreen;
+ if(pSelf->bPrepared)
+ {
+ hr = ResetDevice(pSelf);
+ }
+ tsk_safeobj_unlock(pSelf);
+
+ CHECK_HR(hr);
+ }
+
+bail:
+ return hr;
+}
+
+static LRESULT CALLBACK WndProc(HWND hWnd, UINT uMsg, WPARAM wParam, LPARAM lParam)
+{
+ switch(uMsg)
+ {
+ case WM_CREATE:
+ case WM_SIZE:
+ case WM_MOVE:
+ {
+ struct plugin_win_mf_consumer_video_s* pSelf = dynamic_cast<struct plugin_win_mf_consumer_video_s*>((struct plugin_win_mf_consumer_video_s*)GetPropA(hWnd, "Self"));
+ if (pSelf)
+ {
+
+ }
+ break;
+ }
+
+ case WM_ERASEBKGND:
+ {
+ return TRUE; // avoid background erasing.
+ }
+
+ case WM_CHAR:
+ case WM_KEYUP:
+ {
+ struct plugin_win_mf_consumer_video_s* pSelf = dynamic_cast<struct plugin_win_mf_consumer_video_s*>((struct plugin_win_mf_consumer_video_s*)GetPropA(hWnd, "Self"));
+ if (pSelf)
+ {
+ SetFullscreen(pSelf, FALSE);
+ }
+
+ break;
+ }
+ }
+
+ return DefWindowProc(hWnd, uMsg, wParam, lParam);
+}
+
+static HWND CreateFullScreenWindow(struct plugin_win_mf_consumer_video_s *pSelf)
+{
+ HRESULT hr = S_OK;
+
+ if(!pSelf)
+ {
+ return NULL;
+ }
+
+ if(!pSelf->hWindowFullScreen)
+ {
+ WNDCLASS wc = {0};
+
+ wc.lpfnWndProc = WndProc;
+ wc.hInstance = GetModuleHandle(NULL);
+ wc.hCursor = LoadCursor(NULL, IDC_ARROW);
+ wc.lpszClassName = L"WindowClass";
+ RegisterClass(&wc);
+ pSelf->hWindowFullScreen = ::CreateWindowEx(
+ NULL,
+ wc.lpszClassName,
+ L"Doubango's Video Consumer Fullscreen",
+ WS_EX_TOPMOST | WS_POPUP,
+ 0, 0,
+ GetSystemMetrics(SM_CXSCREEN), GetSystemMetrics(SM_CYSCREEN),
+ NULL,
+ NULL,
+ GetModuleHandle(NULL),
+ NULL);
+
+ SetPropA(pSelf->hWindowFullScreen, "Self", pSelf);
+ }
+ return pSelf->hWindowFullScreen;
+}
+
+static HRESULT HookWindow(plugin_win_mf_consumer_video_s *pSelf, HWND hWnd)
+{
+ HRESULT hr = S_OK;
+
+ tsk_safeobj_lock(pSelf);
+
+ CHECK_HR(hr = UnhookWindow(pSelf));
+
+ if ((pSelf->hWindow = hWnd)) {
+ pSelf->wndProc = (WNDPROC)SetWindowLongPtr(pSelf->hWindow, GWLP_WNDPROC, (LONG_PTR)WndProc);
+ if (!pSelf->wndProc) {
+ TSK_DEBUG_ERROR("HookWindowLongPtr() failed with errcode=%d", GetLastError());
+ CHECK_HR(hr = E_FAIL);
+ }
+ pSelf->bWindowHooked = TRUE;
+ }
+bail:
+ tsk_safeobj_unlock(pSelf);
+ return S_OK;
+}
+
+static HRESULT UnhookWindow(struct plugin_win_mf_consumer_video_s *pSelf)
+{
+ tsk_safeobj_lock(pSelf);
+ if (pSelf->hWindow && pSelf->wndProc) {
+ SetWindowLongPtr(pSelf->hWindow, GWLP_WNDPROC, (LONG_PTR)pSelf->wndProc);
+ pSelf->wndProc = NULL;
+ }
+ if(pSelf->hWindow)
+ {
+ ::InvalidateRect(pSelf->hWindow, NULL, FALSE);
+ }
+ pSelf->bWindowHooked = FALSE;
+ tsk_safeobj_unlock(pSelf);
+ return S_OK;
+}
+
+
+#else /* !PLUGIN_MF_CV_USE_D3D9 */
+
+#include "internals/mf_custom_src.h"
+#include "internals/mf_display_watcher.h"
+#include "internals/mf_codec.h"
+
+#include <KS.h>
+#include <Codecapi.h>
+
+// 0: {{[Source] -> (VideoProcessor) -> SampleGrabber}} , {{[Decoder]}} -> RTP
+// 1: {{[Source] -> (VideoProcessor) -> [Decoder] -> SampleGrabber}} -> RTP
+// (VideoProcessor) is optional
+// "{{" and "}}" defines where the graph starts and ends respectively. For "0", [Decoder] is a stand-alone IMFTransform.
+#if !defined(PLUGIN_MF_CV_BUNDLE_CODEC)
+# define PLUGIN_MF_CV_BUNDLE_CODEC 0
+#endif
+
+// Uncompressed video frame will come from Doubango core and it's up to the converter to match the requested chroma.
+// Supported values: NV12, I420, RGB32 and RGB24. (RGB formats are not recommended because of performance issues)
+// To avoid chroma conversion (performance issues) we use NV12 when the codec is bundled as MediaFoundation codecs most likely only support this format.
+// NV12 is the native format for media foundation codecs (e.g. Intel Quick Sync) and the GPU.
+// I420 is the native format for FFmpeg, libvpx and libtheora.
+const GUID kDefaultUncompressedType
+#if PLUGIN_MF_CV_BUNDLE_CODEC
+= MFVideoFormat_NV12;
+#else
+= MFVideoFormat_I420;
+#endif
+
+DEFINE_GUID(PLUGIN_MF_LOW_LATENCY,
+0x9c27891a, 0xed7a, 0x40e1, 0x88, 0xe8, 0xb2, 0x27, 0x27, 0xa0, 0x24, 0xee);
+
+static void* TSK_STDCALL RunSessionThread(void *pArg);
+static int _plugin_win_mf_consumer_video_unprepare(struct plugin_win_mf_consumer_video_s* pSelf);
+
+typedef struct plugin_win_mf_consumer_video_s
+{
+ TMEDIA_DECLARE_CONSUMER;
+
+ bool bStarted, bPrepared;
+ HWND hWindow;
+ tsk_thread_handle_t* ppTread[1];
+
+ UINT32 nNegWidth;
+ UINT32 nNegHeight;
+ UINT32 nNegFps;
+
+ MFCodecVideo *pDecoder;
+ IMFMediaSession *pSession;
+ CMFSource *pSource;
+ IMFActivate *pSinkActivate;
+ DisplayWatcher* pDisplayWatcher;
+ IMFTopology *pTopologyFull;
+ IMFTopology *pTopologyPartial;
+ IMFMediaType *pOutType;
+}
+plugin_win_mf_consumer_video_t;
+
+
+
+/* ============ Media Consumer Interface ================= */
+static int plugin_win_mf_consumer_video_set(tmedia_consumer_t *self, const tmedia_param_t* param)
+{
+ int ret = 0;
+ HRESULT hr = S_OK;
+ plugin_win_mf_consumer_video_t* pSelf = (plugin_win_mf_consumer_video_t*)self;
+
+ if(!self || !param){
+ TSK_DEBUG_ERROR("Invalid parameter");
+ return -1;
+ }
+
+ if(param->value_type == tmedia_pvt_int64){
+ if(tsk_striequals(param->key, "remote-hwnd")){
+ HWND hWnd = reinterpret_cast<HWND>((INT64)*((int64_t*)param->value));
+ if(hWnd != pSelf->hWindow)
+ {
+ pSelf->hWindow = hWnd;
+ if(pSelf->pDisplayWatcher)
+ {
+ CHECK_HR(hr = pSelf->pDisplayWatcher->SetHwnd(hWnd));
+ }
+ }
+ }
+ }
+ else if(param->value_type == tmedia_pvt_int32){
+ if(tsk_striequals(param->key, "fullscreen")){
+ if(pSelf->pDisplayWatcher)
+ {
+ CHECK_HR(hr = pSelf->pDisplayWatcher->SetFullscreen(!!*((int32_t*)param->value)));
+ }
+ }
+ else if(tsk_striequals(param->key, "create-on-current-thead")){
+ // DSCONSUMER(self)->create_on_ui_thread = *((int32_t*)param->value) ? tsk_false : tsk_true;
+ }
+ else if(tsk_striequals(param->key, "plugin-firefox")){
+ /*DSCONSUMER(self)->plugin_firefox = (*((int32_t*)param->value) != 0);
+ if(DSCONSUMER(self)->display){
+ DSCONSUMER(self)->display->setPluginFirefox((DSCONSUMER(self)->plugin_firefox == tsk_true));
+ }*/
+ }
+ }
+
+bail:
+ return SUCCEEDED(hr) ? 0 : -1;
+}
+
+
+static int plugin_win_mf_consumer_video_prepare(tmedia_consumer_t* self, const tmedia_codec_t* codec)
+{
+ plugin_win_mf_consumer_video_t* pSelf = (plugin_win_mf_consumer_video_t*)self;
+
+ if(!pSelf || !codec && codec->plugin){
+ TSK_DEBUG_ERROR("Invalid parameter");
+ return -1;
+ }
+ if(pSelf->bPrepared){
+ TSK_DEBUG_WARN("MF video consumer already prepared");
+ return -1;
+ }
+
+ // FIXME: DirectShow requires flipping but not MF
+ // The Core library always tries to flip when OSType==Win32. Must be changed
+ TMEDIA_CODEC_VIDEO(codec)->in.flip = tsk_false;
+
+ HRESULT hr = S_OK;
+
+ TMEDIA_CONSUMER(pSelf)->video.fps = TMEDIA_CODEC_VIDEO(codec)->in.fps;
+ TMEDIA_CONSUMER(pSelf)->video.in.width = TMEDIA_CODEC_VIDEO(codec)->in.width;
+ TMEDIA_CONSUMER(pSelf)->video.in.height = TMEDIA_CODEC_VIDEO(codec)->in.height;
+
+ if(!TMEDIA_CONSUMER(pSelf)->video.display.width){
+ TMEDIA_CONSUMER(pSelf)->video.display.width = TMEDIA_CONSUMER(pSelf)->video.in.width;
+ }
+ if(!TMEDIA_CONSUMER(pSelf)->video.display.height){
+ TMEDIA_CONSUMER(pSelf)->video.display.height = TMEDIA_CONSUMER(pSelf)->video.in.height;
+ }
+
+ pSelf->nNegFps = TMEDIA_CONSUMER(pSelf)->video.fps;
+ pSelf->nNegWidth = TMEDIA_CONSUMER(pSelf)->video.display.width;
+ pSelf->nNegHeight = TMEDIA_CONSUMER(pSelf)->video.display.height;
+
+ TSK_DEBUG_INFO("MF video consumer: fps=%d, width=%d, height=%d",
+ pSelf->nNegFps,
+ pSelf->nNegWidth,
+ pSelf->nNegHeight);
+
+ if(kDefaultUncompressedType == MFVideoFormat_NV12) {
+ TMEDIA_CONSUMER(pSelf)->video.display.chroma = tmedia_chroma_nv12;
+ }
+ else if(kDefaultUncompressedType == MFVideoFormat_I420) {
+ TMEDIA_CONSUMER(pSelf)->video.display.chroma = tmedia_chroma_yuv420p;
+ }
+ else if(kDefaultUncompressedType == MFVideoFormat_RGB32) {
+ TMEDIA_CONSUMER(pSelf)->video.display.chroma = tmedia_chroma_rgb32;
+ }
+ else if(kDefaultUncompressedType == MFVideoFormat_RGB24) {
+ TMEDIA_CONSUMER(pSelf)->video.display.chroma = tmedia_chroma_rgb24;
+ }
+ else {
+ CHECK_HR(hr = E_NOTIMPL);
+ }
+ TMEDIA_CONSUMER(pSelf)->decoder.codec_id = tmedia_codec_id_none; // means accept RAW fames
+
+ IMFMediaSink* pMediaSink = NULL;
+ IMFAttributes* pSessionAttributes = NULL;
+
+ // Set session attributes
+ CHECK_HR(hr = MFCreateAttributes(&pSessionAttributes, 1));
+ CHECK_HR(hr = pSessionAttributes->SetUINT32(PLUGIN_MF_LOW_LATENCY, 1));
+
+ CHECK_HR(hr = MFCreateMediaType(&pSelf->pOutType));
+ CHECK_HR(hr = pSelf->pOutType->SetGUID(MF_MT_MAJOR_TYPE, MFMediaType_Video));
+
+#if PLUGIN_MF_CV_BUNDLE_CODEC
+ if((codec->id == tmedia_codec_id_h264_bp || codec->id == tmedia_codec_id_h264_mp) && MFUtils::IsLowLatencyH264Supported()) {
+ // both Microsoft and Intel encoders support NV12 only as input
+ // static const BOOL kIsEncoder = FALSE;
+ // hr = MFUtils::GetBestCodec(kIsEncoder, MFMediaType_Video, MFVideoFormat_H264, MFVideoFormat_NV12, &pSelf->pDecoder);
+ pSelf->pDecoder = (codec->id == tmedia_codec_id_h264_bp) ? MFCodecVideoH264::CreateCodecH264Base(MFCodecType_Decoder) : MFCodecVideoH264::CreateCodecH264Main(MFCodecType_Decoder);
+ if(pSelf->pDecoder)
+ {
+ hr = pSelf->pDecoder->Initialize(
+ pSelf->nNegFps,
+ pSelf->nNegWidth,
+ pSelf->nNegHeight);
+
+ if(FAILED(hr))
+ {
+ SafeRelease(&pSelf->pDecoder);
+ hr = S_OK;
+ }
+ }
+ if(SUCCEEDED(hr) && pSelf->pDecoder) {
+ TMEDIA_CONSUMER(pSelf)->decoder.codec_id = codec->id; // means accept ENCODED fames
+ CHECK_HR(hr = pSelf->pOutType->SetGUID(MF_MT_SUBTYPE, MFVideoFormat_H264));
+ }
+ else {
+ SafeRelease(&pSelf->pDecoder);
+ TSK_DEBUG_WARN("Failed to find H.264 HW encoder...fallback to SW implementation");
+ }
+ }
+#endif
+
+ if(!pSelf->pDecoder){
+ CHECK_HR(hr = pSelf->pOutType->SetGUID(MF_MT_SUBTYPE, kDefaultUncompressedType));
+ TMEDIA_CONSUMER(pSelf)->video.display.chroma = kDefaultUncompressedType == MFVideoFormat_NV12 ? tmedia_chroma_nv12 : tmedia_chroma_yuv420p;
+ }
+ CHECK_HR(hr = pSelf->pOutType->SetUINT32(MF_MT_INTERLACE_MODE, MFVideoInterlace_Progressive));
+ CHECK_HR(hr = pSelf->pOutType->SetUINT32(MF_MT_ALL_SAMPLES_INDEPENDENT, TRUE));
+ CHECK_HR(hr = MFSetAttributeSize(pSelf->pOutType, MF_MT_FRAME_SIZE, pSelf->nNegWidth, pSelf->nNegHeight));
+ CHECK_HR(hr = MFSetAttributeRatio(pSelf->pOutType, MF_MT_FRAME_RATE, pSelf->nNegFps, 1));
+ CHECK_HR(hr = MFSetAttributeRatio(pSelf->pOutType, MF_MT_PIXEL_ASPECT_RATIO, 1, 1));
+
+ CHECK_HR(hr = CMFSource::CreateInstanceEx(IID_IMFMediaSource, (void**)&pSelf->pSource, pSelf->pOutType));
+
+ // Apply Encoder output type (must be called before SetInputType)
+ //if(pSelf->pDecoder) {
+ // CHECK_HR(hr = pSelf->pDecoder->SetOutputType(0, pSelf->pOutType, 0/*MFT_SET_TYPE_TEST_ONLY*/));
+ //}
+
+ // Create the Media Session.
+ CHECK_HR(hr = MFCreateMediaSession(pSessionAttributes, &pSelf->pSession));
+
+ // Create the EVR activation object.
+ CHECK_HR(hr = MFCreateVideoRendererActivate(pSelf->hWindow, &pSelf->pSinkActivate));
+
+ // Create the topology.
+ CHECK_HR(hr = MFUtils::CreateTopology(
+ pSelf->pSource,
+ pSelf->pDecoder ? pSelf->pDecoder->GetMFT() : NULL,
+ pSelf->pSinkActivate,
+ NULL/*Preview*/,
+ pSelf->pOutType,
+ &pSelf->pTopologyPartial));
+ // Resolve topology (adds video processors if needed).
+ CHECK_HR(hr = MFUtils::ResolveTopology(pSelf->pTopologyPartial, &pSelf->pTopologyFull));
+
+ // Find EVR
+ CHECK_HR(hr = MFUtils::FindNodeObject(pSelf->pTopologyFull, MFUtils::g_ullTopoIdSinkMain, (void**)&pMediaSink));
+
+ // Create EVR watcher
+ pSelf->pDisplayWatcher = new DisplayWatcher(pSelf->hWindow, pMediaSink, hr);
+ CHECK_HR(hr);
+
+bail:
+ SafeRelease(&pMediaSink);
+ SafeRelease(&pSessionAttributes);
+
+ pSelf->bPrepared = SUCCEEDED(hr);
+ return pSelf->bPrepared ? 0 : -1;
+}
+
+static int plugin_win_mf_consumer_video_start(tmedia_consumer_t* self)
+{
+ plugin_win_mf_consumer_video_t* pSelf = (plugin_win_mf_consumer_video_t*)self;
+
+ if(!pSelf){
+ TSK_DEBUG_ERROR("Invalid parameter");
+ return -1;
+ }
+
+ if(pSelf->bStarted){
+ TSK_DEBUG_INFO("MF video consumer already started");
+ return 0;
+ }
+ if(!pSelf->bPrepared){
+ TSK_DEBUG_ERROR("MF video consumer not prepared");
+ return -1;
+ }
+
+ HRESULT hr = S_OK;
+
+ // Run EVR watcher
+ if(pSelf->pDisplayWatcher) {
+ CHECK_HR(hr = pSelf->pDisplayWatcher->Start());
+ }
+
+ // Run the media session.
+ CHECK_HR(hr = MFUtils::RunSession(pSelf->pSession, pSelf->pTopologyFull));
+
+ // Start asynchronous watcher thread
+ pSelf->bStarted = true;
+ int ret = tsk_thread_create(&pSelf->ppTread[0], RunSessionThread, pSelf);
+ if(ret != 0) {
+ TSK_DEBUG_ERROR("Failed to create thread");
+ hr = E_FAIL;
+ pSelf->bStarted = false;
+ if(pSelf->ppTread[0]){
+ tsk_thread_join(&pSelf->ppTread[0]);
+ }
+ MFUtils::ShutdownSession(pSelf->pSession, pSelf->pSource);
+ CHECK_HR(hr = E_FAIL);
+ }
+
+bail:
+ return SUCCEEDED(hr) ? 0 : -1;
+}
+
+static int plugin_win_mf_consumer_video_consume(tmedia_consumer_t* self, const void* buffer, tsk_size_t size, const tsk_object_t* proto_hdr)
+{
+ plugin_win_mf_consumer_video_t* pSelf = (plugin_win_mf_consumer_video_t*)self;
+
+ HRESULT hr = S_OK;
+
+ if(!pSelf || !buffer || !size) {
+ TSK_DEBUG_ERROR("Invalid parameter");
+ CHECK_HR(hr = E_INVALIDARG);
+ }
+
+ if(!pSelf->bStarted) {
+ TSK_DEBUG_INFO("MF video consumer not started");
+ CHECK_HR(hr = E_FAIL);
+ }
+ if(!pSelf->pSource) {
+ TSK_DEBUG_ERROR("No video custom source");
+ CHECK_HR(hr = E_FAIL);
+ }
+
+ if(pSelf->nNegWidth != TMEDIA_CONSUMER(pSelf)->video.in.width || pSelf->nNegHeight != TMEDIA_CONSUMER(pSelf)->video.in.height){
+ TSK_DEBUG_INFO("Negotiated and input video sizes are different:%d#%d or %d#%d",
+ pSelf->nNegWidth, TMEDIA_CONSUMER(pSelf)->video.in.width,
+ pSelf->nNegHeight, TMEDIA_CONSUMER(pSelf)->video.in.height);
+ // Update media type
+ CHECK_HR(hr = MFSetAttributeSize(pSelf->pOutType, MF_MT_FRAME_SIZE, TMEDIA_CONSUMER(pSelf)->video.in.width, TMEDIA_CONSUMER(pSelf)->video.in.height));
+ CHECK_HR(hr = MFSetAttributeRatio(pSelf->pOutType, MF_MT_FRAME_RATE, TMEDIA_CONSUMER(pSelf)->video.fps, 1));
+
+ CHECK_HR(hr = pSelf->pSession->ClearTopologies());
+
+ //
+ // FIXME: Using same EVR when the size is just swapped (e.g. [640, 480] -> [480, 640]) doesn't work while other changes does (e.g. [352, 288] -> [640, 480])
+ // /!\This look like a bug in Media Foundation
+ //
+ if(pSelf->nNegWidth == TMEDIA_CONSUMER(pSelf)->video.in.height && pSelf->nNegHeight == TMEDIA_CONSUMER(pSelf)->video.in.width) // swapped?
+ {
+ TSK_DEBUG_INFO("/!\\ Size swapped");
+
+ IMFActivate* pSinkActivate = NULL;
+ IMFTopology* pTopologyPartial = NULL;
+ hr = MFCreateVideoRendererActivate(pSelf->hWindow, &pSinkActivate);
+ if(FAILED(hr)) goto end_of_swapping;
+ hr = MFUtils::CreateTopology(
+ pSelf->pSource,
+ pSelf->pDecoder ? pSelf->pDecoder->GetMFT() : NULL,
+ pSinkActivate,
+ NULL/*Preview*/,
+ pSelf->pOutType,
+ &pTopologyPartial);
+ if(FAILED(hr)) goto end_of_swapping;
+
+ if(SUCCEEDED(hr)) {
+ SafeRelease(&pSelf->pSinkActivate);
+ SafeRelease(&pSelf->pTopologyPartial);
+ pSelf->pSinkActivate = pSinkActivate; pSinkActivate = NULL;
+ pSelf->pTopologyPartial = pTopologyPartial; pTopologyPartial = NULL;
+
+ }
+
+end_of_swapping:
+ SafeRelease(&pSinkActivate);
+ SafeRelease(&pTopologyPartial);
+ CHECK_HR(hr);
+ }
+
+ // Set media type again (not required but who know)
+ CHECK_HR(hr = MFUtils::SetMediaType(pSelf->pSource, pSelf->pOutType));
+
+ // Rebuild topology using the partial one
+ IMFTopology* pTopologyFull = NULL;
+ hr = MFUtils::ResolveTopology(pSelf->pTopologyPartial, &pTopologyFull);
+ if(SUCCEEDED(hr)){
+ SafeRelease(&pSelf->pTopologyFull);
+ pSelf->pTopologyFull = pTopologyFull; pTopologyFull = NULL;
+ }
+ SafeRelease(&pTopologyFull);
+ CHECK_HR(hr);
+
+ // Find Main Sink
+ IMFMediaSink* pMediaSink = NULL;
+ hr = MFUtils::FindNodeObject(pSelf->pTopologyFull, MFUtils::g_ullTopoIdSinkMain, (void**)&pMediaSink);
+ if(SUCCEEDED(hr)) {
+ if(pSelf->pDisplayWatcher){
+ delete pSelf->pDisplayWatcher, pSelf->pDisplayWatcher = NULL;
+ }
+ pSelf->pDisplayWatcher = new DisplayWatcher(pSelf->hWindow, pMediaSink, hr);
+ if(SUCCEEDED(hr) && pSelf->bStarted) {
+ hr = pSelf->pDisplayWatcher->Start();
+ }
+ }
+ SafeRelease(&pMediaSink);
+ CHECK_HR(hr);
+
+ // Update the topology associated to the media session
+ CHECK_HR(hr = pSelf->pSession->SetTopology(MFSESSION_SETTOPOLOGY_IMMEDIATE, pSelf->pTopologyFull));
+
+ // Update negotiated width and height
+ pSelf->nNegWidth = TMEDIA_CONSUMER(pSelf)->video.in.width;
+ pSelf->nNegHeight = TMEDIA_CONSUMER(pSelf)->video.in.height;
+ }
+
+ // Deliver buffer
+ CHECK_HR(hr = pSelf->pSource->CopyVideoBuffer(pSelf->nNegWidth, pSelf->nNegHeight, buffer, size));
+
+bail:
+ return SUCCEEDED(hr) ? 0 : -1;
+}
+
+static int plugin_win_mf_consumer_video_pause(tmedia_consumer_t* self)
+{
+ plugin_win_mf_consumer_video_t* pSelf = (plugin_win_mf_consumer_video_t*)self;
+
+ if(!pSelf){
+ TSK_DEBUG_ERROR("Invalid parameter");
+ return -1;
+ }
+ if(!pSelf->bStarted)
+ {
+ TSK_DEBUG_INFO("MF video producer not started");
+ return 0;
+ }
+
+ HRESULT hr = MFUtils::PauseSession(pSelf->pSession);
+
+ return SUCCEEDED(hr) ? 0 : -1;
+}
+
+static int plugin_win_mf_consumer_video_stop(tmedia_consumer_t* self)
+{
+ plugin_win_mf_consumer_video_t* pSelf = (plugin_win_mf_consumer_video_t*)self;
+
+ if(!pSelf){
+ TSK_DEBUG_ERROR("Invalid parameter");
+ return -1;
+ }
+
+ HRESULT hr = S_OK;
+
+ // stop EVR watcher
+ if(pSelf->pDisplayWatcher) {
+ hr = pSelf->pDisplayWatcher->Stop();
+ }
+
+ // for the thread
+ pSelf->bStarted = false;
+ hr = MFUtils::ShutdownSession(pSelf->pSession, NULL); // stop session to wakeup the asynchronous thread
+ if(pSelf->ppTread[0]){
+ tsk_thread_join(&pSelf->ppTread[0]);
+ }
+ hr = MFUtils::ShutdownSession(NULL, pSelf->pSource); // stop source to release the camera
+
+ // next start() will be called after prepare()
+ return _plugin_win_mf_consumer_video_unprepare(pSelf);
+}
+
+static int _plugin_win_mf_consumer_video_unprepare(plugin_win_mf_consumer_video_t* pSelf)
+{
+ if(!pSelf){
+ TSK_DEBUG_ERROR("Invalid parameter");
+ return -1;
+ }
+
+ if(pSelf->bStarted) {
+ // plugin_win_mf_producer_video_stop(TMEDIA_PRODUCER(pSelf));
+ TSK_DEBUG_ERROR("Consumer must be stopped before calling unprepare");
+ }
+
+ if(pSelf->pDisplayWatcher) {
+ pSelf->pDisplayWatcher->Stop();
+ }
+ if(pSelf->pSource){
+ pSelf->pSource->Shutdown();
+ pSelf->pSource = NULL;
+ }
+ if(pSelf->pSession){
+ pSelf->pSession->Shutdown();
+ pSelf->pSession = NULL;
+ }
+
+ SafeRelease(&pSelf->pDecoder);
+ SafeRelease(&pSelf->pSession);
+ SafeRelease(&pSelf->pSource);
+ SafeRelease(&pSelf->pSinkActivate);
+ SafeRelease(&pSelf->pTopologyFull);
+ SafeRelease(&pSelf->pTopologyPartial);
+ SafeRelease(&pSelf->pOutType);
+
+ if(pSelf->pDisplayWatcher) {
+ delete pSelf->pDisplayWatcher;
+ pSelf->pDisplayWatcher = NULL;
+ }
+
+ pSelf->bPrepared = false;
+
+ return 0;
+}
+
+
+//
+// Media Foundation video consumer object definition
+//
+/* constructor */
+static tsk_object_t* plugin_win_mf_consumer_video_ctor(tsk_object_t * self, va_list * app)
+{
+ MFUtils::Startup();
+
+ plugin_win_mf_consumer_video_t *pSelf = (plugin_win_mf_consumer_video_t *)self;
+ if(pSelf){
+ /* init base */
+ tmedia_consumer_init(TMEDIA_CONSUMER(pSelf));
+ TMEDIA_CONSUMER(pSelf)->video.display.chroma = tmedia_chroma_yuv420p;
+ TMEDIA_CONSUMER(pSelf)->decoder.codec_id = tmedia_codec_id_none; // means accept RAW fames
+
+ /* init self */
+ // consumer->create_on_ui_thread = tsk_true;
+ TMEDIA_CONSUMER(pSelf)->video.fps = 15;
+ TMEDIA_CONSUMER(pSelf)->video.display.width = 0; // use codec value
+ TMEDIA_CONSUMER(pSelf)->video.display.height = 0; // use codec value
+ TMEDIA_CONSUMER(pSelf)->video.display.auto_resize = tsk_true;
+
+ TSK_DEBUG_INFO("Create WinMF video consumer");
+ }
+ return self;
+}
+/* destructor */
+static tsk_object_t* plugin_win_mf_consumer_video_dtor(tsk_object_t * self)
+{
+ plugin_win_mf_consumer_video_t *pSelf = (plugin_win_mf_consumer_video_t *)self;
+ if(pSelf){
+ /* stop */
+ if(pSelf->bStarted){
+ plugin_win_mf_consumer_video_stop(TMEDIA_CONSUMER(pSelf));
+ }
+
+ /* deinit base */
+ tmedia_consumer_deinit(TMEDIA_CONSUMER(pSelf));
+ /* deinit self */
+ _plugin_win_mf_consumer_video_unprepare(pSelf);
+ }
+
+ return self;
+}
+/* object definition */
+static const tsk_object_def_t plugin_win_mf_consumer_video_def_s =
+{
+ sizeof(plugin_win_mf_consumer_video_t),
+ plugin_win_mf_consumer_video_ctor,
+ plugin_win_mf_consumer_video_dtor,
+ tsk_null,
+};
+/* plugin definition*/
+static const tmedia_consumer_plugin_def_t plugin_win_mf_consumer_video_plugin_def_s =
+{
+ &plugin_win_mf_consumer_video_def_s,
+
+ tmedia_video,
+ "Media Foundation video consumer",
+
+ plugin_win_mf_consumer_video_set,
+ plugin_win_mf_consumer_video_prepare,
+ plugin_win_mf_consumer_video_start,
+ plugin_win_mf_consumer_video_consume,
+ plugin_win_mf_consumer_video_pause,
+ plugin_win_mf_consumer_video_stop
+};
+const tmedia_consumer_plugin_def_t *plugin_win_mf_consumer_video_plugin_def_t = &plugin_win_mf_consumer_video_plugin_def_s;
+
+// Run session async thread
+static void* TSK_STDCALL RunSessionThread(void *pArg)
+{
+ plugin_win_mf_consumer_video_t *pSelf = (plugin_win_mf_consumer_video_t *)pArg;
+ HRESULT hrStatus = S_OK;
+ HRESULT hr = S_OK;
+ IMFMediaEvent *pEvent = NULL;
+ MediaEventType met;
+
+ TSK_DEBUG_INFO("RunSessionThread (MF video consumer) - ENTER");
+
+ while(pSelf->bStarted){
+ CHECK_HR(hr = pSelf->pSession->GetEvent(0, &pEvent));
+ CHECK_HR(hr = pEvent->GetStatus(&hrStatus));
+ CHECK_HR(hr = pEvent->GetType(&met));
+
+ if (FAILED(hrStatus) /*&& hrStatus != MF_E_NO_SAMPLE_TIMESTAMP*/)
+ {
+ TSK_DEBUG_ERROR("Session error: 0x%x (event id: %d)\n", hrStatus, met);
+ hr = hrStatus;
+ goto bail;
+ }
+ if (met == MESessionEnded)
+ {
+ break;
+ }
+ SafeRelease(&pEvent);
+ }
+
+bail:
+ TSK_DEBUG_INFO("RunSessionThread (MF video consumer) - EXIT");
+
+ return NULL;
+}
+
+#endif /* PLUGIN_MF_CV_USE_D3D9 */ \ No newline at end of file
diff --git a/plugins/pluginWinMF/plugin_win_mf_converter_video.cxx b/plugins/pluginWinMF/plugin_win_mf_converter_video.cxx
new file mode 100644
index 0000000..0e6abcb
--- /dev/null
+++ b/plugins/pluginWinMF/plugin_win_mf_converter_video.cxx
@@ -0,0 +1,600 @@
+/* Copyright (C) 2013-2015 Mamadou DIOP
+* Copyright (C) 2013-2015 Doubango Telecom <http://www.doubango.org>
+*
+* This file is part of Open Source Doubango Framework.
+*
+* DOUBANGO is free software: you can redistribute it and/or modify
+* it under the terms of the GNU General Public License as published by
+* the Free Software Foundation, either version 3 of the License, or
+* (at your option) any later version.
+*
+* DOUBANGO is distributed in the hope that it will be useful,
+* but WITHOUT ANY WARRANTY; without even the implied warranty of
+* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
+* GNU General Public License for more details.
+*
+* You should have received a copy of the GNU General Public License
+* along with DOUBANGO.
+*/
+/*
+Video Processor MFT (http://msdn.microsoft.com/en-us/library/windows/desktop/hh162913(v=vs.85).aspx)
+* The video processor supports GPU-accelerated video processing.
+* The video processor MFT is a Microsoft Media Foundation transform (MFT) that performs :
+ - colorspace conversion
+ - video resizing
+ - deinterlacing
+ - frame rate conversion
+ - rotation
+ - cropping
+ - spatial left and right view unpacking
+ - and mirroring
+*/
+#include "plugin_win_mf_config.h"
+#include "internals/mf_utils.h"
+
+#include "tinymedia/tmedia_converter_video.h"
+
+#include "tsk_memory.h"
+#include "tsk_debug.h"
+
+#include <assert.h>
+#include <initguid.h>
+#include <dmo.h>
+#include <wmcodecdsp.h>
+
+#ifdef _MSC_VER
+#pragma comment(lib, "strmiids.lib")
+#pragma comment(lib, "wmcodecdspuuid.lib")
+#endif
+
+EXTERN_C const GUID CLSID_VideoProcessorMFT; // defined in mf_utils.cxx
+
+#if !defined(_WIN32_WINNT_WIN8)
+#define _WIN32_WINNT_WIN8 0x0602
+#endif /* _WIN32_WINNT_WIN8 */
+
+#if (WINVER < _WIN32_WINNT_WIN8)
+DEFINE_GUID(MF_SA_D3D11_AWARE,
+ 0x206b4fc8, 0xfcf9, 0x4c51, 0xaf, 0xe3, 0x97, 0x64, 0x36, 0x9e, 0x33, 0xa0);
+#endif /* MF_SA_D3D11_AWARE */
+
+#if !defined(HAVE_IMFVideoProcessorControl)
+# if defined(__IMFVideoProcessorControl_INTERFACE_DEFINED__)
+# define HAVE_IMFVideoProcessorControl 1
+# else
+# define HAVE_IMFVideoProcessorControl 0
+# endif
+#endif /* HAVE_IMFVideoProcessorControl */
+#if !defined(E_BOUNDS)
+# define E_BOUNDS _HRESULT_TYPEDEF_(0x8000000BL)
+#endif /* E_BOUNDS */
+#if !defined(PLUGIN_MF_VC_FPS)
+#define PLUGIN_MF_VC_FPS 120 // Samples requires timestamp
+#endif /* PLUGIN_MF_VC_FPS */
+
+typedef struct plugin_win_mf_converter_video_ms_s
+{
+ TMEDIA_DECLARE_CONVERTER_VIDEO;
+
+ GUID fmtSrc;
+ tsk_size_t widthSrc;
+ tsk_size_t heightSrc;
+
+ GUID fmtDst;
+ tsk_size_t widthDst;
+ tsk_size_t heightDst;
+
+ UINT32 rotation;
+ UINT32 xOutputSize;
+ UINT32 xInputSize;
+ BOOL flip;
+
+ IMFSample* pSampleOut;
+ IMFSample* pSampleIn;
+
+ LONGLONG rtStart;
+ UINT64 rtDuration;
+
+ IMFTransform* pMFT; // "CLSID_VideoProcessorMFT" or "CLSID_CColorConvertDMO"
+#if HAVE_IMFVideoProcessorControl
+ IMFVideoProcessorControl* pVPC;
+#endif
+ BOOL isVideoProcessor;
+}
+plugin_win_mf_converter_video_ms_t;
+
+static inline const GUID& _plugin_win_mf_converter_video_ms_get_pixfmt(tmedia_chroma_t chroma);
+static inline tsk_size_t _plugin_win_mf_converter_video_ms_get_size(tmedia_chroma_t chroma, tsk_size_t w, tsk_size_t h);
+static inline HRESULT _plugin_win_mf_converter_video_ms_copy_rgb32_down_top(
+ BYTE* pDst,
+ const BYTE* pSrc,
+ INT dwWidthInPixels,
+ INT dwHeightInPixels
+ );
+static HRESULT _plugin_win_mf_converter_video_ms_process_input(plugin_win_mf_converter_video_ms_t* pSelf, IMFSample* pSample);
+static HRESULT _plugin_win_mf_converter_video_ms_process_output(plugin_win_mf_converter_video_ms_t* pSelf, IMFSample **ppSample);
+static HRESULT _plugin_win_mf_converter_video_ms_process(plugin_win_mf_converter_video_ms_t* pSelf, const void* pcInputPtr, UINT32 nInputSize, IMFSample **ppSampleOut);
+
+static int plugin_win_mf_converter_video_ms_init(tmedia_converter_video_t* self, tsk_size_t srcWidth, tsk_size_t srcHeight, tmedia_chroma_t srcChroma, tsk_size_t dstWidth, tsk_size_t dstHeight, tmedia_chroma_t dstChroma)
+{
+ plugin_win_mf_converter_video_ms_t* pSelf = (plugin_win_mf_converter_video_ms_t*)self;
+ TSK_DEBUG_INFO("Initializing new MF Video Converter src=(%dx%d@%d) dst=(%dx%d@%d)", srcWidth, srcHeight, srcChroma, dstWidth, dstHeight, dstChroma);
+
+ if((pSelf->fmtSrc = _plugin_win_mf_converter_video_ms_get_pixfmt(srcChroma)) == GUID_NULL)
+ {
+ TSK_DEBUG_ERROR("Invalid source chroma");
+ return -2;
+ }
+ if((pSelf->fmtDst = _plugin_win_mf_converter_video_ms_get_pixfmt(dstChroma)) == GUID_NULL)
+ {
+ TSK_DEBUG_ERROR("Invalid destination chroma");
+ return -3;
+ }
+
+ pSelf->rtStart = 0;
+
+ pSelf->widthSrc = srcWidth;
+ pSelf->heightSrc = srcHeight;
+ pSelf->widthDst = dstWidth;
+ pSelf->heightDst = dstHeight;
+ pSelf->rotation = 0;
+ pSelf->xOutputSize = (UINT32)_plugin_win_mf_converter_video_ms_get_size(dstChroma, dstWidth, dstHeight);
+ pSelf->xInputSize = (UINT32)_plugin_win_mf_converter_video_ms_get_size(srcChroma, srcWidth, srcHeight);
+
+ SafeRelease(&pSelf->pSampleOut);
+ SafeRelease(&pSelf->pSampleIn);
+ SafeRelease(&pSelf->pMFT);
+#if HAVE_IMFVideoProcessorControl
+ SafeRelease(&pSelf->pVPC);
+#endif
+
+ HRESULT hr = S_OK;
+
+ IMFMediaType* pTypeSrc = NULL;
+ IMFMediaType* pTypeDst = NULL;
+
+ // Get video processor or Color convertor
+ hr = CoCreateInstance(CLSID_VideoProcessorMFT, NULL,
+ CLSCTX_INPROC_SERVER, IID_PPV_ARGS(&pSelf->pMFT));
+ pSelf->isVideoProcessor = SUCCEEDED(hr);
+ if(FAILED(hr))
+ {
+ TSK_DEBUG_INFO("CoCreateInstance(CLSID_VideoProcessorMFT) failed");
+ if(pSelf->widthSrc == pSelf->widthDst && pSelf->heightSrc == pSelf->heightDst)
+ {
+ TSK_DEBUG_INFO("No video scaling is required...perform CoCreateInstance(CLSID_CColorConvertDMO)");
+ CHECK_HR(hr = CoCreateInstance(CLSID_CColorConvertDMO, NULL,
+ CLSCTX_INPROC_SERVER, IID_PPV_ARGS(&pSelf->pMFT)));
+ }
+ else
+ {
+ CHECK_HR(hr);
+ }
+ }
+
+
+
+ if(pSelf->isVideoProcessor)
+ {
+ IMFAttributes* pAttributes = NULL;
+ UINT32 GPU = 0;
+ hr = pSelf->pMFT->GetAttributes(&pAttributes);
+ if (SUCCEEDED(hr)) {
+ hr = pAttributes->GetUINT32(MF_SA_D3D11_AWARE, &GPU);
+ }
+ SafeRelease(&pAttributes);
+ TSK_DEBUG_INFO("MF_SA_D3D11_AWARE = %d", GPU);
+#if HAVE_IMFVideoProcessorControl
+ CHECK_HR(hr = pSelf->pMFT->QueryInterface(IID_PPV_ARGS(&pSelf->pVPC)));
+#endif
+ }
+
+
+ CHECK_HR(hr = MFUtils::CreateVideoType(&pSelf->fmtSrc, &pTypeSrc, (UINT32)pSelf->widthSrc, (UINT32)pSelf->heightSrc));
+ CHECK_HR(hr = MFUtils::CreateVideoType(&pSelf->fmtDst, &pTypeDst, (UINT32)pSelf->widthDst, (UINT32)pSelf->heightDst));
+
+ CHECK_HR(hr = pSelf->pMFT->SetInputType(0, pTypeSrc, 0));
+ CHECK_HR(hr = pSelf->pMFT->SetOutputType(0, pTypeDst, 0));
+
+bail:
+ SafeRelease(&pTypeSrc);
+ SafeRelease(&pTypeDst);
+
+ if(FAILED(hr))
+ {
+ SafeRelease(&pSelf->pMFT);
+#if HAVE_IMFVideoProcessorControl
+ SafeRelease(&pSelf->pVPC);
+#endif
+ return -4;
+ }
+
+ return 0;
+}
+
+static tsk_size_t plugin_win_mf_converter_video_ms_process(tmedia_converter_video_t* _self, const void* buffer, tsk_size_t buffer_size, void** output, tsk_size_t* output_max_size)
+{
+ plugin_win_mf_converter_video_ms_t* pSelf = (plugin_win_mf_converter_video_ms_t*)_self;
+
+ HRESULT hr = S_OK;
+
+ IMFSample *pSampleOut = NULL;
+ IMFMediaBuffer* pBufferOut = NULL;
+
+ if(!pSelf || !buffer || !output || !output_max_size)
+ {
+ CHECK_HR(hr = E_POINTER);
+ }
+
+ if(!pSelf->pMFT)
+ {
+ TSK_DEBUG_ERROR("Not initialized");
+ CHECK_HR(hr = E_FAIL);
+ }
+#if HAVE_IMFVideoProcessorControl
+ if(!pSelf->pVPC && pSelf->isVideoProcessor)
+ {
+ TSK_DEBUG_ERROR("Not initialized");
+ CHECK_HR(hr = E_FAIL);
+ }
+#endif
+
+ if(*output_max_size < pSelf->xOutputSize)
+ {
+ if(!(*output = tsk_realloc(*output, pSelf->xOutputSize)))
+ {
+ *output_max_size = 0;
+ TSK_DEBUG_ERROR("Failed to allocate buffer with size = %u", pSelf->xOutputSize);
+ CHECK_HR(hr = E_OUTOFMEMORY);
+ }
+ *output_max_size = pSelf->xOutputSize;
+ }
+#if HAVE_IMFVideoProcessorControl
+ if(pSelf->pVPC && !!_self->flip != !!pSelf->flip)
+ {
+ pSelf->flip = !!_self->flip;
+ CHECK_HR(hr = pSelf->pVPC->SetMirror(pSelf->flip ? MIRROR_NONE : MIRROR_VERTICAL));
+ }
+ if(pSelf->pVPC && _self->rotation != pSelf->rotation)
+ {
+ _self->rotation = pSelf->rotation;
+ CHECK_HR(hr = pSelf->pVPC->SetRotation(pSelf->rotation == 0 ? ROTATION_NONE : ROTATION_NORMAL));
+
+ }
+#endif
+
+ CHECK_HR(hr = _plugin_win_mf_converter_video_ms_process(
+ pSelf, buffer, pSelf->xInputSize, &pSampleOut));
+
+ if(pSampleOut)
+ {
+ CHECK_HR(hr = pSampleOut->GetBufferByIndex(0, &pBufferOut));
+
+ BYTE* pBufferPtr = NULL;
+ DWORD dwDataLength = 0;
+ CHECK_HR(hr = pBufferOut->GetCurrentLength(&dwDataLength));
+ if(dwDataLength > 0)
+ {
+ if(dwDataLength != pSelf->xOutputSize)
+ {
+ TSK_DEBUG_ERROR("Output size mismatch");
+ CHECK_HR(hr = E_BOUNDS);
+ }
+ CHECK_HR(hr = pBufferOut->Lock(&pBufferPtr, NULL, NULL));
+
+ // MFCopyImage() is optimized: MMX, SSE, or SSE2
+ switch(_self->dstChroma)
+ {
+ // Don't waste your time guessing which parameter to use: The consumer will always request RGB32. If not used for consumer then, just memcpy()
+ case tmedia_chroma_rgb32:
+ {
+ if(pSelf->isVideoProcessor)
+ {
+ hr = _plugin_win_mf_converter_video_ms_copy_rgb32_down_top(
+ (BYTE*)*output,
+ (const BYTE*)pBufferPtr,
+ (INT)pSelf->widthDst,
+ (INT)pSelf->heightDst
+ );
+ }
+ else
+ {
+ hr = MFCopyImage(
+ (BYTE*)*output,
+ (LONG)(pSelf->widthDst << 2),
+ (BYTE*)pBufferPtr,
+ (LONG)(pSelf->widthDst << 2),
+ (DWORD)(pSelf->widthDst << 2),
+ (DWORD)pSelf->heightDst
+ );
+ }
+
+
+ if(FAILED(hr))
+ {
+ // unlock() before leaving
+ pBufferOut->Unlock();
+ CHECK_HR(hr);
+ }
+ break;
+ }
+ default:
+ {
+ memcpy(*output, pBufferPtr, dwDataLength);
+ }
+ }
+ CHECK_HR(hr = pBufferOut->Unlock());
+ }
+ }
+
+ pSelf->rtStart += pSelf->rtDuration;
+
+bail:
+ SafeRelease(&pSampleOut);
+ SafeRelease(&pBufferOut);
+
+ return SUCCEEDED(hr) ? pSelf->xOutputSize : 0;
+}
+
+static tsk_object_t* plugin_win_mf_converter_video_ms_ctor(tsk_object_t * self, va_list * app)
+{
+ plugin_win_mf_converter_video_ms_t *pSelf = (plugin_win_mf_converter_video_ms_t *)self;
+ if(pSelf){
+ HRESULT hr = MFFrameRateToAverageTimePerFrame(PLUGIN_MF_VC_FPS, 1, &pSelf->rtDuration);
+ if(FAILED(hr)){
+ pSelf->rtDuration = 83333; // 120 FPS
+ }
+ }
+ return self;
+}
+static tsk_object_t* plugin_win_mf_converter_video_ms_dtor(tsk_object_t * self)
+{
+ plugin_win_mf_converter_video_ms_t *pSelf = (plugin_win_mf_converter_video_ms_t *)self;
+ if(pSelf){
+ SafeRelease(&pSelf->pSampleOut);
+ SafeRelease(&pSelf->pSampleIn);
+ SafeRelease(&pSelf->pMFT);
+#if HAVE_IMFVideoProcessorControl
+ SafeRelease(&pSelf->pVPC);
+#endif
+ }
+
+ return self;
+}
+static const tsk_object_def_t plugin_win_mf_converter_video_ms_def_s =
+{
+ sizeof(plugin_win_mf_converter_video_ms_t),
+ plugin_win_mf_converter_video_ms_ctor,
+ plugin_win_mf_converter_video_ms_dtor,
+ tsk_null,
+};
+const tsk_object_def_t *plugin_win_mf_converter_video_ms_def_t = &plugin_win_mf_converter_video_ms_def_s;
+static const tmedia_converter_video_plugin_def_t plugin_win_mf_converter_video_ms_plugin_def_s =
+{
+ &plugin_win_mf_converter_video_ms_def_s,
+
+ plugin_win_mf_converter_video_ms_init,
+ plugin_win_mf_converter_video_ms_process
+};
+const tmedia_converter_video_plugin_def_t *plugin_win_mf_converter_video_ms_plugin_def_t = &plugin_win_mf_converter_video_ms_plugin_def_s;
+
+
+static inline tsk_size_t _plugin_win_mf_converter_video_ms_get_size(tmedia_chroma_t chroma, tsk_size_t w, tsk_size_t h)
+{
+ switch(chroma){
+ case tmedia_chroma_rgb24:
+ case tmedia_chroma_bgr24:
+ return (w * h * 3);
+ case tmedia_chroma_rgb565le:
+ return ((w * h) << 1);
+ case tmedia_chroma_rgb32:
+ return ((w * h) << 2);
+ case tmedia_chroma_nv21:
+ return ((w * h * 3) >> 1);
+ case tmedia_chroma_nv12:
+ return ((w * h * 3) >> 1);
+ case tmedia_chroma_yuv422p:
+ return ((w * h) << 1);
+ case tmedia_chroma_uyvy422:
+ case tmedia_chroma_yuyv422:
+ return ((w * h) << 1);
+ case tmedia_chroma_yuv420p:
+ return ((w * h * 3) >> 1);
+ default:
+ TSK_DEBUG_ERROR("Invalid chroma %d", (int)chroma);
+ return 0;
+ }
+}
+
+static inline const GUID& _plugin_win_mf_converter_video_ms_get_pixfmt(tmedia_chroma_t chroma)
+{
+ switch(chroma){
+ case tmedia_chroma_rgb24:
+ case tmedia_chroma_bgr24:
+ return MFVideoFormat_RGB24;
+ case tmedia_chroma_rgb565le:
+ return MFVideoFormat_RGB565;
+ case tmedia_chroma_rgb32:
+ return MFVideoFormat_RGB32;
+ case tmedia_chroma_nv12:
+ return MFVideoFormat_NV12;
+ case tmedia_chroma_yuv420p:
+ return MFVideoFormat_I420;
+ case tmedia_chroma_yuyv422:
+ return MFVideoFormat_YUY2;
+ case tmedia_chroma_uyvy422:
+ return MFVideoFormat_UYVY;
+ default:
+ TSK_DEBUG_ERROR("Invalid chroma %d", (int)chroma);
+ return GUID_NULL;
+ }
+}
+
+// For RGB32:
+// Direct3D -> Top-Down
+// Video Processor -> Down-Top
+static inline HRESULT _plugin_win_mf_converter_video_ms_copy_rgb32_down_top(
+ BYTE* pDst,
+ const BYTE* pSrc,
+ INT dwWidthInPixels,
+ INT dwHeightInPixels
+ )
+{
+ RGBQUAD *pSrcPixel = &((RGBQUAD*)pSrc)[(dwWidthInPixels * dwHeightInPixels) - dwWidthInPixels];
+ RGBQUAD *pDestPixel = &((RGBQUAD*)pDst)[0];
+
+ register INT x;
+ register INT y;
+
+ for (y = dwHeightInPixels; y > 0 ; --y)
+ {
+ for (x = 0; x < dwWidthInPixels; ++x)
+ {
+ pDestPixel[x] = pSrcPixel[x];
+ }
+ pDestPixel += dwWidthInPixels;
+ pSrcPixel -= dwWidthInPixels;
+ }
+ return S_OK;
+}
+
+static HRESULT _plugin_win_mf_converter_video_ms_process_input(plugin_win_mf_converter_video_ms_t* pSelf, IMFSample* pSample)
+{
+ return pSelf->pMFT->ProcessInput(0, pSample, 0);
+}
+
+static HRESULT _plugin_win_mf_converter_video_ms_process_output(plugin_win_mf_converter_video_ms_t* pSelf, IMFSample **ppSample)
+{
+ *ppSample = NULL;
+
+ IMFMediaBuffer* pBufferOut = NULL;
+
+ DWORD dwStatus;
+
+ HRESULT hr = S_OK;
+
+ MFT_OUTPUT_STREAM_INFO mftStreamInfo = { 0 };
+ MFT_OUTPUT_DATA_BUFFER mftOutputData = { 0 };
+
+ CHECK_HR(hr = pSelf->pMFT->GetOutputStreamInfo(0, &mftStreamInfo));
+
+ if(!pSelf->pSampleOut)
+ {
+ CHECK_HR(hr = MFUtils::CreateMediaSample(mftStreamInfo.cbSize, &pSelf->pSampleOut));
+ hr = pSelf->pSampleOut->GetBufferByIndex(0, &pBufferOut);
+ if(FAILED(hr))
+ {
+ SafeRelease(&pSelf->pSampleOut);
+ CHECK_HR(hr);
+ }
+ }
+ else
+ {
+ DWORD dwMaxLength = 0;
+ CHECK_HR(hr = pSelf->pSampleOut->GetBufferByIndex(0, &pBufferOut));
+ CHECK_HR(hr = pBufferOut->GetMaxLength(&dwMaxLength));
+ if(dwMaxLength < mftStreamInfo.cbSize)
+ {
+ CHECK_HR(hr = pSelf->pSampleOut->RemoveAllBuffers());
+ SafeRelease(&pBufferOut);
+ CHECK_HR(hr = MFCreateMemoryBuffer(mftStreamInfo.cbSize, &pBufferOut));
+ CHECK_HR(hr = pSelf->pSampleOut->AddBuffer(pBufferOut));
+ }
+ }
+
+ CHECK_HR(hr = pBufferOut->SetCurrentLength(0));
+
+ //Set the output sample
+ mftOutputData.pSample = pSelf->pSampleOut;
+ //Set the output id
+ mftOutputData.dwStreamID = 0;
+
+ //Generate the output sample
+ CHECK_HR(hr = pSelf->pMFT->ProcessOutput(0, 1, &mftOutputData, &dwStatus));
+ /*if (hr == MF_E_TRANSFORM_NEED_MORE_INPUT)
+ {
+ hr = S_OK;
+ goto bail;
+ }*/
+
+ // TODO: Handle MF_E_TRANSFORM_STREAM_CHANGE
+
+ *ppSample = pSelf->pSampleOut;
+ (*ppSample)->AddRef();
+
+bail:
+ SafeRelease(&pBufferOut);
+ return hr;
+}
+
+static HRESULT _plugin_win_mf_converter_video_ms_process(plugin_win_mf_converter_video_ms_t* pSelf, const void* pcInputPtr, UINT32 nInputSize, IMFSample **ppSampleOut)
+{
+ if(!pcInputPtr || !nInputSize || !ppSampleOut)
+ {
+ TSK_DEBUG_ERROR("Invalid parameter");
+ return E_INVALIDARG;
+ }
+
+ *ppSampleOut = NULL;
+
+ HRESULT hr = S_OK;
+
+ IMFMediaBuffer* pBufferIn = NULL;
+ BYTE* pBufferPtr = NULL;
+
+ if(!pSelf->pSampleIn)
+ {
+ CHECK_HR(hr = MFUtils::CreateMediaSample(nInputSize, &pSelf->pSampleIn));
+ hr = pSelf->pSampleIn->GetBufferByIndex(0, &pBufferIn);
+ if(FAILED(hr))
+ {
+ SafeRelease(&pSelf->pSampleIn);
+ CHECK_HR(hr);
+ }
+ }
+ else
+ {
+ DWORD dwMaxLength = 0;
+ CHECK_HR(hr = pSelf->pSampleIn->GetBufferByIndex(0, &pBufferIn));
+ CHECK_HR(hr = pBufferIn->GetMaxLength(&dwMaxLength));
+ if(dwMaxLength < nInputSize)
+ {
+ CHECK_HR(hr = pSelf->pSampleIn->RemoveAllBuffers());
+ SafeRelease(&pBufferIn);
+ CHECK_HR(hr = MFCreateMemoryBuffer(nInputSize, &pBufferIn));
+ CHECK_HR(hr = pSelf->pSampleIn->AddBuffer(pBufferIn));
+ }
+ }
+
+ CHECK_HR(hr = pBufferIn->Lock(&pBufferPtr, NULL, NULL));
+ memcpy(pBufferPtr, pcInputPtr, nInputSize);
+ CHECK_HR(hr = pBufferIn->Unlock());
+ CHECK_HR(hr = pBufferIn->SetCurrentLength(nInputSize));
+
+ CHECK_HR(hr = pSelf->pSampleIn->SetSampleDuration(pSelf->rtDuration));
+ CHECK_HR(hr = pSelf->pSampleIn->SetSampleTime(pSelf->rtStart));
+
+ hr = _plugin_win_mf_converter_video_ms_process_input(pSelf, pSelf->pSampleIn);
+ while(hr == MF_E_NOTACCEPTING)
+ {
+ TSK_DEBUG_INFO("MF_E_NOTACCEPTING");
+ IMFSample* pSample = NULL;
+ hr = _plugin_win_mf_converter_video_ms_process_output(pSelf, &pSample);
+ if(SUCCEEDED(hr) && pSample)
+ {
+ SafeRelease(ppSampleOut);
+ *ppSampleOut = pSample, pSample = NULL;
+
+ hr = pSelf->pSampleIn->SetUINT32(MFSampleExtension_Discontinuity, TRUE);
+ hr = _plugin_win_mf_converter_video_ms_process_input(pSelf, pSelf->pSampleIn);
+ }
+ }
+ if(!*ppSampleOut)
+ {
+ CHECK_HR(hr = _plugin_win_mf_converter_video_ms_process_output(pSelf, ppSampleOut));
+ }
+
+bail:
+ SafeRelease(&pBufferIn);
+ return hr;
+} \ No newline at end of file
diff --git a/plugins/pluginWinMF/plugin_win_mf_producer_audio.cxx b/plugins/pluginWinMF/plugin_win_mf_producer_audio.cxx
new file mode 100644
index 0000000..2a3c314
--- /dev/null
+++ b/plugins/pluginWinMF/plugin_win_mf_producer_audio.cxx
@@ -0,0 +1,333 @@
+/* Copyright (C) 2013 Mamadou DIOP
+* Copyright (C) 2013 Doubango Telecom <http://www.doubango.org>
+*
+* This file is part of Open Source Doubango Framework.
+*
+* DOUBANGO is free software: you can redistribute it and/or modify
+* it under the terms of the GNU General Public License as published by
+* the Free Software Foundation, either version 3 of the License, or
+* (at your option) any later version.
+*
+* DOUBANGO is distributed in the hope that it will be useful,
+* but WITHOUT ANY WARRANTY; without even the implied warranty of
+* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
+* GNU General Public License for more details.
+*
+* You should have received a copy of the GNU General Public License
+* along with DOUBANGO.
+*/
+#include "plugin_win_mf_config.h"
+#include "internals/mf_utils.h"
+#include "internals/mf_sample_grabber.h"
+#include "internals/mf_devices.h"
+
+#include "tinydav/audio/tdav_producer_audio.h"
+
+#include "tsk_thread.h"
+#include "tsk_debug.h"
+
+static void* TSK_STDCALL RunSessionThread(void *pArg);
+
+typedef struct plugin_win_mf_producer_audio_s
+{
+ TDAV_DECLARE_PRODUCER_AUDIO;
+
+ bool bStarted;
+ tsk_thread_handle_t* ppTread[1];
+
+ DeviceListAudio* pDeviceList;
+
+ IMFMediaSession *pSession;
+ IMFMediaSource *pSource;
+ SampleGrabberCB *pCallback;
+ IMFActivate *pSinkActivate;
+ IMFTopology *pTopology;
+ IMFMediaType *pType;
+}
+plugin_win_mf_producer_audio_t;
+
+/* ============ Media Producer Interface ================= */
+static int plugin_win_mf_producer_audio_set(tmedia_producer_t* self, const tmedia_param_t* param)
+{
+ plugin_win_mf_producer_audio_t* pSelf = (plugin_win_mf_producer_audio_t*)self;
+ if(param->plugin_type == tmedia_ppt_producer){
+ }
+ return tdav_producer_audio_set(TDAV_PRODUCER_AUDIO(pSelf), param);
+}
+
+static int plugin_win_mf_producer_audio_prepare(tmedia_producer_t* self, const tmedia_codec_t* codec)
+{
+ plugin_win_mf_producer_audio_t* pSelf = (plugin_win_mf_producer_audio_t*)self;
+
+ if(!pSelf || !codec){
+ TSK_DEBUG_ERROR("Invalid parameter");
+ return -1;
+ }
+
+ TMEDIA_PRODUCER(pSelf)->audio.channels = TMEDIA_CODEC_CHANNELS_AUDIO_ENCODING(codec);
+ TMEDIA_PRODUCER(pSelf)->audio.rate = TMEDIA_CODEC_RATE_ENCODING(codec);
+ TMEDIA_PRODUCER(pSelf)->audio.ptime = TMEDIA_CODEC_PTIME_AUDIO_ENCODING(codec);
+
+ TSK_DEBUG_INFO("MF audio producer: channels=%d, rate=%d, ptime=%d",
+ TMEDIA_PRODUCER(pSelf)->audio.channels,
+ TMEDIA_PRODUCER(pSelf)->audio.rate,
+ TMEDIA_PRODUCER(pSelf)->audio.ptime
+ );
+
+ HRESULT hr = S_OK;
+
+ // create device list object
+ if(!pSelf->pDeviceList && !(pSelf->pDeviceList = new DeviceListAudio())){
+ TSK_DEBUG_ERROR("Failed to create device list");
+ hr = E_OUTOFMEMORY;
+ goto bail;
+ }
+ // enumerate devices
+ hr = pSelf->pDeviceList->EnumerateDevices();
+ if(!SUCCEEDED(hr)){
+ goto bail;
+ }
+
+ // check if we have at least one MF video source connected to the PC
+ if(pSelf->pDeviceList->Count() == 0){
+ TSK_DEBUG_WARN("No MF video source could be found...no video will be sent");
+ // do not break the negotiation as one-way video connection is a valid use-case
+ }
+ else{
+ IMFActivate* pActivate = NULL;
+ // Get best MF audio source
+ hr = pSelf->pDeviceList->GetDeviceBest(&pActivate);
+ if(!SUCCEEDED(hr) || !pActivate){
+ TSK_DEBUG_ERROR("Failed to get best MF audio source");
+ if(!pActivate){
+ hr = E_OUTOFMEMORY;
+ }
+ goto bail;
+ }
+
+ // Create the media source for the device.
+ hr = pActivate->ActivateObject(
+ __uuidof(IMFMediaSource),
+ (void**)&pSelf->pSource
+ );
+ SafeRelease(&pActivate);
+ if(!SUCCEEDED(hr)){
+ TSK_DEBUG_ERROR("ActivateObject(MF audio source) failed");
+ goto bail;
+ }
+
+ // Create and configure the media type
+ CHECK_HR(hr = MFCreateMediaType(&pSelf->pType));
+ CHECK_HR(hr = pSelf->pType->SetGUID(MF_MT_MAJOR_TYPE, MFMediaType_Audio));
+ CHECK_HR(hr = pSelf->pType->SetGUID(MF_MT_SUBTYPE, MFAudioFormat_PCM));
+ CHECK_HR(hr = pSelf->pType->SetUINT32(MF_MT_AUDIO_NUM_CHANNELS, TMEDIA_PRODUCER(pSelf)->audio.channels));
+ CHECK_HR(hr = pSelf->pType->SetUINT32(MF_MT_AUDIO_SAMPLES_PER_SECOND, TMEDIA_PRODUCER(pSelf)->audio.rate));
+ CHECK_HR(hr = pSelf->pType->SetUINT32(MF_MT_AUDIO_BITS_PER_SAMPLE, TMEDIA_PRODUCER(pSelf)->audio.bits_per_sample));
+ CHECK_HR(hr = pSelf->pType->SetUINT32(MF_MT_ALL_SAMPLES_INDEPENDENT, TRUE)); // because uncompressed media type
+ CHECK_HR(hr = pSelf->pType->SetUINT32(MF_MT_FIXED_SIZE_SAMPLES, TRUE));
+ UINT32 nBlockAlign = TMEDIA_PRODUCER(pSelf)->audio.channels * (TMEDIA_PRODUCER(pSelf)->audio.bits_per_sample >> 3);
+ UINT32 nAvgBytesPerSec = (nBlockAlign * TMEDIA_PRODUCER(pSelf)->audio.rate);
+ CHECK_HR(hr = pSelf->pType->SetUINT32(MF_MT_AUDIO_BLOCK_ALIGNMENT, nBlockAlign));
+ CHECK_HR(hr = pSelf->pType->SetUINT32(MF_MT_AUDIO_AVG_BYTES_PER_SECOND, nAvgBytesPerSec));
+
+ // Create the sample grabber sink.
+ CHECK_HR(hr = SampleGrabberCB::CreateInstance(TMEDIA_PRODUCER(pSelf), &pSelf->pCallback));
+ CHECK_HR(hr = MFCreateSampleGrabberSinkActivate(pSelf->pType, pSelf->pCallback, &pSelf->pSinkActivate));
+
+ // To run as fast as possible, set this attribute (requires Windows 7):
+ CHECK_HR(hr = pSelf->pSinkActivate->SetUINT32(MF_SAMPLEGRABBERSINK_IGNORE_CLOCK, TRUE));
+
+ // Create the Media Session.
+ CHECK_HR(hr = MFCreateMediaSession(NULL, &pSelf->pSession));
+
+ // Create the topology.
+ CHECK_HR(hr = MFUtils::CreateTopology(pSelf->pSource, NULL/*NO ENCODER*/, pSelf->pSinkActivate, NULL/*Preview*/, pSelf->pType, &pSelf->pTopology));
+ }
+
+bail:
+ return SUCCEEDED(hr) ? 0 : -1;
+}
+
+static int plugin_win_mf_producer_audio_start(tmedia_producer_t* self)
+{
+ plugin_win_mf_producer_audio_t* pSelf = (plugin_win_mf_producer_audio_t*)self;
+
+ if(!pSelf){
+ TSK_DEBUG_ERROR("Invalid parameter");
+ return -1;
+ }
+
+ if(pSelf->bStarted){
+ TSK_DEBUG_INFO("MF audio producer already started");
+ return 0;
+ }
+
+ HRESULT hr = S_OK;
+
+ // Run the media session.
+ CHECK_HR(hr = MFUtils::RunSession(pSelf->pSession, pSelf->pTopology));
+
+ // Start asynchronous watcher thread
+ pSelf->bStarted = true;
+ int ret = tsk_thread_create(&pSelf->ppTread[0], RunSessionThread, pSelf);
+ if(ret != 0) {
+ TSK_DEBUG_ERROR("Failed to create thread");
+ hr = E_FAIL;
+ pSelf->bStarted = false;
+ if(pSelf->ppTread[0]){
+ tsk_thread_join(&pSelf->ppTread[0]);
+ }
+ MFUtils::ShutdownSession(pSelf->pSession, pSelf->pSource);
+ goto bail;
+ }
+
+bail:
+ return SUCCEEDED(hr) ? 0 : -1;
+}
+
+static int plugin_win_mf_producer_audio_pause(tmedia_producer_t* self)
+{
+ plugin_win_mf_producer_audio_t* pSelf = (plugin_win_mf_producer_audio_t*)self;
+
+ if(!pSelf){
+ TSK_DEBUG_ERROR("Invalid parameter");
+ return -1;
+ }
+
+ HRESULT hr = MFUtils::PauseSession(pSelf->pSession);
+
+ return SUCCEEDED(hr) ? 0 : -1;
+}
+
+static int plugin_win_mf_producer_audio_stop(tmedia_producer_t* self)
+{
+ plugin_win_mf_producer_audio_t* pSelf = (plugin_win_mf_producer_audio_t*)self;
+
+ if(!pSelf){
+ TSK_DEBUG_ERROR("Invalid parameter");
+ return -1;
+ }
+
+ HRESULT hr = S_OK;
+
+ // for the thread
+ pSelf->bStarted = false;
+ hr = MFUtils::ShutdownSession(pSelf->pSession, NULL); // stop session to wakeup the asynchronous thread
+ if(pSelf->ppTread[0]){
+ tsk_thread_join(&pSelf->ppTread[0]);
+ }
+ hr = MFUtils::ShutdownSession(NULL, pSelf->pSource); // stop source to release the camera
+
+ return 0;
+}
+
+
+//
+// WaveAPI producer object definition
+//
+/* constructor */
+static tsk_object_t* plugin_win_mf_producer_audio_ctor(tsk_object_t * self, va_list * app)
+{
+ MFUtils::Startup();
+
+ plugin_win_mf_producer_audio_t *pSelf = (plugin_win_mf_producer_audio_t*)self;
+ if(pSelf){
+ /* init base */
+ tdav_producer_audio_init(TDAV_PRODUCER_AUDIO(pSelf));
+ /* init self */
+
+ }
+ return self;
+}
+/* destructor */
+static tsk_object_t* plugin_win_mf_producer_audio_dtor(tsk_object_t * self)
+{
+ plugin_win_mf_producer_audio_t *pSelf = (plugin_win_mf_producer_audio_t *)self;
+ if(pSelf){
+ /* stop */
+ if(pSelf->bStarted){
+ plugin_win_mf_producer_audio_stop(TMEDIA_PRODUCER(pSelf));
+ }
+
+ /* deinit base */
+ tdav_producer_audio_deinit(TDAV_PRODUCER_AUDIO(pSelf));
+ /* deinit self */
+ if(pSelf->pDeviceList){
+ delete pSelf->pDeviceList, pSelf->pDeviceList = NULL;
+ }
+ if(pSelf->pSource){
+ pSelf->pSource->Shutdown();
+ }
+ if(pSelf->pSession){
+ pSelf->pSession->Shutdown();
+ }
+
+ SafeRelease(&pSelf->pSession);
+ SafeRelease(&pSelf->pSource);
+ SafeRelease(&pSelf->pCallback);
+ SafeRelease(&pSelf->pSinkActivate);
+ SafeRelease(&pSelf->pTopology);
+ SafeRelease(&pSelf->pType);
+ }
+
+ return self;
+}
+/* object definition */
+static const tsk_object_def_t plugin_win_mf_producer_audio_def_s =
+{
+ sizeof(plugin_win_mf_producer_audio_t),
+ plugin_win_mf_producer_audio_ctor,
+ plugin_win_mf_producer_audio_dtor,
+ tdav_producer_audio_cmp,
+};
+/* plugin definition*/
+static const tmedia_producer_plugin_def_t plugin_win_mf_producer_audio_plugin_def_s =
+{
+ &plugin_win_mf_producer_audio_def_s,
+
+ tmedia_audio,
+ "Media Foundation audio producer",
+
+ plugin_win_mf_producer_audio_set,
+ plugin_win_mf_producer_audio_prepare,
+ plugin_win_mf_producer_audio_start,
+ plugin_win_mf_producer_audio_pause,
+ plugin_win_mf_producer_audio_stop
+};
+const tmedia_producer_plugin_def_t *plugin_win_mf_producer_audio_plugin_def_t = &plugin_win_mf_producer_audio_plugin_def_s;
+
+
+// Run session async thread
+static void* TSK_STDCALL RunSessionThread(void *pArg)
+{
+ plugin_win_mf_producer_audio_t *pSelf = (plugin_win_mf_producer_audio_t *)pArg;
+ HRESULT hrStatus = S_OK;
+ HRESULT hr = S_OK;
+ IMFMediaEvent *pEvent = NULL;
+ MediaEventType met;
+
+ TSK_DEBUG_INFO("RunSessionThread (audio) - ENTER");
+
+ while(pSelf->bStarted){
+ CHECK_HR(hr = pSelf->pSession->GetEvent(0, &pEvent));
+ CHECK_HR(hr = pEvent->GetStatus(&hrStatus));
+ CHECK_HR(hr = pEvent->GetType(&met));
+
+ if (FAILED(hrStatus))
+ {
+ TSK_DEBUG_ERROR("Session error: 0x%x (event id: %d)\n", hrStatus, met);
+ hr = hrStatus;
+ goto bail;
+ }
+ if (met == MESessionEnded)
+ {
+ break;
+ }
+ SafeRelease(&pEvent);
+ }
+
+bail:
+ TSK_DEBUG_INFO("RunSessionThread (audio) - EXIT");
+
+ return NULL;
+}
diff --git a/plugins/pluginWinMF/plugin_win_mf_producer_video.cxx b/plugins/pluginWinMF/plugin_win_mf_producer_video.cxx
new file mode 100644
index 0000000..886fc45
--- /dev/null
+++ b/plugins/pluginWinMF/plugin_win_mf_producer_video.cxx
@@ -0,0 +1,708 @@
+/* Copyright (C) 2013-2015 Mamadou DIOP
+* Copyright (C) 2013-2015 Doubango Telecom <http://www.doubango.org>
+*
+* This file is part of Open Source Doubango Framework.
+*
+* DOUBANGO is free software: you can redistribute it and/or modify
+* it under the terms of the GNU General Public License as published by
+* the Free Software Foundation, either version 3 of the License, or
+* (at your option) any later version.
+*
+* DOUBANGO is distributed in the hope that it will be useful,
+* but WITHOUT ANY WARRANTY; without even the implied warranty of
+* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
+* GNU General Public License for more details.
+*
+* You should have received a copy of the GNU General Public License
+* along with DOUBANGO.
+*/
+#include "plugin_win_mf_config.h"
+#include "internals/mf_utils.h"
+#include "internals/mf_sample_grabber.h"
+#include "internals/mf_devices.h"
+#include "internals/mf_display_watcher.h"
+#include "internals/mf_custom_src.h"
+#include "internals/mf_codec.h"
+
+#include "tinymedia/tmedia_defaults.h"
+#include "tinymedia/tmedia_producer.h"
+
+#include "tsk_string.h"
+#include "tsk_thread.h"
+#include "tsk_debug.h"
+
+#include <KS.h>
+#include <Codecapi.h>
+#include <assert.h>
+#include <stdlib.h> /* mbstowcs, wchar_t(C) */
+#include <initguid.h>
+
+// 0: {{[Source] -> (VideoProcessor) -> SampleGrabber}} , {{[Encoder]}} -> RTP
+// 1: {{[Source] -> (VideoProcessor) -> [Encoder] -> SampleGrabber}} -> RTP
+// (VideoProcessor) is optional
+// "{{" and "}}" defines where the graph starts and ends respectively. For "0", [Decoder] is a stand-alone IMFTransform.
+#if !defined(PLUGIN_MF_PV_BUNDLE_CODEC)
+# define PLUGIN_MF_PV_BUNDLE_CODEC 1 /* MUST be "1" when the encoder is an Async Transform (e.g. Intel Quick Sync). Use "1" to be sure is will always work. */
+#endif /* PLUGIN_MF_PV_BUNDLE_CODEC */
+
+#if !defined(PLUGIN_MF_GOP_SIZE_IN_SECONDS)
+#define PLUGIN_MF_GOP_SIZE_IN_SECONDS 60
+#endif /* PLUGIN_MF_GOP_SIZE_IN_SECONDS */
+
+DEFINE_GUID(PLUGIN_MF_LOW_LATENCY,
+ 0x9c27891a, 0xed7a, 0x40e1, 0x88, 0xe8, 0xb2, 0x27, 0x27, 0xa0, 0x24, 0xee);
+
+extern const tmedia_codec_plugin_def_t *mf_codec_h264_main_plugin_def_t;
+extern const tmedia_codec_plugin_def_t *mf_codec_h264_base_plugin_def_t;
+
+static void* TSK_STDCALL RunSessionThread(void *pArg);
+static int _plugin_win_mf_producer_video_unprepare(struct plugin_win_mf_producer_video_s* pSelf);
+
+//
+// plugin_win_mf_producer_video_t
+//
+typedef struct plugin_win_mf_producer_video_s
+{
+ TMEDIA_DECLARE_PRODUCER;
+
+ bool bStarted, bPrepared, bMuted;
+ tsk_thread_handle_t* ppTread[1];
+ HWND hWndPreview;
+
+ int32_t bitrate_bps; // used when encoder bundled only
+
+ DeviceListVideo* pDeviceList;
+
+ MFCodecVideo *pEncoder;
+ IMFMediaSession *pSession;
+ IMFMediaSource *pSource;
+ SampleGrabberCB *pCallback;
+ IMFActivate *pSinkGrabber;
+ IMFActivate *pSinkActivatePreview;
+ DisplayWatcher* pWatcherPreview;
+ IMFTopology *pTopology;
+ IMFMediaType *pGrabberInputType;
+}
+plugin_win_mf_producer_video_t;
+
+/* ============ Video MF Producer Interface ================= */
+static int plugin_win_mf_producer_video_set(tmedia_producer_t *self, const tmedia_param_t* param)
+{
+ int ret = 0;
+ HRESULT hr = S_OK;
+ plugin_win_mf_producer_video_t* pSelf = (plugin_win_mf_producer_video_t*)self;
+
+ if (!pSelf || !param){
+ TSK_DEBUG_ERROR("Invalid parameter");
+ return -1;
+ }
+
+ if (tsk_striequals(param->key, "action")){
+ tmedia_codec_action_t action = (tmedia_codec_action_t)TSK_TO_INT32((uint8_t*)param->value);
+ HRESULT hr = S_OK;
+ switch (action){
+ case tmedia_codec_action_encode_idr:
+ {
+ if (pSelf->pEncoder)
+ {
+ CHECK_HR(hr = pSelf->pEncoder->RequestKeyFrame());
+ }
+ break;
+ }
+ case tmedia_codec_action_bw_down:
+ {
+ pSelf->bitrate_bps = TSK_CLAMP(0, (int32_t)((pSelf->bitrate_bps << 1) / 3), TMEDIA_CODEC(pSelf)->bandwidth_max_upload);
+ TSK_DEBUG_INFO("New target bitrate = %d kbps", pSelf->bitrate_bps);
+ if (pSelf->pEncoder)
+ {
+ CHECK_HR(hr = pSelf->pEncoder->SetBitRate(pSelf->bitrate_bps));
+ }
+ break;
+ }
+ case tmedia_codec_action_bw_up:
+ {
+ pSelf->bitrate_bps = TSK_CLAMP(0, (int32_t)((pSelf->bitrate_bps * 3) >> 1), TMEDIA_CODEC(pSelf)->bandwidth_max_upload);
+ TSK_DEBUG_INFO("New target bitrate = %d kbps", pSelf->bitrate_bps);
+ if (pSelf->pEncoder)
+ {
+ CHECK_HR(hr = pSelf->pEncoder->SetBitRate(pSelf->bitrate_bps));
+ }
+ break;
+ }
+ }
+ }
+ else if (param->value_type == tmedia_pvt_int64){
+ if (tsk_striequals(param->key, "local-hwnd")){
+ HWND hWnd = reinterpret_cast<HWND>((INT64)*((int64_t*)param->value));
+ if (hWnd != pSelf->hWndPreview)
+ {
+ pSelf->hWndPreview = hWnd;
+ if (pSelf->pWatcherPreview)
+ {
+ CHECK_HR(hr = pSelf->pWatcherPreview->SetHwnd(hWnd));
+ }
+ }
+ }
+ }
+ else if (param->value_type == tmedia_pvt_int32){
+ if (tsk_striequals(param->key, "mute")){
+ pSelf->bMuted = (TSK_TO_INT32((uint8_t*)param->value) != 0);
+ if (pSelf->pCallback) {
+ pSelf->pCallback->SetMute(pSelf->bMuted);
+ }
+#if 0
+ if (pSelf->bStarted && pSelf->pSession) {
+ if (pSelf->bMuted) {
+ pSelf->pSession->Pause();
+ }
+ else {
+ CHECK_HR(hr = MFUtils::RunSession(pSelf->pSession, pSelf->pTopology));
+ }
+ }
+#endif
+ }
+ else if (tsk_striequals(param->key, "create-on-current-thead")){
+ //producer->create_on_ui_thread = *((int32_t*)param->value) ? tsk_false : tsk_true;
+ }
+ else if (tsk_striequals(param->key, "plugin-firefox")){
+ //producer->plugin_firefox = (*((int32_t*)param->value) != 0);
+ //if(producer->grabber){
+ // producer->grabber->setPluginFirefox((producer->plugin_firefox == tsk_true));
+ //}
+ }
+ }
+
+bail:
+ return SUCCEEDED(hr) ? 0 : -1;
+}
+
+static int plugin_win_mf_producer_video_prepare(tmedia_producer_t* self, const tmedia_codec_t* codec)
+{
+ plugin_win_mf_producer_video_t* pSelf = (plugin_win_mf_producer_video_t*)self;
+
+ if (!pSelf || !codec && codec->plugin){
+ TSK_DEBUG_ERROR("Invalid parameter");
+ return -1;
+ }
+ if (pSelf->bPrepared){
+ TSK_DEBUG_WARN("MF video producer already prepared");
+ return -1;
+ }
+
+ // FIXME: DirectShow requires flipping but not MF
+ // The Core library always tries to flip when OSType==Win32. Must be changed
+ TMEDIA_CODEC_VIDEO(codec)->out.flip = tsk_false;
+
+ TMEDIA_PRODUCER(pSelf)->video.fps = TMEDIA_CODEC_VIDEO(codec)->out.fps;
+ TMEDIA_PRODUCER(pSelf)->video.width = TMEDIA_CODEC_VIDEO(codec)->out.width;
+ TMEDIA_PRODUCER(pSelf)->video.height = TMEDIA_CODEC_VIDEO(codec)->out.height;
+ TMEDIA_PRODUCER(pSelf)->encoder.codec_id = tmedia_codec_id_none; // means RAW frames as input
+
+ TSK_DEBUG_INFO("MF video producer: fps=%d, width=%d, height=%d",
+ TMEDIA_PRODUCER(pSelf)->video.fps,
+ TMEDIA_PRODUCER(pSelf)->video.width,
+ TMEDIA_PRODUCER(pSelf)->video.height);
+
+ HRESULT hr = S_OK;
+ IMFAttributes* pSessionAttributes = NULL;
+ IMFTopology *pTopology = NULL;
+ IMFMediaSink* pEvr = NULL;
+ IMFMediaType* pEncoderInputType = NULL;
+ IMFTopologyNode *pNodeGrabber = NULL;
+ IMFMediaType* pGrabberNegotiatedInputMedia = NULL;
+ BOOL bVideoProcessorIsSupported = FALSE;
+ const VideoSubTypeGuidPair *pcPreferredSubTypeGuidPair = NULL;
+
+ // create device list object
+ if (!pSelf->pDeviceList && !(pSelf->pDeviceList = new DeviceListVideo())){
+ TSK_DEBUG_ERROR("Failed to create device list");
+ hr = E_OUTOFMEMORY;
+ goto bail;
+ }
+ // enumerate devices
+ hr = pSelf->pDeviceList->EnumerateDevices();
+ if (!SUCCEEDED(hr)){
+ goto bail;
+ }
+
+ // check if we have at least one MF video source connected to the PC
+ if (pSelf->pDeviceList->Count() == 0){
+ TSK_DEBUG_WARN("No MF video source could be found...no video will be sent");
+ // do not break the negotiation as one-way video connection is a valid use-case
+ }
+ else{
+ // Get best MF video source
+ IMFActivate* pActivate = NULL;
+ const char* pczSrcFriendlyName = tmedia_producer_get_friendly_name(tmedia_video);
+ if (!tsk_strnullORempty(pczSrcFriendlyName)) {
+ TSK_DEBUG_INFO("MF pref. video source = %s", pczSrcFriendlyName);
+ wchar_t pczwSrcFriendlyName[MAX_PATH] = { 0 };
+ mbstowcs(pczwSrcFriendlyName, pczSrcFriendlyName, sizeof(pczwSrcFriendlyName) / sizeof(pczwSrcFriendlyName[0]));
+ hr = pSelf->pDeviceList->GetDeviceBest(&pActivate, pczwSrcFriendlyName);
+ }
+ else {
+ hr = pSelf->pDeviceList->GetDeviceBest(&pActivate);
+ }
+ if (!SUCCEEDED(hr) || !pActivate){
+ TSK_DEBUG_ERROR("Failed to get best MF video source");
+ if (!pActivate){
+ hr = E_OUTOFMEMORY;
+ }
+ goto bail;
+ }
+
+ // Create the media source for the device.
+ hr = pActivate->ActivateObject(
+ __uuidof(IMFMediaSource),
+ (void**)&pSelf->pSource
+ );
+ SafeRelease(&pActivate);
+ if (!SUCCEEDED(hr)){
+ TSK_DEBUG_ERROR("ActivateObject(MF video source) failed");
+ goto bail;
+ }
+
+ // Check whether video processor (http://msdn.microsoft.com/en-us/library/windows/desktop/hh162913(v=vs.85).aspx) is supported
+ CHECK_HR(hr = MFUtils::IsVideoProcessorSupported(&bVideoProcessorIsSupported));
+
+ // Must not be set because not supported by Frame Rate Converter DSP (http://msdn.microsoft.com/en-us/library/windows/desktop/ff819100(v=vs.85).aspx).aspx) because of color (neither I420 nor NV12)
+ // Video Processor (http://msdn.microsoft.com/en-us/library/windows/desktop/hh162913(v=vs.85).aspx) supports both NV12 and I420
+ if (!bVideoProcessorIsSupported) {
+ UINT32 nWidth, nHeight, nFps;
+ hr = MFUtils::GetBestFormat(
+ pSelf->pSource,
+ &MFVideoFormat_I420,
+ (UINT32)TMEDIA_PRODUCER(pSelf)->video.width,
+ (UINT32)TMEDIA_PRODUCER(pSelf)->video.height,
+ (UINT32)TMEDIA_PRODUCER(pSelf)->video.fps,
+ &nWidth,
+ &nHeight,
+ &nFps,
+ &pcPreferredSubTypeGuidPair
+ );
+ if (SUCCEEDED(hr))
+ {
+ TSK_DEBUG_INFO("Video processor not supported...using source fps=%u, width=%u, height=%u", nFps, nWidth, nHeight);
+ TMEDIA_PRODUCER(pSelf)->video.width = nWidth;
+ TMEDIA_PRODUCER(pSelf)->video.height = nHeight;
+ TMEDIA_PRODUCER(pSelf)->video.fps = nFps;
+ }
+ }
+
+ // If H.264 is negotiated for this session then, try to find hardware encoder
+ // If no HW encoder is found will fallback to SW implementation from x264
+#if PLUGIN_MF_PV_BUNDLE_CODEC
+ // Before embedding a H.264 encoder we have to be sure that:
+ // - Low latency is supported
+ // - The user decided to use MF encoder (Microsoft, Intel Quick Sync or any other)
+ if ((codec->id == tmedia_codec_id_h264_bp || codec->id == tmedia_codec_id_h264_mp) && MFUtils::IsLowLatencyH264Supported()) {
+ BOOL bMFEncoderIsRegistered =
+ (codec->id == tmedia_codec_id_h264_mp && codec->plugin == mf_codec_h264_main_plugin_def_t)
+ || (codec->id == tmedia_codec_id_h264_bp && codec->plugin == mf_codec_h264_base_plugin_def_t);
+ if (bMFEncoderIsRegistered)
+ {
+ // both Microsoft and Intel encoders support NV12 only as input
+ // static const BOOL kIsEncoder = TRUE;
+ // hr = MFUtils::GetBestCodec(kIsEncoder, MFMediaType_Video, MFVideoFormat_NV12, MFVideoFormat_H264, &pSelf->pEncoder);
+ pSelf->pEncoder = (codec->id == tmedia_codec_id_h264_bp) ? MFCodecVideoH264::CreateCodecH264Base(MFCodecType_Encoder) : MFCodecVideoH264::CreateCodecH264Main(MFCodecType_Encoder);
+ if (pSelf->pEncoder)
+ {
+ pSelf->pEncoder->setBundled(TRUE);
+ int32_t avg_bitrate_kbps = tmedia_get_video_bandwidth_kbps_2((unsigned int)TMEDIA_PRODUCER(pSelf)->video.width, (unsigned int)TMEDIA_PRODUCER(pSelf)->video.height, TMEDIA_PRODUCER(pSelf)->video.fps);
+ TSK_DEBUG_INFO("MF_MT_AVG_BITRATE defined with value = %d kbps", avg_bitrate_kbps);
+ pSelf->bitrate_bps = (avg_bitrate_kbps * 1024);
+
+ hr = pSelf->pEncoder->Initialize(
+ (UINT32)TMEDIA_PRODUCER(pSelf)->video.fps,
+ (UINT32)TMEDIA_PRODUCER(pSelf)->video.width,
+ (UINT32)TMEDIA_PRODUCER(pSelf)->video.height,
+ (UINT32)pSelf->bitrate_bps);
+ if (SUCCEEDED(hr))
+ {
+ /*hr =*/ pSelf->pEncoder->SetGOPSize((PLUGIN_MF_GOP_SIZE_IN_SECONDS * TMEDIA_PRODUCER(pSelf)->video.fps));
+ }
+ if (FAILED(hr))
+ {
+ SafeRelease(&pSelf->pEncoder);
+ hr = S_OK;
+ }
+ }
+ if (SUCCEEDED(hr) && pSelf->pEncoder)
+ {
+ TMEDIA_PRODUCER(pSelf)->encoder.codec_id = codec->id; // means encoded frames as input
+ }
+ else
+ {
+ SafeRelease(&pSelf->pEncoder);
+ TSK_DEBUG_WARN("Failed to find H.264 HW encoder...fallback to SW implementation");
+ }
+ }
+ else /* if(!bMFEncoderIsRegistered) */
+ {
+ TSK_DEBUG_INFO("Not bundling MF H.264 encoder even if low latency is supported because another implementation is registered: %s", codec->plugin->desc);
+ }
+ }
+#endif
+
+ // Set session attributes
+ CHECK_HR(hr = MFCreateAttributes(&pSessionAttributes, 1));
+ CHECK_HR(hr = pSessionAttributes->SetUINT32(PLUGIN_MF_LOW_LATENCY, 1));
+
+ // Configure the media type that the Sample Grabber will receive.
+ // Setting the major and subtype is usually enough for the topology loader
+ // to resolve the topology.
+
+ CHECK_HR(hr = MFCreateMediaType(&pSelf->pGrabberInputType));
+ CHECK_HR(hr = pSelf->pGrabberInputType->SetGUID(MF_MT_MAJOR_TYPE, MFMediaType_Video));
+ CHECK_HR(hr = pSelf->pGrabberInputType->SetUINT32(MF_MT_INTERLACE_MODE, MFVideoInterlace_Progressive));
+
+ CHECK_HR(hr = MFSetAttributeSize(pSelf->pGrabberInputType, MF_MT_FRAME_SIZE, (UINT32)TMEDIA_PRODUCER(pSelf)->video.width, (UINT32)TMEDIA_PRODUCER(pSelf)->video.height));
+ CHECK_HR(hr = MFSetAttributeRatio(pSelf->pGrabberInputType, MF_MT_FRAME_RATE, TMEDIA_PRODUCER(pSelf)->video.fps, 1));
+
+ CHECK_HR(hr = MFSetAttributeRatio(pSelf->pGrabberInputType, MF_MT_PIXEL_ASPECT_RATIO, 1, 1));
+ CHECK_HR(hr = pSelf->pGrabberInputType->SetUINT32(MF_MT_ALL_SAMPLES_INDEPENDENT, pSelf->pEncoder ? FALSE : TRUE));
+ CHECK_HR(hr = pSelf->pGrabberInputType->SetUINT32(MF_MT_FIXED_SIZE_SAMPLES, pSelf->pEncoder ? FALSE : TRUE));
+ if (pSelf->pEncoder) {
+ switch (codec->id){
+ case tmedia_codec_id_h264_bp: case tmedia_codec_id_h264_mp:
+ {
+ CHECK_HR(hr = pSelf->pGrabberInputType->SetGUID(MF_MT_SUBTYPE, MFVideoFormat_H264));
+ CHECK_HR(hr = pSelf->pGrabberInputType->SetUINT32(MF_MT_MPEG2_PROFILE, (codec->id == tmedia_codec_id_h264_bp) ? eAVEncH264VProfile_Base : eAVEncH264VProfile_Main));
+ CHECK_HR(hr = pSelf->pGrabberInputType->SetUINT32(MF_MT_AVG_BITRATE, pSelf->bitrate_bps));
+ break;
+ }
+ default:
+ {
+ TSK_DEBUG_ERROR("HW encoder with id = %d not expected", codec->id);
+ assert(false);
+ }
+ }
+ TMEDIA_PRODUCER(pSelf)->video.chroma = tmedia_chroma_nv12;
+ TSK_DEBUG_INFO("MF video producer chroma = NV12 (because of HW encoder)");
+ }
+ else {
+ // Video Processors will be inserted in the topology if the source cannot produce I420 frames
+ // IMPORTANT: Must not be NV12 because not supported by Video Resizer DSP (http://msdn.microsoft.com/en-us/library/windows/desktop/ff819491(v=vs.85).aspx)
+ CHECK_HR(hr = pSelf->pGrabberInputType->SetGUID(MF_MT_SUBTYPE, pcPreferredSubTypeGuidPair ? pcPreferredSubTypeGuidPair->fourcc : MFVideoFormat_I420));
+ TMEDIA_PRODUCER(pSelf)->video.chroma = pcPreferredSubTypeGuidPair ? pcPreferredSubTypeGuidPair->chroma : tmedia_chroma_yuv420p;
+ TSK_DEBUG_INFO("MF video producer chroma = %d", TMEDIA_PRODUCER(pSelf)->video.chroma);
+ }
+
+ if (pSelf->pEncoder) {
+ // Unlock the encoder
+ //BOOL bIsAsyncMFT = FALSE;
+ //CHECK_HR(hr = MFUtils::IsAsyncMFT(pSelf->pEncoder->GetMFT(), &bIsAsyncMFT));
+ //if(bIsAsyncMFT)
+ //{
+ // CHECK_HR(hr = MFUtils::UnlockAsyncMFT(pSelf->pEncoderpSelf->pEncoder->GetMFT()));
+ //}
+ // Apply Encoder output type (must be called before SetInputType)
+ //CHECK_HR(hr = pSelf->pEncoder->GetMFT()->SetOutputType(0, pSelf->pGrabberInputType, 0/*MFT_SET_TYPE_TEST_ONLY*/));
+
+ // HW encoders support only NV12
+ //CHECK_HR(hr = MFUtils::ConvertVideoTypeToUncompressedType(pSelf->pGrabberInputType, MFVideoFormat_NV12, &pEncoderInputType));
+ //CHECK_HR(hr = pSelf->pEncoder->GetMFT()->SetInputType(0, pEncoderInputType, 0/*MFT_SET_TYPE_TEST_ONLY*/));
+ }
+ // Create the sample grabber sink.
+ CHECK_HR(hr = SampleGrabberCB::CreateInstance(TMEDIA_PRODUCER(pSelf), &pSelf->pCallback));
+ CHECK_HR(hr = MFCreateSampleGrabberSinkActivate(pSelf->pGrabberInputType, pSelf->pCallback, &pSelf->pSinkGrabber));
+
+ // To run as fast as possible, set this attribute (requires Windows 7):
+ CHECK_HR(hr = pSelf->pSinkGrabber->SetUINT32(MF_SAMPLEGRABBERSINK_IGNORE_CLOCK, TRUE));
+
+ // Create the Media Session.
+ CHECK_HR(hr = MFCreateMediaSession(pSessionAttributes, &pSelf->pSession));
+
+ // Create the EVR activation object for the preview.
+ CHECK_HR(hr = MFCreateVideoRendererActivate(pSelf->hWndPreview, &pSelf->pSinkActivatePreview));
+
+ // Create the topology.
+ CHECK_HR(hr = MFUtils::CreateTopology(
+ pSelf->pSource,
+ pSelf->pEncoder ? pSelf->pEncoder->GetMFT() : NULL,
+ pSelf->pSinkGrabber,
+ pSelf->pSinkActivatePreview,
+ pSelf->pGrabberInputType,
+ &pTopology));
+ // Resolve topology (adds video processors if needed).
+ CHECK_HR(hr = MFUtils::ResolveTopology(pTopology, &pSelf->pTopology));
+
+ // Find EVR for the preview.
+ CHECK_HR(hr = MFUtils::FindNodeObject(pSelf->pTopology, MFUtils::g_ullTopoIdSinkPreview, (void**)&pEvr));
+
+ // Find negotiated media and update producer
+ UINT32 nNegWidth = (UINT32)TMEDIA_PRODUCER(pSelf)->video.width, nNegHeight = (UINT32)TMEDIA_PRODUCER(pSelf)->video.height, nNegNumeratorFps = (UINT32)TMEDIA_PRODUCER(pSelf)->video.fps, nNegDenominatorFps = 1;
+ CHECK_HR(hr = pSelf->pTopology->GetNodeByID(MFUtils::g_ullTopoIdSinkMain, &pNodeGrabber));
+ CHECK_HR(hr = pNodeGrabber->GetInputPrefType(0, &pGrabberNegotiatedInputMedia));
+ hr = MFGetAttributeSize(pGrabberNegotiatedInputMedia, MF_MT_FRAME_SIZE, &nNegWidth, &nNegHeight);
+ if (SUCCEEDED(hr))
+ {
+ TSK_DEBUG_INFO("MF video producer topology vs sdp parameters: width(%u/%u), height(%u/%u)",
+ TMEDIA_PRODUCER(pSelf)->video.width, nNegWidth,
+ TMEDIA_PRODUCER(pSelf)->video.height, nNegHeight
+ );
+ TMEDIA_PRODUCER(pSelf)->video.width = nNegWidth;
+ TMEDIA_PRODUCER(pSelf)->video.height = nNegHeight;
+ }
+ hr = MFGetAttributeRatio(pGrabberNegotiatedInputMedia, MF_MT_FRAME_RATE, &nNegNumeratorFps, &nNegDenominatorFps);
+ if (SUCCEEDED(hr))
+ {
+ TSK_DEBUG_INFO("MF video producer topology vs sdp parameters: fps(%u/%u)",
+ TMEDIA_PRODUCER(pSelf)->video.fps, (nNegNumeratorFps / nNegDenominatorFps)
+ );
+ TMEDIA_PRODUCER(pSelf)->video.fps = (nNegNumeratorFps / nNegDenominatorFps);
+ }
+
+ // Create EVR watcher for the preview.
+ pSelf->pWatcherPreview = new DisplayWatcher(pSelf->hWndPreview, pEvr, hr);
+ CHECK_HR(hr);
+ }
+
+bail:
+ SafeRelease(&pSessionAttributes);
+ SafeRelease(&pTopology);
+ SafeRelease(&pEvr);
+ SafeRelease(&pEncoderInputType);
+ SafeRelease(&pNodeGrabber);
+ SafeRelease(&pGrabberNegotiatedInputMedia);
+
+ pSelf->bPrepared = SUCCEEDED(hr);
+ return pSelf->bPrepared ? 0 : -1;
+}
+
+static int plugin_win_mf_producer_video_start(tmedia_producer_t* self)
+{
+ plugin_win_mf_producer_video_t* pSelf = (plugin_win_mf_producer_video_t*)self;
+
+ if (!pSelf){
+ TSK_DEBUG_ERROR("Invalid parameter");
+ return -1;
+ }
+
+ if (pSelf->bStarted){
+ TSK_DEBUG_INFO("MF video producer already started");
+ return 0;
+ }
+ if (!pSelf->bPrepared){
+ TSK_DEBUG_ERROR("MF video producer not prepared");
+ return -1;
+ }
+
+ HRESULT hr = S_OK;
+
+ // Run preview watcher
+ if (pSelf->pWatcherPreview) {
+ CHECK_HR(hr = pSelf->pWatcherPreview->Start());
+ }
+
+ // Run the media session.
+ CHECK_HR(hr = MFUtils::RunSession(pSelf->pSession, pSelf->pTopology));
+
+ // Start asynchronous watcher thread
+ pSelf->bStarted = true;
+ int ret = tsk_thread_create(&pSelf->ppTread[0], RunSessionThread, pSelf);
+ if (ret != 0) {
+ TSK_DEBUG_ERROR("Failed to create thread");
+ hr = E_FAIL;
+ pSelf->bStarted = false;
+ if (pSelf->ppTread[0]){
+ tsk_thread_join(&pSelf->ppTread[0]);
+ }
+ MFUtils::ShutdownSession(pSelf->pSession, pSelf->pSource);
+ goto bail;
+ }
+
+bail:
+ return SUCCEEDED(hr) ? 0 : -1;
+}
+
+static int plugin_win_mf_producer_video_pause(tmedia_producer_t* self)
+{
+ plugin_win_mf_producer_video_t* pSelf = (plugin_win_mf_producer_video_t*)self;
+
+ if (!pSelf){
+ TSK_DEBUG_ERROR("Invalid parameter");
+ return -1;
+ }
+ if (!pSelf->bStarted)
+ {
+ TSK_DEBUG_INFO("MF video producer not started");
+ return 0;
+ }
+
+ HRESULT hr = MFUtils::PauseSession(pSelf->pSession);
+
+ return SUCCEEDED(hr) ? 0 : -1;
+}
+
+static int plugin_win_mf_producer_video_stop(tmedia_producer_t* self)
+{
+ plugin_win_mf_producer_video_t* pSelf = (plugin_win_mf_producer_video_t*)self;
+
+ if (!pSelf){
+ TSK_DEBUG_ERROR("Invalid parameter");
+ return -1;
+ }
+
+ HRESULT hr = S_OK;
+
+ if (pSelf->pWatcherPreview){
+ hr = pSelf->pWatcherPreview->Stop();
+ }
+
+ // for the thread
+ pSelf->bStarted = false;
+ hr = MFUtils::ShutdownSession(pSelf->pSession, NULL); // stop session to wakeup the asynchronous thread
+ if (pSelf->ppTread[0]){
+ tsk_thread_join(&pSelf->ppTread[0]);
+ }
+ hr = MFUtils::ShutdownSession(NULL, pSelf->pSource); // stop source to release the camera
+
+ // next start() will be called after prepare()
+ return _plugin_win_mf_producer_video_unprepare(pSelf);
+}
+
+static int _plugin_win_mf_producer_video_unprepare(plugin_win_mf_producer_video_t* pSelf)
+{
+ if (!pSelf){
+ TSK_DEBUG_ERROR("Invalid parameter");
+ return -1;
+ }
+
+ if (pSelf->bStarted) {
+ // plugin_win_mf_producer_video_stop(TMEDIA_PRODUCER(pSelf));
+ TSK_DEBUG_ERROR("Producer must be stopped before calling unprepare");
+ }
+ if (pSelf->pDeviceList){
+ delete pSelf->pDeviceList, pSelf->pDeviceList = NULL;
+ }
+ if (pSelf->pWatcherPreview){
+ pSelf->pWatcherPreview->Stop();
+ }
+ if (pSelf->pSource){
+ pSelf->pSource->Shutdown();
+ }
+ if (pSelf->pSession){
+ pSelf->pSession->Shutdown();
+ }
+
+ SafeRelease(&pSelf->pEncoder);
+ SafeRelease(&pSelf->pSession);
+ SafeRelease(&pSelf->pSource);
+ SafeRelease(&pSelf->pSinkActivatePreview);
+ SafeRelease(&pSelf->pCallback);
+ SafeRelease(&pSelf->pSinkGrabber);
+ SafeRelease(&pSelf->pTopology);
+ SafeRelease(&pSelf->pGrabberInputType);
+
+ if (pSelf->pWatcherPreview){
+ delete pSelf->pWatcherPreview;
+ pSelf->pWatcherPreview = NULL;
+ }
+
+ pSelf->bPrepared = false;
+
+ return 0;
+}
+
+//
+// Windows Media Foundation video producer object definition
+//
+/* constructor */
+static tsk_object_t* plugin_win_mf_producer_video_ctor(tsk_object_t * self, va_list * app)
+{
+ MFUtils::Startup();
+
+ plugin_win_mf_producer_video_t *pSelf = (plugin_win_mf_producer_video_t *)self;
+ if (pSelf){
+ /* init base */
+ tmedia_producer_init(TMEDIA_PRODUCER(pSelf));
+
+ /* init self with default values*/
+ TMEDIA_PRODUCER(pSelf)->encoder.codec_id = tmedia_codec_id_none; // means RAW frames as input
+ TMEDIA_PRODUCER(pSelf)->video.chroma = tmedia_chroma_nv12;
+ TMEDIA_PRODUCER(pSelf)->video.fps = 15;
+ TMEDIA_PRODUCER(pSelf)->video.width = 352;
+ TMEDIA_PRODUCER(pSelf)->video.height = 288;
+
+ TSK_DEBUG_INFO("Create WinMF video producer");
+ }
+ return self;
+}
+/* destructor */
+static tsk_object_t* plugin_win_mf_producer_video_dtor(tsk_object_t * self)
+{
+ plugin_win_mf_producer_video_t *pSelf = (plugin_win_mf_producer_video_t *)self;
+ if (pSelf){
+ /* stop */
+ if (pSelf->bStarted){
+ plugin_win_mf_producer_video_stop(TMEDIA_PRODUCER(pSelf));
+ }
+
+ /* deinit base */
+ tmedia_producer_deinit(TMEDIA_PRODUCER(pSelf));
+ /* deinit self */
+ _plugin_win_mf_producer_video_unprepare(pSelf);
+ }
+
+ return self;
+}
+/* object definition */
+static const tsk_object_def_t plugin_win_mf_producer_video_def_s =
+{
+ sizeof(plugin_win_mf_producer_video_t),
+ plugin_win_mf_producer_video_ctor,
+ plugin_win_mf_producer_video_dtor,
+ tsk_null,
+};
+/* plugin definition*/
+static const tmedia_producer_plugin_def_t plugin_win_mf_producer_video_plugin_def_s =
+{
+ &plugin_win_mf_producer_video_def_s,
+
+ tmedia_video,
+ "Microsoft Windows Media Foundation producer (Video)",
+
+ plugin_win_mf_producer_video_set,
+ plugin_win_mf_producer_video_prepare,
+ plugin_win_mf_producer_video_start,
+ plugin_win_mf_producer_video_pause,
+ plugin_win_mf_producer_video_stop
+};
+const tmedia_producer_plugin_def_t *plugin_win_mf_producer_video_plugin_def_t = &plugin_win_mf_producer_video_plugin_def_s;
+
+
+// Run session async thread
+static void* TSK_STDCALL RunSessionThread(void *pArg)
+{
+ plugin_win_mf_producer_video_t *pSelf = (plugin_win_mf_producer_video_t *)pArg;
+ HRESULT hrStatus = S_OK;
+ HRESULT hr = S_OK;
+ IMFMediaEvent *pEvent = NULL;
+ MediaEventType met;
+
+ TSK_DEBUG_INFO("RunSessionThread (MF video producer) - ENTER");
+
+ while (pSelf->bStarted){
+ CHECK_HR(hr = pSelf->pSession->GetEvent(0, &pEvent));
+ CHECK_HR(hr = pEvent->GetStatus(&hrStatus));
+ CHECK_HR(hr = pEvent->GetType(&met));
+
+ if (FAILED(hrStatus) /*&& hrStatus != MF_E_NO_SAMPLE_TIMESTAMP*/)
+ {
+ TSK_DEBUG_ERROR("Session error: 0x%x (event id: %d)\n", hrStatus, met);
+ hr = hrStatus;
+ goto bail;
+ }
+ if (met == MESessionEnded)
+ {
+ break;
+ }
+ SafeRelease(&pEvent);
+ }
+
+bail:
+ TSK_DEBUG_INFO("RunSessionThread (MF video producer) - EXIT");
+
+ return NULL;
+} \ No newline at end of file
diff --git a/plugins/pluginWinMF/plugin_win_mf_tdav.cxx b/plugins/pluginWinMF/plugin_win_mf_tdav.cxx
new file mode 100644
index 0000000..d08bcfc
--- /dev/null
+++ b/plugins/pluginWinMF/plugin_win_mf_tdav.cxx
@@ -0,0 +1,22 @@
+/* Copyright (C) 2013 Mamadou DIOP
+* Copyright (C) 2013 Doubango Telecom <http://www.doubango.org>
+*
+* This file is part of Open Source Doubango Framework.
+*
+* DOUBANGO is free software: you can redistribute it and/or modify
+* it under the terms of the GNU General Public License as published by
+* the Free Software Foundation, either version 3 of the License, or
+* (at your option) any later version.
+*
+* DOUBANGO is distributed in the hope that it will be useful,
+* but WITHOUT ANY WARRANTY; without even the implied warranty of
+* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
+* GNU General Public License for more details.
+*
+* You should have received a copy of the GNU General Public License
+* along with DOUBANGO.
+*/
+// This file is used to avoid duplication for the .obj files
+#include "../../tinyDAV/src/codecs/h264/tdav_codec_h264_rtp.c"
+#include "../../tinyDAV/src/audio/tdav_consumer_audio.c"
+#include "../../tinyDAV/src/audio/tdav_producer_audio.c" \ No newline at end of file
diff --git a/plugins/pluginWinMF/version.rc b/plugins/pluginWinMF/version.rc
new file mode 100644
index 0000000..446f34a
--- /dev/null
+++ b/plugins/pluginWinMF/version.rc
@@ -0,0 +1,102 @@
+// Microsoft Visual C++ generated resource script.
+//
+// #include "resource.h"
+
+#define APSTUDIO_READONLY_SYMBOLS
+/////////////////////////////////////////////////////////////////////////////
+//
+// Generated from the TEXTINCLUDE 2 resource.
+//
+#include "afxres.h"
+
+/////////////////////////////////////////////////////////////////////////////
+#undef APSTUDIO_READONLY_SYMBOLS
+
+/////////////////////////////////////////////////////////////////////////////
+// English (U.S.) resources
+
+#if !defined(AFX_RESOURCE_DLL) || defined(AFX_TARG_ENU)
+#ifdef _WIN32
+LANGUAGE LANG_ENGLISH, SUBLANG_ENGLISH_US
+#pragma code_page(1252)
+#endif //_WIN32
+
+#ifdef APSTUDIO_INVOKED
+/////////////////////////////////////////////////////////////////////////////
+//
+// TEXTINCLUDE
+//
+
+1 TEXTINCLUDE
+BEGIN
+ "resource.h\0"
+END
+
+2 TEXTINCLUDE
+BEGIN
+ "#include ""afxres.h""\r\n"
+ "\0"
+END
+
+3 TEXTINCLUDE
+BEGIN
+ "\r\n"
+ "\0"
+END
+
+#endif // APSTUDIO_INVOKED
+
+
+/////////////////////////////////////////////////////////////////////////////
+//
+// Version
+//
+
+VS_VERSION_INFO VERSIONINFO
+ FILEVERSION 2.0.0.1156
+ PRODUCTVERSION 2.0.0.1156
+ FILEFLAGSMASK 0x17L
+#ifdef _DEBUG
+ FILEFLAGS 0x1L
+#else
+ FILEFLAGS 0x0L
+#endif
+ FILEOS 0x4L
+ FILETYPE 0x2L
+ FILESUBTYPE 0x0L
+BEGIN
+ BLOCK "StringFileInfo"
+ BEGIN
+ BLOCK "040904b0"
+ BEGIN
+ VALUE "CompanyName", "Doubango Telecom"
+ VALUE "FileDescription", "Doubango IMS Framework Media Foundation Plugin"
+ VALUE "FileVersion", "2.0.0.1156"
+ VALUE "InternalName", "pluginMF.dll"
+ VALUE "LegalCopyright", "(c) 2010-2013 Doubango Telecom. All rights reserved."
+ VALUE "OriginalFilename", "pluginMF.dll"
+ VALUE "ProductName", "Doubango IMS Framework Foundation Plugin"
+ VALUE "ProductVersion", "2.0.0.1156"
+ END
+ END
+ BLOCK "VarFileInfo"
+ BEGIN
+ VALUE "Translation", 0x409, 1200
+ END
+END
+
+#endif // English (U.S.) resources
+/////////////////////////////////////////////////////////////////////////////
+
+
+
+#ifndef APSTUDIO_INVOKED
+/////////////////////////////////////////////////////////////////////////////
+//
+// Generated from the TEXTINCLUDE 3 resource.
+//
+
+
+/////////////////////////////////////////////////////////////////////////////
+#endif // not APSTUDIO_INVOKED
+
OpenPOWER on IntegriCloud