summaryrefslogtreecommitdiffstats
path: root/plugins
diff options
context:
space:
mode:
authorMamadou DIOP <bossiel@yahoo.fr>2015-08-17 01:56:35 +0200
committerMamadou DIOP <bossiel@yahoo.fr>2015-08-17 01:56:35 +0200
commit631fffee8a28b1bec5ed1f1d26a20e0135967f99 (patch)
tree74afe3bf3efe15aa82bcd0272b2b0f4d48c2d837 /plugins
parent7908865936604036e6f200f1b5e069f8752f3a3a (diff)
downloaddoubango-631fffee8a28b1bec5ed1f1d26a20e0135967f99.zip
doubango-631fffee8a28b1bec5ed1f1d26a20e0135967f99.tar.gz
-
Diffstat (limited to 'plugins')
-rw-r--r--plugins/audio_opensles/Makefile.am39
-rw-r--r--plugins/audio_opensles/audio_opensles.cxx565
-rw-r--r--plugins/audio_opensles/audio_opensles.d127
-rw-r--r--plugins/audio_opensles/audio_opensles.h47
-rw-r--r--plugins/audio_opensles/audio_opensles.vcproj277
-rw-r--r--plugins/audio_opensles/audio_opensles_config.h116
-rw-r--r--plugins/audio_opensles/audio_opensles_consumer.cxx253
-rw-r--r--plugins/audio_opensles/audio_opensles_consumer.h32
-rw-r--r--plugins/audio_opensles/audio_opensles_device.cxx1137
-rw-r--r--plugins/audio_opensles/audio_opensles_device.h171
-rw-r--r--plugins/audio_opensles/audio_opensles_device_impl.cxx58
-rw-r--r--plugins/audio_opensles/audio_opensles_device_impl.h51
-rw-r--r--plugins/audio_opensles/audio_opensles_producer.cxx239
-rw-r--r--plugins/audio_opensles/audio_opensles_producer.h32
-rw-r--r--plugins/audio_opensles/dllmain.cxx35
-rw-r--r--plugins/audio_opensles/droid-makefile45
-rw-r--r--plugins/audio_opensles/plugin_audio_opensles.pc.in14
-rw-r--r--plugins/audio_webrtc/audio_webrtc.cxx562
-rw-r--r--plugins/audio_webrtc/audio_webrtc.h47
-rw-r--r--plugins/audio_webrtc/audio_webrtc.vcproj269
-rw-r--r--plugins/audio_webrtc/audio_webrtc_config.h144
-rw-r--r--plugins/audio_webrtc/audio_webrtc_consumer.cxx233
-rw-r--r--plugins/audio_webrtc/audio_webrtc_consumer.h33
-rw-r--r--plugins/audio_webrtc/audio_webrtc_producer.cxx227
-rw-r--r--plugins/audio_webrtc/audio_webrtc_producer.h32
-rw-r--r--plugins/audio_webrtc/audio_webrtc_transport.cxx84
-rw-r--r--plugins/audio_webrtc/audio_webrtc_transport.h115
-rw-r--r--plugins/audio_webrtc/dllmain.cxx39
-rw-r--r--plugins/audio_webrtc/droid-makefile49
-rw-r--r--plugins/audio_webrtc/makefile21
-rw-r--r--plugins/buildAll.sh23
-rw-r--r--plugins/pluginCUDA/dllmain_cuda.cxx137
-rw-r--r--plugins/pluginCUDA/pluginCUDA.vcproj225
-rw-r--r--plugins/pluginCUDA/plugin_cuda_codec_h264.cxx1346
-rw-r--r--plugins/pluginCUDA/plugin_cuda_config.h75
-rw-r--r--plugins/pluginCUDA/plugin_cuda_tdav.cxx20
-rw-r--r--plugins/pluginCUDA/plugin_cuda_utils.cxx168
-rw-r--r--plugins/pluginCUDA/plugin_cuda_utils.h56
-rw-r--r--plugins/pluginCUDA/version.rc102
-rw-r--r--plugins/pluginDirectShow/dllmain_dshow.cxx156
-rw-r--r--plugins/pluginDirectShow/internals/DSBaseCaptureGraph.h64
-rw-r--r--plugins/pluginDirectShow/internals/DSBufferWriter.h48
-rw-r--r--plugins/pluginDirectShow/internals/DSCaptureFormat.cxx60
-rw-r--r--plugins/pluginDirectShow/internals/DSCaptureFormat.h47
-rw-r--r--plugins/pluginDirectShow/internals/DSCaptureGraph.cxx436
-rw-r--r--plugins/pluginDirectShow/internals/DSCaptureGraph.h106
-rw-r--r--plugins/pluginDirectShow/internals/DSCaptureUtils.cxx377
-rw-r--r--plugins/pluginDirectShow/internals/DSCaptureUtils.h60
-rw-r--r--plugins/pluginDirectShow/internals/DSDibHelper.cxx80
-rw-r--r--plugins/pluginDirectShow/internals/DSDibHelper.h106
-rw-r--r--plugins/pluginDirectShow/internals/DSDisplay.cxx622
-rw-r--r--plugins/pluginDirectShow/internals/DSDisplay.h84
-rw-r--r--plugins/pluginDirectShow/internals/DSDisplayGraph.cxx345
-rw-r--r--plugins/pluginDirectShow/internals/DSDisplayGraph.h110
-rw-r--r--plugins/pluginDirectShow/internals/DSDisplayOverlay.VMR.cxx179
-rw-r--r--plugins/pluginDirectShow/internals/DSDisplayOverlay.VMR9.cxx207
-rw-r--r--plugins/pluginDirectShow/internals/DSDisplayOverlay.cxx67
-rw-r--r--plugins/pluginDirectShow/internals/DSDisplayOverlay.h68
-rw-r--r--plugins/pluginDirectShow/internals/DSFrameRateFilter.cxx120
-rw-r--r--plugins/pluginDirectShow/internals/DSFrameRateFilter.h64
-rw-r--r--plugins/pluginDirectShow/internals/DSGrabber.cxx292
-rw-r--r--plugins/pluginDirectShow/internals/DSGrabber.h92
-rw-r--r--plugins/pluginDirectShow/internals/DSOutputFilter.cxx113
-rw-r--r--plugins/pluginDirectShow/internals/DSOutputFilter.h112
-rw-r--r--plugins/pluginDirectShow/internals/DSOutputStream.cxx313
-rw-r--r--plugins/pluginDirectShow/internals/DSOutputStream.h90
-rw-r--r--plugins/pluginDirectShow/internals/DSPushSource.h496
-rw-r--r--plugins/pluginDirectShow/internals/DSPushSourceDesktop.cxx434
-rw-r--r--plugins/pluginDirectShow/internals/DSScreenCastGraph.cxx257
-rw-r--r--plugins/pluginDirectShow/internals/DSScreenCastGraph.h160
-rw-r--r--plugins/pluginDirectShow/internals/DSUtils.cxx365
-rw-r--r--plugins/pluginDirectShow/internals/DSUtils.h82
-rw-r--r--plugins/pluginDirectShow/internals/Resizer.cxx1192
-rw-r--r--plugins/pluginDirectShow/internals/Resizer.h76
-rw-r--r--plugins/pluginDirectShow/internals/VideoDisplayName.cxx37
-rw-r--r--plugins/pluginDirectShow/internals/VideoDisplayName.h43
-rw-r--r--plugins/pluginDirectShow/internals/VideoFrame.h107
-rw-r--r--plugins/pluginDirectShow/internals/VideoGrabberName.cxx37
-rw-r--r--plugins/pluginDirectShow/internals/VideoGrabberName.h43
-rw-r--r--plugins/pluginDirectShow/internals/wince/CPropertyBag.cxx108
-rw-r--r--plugins/pluginDirectShow/internals/wince/CPropertyBag.h43
-rw-r--r--plugins/pluginDirectShow/internals/wince/DSISampleGrabberCB.h30
-rw-r--r--plugins/pluginDirectShow/internals/wince/DSNullFilter.cxx56
-rw-r--r--plugins/pluginDirectShow/internals/wince/DSNullFilter.h40
-rw-r--r--plugins/pluginDirectShow/internals/wince/DSSampleGrabber.cxx197
-rw-r--r--plugins/pluginDirectShow/internals/wince/DSSampleGrabber.h73
-rw-r--r--plugins/pluginDirectShow/internals/wince/DSSampleGrabberUtils.h38
-rw-r--r--plugins/pluginDirectShow/pluginDirectShow.vcproj379
-rw-r--r--plugins/pluginDirectShow/plugin_dshow_config.h103
-rw-r--r--plugins/pluginDirectShow/plugin_screencast_dshow_producer.cxx273
-rw-r--r--plugins/pluginDirectShow/plugin_video_dshow_consumer.cxx1319
-rw-r--r--plugins/pluginDirectShow/plugin_video_dshow_producer.cxx276
-rw-r--r--plugins/pluginDirectShow/version.rc102
-rw-r--r--plugins/pluginWASAPI/dllmain_wasapi.cxx134
-rw-r--r--plugins/pluginWASAPI/pluginWASAPI.vcproj227
-rw-r--r--plugins/pluginWASAPI/plugin_wasapi_config.h78
-rw-r--r--plugins/pluginWASAPI/plugin_wasapi_consumer_audio.cxx700
-rw-r--r--plugins/pluginWASAPI/plugin_wasapi_producer_audio.cxx712
-rw-r--r--plugins/pluginWASAPI/plugin_wasapi_tdav.cxx21
-rw-r--r--plugins/pluginWASAPI/plugin_wasapi_utils.cxx81
-rw-r--r--plugins/pluginWASAPI/plugin_wasapi_utils.h53
-rw-r--r--plugins/pluginWASAPI/version.rc102
-rw-r--r--plugins/pluginWinAudioDSP/dllmain_audio_dsp.cxx157
-rw-r--r--plugins/pluginWinAudioDSP/pluginWinAudioDSP.vcproj231
-rw-r--r--plugins/pluginWinAudioDSP/plugin_audio_dsp_config.h75
-rw-r--r--plugins/pluginWinAudioDSP/plugin_audio_dsp_denoiser.cxx402
-rw-r--r--plugins/pluginWinAudioDSP/plugin_audio_dsp_mediabuffer.cxx148
-rw-r--r--plugins/pluginWinAudioDSP/plugin_audio_dsp_mediabuffer.h51
-rw-r--r--plugins/pluginWinAudioDSP/plugin_audio_dsp_resampler.cxx388
-rw-r--r--plugins/pluginWinAudioDSP/plugin_audio_dsp_utils.cxx157
-rw-r--r--plugins/pluginWinAudioDSP/plugin_audio_dsp_utils.h72
-rw-r--r--plugins/pluginWinAudioDSP/version.rc102
-rw-r--r--plugins/pluginWinDD/dllmain_dd.cxx114
-rw-r--r--plugins/pluginWinDD/internals/CommonTypes.h119
-rw-r--r--plugins/pluginWinDD/internals/DisplayManager.cxx478
-rw-r--r--plugins/pluginWinDD/internals/DisplayManager.h46
-rw-r--r--plugins/pluginWinDD/internals/DuplicationManager.cxx499
-rw-r--r--plugins/pluginWinDD/internals/DuplicationManager.h43
-rw-r--r--plugins/pluginWinDD/internals/OutputManager.cxx1118
-rw-r--r--plugins/pluginWinDD/internals/OutputManager.h61
-rw-r--r--plugins/pluginWinDD/internals/PixelShader.hlsl24
-rw-r--r--plugins/pluginWinDD/internals/ThreadManager.cxx261
-rw-r--r--plugins/pluginWinDD/internals/ThreadManager.h33
-rw-r--r--plugins/pluginWinDD/internals/VertexShader.hlsl28
-rw-r--r--plugins/pluginWinDD/pluginWinDD.vcproj189
-rw-r--r--plugins/pluginWinDD/plugin_win_dd_config.h75
-rw-r--r--plugins/pluginWinDD/plugin_win_dd_producer.cxx1074
-rw-r--r--plugins/pluginWinDD/version.apsbin0 -> 20032 bytes
-rw-r--r--plugins/pluginWinDD/version.rc102
-rw-r--r--plugins/pluginWinIPSecVista/AStyle.sh1
-rw-r--r--plugins/pluginWinIPSecVista/dllmain_ipsec_vista.c100
-rw-r--r--plugins/pluginWinIPSecVista/pluginWinIPSecVista.vcproj212
-rw-r--r--plugins/pluginWinIPSecVista/plugin_win_ipsec_vista.c711
-rw-r--r--plugins/pluginWinIPSecVista/plugin_win_ipsec_vista_config.h75
-rw-r--r--plugins/pluginWinIPSecVista/version.rc102
-rw-r--r--plugins/pluginWinMF/dllmain_mf.cxx244
-rw-r--r--plugins/pluginWinMF/internals/mf_codec.cxx888
-rw-r--r--plugins/pluginWinMF/internals/mf_codec.h158
-rw-r--r--plugins/pluginWinMF/internals/mf_codec_topology.cxx473
-rw-r--r--plugins/pluginWinMF/internals/mf_codec_topology.h87
-rw-r--r--plugins/pluginWinMF/internals/mf_custom_src.cxx1722
-rw-r--r--plugins/pluginWinMF/internals/mf_custom_src.h340
-rw-r--r--plugins/pluginWinMF/internals/mf_devices.cxx151
-rw-r--r--plugins/pluginWinMF/internals/mf_devices.h64
-rw-r--r--plugins/pluginWinMF/internals/mf_display_watcher.cxx160
-rw-r--r--plugins/pluginWinMF/internals/mf_display_watcher.h55
-rw-r--r--plugins/pluginWinMF/internals/mf_sample_grabber.cxx135
-rw-r--r--plugins/pluginWinMF/internals/mf_sample_grabber.h68
-rw-r--r--plugins/pluginWinMF/internals/mf_sample_queue.cxx158
-rw-r--r--plugins/pluginWinMF/internals/mf_sample_queue.h81
-rw-r--r--plugins/pluginWinMF/internals/mf_utils.cxx2104
-rw-r--r--plugins/pluginWinMF/internals/mf_utils.h260
-rw-r--r--plugins/pluginWinMF/pluginWinMF.vcproj319
-rw-r--r--plugins/pluginWinMF/plugin_win_mf_codec_h264.cxx750
-rw-r--r--plugins/pluginWinMF/plugin_win_mf_config.h75
-rw-r--r--plugins/pluginWinMF/plugin_win_mf_consumer_audio.cxx163
-rw-r--r--plugins/pluginWinMF/plugin_win_mf_consumer_video.cxx1620
-rw-r--r--plugins/pluginWinMF/plugin_win_mf_converter_video.cxx600
-rw-r--r--plugins/pluginWinMF/plugin_win_mf_producer_audio.cxx333
-rw-r--r--plugins/pluginWinMF/plugin_win_mf_producer_video.cxx708
-rw-r--r--plugins/pluginWinMF/plugin_win_mf_tdav.cxx22
-rw-r--r--plugins/pluginWinMF/version.rc102
162 files changed, 39225 insertions, 0 deletions
diff --git a/plugins/audio_opensles/Makefile.am b/plugins/audio_opensles/Makefile.am
new file mode 100644
index 0000000..f37d8d6
--- /dev/null
+++ b/plugins/audio_opensles/Makefile.am
@@ -0,0 +1,39 @@
+lib_LTLIBRARIES = libplugin_audio_opensles.la
+libplugin_audio_opensles_la_LIBADD = \
+ ../../tinySAK/libtinySAK.la \
+ ../../tinyNET/libtinyNET.la \
+ ../../tinyMEDIA/libtinyMEDIA.la \
+ ../../tinySDP/libtinySDP.la
+libplugin_audio_opensles_la_CPPFLAGS = \
+ -I.. \
+ -I../../tinySAK/src \
+ -I../../tinyNET/src \
+ -I../../tinySDP/include \
+ -I../../tinyMEDIA/include \
+ -I../../tinyRTP/include \
+ -I../../tinyDAV/include
+
+libplugin_audio_opensles_la_LDFLAGS = ${LDFLAGS}
+
+if TARGET_OS_IS_ANDROID
+libplugin_audio_opensles_la_CPPFLAGS += -DANDROID=1 -fno-rtti -fno-exceptions -I${NDK}/platforms/android-9/arch-${TARGET_ARCH}/usr/include
+libplugin_audio_opensles_la_LDFLAGS += -Wl,-shared,-Bsymbolic,--no-undefined,--whole-archive -L${NDK}/platforms/android-9/arch-${TARGET_ARCH}/usr/lib -lOpenSLES -lm -lstdc++ -lgcc -llog -ldl
+else
+libplugin_audio_opensles_la_LDFLAGS +=
+endif
+
+if USE_SSL
+libplugin_audio_opensles_la_LIBADD += ${LIBSSL_LIBADD}
+endif
+
+libplugin_audio_opensles_la_SOURCES = \
+ audio_opensles.cxx \
+ audio_opensles_consumer.cxx \
+ audio_opensles_device_impl.cxx \
+ audio_opensles_producer.cxx \
+ audio_opensles_device.cxx \
+ ../../tinyDAV/src/audio/tdav_consumer_audio.c \
+ ../../tinyDAV/src/audio/tdav_producer_audio.c
+
+pkgconfigdir = $(libdir)/pkgconfig
+pkgconfig_DATA = plugin_audio_opensles.pc
diff --git a/plugins/audio_opensles/audio_opensles.cxx b/plugins/audio_opensles/audio_opensles.cxx
new file mode 100644
index 0000000..0cf5ea3
--- /dev/null
+++ b/plugins/audio_opensles/audio_opensles.cxx
@@ -0,0 +1,565 @@
+/* Copyright (C) 2012 Doubango Telecom <http://www.doubango.org>
+*
+* This file is part of Open Source Doubango Framework.
+*
+* DOUBANGO is free software: you can redistribute it and/or modify
+* it under the terms of the GNU General Public License as published by
+* the Free Software Foundation, either version 3 of the License, or
+* (at your option) any later version.
+*
+* DOUBANGO is distributed in the hope that it will be useful,
+* but WITHOUT ANY WARRANTY; without even the implied warranty of
+* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
+* GNU General Public License for more details.
+*
+* You should have received a copy of the GNU General Public License
+* along with DOUBANGO.
+*/
+#include "audio_opensles.h"
+
+#include "audio_opensles_consumer.h"
+#include "audio_opensles_producer.h"
+#include "audio_opensles_device.h"
+#include "audio_opensles_device_impl.h"
+
+#include "tinymedia/tmedia_consumer.h"
+#include "tinymedia/tmedia_producer.h"
+
+#include "tsk_list.h"
+#include "tsk_safeobj.h"
+#include "tsk_debug.h"
+
+typedef enum PLUGIN_INDEX_E
+{
+ PLUGIN_INDEX_AUDIO_CONSUMER,
+ PLUGIN_INDEX_AUDIO_PRODUCER,
+ PLUGIN_INDEX_COUNT
+}
+PLUGIN_INDEX_T;
+
+
+int __plugin_get_def_count()
+{
+ return PLUGIN_INDEX_COUNT;
+}
+
+tsk_plugin_def_type_t __plugin_get_def_type_at(int index)
+{
+ switch(index){
+ case PLUGIN_INDEX_AUDIO_CONSUMER: return tsk_plugin_def_type_consumer;
+ case PLUGIN_INDEX_AUDIO_PRODUCER: return tsk_plugin_def_type_producer;
+ default:
+ {
+ AUDIO_OPENSLES_DEBUG_ERROR("No plugin at index %d", index);
+ return tsk_plugin_def_type_none;
+ }
+ }
+}
+
+tsk_plugin_def_media_type_t __plugin_get_def_media_type_at(int index)
+{
+ switch(index){
+ case PLUGIN_INDEX_AUDIO_CONSUMER:
+ case PLUGIN_INDEX_AUDIO_PRODUCER:
+ {
+ return tsk_plugin_def_media_type_audio;
+ }
+ default:
+ {
+ AUDIO_OPENSLES_DEBUG_ERROR("No plugin at index %d", index);
+ return tsk_plugin_def_media_type_none;
+ }
+ }
+}
+
+tsk_plugin_def_ptr_const_t __plugin_get_def_at(int index)
+{
+ switch(index){
+ case PLUGIN_INDEX_AUDIO_CONSUMER:
+ {
+ return audio_consumer_opensles_plugin_def_t;
+ }
+ case PLUGIN_INDEX_AUDIO_PRODUCER:
+ {
+ return audio_producer_opensles_plugin_def_t;
+ }
+ default:
+ {
+ AUDIO_OPENSLES_DEBUG_ERROR("No plugin at index %d", index);
+ return tsk_null;
+ }
+ }
+}
+
+//
+// SLES AudioInstance
+//
+
+typedef struct audio_opensles_instance_s
+{
+ TSK_DECLARE_OBJECT;
+
+ uint64_t sessionId;
+
+ bool isStarted;
+
+ bool isConsumerPrepared;
+ bool isConsumerStarted;
+ bool isProducerPrepared;
+ bool isProducerStarted;
+
+ bool isSpeakerAvailable;
+ bool isPlayoutAvailable;
+ bool isRecordingAvailable;
+
+ SLAudioDevice* device;
+ SLAudioDeviceCallbackImpl* callback;
+
+ TSK_DECLARE_SAFEOBJ;
+}
+audio_opensles_instance_t;
+typedef tsk_list_t audio_opensles_instances_L_t;
+
+static audio_opensles_instances_L_t* __audioInstances = tsk_null;
+
+static tsk_object_t* audio_opensles_instance_ctor(tsk_object_t * self, va_list * app)
+{
+ audio_opensles_instance_t* audioInstance = (audio_opensles_instance_t*)self;
+ if(audioInstance){
+ tsk_safeobj_init(audioInstance);
+ }
+ return self;
+}
+static tsk_object_t* audio_opensles_instance_dtor(tsk_object_t * self)
+{
+ AUDIO_OPENSLES_DEBUG_INFO("Audio Instance destroyed");
+ audio_opensles_instance_t* audioInstance = (audio_opensles_instance_t*)self;
+ if(audioInstance){
+ tsk_safeobj_lock(audioInstance);
+ if(audioInstance->device){
+ audioInstance->device->SetCallback(NULL);
+ audioInstance->device->Terminate();
+ delete audioInstance->device;
+ audioInstance->device = tsk_null;
+ }
+ if(audioInstance->callback){
+ delete audioInstance->callback;
+ audioInstance->callback = tsk_null;
+ }
+ tsk_safeobj_unlock(audioInstance);
+
+ tsk_safeobj_deinit(audioInstance);
+ }
+ return self;
+}
+static int audio_opensles_instance_cmp(const tsk_object_t *_ai1, const tsk_object_t *_ai2)
+{
+ return ((int)_ai1 - (int)_ai2);
+}
+static const tsk_object_def_t audio_opensles_instance_def_s =
+{
+ sizeof(audio_opensles_instance_t),
+ audio_opensles_instance_ctor,
+ audio_opensles_instance_dtor,
+ audio_opensles_instance_cmp,
+};
+const tsk_object_def_t *audio_opensles_instance_def_t = &audio_opensles_instance_def_s;
+
+
+audio_opensles_instance_handle_t* audio_opensles_instance_create(uint64_t sessionId)
+{
+ audio_opensles_instance_t* audioInstance = tsk_null;
+
+ // create list used to hold instances
+ if(!__audioInstances && !(__audioInstances = tsk_list_create())){
+ AUDIO_OPENSLES_DEBUG_ERROR("Failed to create new list");
+ return tsk_null;
+ }
+
+ //= lock the list
+ tsk_list_lock(__audioInstances);
+
+ // find the instance from the list
+ const tsk_list_item_t* item;
+ tsk_list_foreach(item, __audioInstances){
+ if(((audio_opensles_instance_t*)item->data)->sessionId == sessionId){
+ audioInstance = (audio_opensles_instance_t*)tsk_object_ref(item->data);
+ break;
+ }
+ }
+
+ if(!audioInstance){
+ audio_opensles_instance_t* _audioInstance;
+ if(!(_audioInstance = (audio_opensles_instance_t*)tsk_object_new(&audio_opensles_instance_def_s))){
+ AUDIO_OPENSLES_DEBUG_ERROR("Failed to create new audio instance");
+ goto done;
+ }
+
+ if(!(_audioInstance->device = new SLAudioDevice())){
+ AUDIO_OPENSLES_DEBUG_ERROR("Failed to create audio device");
+ TSK_OBJECT_SAFE_FREE(_audioInstance);
+ goto done;
+ }
+
+ if(!(_audioInstance->callback = new SLAudioDeviceCallbackImpl())){
+ AUDIO_OPENSLES_DEBUG_ERROR("Failed to create audio transport");
+ TSK_OBJECT_SAFE_FREE(_audioInstance);
+ goto done;
+ }
+ if((_audioInstance->device->SetCallback(_audioInstance->callback))){
+ AUDIO_OPENSLES_DEBUG_ERROR("AudioDeviceModule::RegisterAudioCallback() failed");
+ TSK_OBJECT_SAFE_FREE(_audioInstance);
+ goto done;
+ }
+
+ if((_audioInstance->device->Init())){
+ AUDIO_OPENSLES_DEBUG_ERROR("AudioDeviceModule::Init() failed");
+ TSK_OBJECT_SAFE_FREE(_audioInstance);
+ goto done;
+ }
+
+ _audioInstance->sessionId = sessionId;
+ audioInstance = _audioInstance;
+ tsk_list_push_back_data(__audioInstances, (void**)&_audioInstance);
+ }
+
+done:
+ //= unlock the list
+ tsk_list_unlock(__audioInstances);
+
+ return audioInstance;
+}
+
+int audio_opensles_instance_prepare_consumer(audio_opensles_instance_handle_t* _self, tmedia_consumer_t** _consumer)
+{
+ audio_opensles_instance_t* self = (audio_opensles_instance_t*)_self;
+ const struct audio_consumer_opensles_s* consumer = (const struct audio_consumer_opensles_s*)*_consumer;
+ if(!self || !self->device || !self->callback || !_consumer || !*_consumer){
+ AUDIO_OPENSLES_DEBUG_ERROR("invalid parameter");
+ return -1;
+ }
+
+ if(self->isConsumerPrepared){
+ AUDIO_OPENSLES_DEBUG_WARN("Consumer already prepared");
+ return 0;
+ }
+
+ int ret;
+ bool _bool;
+
+ tsk_safeobj_lock(self);
+
+ self->callback->SetConsumer(consumer);
+
+ if((ret = self->device->SpeakerIsAvailable(&_bool))){
+ AUDIO_OPENSLES_DEBUG_ERROR("SpeakerIsAvailable() failed with error code=%d", ret);
+ }
+ else{
+ if(!_bool){
+ AUDIO_OPENSLES_DEBUG_ERROR("SpeakerIsAvailable() returned false");
+ }
+ self->isSpeakerAvailable = _bool;
+ }
+
+ if((ret = self->device->InitSpeaker())){
+ AUDIO_OPENSLES_DEBUG_ERROR("InitSpeaker() failed with error code=%d", ret);
+ }
+ else if((ret = self->device->SetSpeakerOn(audio_consumer_opensles_is_speakerOn(consumer)))){
+ AUDIO_OPENSLES_DEBUG_ERROR("SetSpeakerOn() failed with error code=%d", ret);
+ }
+
+ if((ret = self->device->PlayoutIsAvailable(&_bool))){
+ AUDIO_OPENSLES_DEBUG_ERROR("PlayoutIsAvailable() failed with error code =%d", ret);
+ }
+ else{
+ if(!_bool){
+ AUDIO_OPENSLES_DEBUG_ERROR("PlayoutIsAvailable() returned false");
+ }
+ self->isPlayoutAvailable = _bool;
+ }
+
+ if((ret = self->device->SetStereoPlayout(((*_consumer)->audio.in.channels == 2)))){
+ AUDIO_OPENSLES_DEBUG_ERROR("SetStereoPlayout(%d==2) failed with error code=%d", (*_consumer)->audio.in.channels, ret);
+ }
+
+ //if((ret = self->device->SetPlayoutBuffer(AudioDeviceModule::kFixedBufferSize, (*_consumer)->audio.ptime))){
+ // AUDIO_OPENSLES_DEBUG_ERROR("SetPlayoutBuffer(%d ms) failed with error code=%d", (*_consumer)->audio.ptime, ret);
+ //}
+ // always request 10ms buffers
+ if((ret = self->device->SetPlayoutBuffer(10))){
+ AUDIO_OPENSLES_DEBUG_ERROR("SetPlayoutBuffer(%d ms) failed with error code=%d", 10, ret);
+ }
+
+ int playoutSampleRate = (*_consumer)->audio.out.rate ? (*_consumer)->audio.out.rate : (*_consumer)->audio.in.rate;
+ if((ret = self->device->SetPlayoutSampleRate(playoutSampleRate))){
+ AUDIO_OPENSLES_DEBUG_ERROR("SetPlayoutSampleRate(%d) failed with error code=%d", playoutSampleRate, ret);
+ }
+
+ if((ret = self->device->InitPlayout())){
+ AUDIO_OPENSLES_DEBUG_ERROR("AudioDeviceModule::InitPlayout() failed with error code = %d", ret);
+ goto done;
+ }
+
+ // init output parameters
+ if((ret = self->device->StereoPlayout(&_bool))){
+ AUDIO_OPENSLES_DEBUG_ERROR("StereoPlayout() failed with error code=%d", ret);
+ }
+ else{
+ (*_consumer)->audio.out.channels = (_bool ? 2 : 1);
+ }
+ if((ret = self->device->PlayoutSampleRate(&playoutSampleRate))){
+ AUDIO_OPENSLES_DEBUG_ERROR("PlayoutSampleRate() failed with error code=%d", ret);
+ }
+ else{
+ (*_consumer)->audio.out.rate = playoutSampleRate;
+ }
+
+done:
+ tsk_safeobj_unlock(self);
+
+ self->isConsumerPrepared = (ret == 0);
+
+ return ret;
+}
+
+int audio_opensles_instance_prepare_producer(audio_opensles_instance_handle_t* _self, tmedia_producer_t** _producer)
+{
+ audio_opensles_instance_t* self = (audio_opensles_instance_t*)_self;
+ if(!self || !self->device || !self->callback || !_producer || !*_producer){
+ AUDIO_OPENSLES_DEBUG_ERROR("invalid parameter");
+ return -1;
+ }
+
+ if(self->isProducerPrepared){
+ AUDIO_OPENSLES_DEBUG_WARN("Producer already prepared");
+ return 0;
+ }
+
+ int ret;
+ bool _bool;
+
+ tsk_safeobj_lock(self);
+
+ self->callback->SetProducer((const struct audio_producer_opensles_s*)*_producer);
+
+ if((ret = self->device->RecordingIsAvailable(&_bool))){
+ AUDIO_OPENSLES_DEBUG_ERROR("RecordingIsAvailable() failed with error code =%d", ret);
+ }
+ else{
+ if(!_bool){
+ AUDIO_OPENSLES_DEBUG_ERROR("RecordingIsAvailable() returned false");
+ }
+ self->isRecordingAvailable = _bool;
+ }
+
+ if((ret = self->device->MicrophoneIsAvailable(&_bool))){
+ AUDIO_OPENSLES_DEBUG_ERROR("MicrophoneIsAvailable() failed with error code =%d", ret);
+ }
+ else{
+ if(!_bool){
+ AUDIO_OPENSLES_DEBUG_ERROR("MicrophoneIsAvailable() returned false");
+ }
+ else{
+ if((ret = self->device->InitMicrophone())){
+ AUDIO_OPENSLES_DEBUG_ERROR("InitMicrophone() failed with error code =%d", ret);
+ }
+ }
+ }
+
+ if((ret = self->device->SetStereoRecording(((*_producer)->audio.channels == 2)))){
+ AUDIO_OPENSLES_DEBUG_ERROR("SetStereoRecording(%d==2) failed with error code=%d", (*_producer)->audio.channels, ret);
+ }
+
+ int recordingSampleRate = (*_producer)->audio.rate;
+ if((ret = self->device->SetRecordingSampleRate(recordingSampleRate))){
+ AUDIO_OPENSLES_DEBUG_ERROR("SetRecordingSampleRate(%d) failed with error code=%d", recordingSampleRate, ret);
+ }
+
+ if((ret = self->device->InitRecording())){
+ AUDIO_OPENSLES_DEBUG_ERROR("AudioDeviceModule::InitRecording() failed with error code = %d", ret);
+ goto done;
+ }
+
+ // init output parameters
+ if((ret = self->device->StereoRecording(&_bool))){
+ AUDIO_OPENSLES_DEBUG_ERROR("StereoRecording() failed with error code=%d", ret);
+ }
+ else{
+ (*_producer)->audio.channels = (_bool ? 2 : 1);
+ }
+ if((ret = self->device->RecordingSampleRate(&recordingSampleRate))){
+ AUDIO_OPENSLES_DEBUG_ERROR("RecordingSampleRate() failed with error code=%d", ret);
+ }
+ else{
+ (*_producer)->audio.rate = recordingSampleRate;
+ }
+
+done:
+ tsk_safeobj_unlock(self);
+
+ self->isProducerPrepared = (ret == 0);
+
+ return ret;
+}
+
+int audio_opensles_instance_start_consumer(audio_opensles_instance_handle_t* _self)
+{
+ audio_opensles_instance_t* self = (audio_opensles_instance_t*)_self;
+ if(!self || !self->device || !self->callback){
+ AUDIO_OPENSLES_DEBUG_ERROR("invalid parameter");
+ return -1;
+ }
+
+ tsk_safeobj_lock(self);
+ if(!self->isConsumerPrepared){
+ AUDIO_OPENSLES_DEBUG_ERROR("Consumer not prepared");
+ goto done;
+ }
+
+ if(self->isConsumerStarted){
+ AUDIO_OPENSLES_DEBUG_WARN("Consumer already started");
+ goto done;
+ }
+
+ if(self->isPlayoutAvailable){
+ int ret;
+ if((ret = self->device->StartPlayout())){
+ AUDIO_OPENSLES_DEBUG_ERROR("StartPlayout() failed with error code = %d", ret);
+ }
+
+ self->isConsumerStarted = self->device->Playing();
+ AUDIO_OPENSLES_DEBUG_INFO("isPlaying=%s", (self->isConsumerPrepared ? "true" : "false"));
+ }
+
+done:
+ tsk_safeobj_unlock(self);
+ return (self->isConsumerStarted ? 0 : -1);
+}
+
+int audio_opensles_instance_start_producer(audio_opensles_instance_handle_t* _self)
+{
+ audio_opensles_instance_t* self = (audio_opensles_instance_t*)_self;
+ if(!self || !self->device || !self->callback){
+ AUDIO_OPENSLES_DEBUG_ERROR("invalid parameter");
+ return -1;
+ }
+
+ tsk_safeobj_lock(self);
+ if(!self->isProducerPrepared){
+ AUDIO_OPENSLES_DEBUG_ERROR("Producer not prepared");
+ goto done;
+ }
+
+ if(self->isProducerStarted){
+ AUDIO_OPENSLES_DEBUG_WARN("Consumer already started");
+ goto done;
+ }
+
+ if(self->isRecordingAvailable){
+ int ret;
+ if((ret = self->device->StartRecording())){
+ AUDIO_OPENSLES_DEBUG_ERROR("StartRecording() failed with error code = %d", ret);
+ }
+
+ self->isProducerStarted = self->device->Recording();
+ AUDIO_OPENSLES_DEBUG_INFO("isRecording=%s", (self->isProducerStarted ? "true" : "false"));
+ }
+
+done:
+ tsk_safeobj_unlock(self);
+ return (self->isProducerStarted ? 0 : -1);
+}
+
+int audio_opensles_instance_stop_consumer(audio_opensles_instance_handle_t* _self)
+{
+ audio_opensles_instance_t* self = (audio_opensles_instance_t*)_self;
+ if(!self || !self->device || !self->callback){
+ AUDIO_OPENSLES_DEBUG_ERROR("invalid parameter");
+ return -1;
+ }
+
+ tsk_safeobj_lock(self);
+
+ if(!self->isConsumerStarted){
+ goto done;
+ }
+
+ int ret;
+ if((ret = self->device->StopPlayout())){
+ AUDIO_OPENSLES_DEBUG_ERROR("StopPlayout() failed with error code = %d", ret);
+ }
+ else{
+ self->isConsumerStarted = self->device->Playing();
+ self->isConsumerPrepared = false;
+ }
+
+done:
+ tsk_safeobj_unlock(self);
+ return (self->isConsumerStarted ? -1 : 0);
+}
+
+int audio_opensles_instance_stop_producer(audio_opensles_instance_handle_t* _self)
+{
+ audio_opensles_instance_t* self = (audio_opensles_instance_t*)_self;
+ if(!self || !self->device || !self->callback){
+ AUDIO_OPENSLES_DEBUG_ERROR("invalid parameter");
+ return -1;
+ }
+
+ tsk_safeobj_lock(self);
+
+ if(!self->isProducerStarted){
+ goto done;
+ }
+
+ int ret;
+ if((ret = self->device->StopRecording())){
+ AUDIO_OPENSLES_DEBUG_ERROR("StopRecording() failed with error code = %d", ret);
+ }
+ else{
+ self->isProducerStarted = self->device->Recording();
+ self->isProducerPrepared = false;
+ }
+
+done:
+ tsk_safeobj_unlock(self);
+ return (self->isProducerStarted ? -1 : 0);
+}
+
+int audio_opensles_instance_set_speakerOn(audio_opensles_instance_handle_t* _self, bool speakerOn)
+{
+ audio_opensles_instance_t* self = (audio_opensles_instance_t*)_self;
+ if(!self || !self->device ){
+ AUDIO_OPENSLES_DEBUG_ERROR("Invalid parameter");
+ return -1;
+ }
+ return self->device->SetSpeakerOn(speakerOn);
+}
+
+int audio_opensles_instance_set_microphone_volume(audio_opensles_instance_handle_t* _self, int32_t volume)
+{
+ audio_opensles_instance_t* self = (audio_opensles_instance_t*)_self;
+ if(!self || !self->device ){
+ AUDIO_OPENSLES_DEBUG_ERROR("Invalid parameter");
+ return -1;
+ }
+ return self->device->SetMicrophoneVolume(volume);
+}
+
+int audio_opensles_instance_destroy(audio_opensles_instance_handle_t** _self){
+ if(!_self || !*_self){
+ AUDIO_OPENSLES_DEBUG_ERROR("Invalid parameter");
+ return -1;
+ }
+ tsk_list_lock(__audioInstances);
+ if(tsk_object_get_refcount(*_self)==1){
+ tsk_list_remove_item_by_data(__audioInstances, *_self);
+ }
+ else {
+ tsk_object_unref(*_self);
+ }
+ tsk_list_unlock(__audioInstances);
+ *_self = tsk_null;
+ return 0;
+}
+
diff --git a/plugins/audio_opensles/audio_opensles.d b/plugins/audio_opensles/audio_opensles.d
new file mode 100644
index 0000000..2a9da10
--- /dev/null
+++ b/plugins/audio_opensles/audio_opensles.d
@@ -0,0 +1,127 @@
+audio_opensles.o: audio_opensles.cxx audio_opensles.h \
+ audio_opensles_config.h \
+ C:/android-ndk-r7c/platforms/android-9/arch-arm/usr/include/stdint.h \
+ C:/android-ndk-r7c/platforms/android-9/arch-arm/usr/include/sys/_types.h \
+ C:/android-ndk-r7c/platforms/android-9/arch-arm/usr/include/machine/_types.h \
+ C:/android-ndk-r7c/platforms/android-9/arch-arm/usr/include/android/log.h \
+ ../../tinySAK/src/tsk_plugin.h ../../tinySAK/src/tinysak_config.h \
+ C:/android-ndk-r7c/platforms/android-9/arch-arm/usr/include/string.h \
+ C:/android-ndk-r7c/platforms/android-9/arch-arm/usr/include/sys/cdefs.h \
+ C:/android-ndk-r7c/platforms/android-9/arch-arm/usr/include/sys/cdefs_elf.h \
+ C:/android-ndk-r7c/platforms/android-9/arch-arm/usr/include/malloc.h \
+ ../../tinySAK/src/tsk_common.h ../../tinySAK/src/tsk_object.h \
+ C:/android-ndk-r7c/platforms/android-9/arch-arm/usr/include/stdio.h \
+ C:/android-ndk-r7c/platforms/android-9/arch-arm/usr/include/sys/types.h \
+ C:/android-ndk-r7c/platforms/android-9/arch-arm/usr/include/linux/posix_types.h \
+ C:/android-ndk-r7c/platforms/android-9/arch-arm/usr/include/linux/stddef.h \
+ C:/android-ndk-r7c/platforms/android-9/arch-arm/usr/include/linux/compiler.h \
+ C:/android-ndk-r7c/platforms/android-9/arch-arm/usr/include/asm/posix_types.h \
+ C:/android-ndk-r7c/platforms/android-9/arch-arm/usr/include/asm/types.h \
+ C:/android-ndk-r7c/platforms/android-9/arch-arm/usr/include/linux/types.h \
+ C:/android-ndk-r7c/platforms/android-9/arch-arm/usr/include/machine/kernel.h \
+ C:/android-ndk-r7c/platforms/android-9/arch-arm/usr/include/sys/sysmacros.h \
+ audio_opensles_consumer.h audio_opensles_producer.h \
+ audio_opensles_device.h \
+ C:/android-ndk-r7c/platforms/android-9/arch-arm/usr/include/SLES/OpenSLES.h \
+ C:/android-ndk-r7c/platforms/android-9/arch-arm/usr/include/SLES/OpenSLES_Platform.h \
+ C:/android-ndk-r7c/platforms/android-9/arch-arm/usr/include/SLES/OpenSLES_Android.h \
+ C:/android-ndk-r7c/platforms/android-9/arch-arm/usr/include/SLES/OpenSLES_AndroidConfiguration.h \
+ audio_opensles_device_impl.h \
+ ../../tinyMEDIA/include/tinymedia/tmedia_consumer.h \
+ ../../tinyMEDIA/include/tinymedia_config.h \
+ ../../tinyMEDIA/include/tinymedia/tmedia_codec.h \
+ ../../tinyMEDIA/include/tinymedia/tmedia_common.h \
+ ../../tinySAK/src/tsk_object.h ../../tinySAK/src/tsk_list.h \
+ ../../tinySAK/src/tsk_mutex.h \
+ ../../tinyMEDIA/include/tinymedia/tmedia_params.h \
+ ../../tinyMEDIA/include/tinymedia/tmedia_common.h \
+ ../../tinyMEDIA/include/tinymedia/tmedia_producer.h \
+ ../../tinySAK/src/tsk_safeobj.h ../../tinySAK/src/tsk_debug.h
+
+audio_opensles.h:
+
+audio_opensles_config.h:
+
+C:/android-ndk-r7c/platforms/android-9/arch-arm/usr/include/stdint.h:
+
+C:/android-ndk-r7c/platforms/android-9/arch-arm/usr/include/sys/_types.h:
+
+C:/android-ndk-r7c/platforms/android-9/arch-arm/usr/include/machine/_types.h:
+
+C:/android-ndk-r7c/platforms/android-9/arch-arm/usr/include/android/log.h:
+
+../../tinySAK/src/tsk_plugin.h:
+
+../../tinySAK/src/tinysak_config.h:
+
+C:/android-ndk-r7c/platforms/android-9/arch-arm/usr/include/string.h:
+
+C:/android-ndk-r7c/platforms/android-9/arch-arm/usr/include/sys/cdefs.h:
+
+C:/android-ndk-r7c/platforms/android-9/arch-arm/usr/include/sys/cdefs_elf.h:
+
+C:/android-ndk-r7c/platforms/android-9/arch-arm/usr/include/malloc.h:
+
+../../tinySAK/src/tsk_common.h:
+
+../../tinySAK/src/tsk_object.h:
+
+C:/android-ndk-r7c/platforms/android-9/arch-arm/usr/include/stdio.h:
+
+C:/android-ndk-r7c/platforms/android-9/arch-arm/usr/include/sys/types.h:
+
+C:/android-ndk-r7c/platforms/android-9/arch-arm/usr/include/linux/posix_types.h:
+
+C:/android-ndk-r7c/platforms/android-9/arch-arm/usr/include/linux/stddef.h:
+
+C:/android-ndk-r7c/platforms/android-9/arch-arm/usr/include/linux/compiler.h:
+
+C:/android-ndk-r7c/platforms/android-9/arch-arm/usr/include/asm/posix_types.h:
+
+C:/android-ndk-r7c/platforms/android-9/arch-arm/usr/include/asm/types.h:
+
+C:/android-ndk-r7c/platforms/android-9/arch-arm/usr/include/linux/types.h:
+
+C:/android-ndk-r7c/platforms/android-9/arch-arm/usr/include/machine/kernel.h:
+
+C:/android-ndk-r7c/platforms/android-9/arch-arm/usr/include/sys/sysmacros.h:
+
+audio_opensles_consumer.h:
+
+audio_opensles_producer.h:
+
+audio_opensles_device.h:
+
+C:/android-ndk-r7c/platforms/android-9/arch-arm/usr/include/SLES/OpenSLES.h:
+
+C:/android-ndk-r7c/platforms/android-9/arch-arm/usr/include/SLES/OpenSLES_Platform.h:
+
+C:/android-ndk-r7c/platforms/android-9/arch-arm/usr/include/SLES/OpenSLES_Android.h:
+
+C:/android-ndk-r7c/platforms/android-9/arch-arm/usr/include/SLES/OpenSLES_AndroidConfiguration.h:
+
+audio_opensles_device_impl.h:
+
+../../tinyMEDIA/include/tinymedia/tmedia_consumer.h:
+
+../../tinyMEDIA/include/tinymedia_config.h:
+
+../../tinyMEDIA/include/tinymedia/tmedia_codec.h:
+
+../../tinyMEDIA/include/tinymedia/tmedia_common.h:
+
+../../tinySAK/src/tsk_object.h:
+
+../../tinySAK/src/tsk_list.h:
+
+../../tinySAK/src/tsk_mutex.h:
+
+../../tinyMEDIA/include/tinymedia/tmedia_params.h:
+
+../../tinyMEDIA/include/tinymedia/tmedia_common.h:
+
+../../tinyMEDIA/include/tinymedia/tmedia_producer.h:
+
+../../tinySAK/src/tsk_safeobj.h:
+
+../../tinySAK/src/tsk_debug.h:
diff --git a/plugins/audio_opensles/audio_opensles.h b/plugins/audio_opensles/audio_opensles.h
new file mode 100644
index 0000000..7837509
--- /dev/null
+++ b/plugins/audio_opensles/audio_opensles.h
@@ -0,0 +1,47 @@
+/* Copyright (C) 2012 Doubango Telecom <http://www.doubango.org>
+*
+* This file is part of Open Source Doubango Framework.
+*
+* DOUBANGO is free software: you can redistribute it and/or modify
+* it under the terms of the GNU General Public License as published by
+* the Free Software Foundation, either version 3 of the License, or
+* (at your option) any later version.
+*
+* DOUBANGO is distributed in the hope that it will be useful,
+* but WITHOUT ANY WARRANTY; without even the implied warranty of
+* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
+* GNU General Public License for more details.
+*
+* You should have received a copy of the GNU General Public License
+* along with DOUBANGO.
+*/
+#ifndef _DOUBANGO_AUDIO_OPENSLES_H
+#define _DOUBANGO_AUDIO_OPENSLES_H
+
+#include "audio_opensles_config.h"
+
+#include "tsk_plugin.h"
+
+AUDIO_OPENSLES_BEGIN_DECLS
+
+typedef void audio_opensles_instance_handle_t;
+
+AUDIO_OPENSLES_API int __plugin_get_def_count();
+AUDIO_OPENSLES_API tsk_plugin_def_type_t __plugin_get_def_type_at(int index);
+AUDIO_OPENSLES_API tsk_plugin_def_media_type_t __plugin_get_def_media_type_at(int index);
+AUDIO_OPENSLES_API tsk_plugin_def_ptr_const_t __plugin_get_def_at(int index);
+
+audio_opensles_instance_handle_t* audio_opensles_instance_create(uint64_t session_id);
+int audio_opensles_instance_prepare_consumer(audio_opensles_instance_handle_t* self, struct tmedia_consumer_s** consumer);
+int audio_opensles_instance_prepare_producer(audio_opensles_instance_handle_t* _self, struct tmedia_producer_s** producer);
+int audio_opensles_instance_start_consumer(audio_opensles_instance_handle_t* self);
+int audio_opensles_instance_start_producer(audio_opensles_instance_handle_t* self);
+int audio_opensles_instance_stop_consumer(audio_opensles_instance_handle_t* self);
+int audio_opensles_instance_stop_producer(audio_opensles_instance_handle_t* self);
+int audio_opensles_instance_set_speakerOn(audio_opensles_instance_handle_t* self, bool speakerOn);
+int audio_opensles_instance_set_microphone_volume(audio_opensles_instance_handle_t* self, int32_t volume);
+int audio_opensles_instance_destroy(audio_opensles_instance_handle_t** self);
+
+AUDIO_OPENSLES_END_DECLS
+
+#endif /* _DOUBANGO_AUDIO_OPENSLES_H */
diff --git a/plugins/audio_opensles/audio_opensles.vcproj b/plugins/audio_opensles/audio_opensles.vcproj
new file mode 100644
index 0000000..943de27
--- /dev/null
+++ b/plugins/audio_opensles/audio_opensles.vcproj
@@ -0,0 +1,277 @@
+<?xml version="1.0" encoding="Windows-1252"?>
+<VisualStudioProject
+ ProjectType="Visual C++"
+ Version="9,00"
+ Name="audio_opensles"
+ ProjectGUID="{1C451CD7-337D-4E72-8788-9607D263752B}"
+ RootNamespace="audio_opensles"
+ Keyword="Win32Proj"
+ TargetFrameworkVersion="131072"
+ >
+ <Platforms>
+ <Platform
+ Name="Win32"
+ />
+ </Platforms>
+ <ToolFiles>
+ </ToolFiles>
+ <Configurations>
+ <Configuration
+ Name="Debug|Win32"
+ OutputDirectory="$(SolutionDir)$(ConfigurationName)"
+ IntermediateDirectory="$(ConfigurationName)"
+ ConfigurationType="2"
+ CharacterSet="1"
+ >
+ <Tool
+ Name="VCPreBuildEventTool"
+ />
+ <Tool
+ Name="VCCustomBuildTool"
+ />
+ <Tool
+ Name="VCXMLDataGeneratorTool"
+ />
+ <Tool
+ Name="VCWebServiceProxyGeneratorTool"
+ />
+ <Tool
+ Name="VCMIDLTool"
+ />
+ <Tool
+ Name="VCCLCompilerTool"
+ Optimization="0"
+ AdditionalIncludeDirectories="..\..\thirdparties\win32\include;..\..\tinySAK\src;..\..\tinyDAV\include;..\..\tinyMEDIA\include;..\..\tinySDP\include;..\..\tinyRTP\include"
+ PreprocessorDefinitions="WIN32;_DEBUG;_WINDOWS;_USRDLL;DEBUG_LEVEL=DEBUG_LEVEL_INFO;AUDIO_OPENSLES_EXPORTS;TINYDAV_EXPORTS"
+ MinimalRebuild="true"
+ BasicRuntimeChecks="3"
+ RuntimeLibrary="3"
+ UsePrecompiledHeader="0"
+ WarningLevel="3"
+ WarnAsError="true"
+ DebugInformationFormat="4"
+ />
+ <Tool
+ Name="VCManagedResourceCompilerTool"
+ />
+ <Tool
+ Name="VCResourceCompilerTool"
+ />
+ <Tool
+ Name="VCPreLinkEventTool"
+ />
+ <Tool
+ Name="VCLinkerTool"
+ AdditionalDependencies="$(OutDir)\tinySAK.lib $(OutDir)\tinyMEDIA.lib"
+ LinkIncremental="2"
+ GenerateDebugInformation="true"
+ SubSystem="2"
+ TargetMachine="1"
+ />
+ <Tool
+ Name="VCALinkTool"
+ />
+ <Tool
+ Name="VCManifestTool"
+ />
+ <Tool
+ Name="VCXDCMakeTool"
+ />
+ <Tool
+ Name="VCBscMakeTool"
+ />
+ <Tool
+ Name="VCFxCopTool"
+ />
+ <Tool
+ Name="VCAppVerifierTool"
+ />
+ <Tool
+ Name="VCPostBuildEventTool"
+ />
+ </Configuration>
+ <Configuration
+ Name="Release|Win32"
+ OutputDirectory="$(SolutionDir)$(ConfigurationName)"
+ IntermediateDirectory="$(ConfigurationName)"
+ ConfigurationType="2"
+ CharacterSet="1"
+ WholeProgramOptimization="1"
+ >
+ <Tool
+ Name="VCPreBuildEventTool"
+ />
+ <Tool
+ Name="VCCustomBuildTool"
+ />
+ <Tool
+ Name="VCXMLDataGeneratorTool"
+ />
+ <Tool
+ Name="VCWebServiceProxyGeneratorTool"
+ />
+ <Tool
+ Name="VCMIDLTool"
+ />
+ <Tool
+ Name="VCCLCompilerTool"
+ Optimization="2"
+ EnableIntrinsicFunctions="true"
+ AdditionalIncludeDirectories="..\..\thirdparties\win32\include;..\..\tinySAK\src;..\..\tinyDAV\include;..\..\tinyMEDIA\include;..\..\tinySDP\include;..\..\tinyRTP\include"
+ PreprocessorDefinitions="WIN32;NDEBUG;_WINDOWS;_USRDLL;AUDIO_OPENSLES_EXPORTS;TINYDAV_EXPORTS"
+ RuntimeLibrary="2"
+ EnableFunctionLevelLinking="true"
+ UsePrecompiledHeader="0"
+ WarningLevel="3"
+ WarnAsError="true"
+ DebugInformationFormat="0"
+ />
+ <Tool
+ Name="VCManagedResourceCompilerTool"
+ />
+ <Tool
+ Name="VCResourceCompilerTool"
+ />
+ <Tool
+ Name="VCPreLinkEventTool"
+ />
+ <Tool
+ Name="VCLinkerTool"
+ AdditionalDependencies="$(OutDir)\tinySAK.lib $(OutDir)\tinyMEDIA.lib"
+ LinkIncremental="1"
+ GenerateDebugInformation="true"
+ SubSystem="2"
+ OptimizeReferences="2"
+ EnableCOMDATFolding="2"
+ TargetMachine="1"
+ />
+ <Tool
+ Name="VCALinkTool"
+ />
+ <Tool
+ Name="VCManifestTool"
+ />
+ <Tool
+ Name="VCXDCMakeTool"
+ />
+ <Tool
+ Name="VCBscMakeTool"
+ />
+ <Tool
+ Name="VCFxCopTool"
+ />
+ <Tool
+ Name="VCAppVerifierTool"
+ />
+ <Tool
+ Name="VCPostBuildEventTool"
+ />
+ </Configuration>
+ </Configurations>
+ <References>
+ </References>
+ <Files>
+ <Filter
+ Name="Source Files"
+ Filter="cpp;c;cc;cxx;def;odl;idl;hpj;bat;asm;asmx"
+ UniqueIdentifier="{4FC737F1-C7A5-4376-A066-2A32D752A2FF}"
+ >
+ <File
+ RelativePath=".\audio_opensles.cxx"
+ >
+ </File>
+ <File
+ RelativePath=".\audio_opensles_consumer.cxx"
+ >
+ </File>
+ <File
+ RelativePath=".\audio_opensles_device.cxx"
+ >
+ </File>
+ <File
+ RelativePath=".\audio_opensles_device_impl.cxx"
+ >
+ </File>
+ <File
+ RelativePath=".\audio_opensles_producer.cxx"
+ >
+ </File>
+ <File
+ RelativePath=".\dllmain.cxx"
+ >
+ <FileConfiguration
+ Name="Debug|Win32"
+ >
+ <Tool
+ Name="VCCLCompilerTool"
+ UsePrecompiledHeader="0"
+ CompileAsManaged="0"
+ />
+ </FileConfiguration>
+ <FileConfiguration
+ Name="Release|Win32"
+ >
+ <Tool
+ Name="VCCLCompilerTool"
+ UsePrecompiledHeader="0"
+ CompileAsManaged="0"
+ />
+ </FileConfiguration>
+ </File>
+ <File
+ RelativePath="..\..\tinyDAV\src\audio\tdav_consumer_audio.c"
+ >
+ </File>
+ <File
+ RelativePath="..\..\tinyDAV\src\audio\tdav_producer_audio.c"
+ >
+ </File>
+ </Filter>
+ <Filter
+ Name="Header Files"
+ Filter="h;hpp;hxx;hm;inl;inc;xsd"
+ UniqueIdentifier="{93995380-89BD-4b04-88EB-625FBE52EBFB}"
+ >
+ <File
+ RelativePath=".\audio_opensles.h"
+ >
+ </File>
+ <File
+ RelativePath=".\audio_opensles_config.h"
+ >
+ </File>
+ <File
+ RelativePath=".\audio_opensles_consumer.h"
+ >
+ </File>
+ <File
+ RelativePath=".\audio_opensles_device.h"
+ >
+ </File>
+ <File
+ RelativePath=".\audio_opensles_device_impl.h"
+ >
+ </File>
+ <File
+ RelativePath=".\audio_opensles_producer.h"
+ >
+ </File>
+ <File
+ RelativePath="..\..\tinyDAV\include\tinydav\audio\tdav_consumer_audio.h"
+ >
+ </File>
+ <File
+ RelativePath="..\..\tinyDAV\include\tinydav\audio\tdav_producer_audio.h"
+ >
+ </File>
+ </Filter>
+ <Filter
+ Name="Resource Files"
+ Filter="rc;ico;cur;bmp;dlg;rc2;rct;bin;rgs;gif;jpg;jpeg;jpe;resx;tiff;tif;png;wav"
+ UniqueIdentifier="{67DA6AB6-F800-4c08-8B7A-83BB121AAD01}"
+ >
+ </Filter>
+ </Files>
+ <Globals>
+ </Globals>
+</VisualStudioProject>
diff --git a/plugins/audio_opensles/audio_opensles_config.h b/plugins/audio_opensles/audio_opensles_config.h
new file mode 100644
index 0000000..a495608
--- /dev/null
+++ b/plugins/audio_opensles/audio_opensles_config.h
@@ -0,0 +1,116 @@
+/* Copyright (C) 2012 Doubango Telecom <http://www.doubango.org>
+*
+* This file is part of Open Source Doubango Framework.
+*
+* DOUBANGO is free software: you can redistribute it and/or modify
+* it under the terms of the GNU General Public License as published by
+* the Free Software Foundation, either version 3 of the License, or
+* (at your option) any later version.
+*
+* DOUBANGO is distributed in the hope that it will be useful,
+* but WITHOUT ANY WARRANTY; without even the implied warranty of
+* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
+* GNU General Public License for more details.
+*
+* You should have received a copy of the GNU General Public License
+* along with DOUBANGO.
+*/
+#ifndef AUDIO_OPENSLES_CONFIG_H
+#define AUDIO_OPENSLES_CONFIG_H
+
+#ifdef __SYMBIAN32__
+#undef _WIN32 /* Because of WINSCW */
+#endif
+
+// Windows (XP/Vista/7/CE and Windows Mobile) macro definition
+#if defined(WIN32)|| defined(_WIN32) || defined(_WIN32_WCE)
+# define AUDIO_OPENSLES_UNDER_WINDOWS 1
+#endif
+
+// OS X or iOS
+#if defined(__APPLE__)
+# define AUDIO_OPENSLES_UNDER_APPLE 1
+#endif
+#if TARGET_OS_MAC
+# define AUDIO_OPENSLES_UNDER_MAC 1
+#endif
+#if TARGET_OS_IPHONE
+# define AUDIO_OPENSLES_UNDER_IPHONE 1
+#endif
+#if TARGET_IPHONE_SIMULATOR
+# define AUDIO_OPENSLES_UNDER_IPHONE_SIMULATOR 1
+#endif
+
+#if defined(ANDROID)
+# define AUDIO_OPENSLES_UNDER_ANDROID 1
+#endif
+
+// x86
+#if AUDIO_OPENSLES_UNDER_WINDOWS || defined(__x86_64__) || defined(__x86__) || defined(__i386__)
+# define AUDIO_OPENSLES_UNDER_X86 1
+#endif
+
+// Mobile
+#if defined(_WIN32_WCE) || defined(ANDROID) // iOS (not true)=> || defined(IOS)
+# define AUDIO_OPENSLES_UNDER_MOBILE 1
+#endif
+
+#if (AUDIO_OPENSLES_UNDER_WINDOWS || defined(__SYMBIAN32__)) && defined(AUDIO_OPENSLES_EXPORTS)
+# define AUDIO_OPENSLES_API __declspec(dllexport)
+# define AUDIO_OPENSLES_GEXTERN __declspec(dllexport)
+#elif (AUDIO_OPENSLES_UNDER_WINDOWS || defined(__SYMBIAN32__))
+# define AUDIO_OPENSLES_API __declspec(dllimport)
+# define AUDIO_OPENSLES_GEXTERN __declspec(dllimport)
+#else
+# define AUDIO_OPENSLES_API
+# define AUDIO_OPENSLES_GEXTERN extern
+#endif
+
+// Guards against C++ name mangling
+#ifdef __cplusplus
+# define AUDIO_OPENSLES_BEGIN_DECLS extern "C" {
+# define AUDIO_OPENSLES_END_DECLS }
+#else
+# define AUDIO_OPENSLES_BEGIN_DECLS
+# define AUDIO_OPENSLES_END_DECLS
+#endif
+
+#ifdef _MSC_VER
+#if HAVE_FFMPEG // FFMPeg warnings (treated as errors)
+# pragma warning (disable:4244)
+#endif
+# define inline __inline
+# define _CRT_SECURE_NO_WARNINGS
+#endif
+
+// Detecting C99 compilers
+#if (__STDC_VERSION__ == 199901L) && !defined(__C99__)
+# define __C99__
+#endif
+
+#include <stdint.h>
+#ifdef __SYMBIAN32__
+#include <stdlib.h>
+#endif
+
+#if HAVE_CONFIG_H
+ #include "../config.h"
+#endif
+
+#if AUDIO_OPENSLES_UNDER_ANDROID
+# include <android/log.h>
+
+# define ANDROID_DEBUG_TAG "plugin_audio_opensles" // DDMS log tag when using eclise
+# define AUDIO_OPENSLES_DEBUG_INFO(FMT, ...) __android_log_print(ANDROID_LOG_INFO, ANDROID_DEBUG_TAG, FMT, ##__VA_ARGS__)
+# define AUDIO_OPENSLES_DEBUG_WARN(FMT, ...) __android_log_print(ANDROID_LOG_WARN, ANDROID_DEBUG_TAG, "***WARN: function: \"%s()\" \nfile: \"%s\" \nline: \"%u\" \nMSG: " FMT "\n", __FUNCTION__, __FILE__, __LINE__, ##__VA_ARGS__)
+# define AUDIO_OPENSLES_DEBUG_ERROR(FMT, ...) __android_log_print(ANDROID_LOG_ERROR, ANDROID_DEBUG_TAG, "***ERROR: function: \"%s()\" \nfile: \"%s\" \nline: \"%u\" \nMSG: " FMT "\n", __FUNCTION__, __FILE__, __LINE__, ##__VA_ARGS__)
+# define AUDIO_OPENSLES_DEBUG_FATAL(FMT, ...) __android_log_print(ANDROID_LOG_FATAL, ANDROID_DEBUG_TAG, "***FATAL: function: \"%s()\" \nfile: \"%s\" \nline: \"%u\" \nMSG: " FMT "\n", __FUNCTION__, __FILE__, __LINE__, ##__VA_ARGS__)
+#else
+# include "tsk_debug.h"
+# define AUDIO_OPENSLES_DEBUG_INFO(FMT, ...) TSK_DEBUG_INFO(FMT, ##__VA_ARGS__)
+# define AUDIO_OPENSLES_DEBUG_WARN(FMT, ...) TSK_DEBUG_WARN(FMT, ##__VA_ARGS__)
+# define AUDIO_OPENSLES_DEBUG_ERROR(FMT, ...) TSK_DEBUG_ERROR(FMT, ##__VA_ARGS__)
+# define AUDIO_OPENSLES_DEBUG_FATAL(FMT, ...) TSK_DEBUG_FATAL(FMT, ##__VA_ARGS__)
+#endif /* AUDIO_OPENSLES_UNDER_ANDROID */
+
+#endif // AUDIO_OPENSLES_CONFIG_H \ No newline at end of file
diff --git a/plugins/audio_opensles/audio_opensles_consumer.cxx b/plugins/audio_opensles/audio_opensles_consumer.cxx
new file mode 100644
index 0000000..57acd7f
--- /dev/null
+++ b/plugins/audio_opensles/audio_opensles_consumer.cxx
@@ -0,0 +1,253 @@
+/* Copyright (C) 2012 Doubango Telecom <http://www.doubango.org>
+*
+* This file is part of Open Source Doubango Framework.
+*
+* DOUBANGO is free software: you can redistribute it and/or modify
+* it under the terms of the GNU General Public License as published by
+* the Free Software Foundation, either version 3 of the License, or
+* (at your option) any later version.
+*
+* DOUBANGO is distributed in the hope that it will be useful,
+* but WITHOUT ANY WARRANTY; without even the implied warranty of
+* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
+* GNU General Public License for more details.
+*
+* You should have received a copy of the GNU General Public License
+* along with DOUBANGO.
+*/
+#include "audio_opensles_consumer.h"
+#include "audio_opensles.h"
+
+#include "tinydav/audio/tdav_consumer_audio.h"
+
+#include "tsk_string.h"
+#include "tsk_string.h"
+#include "tsk_memory.h"
+#include "tsk_debug.h"
+
+typedef struct audio_consumer_opensles_s
+{
+ TDAV_DECLARE_CONSUMER_AUDIO;
+ audio_opensles_instance_handle_t* audioInstHandle;
+ bool isSpeakerOn;
+ struct{
+ void* ptr;
+ bool isFull;
+ int size;
+ int index;
+ } buffer;
+}
+audio_consumer_opensles_t;
+
+int audio_consumer_opensles_get_data_10ms(const audio_consumer_opensles_t* _self, void* audioSamples, int nSamples, int nBytesPerSample, int nChannels, int samplesPerSec, uint32_t &nSamplesOut)
+{
+ nSamplesOut = 0;
+ if(!_self || !audioSamples || !nSamples){
+ AUDIO_OPENSLES_DEBUG_ERROR("Invalid parameter");
+ return -1;
+ }
+ if((nSamples != (samplesPerSec / 100))){
+ AUDIO_OPENSLES_DEBUG_ERROR("Not producing 10ms samples (nSamples=%d, samplesPerSec=%d)", nSamples, samplesPerSec);
+ return -2;
+ }
+ if((nBytesPerSample != (TMEDIA_CONSUMER(_self)->audio.bits_per_sample >> 3))){
+ AUDIO_OPENSLES_DEBUG_ERROR("%d not valid bytes/samples", nBytesPerSample);
+ return -3;
+ }
+ if((nChannels != TMEDIA_CONSUMER(_self)->audio.out.channels)){
+ AUDIO_OPENSLES_DEBUG_ERROR("Playout - %d not the expected number of channels but should be %d", nChannels, TMEDIA_CONSUMER(_self)->audio.out.channels);
+ return -4;
+ }
+
+ audio_consumer_opensles_t* self = const_cast<audio_consumer_opensles_t*>(_self);
+
+ if(self->buffer.index == self->buffer.size){
+ if((tdav_consumer_audio_get(TDAV_CONSUMER_AUDIO(self), self->buffer.ptr, self->buffer.size)) != self->buffer.size){
+ nSamplesOut = 0;
+ self->buffer.index = self->buffer.size;
+ return 0;
+ }
+ self->buffer.index = 0;
+ tdav_consumer_audio_tick(TDAV_CONSUMER_AUDIO(self));
+ }
+
+ int nSamplesInBits = (nSamples * nBytesPerSample);
+ if(_self->buffer.index + nSamplesInBits <= _self->buffer.size){
+ memcpy(audioSamples, (((uint8_t*)self->buffer.ptr) + self->buffer.index), nSamplesInBits);
+ }
+ self->buffer.index += nSamplesInBits;
+ TSK_CLAMP(0, self->buffer.index, self->buffer.size);
+ nSamplesOut = nSamples;
+
+ return 0;
+}
+
+bool audio_consumer_opensles_is_speakerOn(const audio_consumer_opensles_t* self)
+{
+ if(!self){
+ AUDIO_OPENSLES_DEBUG_ERROR("Invalid parameter");
+ return false;
+ }
+ return self->isSpeakerOn;
+}
+
+
+/* ============ Media Consumer Interface ================= */
+static int audio_consumer_opensles_set(tmedia_consumer_t* _self, const tmedia_param_t* param)
+{
+ audio_consumer_opensles_t* self = (audio_consumer_opensles_t*)_self;
+ int ret = tdav_consumer_audio_set(TDAV_CONSUMER_AUDIO(self), param);
+
+ if(ret == 0){
+ if(tsk_striequals(param->key, "volume")){
+
+ }
+ else if(tsk_striequals(param->key, "speaker-on")){
+ self->isSpeakerOn = (TSK_TO_INT32((uint8_t*)param->value) != 0);
+ if(self->audioInstHandle){
+ return audio_opensles_instance_set_speakerOn(self->audioInstHandle, self->isSpeakerOn);
+ }
+ else return 0; // will be set when instance is initialized
+ }
+ }
+
+ return ret;
+}
+
+static int audio_consumer_opensles_prepare(tmedia_consumer_t* _self, const tmedia_codec_t* codec)
+{
+ audio_consumer_opensles_t* self = (audio_consumer_opensles_t*)_self;
+ if(!self){
+ AUDIO_OPENSLES_DEBUG_ERROR("Invalid parameter");
+ return -1;
+ }
+
+ // create audio instance
+ if(!(self->audioInstHandle = audio_opensles_instance_create(TMEDIA_CONSUMER(self)->session_id))){
+ AUDIO_OPENSLES_DEBUG_ERROR("Failed to create audio instance handle");
+ return -1;
+ }
+
+ // initialize input parameters from the codec information
+ TMEDIA_CONSUMER(self)->audio.ptime = TMEDIA_CODEC_PTIME_AUDIO_DECODING(codec);
+ TMEDIA_CONSUMER(self)->audio.in.channels = TMEDIA_CODEC_CHANNELS_AUDIO_DECODING(codec);
+ TMEDIA_CONSUMER(self)->audio.in.rate = TMEDIA_CODEC_RATE_DECODING(codec);
+
+ AUDIO_OPENSLES_DEBUG_INFO("audio_consumer_opensles_prepare(channels=%d, rate=%d, ptime=%d)", codec->plugin->audio.channels, codec->plugin->rate, codec->plugin->audio.ptime);
+
+ // prepare playout device and update output parameters
+ int ret = audio_opensles_instance_prepare_consumer(self->audioInstHandle, &_self);
+
+ // now that the producer is prepared we can initialize internal buffer using device caps
+ if(ret == 0){
+ // allocate buffer
+ int xsize = ((TMEDIA_CONSUMER(self)->audio.ptime * TMEDIA_CONSUMER(self)->audio.out.rate) / 1000) * (TMEDIA_CONSUMER(self)->audio.bits_per_sample >> 3);
+ if(!(self->buffer.ptr = tsk_realloc(self->buffer.ptr, xsize))){
+ AUDIO_OPENSLES_DEBUG_ERROR("Failed to allocate buffer with size = %d", xsize);
+ self->buffer.size = 0;
+ return -1;
+ }
+ memset(self->buffer.ptr, 0, xsize);
+ self->buffer.size = xsize;
+ self->buffer.index = 0;
+ self->buffer.isFull = false;
+ }
+ return ret;
+}
+
+static int audio_consumer_opensles_start(tmedia_consumer_t* _self)
+{
+ audio_consumer_opensles_t* self = (audio_consumer_opensles_t*)_self;
+ if(!self){
+ AUDIO_OPENSLES_DEBUG_ERROR("Invalid parameter");
+ return -1;
+ }
+
+ return audio_opensles_instance_start_consumer(self->audioInstHandle);
+}
+
+static int audio_consumer_opensles_consume(tmedia_consumer_t* _self, const void* data, tsk_size_t data_size, const tsk_object_t* proto_hdr)
+{
+ audio_consumer_opensles_t* self = (audio_consumer_opensles_t*)_self;
+ if(!self || !data || !data_size){
+ AUDIO_OPENSLES_DEBUG_ERROR("1Invalid parameter");
+ return -1;
+ }
+ /* buffer is already decoded */
+ return tdav_consumer_audio_put(TDAV_CONSUMER_AUDIO(self), data, data_size, proto_hdr);
+}
+
+static int audio_consumer_opensles_pause(tmedia_consumer_t* self)
+{
+ return 0;
+}
+
+static int audio_consumer_opensles_stop(tmedia_consumer_t* _self)
+{
+ audio_consumer_opensles_t* self = (audio_consumer_opensles_t*)_self;
+ if(!self){
+ AUDIO_OPENSLES_DEBUG_ERROR("Invalid parameter");
+ return -1;
+ }
+
+ return audio_opensles_instance_stop_consumer(self->audioInstHandle);
+}
+
+
+//
+// SLES audio consumer object definition
+//
+/* constructor */
+static tsk_object_t* audio_consumer_opensles_ctor(tsk_object_t *_self, va_list * app)
+{
+ audio_consumer_opensles_t *self = (audio_consumer_opensles_t *)_self;
+ if(self){
+ /* init base */
+ tdav_consumer_audio_init(TDAV_CONSUMER_AUDIO(self));
+ /* init self */
+
+ }
+ return self;
+}
+/* destructor */
+static tsk_object_t* audio_consumer_opensles_dtor(tsk_object_t *_self)
+{
+ audio_consumer_opensles_t *self = (audio_consumer_opensles_t *)_self;
+ if(self){
+ /* stop */
+ audio_consumer_opensles_stop(TMEDIA_CONSUMER(self));
+ /* deinit self */
+ if(self->audioInstHandle){
+ audio_opensles_instance_destroy(&self->audioInstHandle);
+ }
+ TSK_FREE(self->buffer.ptr);
+ /* deinit base */
+ tdav_consumer_audio_deinit(TDAV_CONSUMER_AUDIO(self));
+ }
+
+ return self;
+}
+/* object definition */
+static const tsk_object_def_t audio_consumer_opensles_def_s =
+{
+ sizeof(audio_consumer_opensles_t),
+ audio_consumer_opensles_ctor,
+ audio_consumer_opensles_dtor,
+ tdav_consumer_audio_cmp,
+};
+/* plugin definition*/
+static const tmedia_consumer_plugin_def_t audio_consumer_opensles_plugin_def_s =
+{
+ &audio_consumer_opensles_def_s,
+
+ tmedia_audio,
+ "SLES audio consumer",
+
+ audio_consumer_opensles_set,
+ audio_consumer_opensles_prepare,
+ audio_consumer_opensles_start,
+ audio_consumer_opensles_consume,
+ audio_consumer_opensles_pause,
+ audio_consumer_opensles_stop
+};
+const tmedia_consumer_plugin_def_t *audio_consumer_opensles_plugin_def_t = &audio_consumer_opensles_plugin_def_s;
diff --git a/plugins/audio_opensles/audio_opensles_consumer.h b/plugins/audio_opensles/audio_opensles_consumer.h
new file mode 100644
index 0000000..1702db6
--- /dev/null
+++ b/plugins/audio_opensles/audio_opensles_consumer.h
@@ -0,0 +1,32 @@
+/* Copyright (C) 2012 Doubango Telecom <http://www.doubango.org>
+*
+* This file is part of Open Source Doubango Framework.
+*
+* DOUBANGO is free software: you can redistribute it and/or modify
+* it under the terms of the GNU General Public License as published by
+* the Free Software Foundation, either version 3 of the License, or
+* (at your option) any later version.
+*
+* DOUBANGO is distributed in the hope that it will be useful,
+* but WITHOUT ANY WARRANTY; without even the implied warranty of
+* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
+* GNU General Public License for more details.
+*
+* You should have received a copy of the GNU General Public License
+* along with DOUBANGO.
+*/
+#ifndef _DOUBANGO_AUDIOOPENSLES_CONSUMER_H
+#define _DOUBANGO_AUDIOOPENSLES_CONSUMER_H
+
+#include "audio_opensles_config.h"
+
+AUDIO_OPENSLES_BEGIN_DECLS
+
+extern const struct tmedia_consumer_plugin_def_s *audio_consumer_opensles_plugin_def_t;
+
+int audio_consumer_opensles_get_data_10ms(const struct audio_consumer_opensles_s* self, void* audioSamples, int nSamples, int nBytesPerSample, int nChannels, int samplesPerSec, uint32_t &nSamplesOut);
+bool audio_consumer_opensles_is_speakerOn(const struct audio_consumer_opensles_s* self);
+
+AUDIO_OPENSLES_END_DECLS
+
+#endif /* _DOUBANGO_AUDIOOPENSLES_CONSUMER_H */
diff --git a/plugins/audio_opensles/audio_opensles_device.cxx b/plugins/audio_opensles/audio_opensles_device.cxx
new file mode 100644
index 0000000..3c06bfc
--- /dev/null
+++ b/plugins/audio_opensles/audio_opensles_device.cxx
@@ -0,0 +1,1137 @@
+/* Copyright (C) 2012 Doubango Telecom <http://www.doubango.org>. All Rights Reserved.
+ * Copyright (c) 2012 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+#include "audio_opensles_device.h"
+
+#include <stdio.h>
+#include <string.h>
+
+#define CHECK_TRUE(_bool, _text) { if(!_bool){ AUDIO_OPENSLES_DEBUG_ERROR(_text); return -1; } }
+#define CHECK_FALSE(_bool, _text) { if(_bool){ AUDIO_OPENSLES_DEBUG_ERROR(_text); return -1; } }
+#define CHECK_PLAYOUT_INITIALIZED() CHECK_TRUE(m_bPlayoutInitialized, "Playout not initialized")
+#define CHECK_PLAYOUT_NOT_INITIALIZED() CHECK_FALSE(m_bPlayoutInitialized, "Playout initialized")
+#define CHECK_RECORDING_INITIALIZED() CHECK_TRUE(m_bRecordingInitialized, "Recording not initialized")
+#define CHECK_RECORDING_NOT_INITIALIZED() CHECK_FALSE(m_bRecordingInitialized, "Recording initialized")
+#define CHECK_MICROPHONE_INITIALIZED() CHECK_TRUE(m_bMicrophoneInitialized, "Microphone not initialized")
+#define CHECK_MICROPHONE_NOT_INITIALIZED() CHECK_FALSE(m_bMicrophoneInitialized, "Microphone initialized")
+
+#if AUDIO_OPENSLES_UNDER_ANDROID
+static inline SLuint32 SL_SAMPLING_RATE(int RATE_INT){
+ switch(RATE_INT){
+ case 8000: return SL_SAMPLINGRATE_8;
+ case 11025: return SL_SAMPLINGRATE_11_025;
+ default:case 16000: return SL_SAMPLINGRATE_16;
+ case 22050: return SL_SAMPLINGRATE_22_05;
+ case 24000: return SL_SAMPLINGRATE_24;
+ case 32000: return SL_SAMPLINGRATE_32;
+ case 44100: return SL_SAMPLINGRATE_44_1;
+ case 64000: return SL_SAMPLINGRATE_64;
+ case 88200: return SL_SAMPLINGRATE_88_2;
+ case 96000: return SL_SAMPLINGRATE_96;
+ case 192000: return SL_SAMPLINGRATE_192;
+ }
+}
+#endif
+
+SLAudioDevice::SLAudioDevice(const SLAudioDeviceCallback* pCallback):
+#if AUDIO_OPENSLES_UNDER_ANDROID
+m_slEngineObject(NULL),
+m_slPlayer(NULL),
+m_slEngine(NULL),
+m_slPlayerPlay(NULL),
+m_slPlayerSimpleBufferQueue(NULL),
+m_slOutputMixObject(NULL),
+m_slSpeakerVolume(NULL),
+m_slRecorder(NULL),
+m_slRecorderRecord(NULL),
+m_slAudioIODeviceCapabilities(NULL),
+m_slRecorderSimpleBufferQueue(NULL),
+m_slMicVolume(NULL),
+_playQueueSeq(0),
+_recCurrentSeq(0),
+_recBufferTotalSize(0),
+_recQueueSeq(0),
+#endif
+m_nMicDeviceId(0),
+m_pCallback(pCallback),
+m_bInitialized(false),
+m_bSpeakerInitialized(false),
+m_bSpeakerOn(false),
+m_bPlayoutInitialized(false),
+m_bRecordingInitialized(false),
+m_bMicrophoneInitialized(false),
+m_bStereoPlayout(false),
+m_bStereoRecording(false),
+m_nPlayoutSampleRate(PLAYOUT_SAMPLE_RATE),
+m_nRecordingSampleRate(RECORDING_SAMPLE_RATE),
+m_nRecordingBufferSize(RECORDING_BUFFER_SIZE),
+m_nPlayoutBufferSize(PLAYOUT_BUFFER_SIZE),
+m_bPlaying(false),
+m_bRecording(false),
+m_nSpeakerVolume(0),
+m_nMinSpeakerVolume(0),
+m_nMaxSpeakerVolume(0)
+{
+#if AUDIO_OPENSLES_UNDER_ANDROID
+ memset(_playQueueBuffer, 0, sizeof(_playQueueBuffer));
+ memset(_recQueueBuffer, 0, sizeof(_recQueueBuffer));
+ memset(_recBuffer, 0, sizeof(_recBuffer));
+ memset(_recLength, 0, sizeof(_recLength));
+ memset(_recSeqNumber, 0, sizeof(_recSeqNumber));
+#endif
+}
+
+SLAudioDevice::~SLAudioDevice()
+{
+
+}
+
+int SLAudioDevice::SetCallback(const SLAudioDeviceCallback* pCallback)
+{
+ m_pCallback = pCallback;
+ return 0;
+}
+
+int SLAudioDevice::Init()
+{
+ CHECK_FALSE(m_bInitialized, "Already initialized");
+
+ AUDIO_OPENSLES_DEBUG_INFO("SLAudioDevice::Init()");
+
+#if AUDIO_OPENSLES_UNDER_ANDROID
+ SLresult slResult;
+
+ SLEngineOption EngineOption[] = {
+ { (SLuint32) SL_ENGINEOPTION_THREADSAFE, (SLuint32) SL_BOOLEAN_TRUE },
+ };
+ slResult = slCreateEngine(&m_slEngineObject, 1, EngineOption, 0, NULL, NULL);
+ if (slResult != SL_RESULT_SUCCESS) {
+ AUDIO_OPENSLES_DEBUG_ERROR("Failed to create Engine with error code = %d", slResult);
+ return -1;
+ }
+ if ((slResult = (*m_slEngineObject)->Realize(m_slEngineObject, SL_BOOLEAN_FALSE)) != SL_RESULT_SUCCESS) {
+ AUDIO_OPENSLES_DEBUG_ERROR("Failed to Realize SL Engine with erro code = %d", slResult);
+ return -1;
+ }
+ if ((slResult = (*m_slEngineObject)->GetInterface(m_slEngineObject, SL_IID_ENGINE, (void*) &m_slEngine)) != SL_RESULT_SUCCESS) {
+ AUDIO_OPENSLES_DEBUG_ERROR("Failed to get SL Engine interface with error code = %d", slResult);
+ return -1;
+ }
+#endif /* AUDIO_OPENSLES_UNDER_ANDROID */
+
+ m_bInitialized = true;
+ AUDIO_OPENSLES_DEBUG_INFO("SL engine initialized");
+ return 0;
+}
+
+bool SLAudioDevice::Initialized()
+{
+ return m_bInitialized;
+}
+
+int SLAudioDevice::SpeakerIsAvailable(bool *pAvailable)
+{
+ CHECK_TRUE(m_bInitialized, "Not initialized");
+
+ if(!pAvailable){
+ AUDIO_OPENSLES_DEBUG_ERROR("Invalid parameter");
+ return -1;
+ }
+
+ *pAvailable = true;
+ return 0;
+}
+
+int SLAudioDevice::InitSpeaker()
+{
+ AUDIO_OPENSLES_DEBUG_INFO("SLAudioDevice::InitSpeaker()");
+
+ CHECK_TRUE(m_bInitialized, "Not initialized");
+
+ if(m_bSpeakerInitialized){
+ return 0;
+ }
+
+ m_bSpeakerInitialized = true;
+ return 0;
+}
+
+int SLAudioDevice::SetMaxSpeakerVolume(int nMaxSpeakerVolume)
+{
+ CHECK_TRUE(m_bSpeakerInitialized, "Speaker not initialized");
+ AUDIO_OPENSLES_DEBUG_INFO("SetMaxSpeakerVolume(%d)", nMaxSpeakerVolume);
+ m_nMaxSpeakerVolume = nMaxSpeakerVolume;
+ return 0;
+}
+
+int SLAudioDevice::SetMinSpeakerVolume(int nMinSpeakerVolume)
+{
+ CHECK_TRUE(m_bSpeakerInitialized, "Speaker not initialized");
+ AUDIO_OPENSLES_DEBUG_INFO("SetMinSpeakerVolume(%d)", nMinSpeakerVolume);
+ m_nMinSpeakerVolume = nMinSpeakerVolume;
+ return 0;
+}
+
+int SLAudioDevice::SetSpeakerVolume(int nSpeakerVolume)
+{
+ CHECK_TRUE(m_bSpeakerInitialized, "Speaker not initialized");
+ AUDIO_OPENSLES_DEBUG_INFO("SetSpeakerVolume(%d)", nSpeakerVolume);
+ m_nSpeakerVolume = nSpeakerVolume;
+ return 0;
+}
+
+int SLAudioDevice::SetSpeakerOn(bool bSpeakerOn)
+{
+ CHECK_TRUE(m_bSpeakerInitialized, "Speaker not initialized");
+
+ AUDIO_OPENSLES_DEBUG_INFO("SetSpeakerOn(%s -> %s)", (m_bSpeakerOn ? "true" : "false"), (bSpeakerOn ? "true" : "false"));
+ int ret = 0;
+ bool oldValue = m_bSpeakerOn;
+ m_bSpeakerOn = bSpeakerOn; // update value beacause use in PlayoutApplyNewConfig();
+ if(m_bPlayoutInitialized && (oldValue != bSpeakerOn)){
+ ret = PlayoutApplyNewConfig();
+ }
+
+ if(ret != 0){
+ m_bSpeakerOn = oldValue;
+ }
+
+ return ret;
+}
+
+int SLAudioDevice::PlayoutIsAvailable(bool *pAvailable)
+{
+ CHECK_TRUE(m_bInitialized, "Not initialized");
+ CHECK_PLAYOUT_NOT_INITIALIZED();
+
+ if(!pAvailable){
+ AUDIO_OPENSLES_DEBUG_ERROR("Invalid parameter");
+ return -1;
+ }
+
+ *pAvailable = true;
+ return 0;
+}
+
+int SLAudioDevice::SetStereoPlayout(bool bEnabled)
+{
+ CHECK_TRUE(m_bInitialized, "Not initialized");
+ CHECK_PLAYOUT_NOT_INITIALIZED();
+ m_bStereoPlayout = bEnabled;
+ return 0;
+}
+
+int SLAudioDevice::SetPlayoutBuffer(int nPlayoutBufferSize)
+{
+ CHECK_TRUE(m_bInitialized, "Not initialized");
+ CHECK_PLAYOUT_NOT_INITIALIZED();
+
+ if(PLAYOUT_BUFFER_SIZE != nPlayoutBufferSize){
+ AUDIO_OPENSLES_DEBUG_ERROR("Invalid parameter");
+ return -1;
+ }
+
+ m_nPlayoutBufferSize = nPlayoutBufferSize;
+ return 0;
+}
+
+int SLAudioDevice::SetPlayoutSampleRate(int nPlayoutSampleRate)
+{
+ CHECK_TRUE(m_bInitialized, "Not initialized");
+ CHECK_PLAYOUT_NOT_INITIALIZED();
+
+ AUDIO_OPENSLES_DEBUG_INFO("SetPlayoutSampleRate(%d)", nPlayoutSampleRate);
+
+ switch(nPlayoutSampleRate){
+ case 8000: case 11025: case 16000: case 22050: case 24000: case 32000: case 44100: case 64000: case 88200: case 96000: case 192000:
+ {
+ m_nPlayoutSampleRate = nPlayoutSampleRate;
+ return 0;
+ }
+ default:
+ {
+ AUDIO_OPENSLES_DEBUG_ERROR("%d not valid sampling rate", nPlayoutSampleRate);
+ return -1;
+ }
+ }
+}
+
+int SLAudioDevice::InitPlayout()
+{
+ CHECK_TRUE(m_bInitialized, "Not initialized");
+
+ AUDIO_OPENSLES_DEBUG_INFO("SLAudioDevice::InitPlayout()");
+
+ if(m_bPlayoutInitialized){
+ return 0;
+ }
+
+ if (m_bPlaying) {
+ AUDIO_OPENSLES_DEBUG_ERROR("Playout already started");
+ return -1;
+ }
+
+ // Initialize the speaker
+ if (InitSpeaker()) {
+ AUDIO_OPENSLES_DEBUG_ERROR("InitSpeaker() failed");
+ }
+
+#if AUDIO_OPENSLES_UNDER_ANDROID
+
+ if (m_slEngineObject == NULL || m_slEngine == NULL) {
+ AUDIO_OPENSLES_DEBUG_ERROR("SLObject or Engiine is NULL");
+ return -1;
+ }
+
+ SLresult slResult;
+ SLDataFormat_PCM pcm;
+ SLDataSource audioSource;
+ SLDataLocator_AndroidSimpleBufferQueue simpleBufferQueue;
+ SLDataSink audioSink;
+ SLDataLocator_OutputMix locator_outputmix;
+
+ // Create Output Mix object to be used by player
+ SLInterfaceID ids[N_MAX_INTERFACES];
+ SLboolean req[N_MAX_INTERFACES];
+ for (unsigned int i = 0; i < N_MAX_INTERFACES; i++) {
+ ids[i] = SL_IID_NULL;
+ req[i] = SL_BOOLEAN_FALSE;
+ }
+ ids[0] = SL_IID_ENVIRONMENTALREVERB;
+
+ if ((slResult = (*m_slEngine)->CreateOutputMix(m_slEngine, &m_slOutputMixObject, 1, ids, req)) != SL_RESULT_SUCCESS) {
+ AUDIO_OPENSLES_DEBUG_ERROR("CreateOutputMix() for playout failed with error code = %d", slResult);
+ return -1;
+ }
+
+ if ((slResult = (*m_slOutputMixObject)->Realize(m_slOutputMixObject, SL_BOOLEAN_FALSE)) != SL_RESULT_SUCCESS) {
+ AUDIO_OPENSLES_DEBUG_ERROR("Failed to realize SL Output Mix object for playout with error code = %d", slResult);
+ return -1;
+ }
+
+ simpleBufferQueue.locatorType = SL_DATALOCATOR_ANDROIDSIMPLEBUFFERQUEUE;
+ simpleBufferQueue.numBuffers = N_PLAY_QUEUE_BUFFERS;
+
+ pcm.formatType = SL_DATAFORMAT_PCM;
+ pcm.numChannels = m_bStereoPlayout ? 2 : 1;
+ pcm.samplesPerSec = SL_SAMPLING_RATE(m_nPlayoutSampleRate);
+ pcm.bitsPerSample = SL_PCMSAMPLEFORMAT_FIXED_16;
+ pcm.containerSize = SL_PCMSAMPLEFORMAT_FIXED_16;
+ pcm.channelMask = m_bStereoRecording ? (SL_SPEAKER_FRONT_LEFT | SL_SPEAKER_FRONT_RIGHT) : SL_SPEAKER_FRONT_CENTER;
+ pcm.endianness = SL_BYTEORDER_LITTLEENDIAN;
+
+ audioSource.pFormat = (void *) &pcm;
+ audioSource.pLocator = (void *) &simpleBufferQueue;
+
+ locator_outputmix.locatorType = SL_DATALOCATOR_OUTPUTMIX;
+ locator_outputmix.outputMix = m_slOutputMixObject;
+ audioSink.pLocator = (void *) &locator_outputmix;
+ audioSink.pFormat = NULL;
+
+ ids[0] = SL_IID_ANDROIDSIMPLEBUFFERQUEUE;
+ ids[1] = SL_IID_EFFECTSEND;
+ ids[2] = SL_IID_ANDROIDCONFIGURATION;
+ ids[3] = SL_IID_VOLUME;
+ req[0] = SL_BOOLEAN_TRUE;
+ req[1] = SL_BOOLEAN_TRUE;
+ req[2] = SL_BOOLEAN_TRUE;
+ req[3] = SL_BOOLEAN_TRUE;
+
+ // Create the player
+ if ((slResult = (*m_slEngine)->CreateAudioPlayer(m_slEngine, &m_slPlayer, &audioSource, &audioSink, 3, ids, req)) != SL_RESULT_SUCCESS) {
+ AUDIO_OPENSLES_DEBUG_ERROR("Failed to create Audio Player with error code = %d", slResult);
+ return -1;
+ }
+
+ // set stream type
+ if(!m_bSpeakerOn){ // only set if speaker OFF, otherwise default is ON. "SL_ANDROID_STREAM_MEDIA" doen't look to work on all devices
+ static SLAndroidConfigurationItf _playerStreamConfig;
+ if((slResult = (*m_slPlayer)->GetInterface(m_slPlayer, SL_IID_ANDROIDCONFIGURATION, &_playerStreamConfig)) != SL_RESULT_SUCCESS){
+ AUDIO_OPENSLES_DEBUG_ERROR("Failed to get player configuration with error code = %d", slResult);
+ return -1;
+ }
+ else{
+ static SLint32 _playerStreamType = m_bSpeakerOn ? SL_ANDROID_STREAM_MEDIA : SL_ANDROID_STREAM_VOICE;
+ static SLint32 _playerStreamTypeSize = sizeof(SLint32);
+ AUDIO_OPENSLES_DEBUG_INFO("_playerStreamType=%d", _playerStreamType);
+ if((slResult = (*_playerStreamConfig)->SetConfiguration(_playerStreamConfig, SL_ANDROID_KEY_STREAM_TYPE, &_playerStreamType, _playerStreamTypeSize))){
+ AUDIO_OPENSLES_DEBUG_ERROR("Failed to set player stream type with error code = %d", slResult);
+ return -2;
+ }
+ }
+ }
+
+ // Realizing the player in synchronous mode
+ if ((slResult = (*m_slPlayer)->Realize(m_slPlayer, SL_BOOLEAN_FALSE)) != SL_RESULT_SUCCESS) {
+ AUDIO_OPENSLES_DEBUG_ERROR("Failed to realize the player with error code = %d", slResult);
+ return -1;
+ }
+ // Get seek and play interfaces
+ if ((slResult = (*m_slPlayer)->GetInterface(m_slPlayer, SL_IID_PLAY, (void*) &m_slPlayerPlay)) != SL_RESULT_SUCCESS) {
+ AUDIO_OPENSLES_DEBUG_ERROR("Failed to get Player interface with error code = %d", slResult);
+ return -1;
+ }
+ if ((slResult = (*m_slPlayer)->GetInterface(m_slPlayer, SL_IID_ANDROIDSIMPLEBUFFERQUEUE, (void*) &m_slPlayerSimpleBufferQueue)) != SL_RESULT_SUCCESS) {
+ AUDIO_OPENSLES_DEBUG_ERROR("Failed to get Player Simple Buffer Queue interface with error code = %d", slResult);
+ return -1;
+ }
+
+ // Setup to receive buffer queue event callbacks
+ if ((slResult = (*m_slPlayerSimpleBufferQueue)->RegisterCallback(m_slPlayerSimpleBufferQueue, PlayerSimpleBufferQueueCallback, this)) != SL_RESULT_SUCCESS) {
+ AUDIO_OPENSLES_DEBUG_ERROR("Failed to register Player Callback");
+ return -1;
+ }
+#endif /* AUDIO_OPENSLES_UNDER_ANDROID */
+
+ m_bPlayoutInitialized = true;
+
+ AUDIO_OPENSLES_DEBUG_INFO("Playout initialized");
+
+ return 0;
+}
+
+int SLAudioDevice::StereoPlayout(bool *pEnabled)
+{
+ CHECK_TRUE(m_bInitialized, "Not initialized");
+ CHECK_PLAYOUT_INITIALIZED();
+
+ if(!pEnabled){
+ AUDIO_OPENSLES_DEBUG_ERROR("Invalid parameter");
+ return -1;
+ }
+
+ *pEnabled = m_bStereoPlayout;
+ return 0;
+}
+
+int SLAudioDevice::PlayoutSampleRate(int *pPlayoutSampleRate)
+{
+ CHECK_TRUE(m_bInitialized, "Not initialized");
+ CHECK_PLAYOUT_INITIALIZED();
+
+ if(!pPlayoutSampleRate){
+ AUDIO_OPENSLES_DEBUG_ERROR("Invalid parameter");
+ return -1;
+ }
+
+ *pPlayoutSampleRate = m_nPlayoutSampleRate;
+ return 0;
+}
+
+int SLAudioDevice::StartPlayout()
+{
+ AUDIO_OPENSLES_DEBUG_INFO("SLAudioDevice::StartPlayout()");
+
+ CHECK_TRUE(m_bInitialized, "Not initialized");
+ CHECK_PLAYOUT_INITIALIZED();
+
+ if (m_bPlaying) {
+ return 0;
+ }
+
+#if AUDIO_OPENSLES_UNDER_ANDROID
+ if (m_slPlayerPlay == NULL) {
+ AUDIO_OPENSLES_DEBUG_ERROR("PlayItf is NULL");
+ return -1;
+ }
+ if (m_slPlayerSimpleBufferQueue == NULL) {
+ AUDIO_OPENSLES_DEBUG_ERROR("PlayerSimpleBufferQueue is NULL");
+ return -1;
+ }
+
+ _recQueueSeq = 0;
+
+ SLresult slResult;
+ /* Enqueue a set of zero buffers to get the ball rolling */
+ uint32_t nSample10ms = m_nPlayoutSampleRate / 100;
+ uint8_t playBuffer[nSample10ms << BYTES_PER_SAMPLE_LOG2];
+ uint32_t noSamplesOut(0);
+ {
+ // get data from jitter buffer
+ noSamplesOut = SLAudioDevice::PullPlayoutData(playBuffer, nSample10ms);
+ if(noSamplesOut != nSample10ms){
+ AUDIO_OPENSLES_DEBUG_WARN("%d not expected as samples output count value", noSamplesOut);
+ noSamplesOut = nSample10ms;
+ memset(_playQueueBuffer[_playQueueSeq], 0, (noSamplesOut << BYTES_PER_SAMPLE_LOG2));
+ }
+ else{
+ memcpy(_playQueueBuffer[_playQueueSeq], playBuffer, (noSamplesOut << BYTES_PER_SAMPLE_LOG2));
+ }
+
+ // write the buffer data we into the device
+ if ((slResult = (*m_slPlayerSimpleBufferQueue)->Enqueue(m_slPlayerSimpleBufferQueue, (void*) _playQueueBuffer[_playQueueSeq], (noSamplesOut << 1))) != SL_RESULT_SUCCESS) {
+ AUDIO_OPENSLES_DEBUG_ERROR("Player simpler buffer queue Enqueue failed with error code = %d and noSamplesOut = %d", slResult, noSamplesOut);
+ }
+ _playQueueSeq = (_playQueueSeq + 1) % N_PLAY_QUEUE_BUFFERS;
+ }
+
+ // Play the PCM samples using a buffer queue
+ m_bPlaying = true;
+ if ((slResult = (*m_slPlayerPlay)->SetPlayState(m_slPlayerPlay, SL_PLAYSTATE_PLAYING)) != SL_RESULT_SUCCESS) {
+ AUDIO_OPENSLES_DEBUG_ERROR("Failed to start playout with error code = %d", slResult);
+ m_bPlaying = false;
+ return -1;
+ }
+#else
+ m_bPlaying = true;
+#endif /* AUDIO_OPENSLES_UNDER_ANDROID */
+
+ AUDIO_OPENSLES_DEBUG_INFO("Payout started - rate=%d", m_nPlayoutSampleRate);
+
+ return 0;
+}
+
+bool SLAudioDevice::Playing()
+{
+ return m_bPlaying;
+}
+
+int SLAudioDevice::StopPlayout()
+{
+ AUDIO_OPENSLES_DEBUG_INFO("SLAudioDevice::StopPlayout()");
+
+ if(!m_bPlaying){
+ return 0;
+ }
+#if AUDIO_OPENSLES_UNDER_ANDROID
+ if ((m_slPlayerPlay != NULL) && (m_slOutputMixObject != NULL) && (m_slPlayer != NULL)) {
+ SLresult slResult;
+
+ if ((slResult = (*m_slPlayerPlay)->SetPlayState(m_slPlayerPlay, SL_PLAYSTATE_STOPPED)) != SL_RESULT_SUCCESS) {
+ AUDIO_OPENSLES_DEBUG_ERROR("Failed to stop playout with error code = %d", slResult);
+ return -1;
+ }
+
+ if ((slResult = (*m_slPlayerSimpleBufferQueue)->Clear(m_slPlayerSimpleBufferQueue)) != SL_RESULT_SUCCESS) {
+ AUDIO_OPENSLES_DEBUG_ERROR("Failed to clear recorder buffer queue");
+ return -1;
+ }
+
+ // Destroy the player
+ (*m_slPlayer)->Destroy(m_slPlayer);
+ // Destroy Output Mix object
+ (*m_slOutputMixObject)->Destroy(m_slOutputMixObject);
+ m_slPlayer = NULL;
+ m_slPlayerPlay = NULL;
+ m_slPlayerSimpleBufferQueue = NULL;
+ m_slOutputMixObject = NULL;
+ }
+#endif
+
+ AUDIO_OPENSLES_DEBUG_INFO("Playout stopped");
+ m_bPlayoutInitialized = false;
+ m_bPlaying = false;
+ return 0;
+}
+
+int SLAudioDevice::RecordingIsAvailable(bool *pAvailable)
+{
+ CHECK_TRUE(m_bInitialized, "Device not initialized");
+ CHECK_RECORDING_NOT_INITIALIZED();
+
+ if(!pAvailable){
+ AUDIO_OPENSLES_DEBUG_ERROR("Invalid parameter");
+ return -1;
+ }
+
+ *pAvailable = true;
+ return 0;
+}
+
+int SLAudioDevice::MicrophoneIsAvailable(bool *pAvailable)
+{
+ CHECK_TRUE(m_bInitialized, "Device not initialized");
+ CHECK_RECORDING_NOT_INITIALIZED();
+
+ if(!pAvailable){
+ AUDIO_OPENSLES_DEBUG_ERROR("Invalid parameter");
+ return -1;
+ }
+
+ *pAvailable = true;
+ return 0;
+}
+
+int SLAudioDevice::InitMicrophone()
+{
+ AUDIO_OPENSLES_DEBUG_INFO("SLAudioDevice::InitMicrophone()");
+ CHECK_TRUE(m_bInitialized, "Device not initialized");
+
+ if(m_bMicrophoneInitialized){
+ return 0;
+ }
+
+ m_bMicrophoneInitialized = true;
+ return 0;
+}
+
+int SLAudioDevice::SetMicrophoneVolume(int nMicrophoneVolume)
+{
+ CHECK_MICROPHONE_INITIALIZED();
+
+ AUDIO_OPENSLES_DEBUG_INFO("SetMicrophoneVolume(%d)", nMicrophoneVolume);
+
+#if AUDIO_OPENSLES_UNDER_ANDROID
+ if (m_slMicVolume == NULL) {
+ SLresult slResult;
+ if ((slResult = (*m_slEngineObject)->GetInterface(m_slEngineObject, SL_IID_DEVICEVOLUME, (void*) &m_slMicVolume)) != SL_RESULT_SUCCESS) {
+ AUDIO_OPENSLES_DEBUG_ERROR("Failed to get 'SL_IID_DEVICEVOLUME' interface with error code = %d", slResult);
+ return -1;
+ }
+ }
+
+ if (m_slMicVolume != NULL) {
+ SLresult slResult;
+ int vol(0);
+ vol = ((nMicrophoneVolume * (m_nMaxSpeakerVolume - m_nMinSpeakerVolume) + (int) (255 / 2)) / (255)) + m_nMinSpeakerVolume;
+ if ((slResult = (*m_slMicVolume)->SetVolume(m_slMicVolume, m_nMicDeviceId, vol)) != SL_RESULT_SUCCESS) {
+ AUDIO_OPENSLES_DEBUG_ERROR("SetVolume() failed with error code = %d", slResult);
+ return -1;
+ }
+ }
+#endif /* AUDIO_OPENSLES_UNDER_ANDROID */
+
+ return 0;
+}
+
+int SLAudioDevice::SetStereoRecording(bool bEnabled)
+{
+ CHECK_TRUE(m_bInitialized, "Not initialized");
+ CHECK_RECORDING_NOT_INITIALIZED();
+ AUDIO_OPENSLES_DEBUG_INFO("SetStereoRecording(%s)", bEnabled ? "True" : "False");
+ m_bStereoRecording = bEnabled;
+ return 0;
+}
+
+int SLAudioDevice::SetRecordingSampleRate(int nRecordingSampleRate)
+{
+ CHECK_TRUE(m_bInitialized, "Not initialized");
+ CHECK_RECORDING_NOT_INITIALIZED();
+
+ AUDIO_OPENSLES_DEBUG_INFO("SetRecordingSampleRate(%d)", nRecordingSampleRate);
+
+ switch(nRecordingSampleRate){
+ case 8000: case 11025: case 16000: case 22050: case 24000: case 32000: case 44100: case 64000: case 88200: case 96000: case 192000:
+ {
+ m_nRecordingSampleRate = nRecordingSampleRate;
+ return 0;
+ }
+ default:
+ {
+ AUDIO_OPENSLES_DEBUG_ERROR("%d not valid sampling rate", nRecordingSampleRate);
+ return -1;
+ }
+ }
+}
+
+int SLAudioDevice::InitRecording()
+{
+ CHECK_TRUE(m_bInitialized, "Not initialized");
+
+ AUDIO_OPENSLES_DEBUG_INFO("SLAudioDevice::InitRecording()");
+
+ if (m_bRecording) {
+ AUDIO_OPENSLES_DEBUG_ERROR("Recording already started");
+ return -1;
+ }
+
+ if (m_bRecordingInitialized) {
+ return 0;
+ }
+
+ // Initialize the microphone
+ if (InitMicrophone() == -1) {
+ AUDIO_OPENSLES_DEBUG_ERROR("InitMicrophone() failed");
+ }
+
+#if AUDIO_OPENSLES_UNDER_ANDROID
+
+ if (m_slEngineObject == NULL || m_slEngine == NULL) {
+ AUDIO_OPENSLES_DEBUG_ERROR("Recording object is NULL");
+ return -1;
+ }
+
+ SLresult slResult;
+ SLDataSource audioSource;
+ SLDataLocator_IODevice micLocator;
+ SLDataSink audioSink;
+ SLDataFormat_PCM pcm;
+ SLDataLocator_AndroidSimpleBufferQueue simpleBufferQueue;
+
+ // Setup the data source structure
+ micLocator.locatorType = SL_DATALOCATOR_IODEVICE;
+ micLocator.deviceType = SL_IODEVICE_AUDIOINPUT;
+ micLocator.deviceID = SL_DEFAULTDEVICEID_AUDIOINPUT; //micDeviceID;
+ micLocator.device = NULL;
+ audioSource.pLocator = (void *) &micLocator;
+ audioSource.pFormat = NULL;
+
+ /* Setup the data source structure for the buffer queue */
+ simpleBufferQueue.locatorType = SL_DATALOCATOR_ANDROIDSIMPLEBUFFERQUEUE;
+ simpleBufferQueue.numBuffers = N_REC_QUEUE_BUFFERS;
+ /* Setup the format of the content in the buffer queue */
+ pcm.formatType = SL_DATAFORMAT_PCM;
+ pcm.numChannels = 1;
+ // _samplingRateIn is initialized in initSampleRate()
+ pcm.samplesPerSec = SL_SAMPLING_RATE(m_nRecordingSampleRate);
+ pcm.bitsPerSample = SL_PCMSAMPLEFORMAT_FIXED_16;
+ pcm.containerSize = 16;
+ pcm.channelMask = SL_SPEAKER_FRONT_CENTER;
+ pcm.endianness = SL_BYTEORDER_LITTLEENDIAN;
+ audioSink.pFormat = (void *) &pcm;
+ audioSink.pLocator = (void *) &simpleBufferQueue;
+
+ const SLInterfaceID id[2] = { SL_IID_ANDROIDSIMPLEBUFFERQUEUE, SL_IID_ANDROIDCONFIGURATION };
+ const SLboolean req[2] = { SL_BOOLEAN_TRUE, SL_BOOLEAN_TRUE };
+ slResult = (*m_slEngine)->CreateAudioRecorder(m_slEngine, &m_slRecorder, &audioSource, &audioSink, 2, id, req);
+ if (slResult != SL_RESULT_SUCCESS) {
+ AUDIO_OPENSLES_DEBUG_ERROR("Failed to create Recorder with error code = %d", slResult);
+ return -1;
+ }
+
+ // Set stream type
+ SLAndroidConfigurationItf slRecorderConfig;
+ SLint32 slStreamType = SL_ANDROID_RECORDING_PRESET_GENERIC;
+ slResult = (*m_slRecorder)->GetInterface(m_slRecorder, SL_IID_ANDROIDCONFIGURATION, &slRecorderConfig);
+ if(slResult != SL_RESULT_SUCCESS) {
+ AUDIO_OPENSLES_DEBUG_ERROR("GetInterface(SL_IID_ANDROIDCONFIGURATION) failed with error code = %d", slResult);
+ return -1;
+ }
+ AUDIO_OPENSLES_DEBUG_INFO("Recording stream type = %d", slStreamType);
+ slResult = (*slRecorderConfig)->SetConfiguration(slRecorderConfig, SL_ANDROID_KEY_RECORDING_PRESET, &slStreamType, sizeof(SLint32));
+ if(slResult != SL_RESULT_SUCCESS) {
+ AUDIO_OPENSLES_DEBUG_ERROR("SetConfiguration(SL_ANDROID_KEY_RECORDING_PRESET) failed with error code = %d", slResult);
+ return -1;
+ }
+
+
+ // Realizing the recorder in synchronous mode.
+ slResult = (*m_slRecorder)->Realize(m_slRecorder, SL_BOOLEAN_FALSE);
+ if (slResult != SL_RESULT_SUCCESS) {
+ AUDIO_OPENSLES_DEBUG_ERROR("Failed to realize Recorder with error code = %d", slResult);
+ return -1;
+ }
+
+ // Get the RECORD interface - it is an implicit interface
+ slResult = (*m_slRecorder)->GetInterface(m_slRecorder, SL_IID_RECORD, (void*) &m_slRecorderRecord);
+ if (slResult != SL_RESULT_SUCCESS) {
+ AUDIO_OPENSLES_DEBUG_ERROR("Failed to get Recorder interface with error code = %d", slResult);
+ return -1;
+ }
+
+ // Get the simpleBufferQueue interface
+ slResult = (*m_slRecorder)->GetInterface(m_slRecorder, SL_IID_ANDROIDSIMPLEBUFFERQUEUE, (void*) &m_slRecorderSimpleBufferQueue);
+ if (slResult != SL_RESULT_SUCCESS) {
+ AUDIO_OPENSLES_DEBUG_ERROR("Failed to get Recorder Simple Buffer Queue with error code = %d", slResult);
+ return -1;
+ }
+
+ // Setup to receive buffer queue event callbacks
+ slResult = (*m_slRecorderSimpleBufferQueue)->RegisterCallback(m_slRecorderSimpleBufferQueue, RecorderSimpleBufferQueueCallback, this);
+ if (slResult != SL_RESULT_SUCCESS) {
+ AUDIO_OPENSLES_DEBUG_ERROR("Failed to register Recorder Callback with error code = %d", slResult);
+ return -1;
+ }
+
+#endif /* AUDIO_OPENSLES_UNDER_ANDROID */
+
+ AUDIO_OPENSLES_DEBUG_INFO("Recording initialized");
+
+ m_bRecordingInitialized = true;
+
+ return 0;
+}
+
+int SLAudioDevice::StereoRecording(bool *pEnabled)
+{
+ CHECK_TRUE(m_bInitialized, "Not initialized");
+ CHECK_RECORDING_INITIALIZED();
+
+ if(!pEnabled){
+ AUDIO_OPENSLES_DEBUG_ERROR("Invalid parameter");
+ return -1;
+ }
+
+ *pEnabled = m_bStereoRecording;
+ return 0;
+}
+
+int SLAudioDevice::RecordingSampleRate(int *pRecordingSampleRate)
+{
+ CHECK_TRUE(m_bInitialized, "Not initialized");
+ CHECK_RECORDING_INITIALIZED();
+
+ if(!pRecordingSampleRate){
+ AUDIO_OPENSLES_DEBUG_ERROR("Invalid parameter");
+ return -1;
+ }
+
+ *pRecordingSampleRate = m_nRecordingSampleRate;
+ return 0;
+}
+
+int SLAudioDevice::StartRecording()
+{
+ AUDIO_OPENSLES_DEBUG_INFO("SLAudioDevice::StartRecording()");
+
+ CHECK_TRUE(m_bInitialized, "Not initialized");
+ CHECK_RECORDING_INITIALIZED();
+
+ if (m_bRecording) {
+ return 0;
+ }
+
+#if AUDIO_OPENSLES_UNDER_ANDROID
+
+ if (m_slRecorderRecord == NULL) {
+ AUDIO_OPENSLES_DEBUG_ERROR("RecordITF is NULL");
+ return -1;
+ }
+
+ if (m_slRecorderSimpleBufferQueue == NULL) {
+ AUDIO_OPENSLES_DEBUG_ERROR("Recorder Simple Buffer Queue is NULL");
+ return -1;
+ }
+
+ // Reset recording buffer
+ memset(_recQueueBuffer, 0, sizeof(_recQueueBuffer)); // empty the queue
+ _recQueueSeq = 0;
+
+ memset(_recBuffer, 0, sizeof(_recBuffer));
+ memset(_recLength, 0, sizeof(_recLength));
+ memset(_recSeqNumber, 0, sizeof(_recSeqNumber));
+
+ // Enqueue N_REC_QUEUE_BUFFERS -1 zero buffers to get the ball rolling
+ // find out how it behaves when the sample rate is 44100
+ SLresult slResult;
+ int nSample10ms = m_nRecordingSampleRate / 100;
+ for (int i = 0; i < (N_REC_QUEUE_BUFFERS - 1); i++) {
+ // We assign 10ms buffer to each queue, size given in bytes.
+ slResult = (*m_slRecorderSimpleBufferQueue)->Enqueue(m_slRecorderSimpleBufferQueue, (void*) _recQueueBuffer[_recQueueSeq], (nSample10ms << BYTES_PER_SAMPLE_LOG2));
+ if (slResult != SL_RESULT_SUCCESS) {
+ AUDIO_OPENSLES_DEBUG_ERROR("Failed to Enqueue Empty Buffer to recorder with error code = %d", slResult);
+ return -1;
+ }
+ _recQueueSeq++;
+ }
+ // Record the audio
+ m_bRecording = true;
+ slResult = (*m_slRecorderRecord)->SetRecordState(m_slRecorderRecord, SL_RECORDSTATE_RECORDING);
+ if (slResult != SL_RESULT_SUCCESS) {
+ AUDIO_OPENSLES_DEBUG_ERROR("Failed to start recording with error code = %d", slResult);
+ m_bRecording = false;
+ return -1;
+ }
+#else
+ m_bRecording = true;
+#endif /* AUDIO_OPENSLES_UNDER_ANDROID */
+
+ AUDIO_OPENSLES_DEBUG_INFO("Recording started - rate = %d", m_nRecordingSampleRate);
+
+ return 0;
+}
+
+bool SLAudioDevice::Recording()
+{
+ return m_bRecording;
+}
+
+int SLAudioDevice::StopRecording()
+{
+ AUDIO_OPENSLES_DEBUG_INFO("SLAudioDevice::StopRecording()");
+ if (!m_bRecording) {
+ return 0;
+ }
+#if AUDIO_OPENSLES_UNDER_ANDROID
+ if ((m_slRecorderRecord != NULL) && (m_slRecorder != NULL)) {
+ SLresult slResult = (*m_slRecorderRecord)->SetRecordState(m_slRecorderRecord, SL_RECORDSTATE_STOPPED);
+ if (slResult != SL_RESULT_SUCCESS) {
+ AUDIO_OPENSLES_DEBUG_ERROR("Failed to stop recording with error code = %d", slResult);
+ return -1;
+ }
+ slResult = (*m_slRecorderSimpleBufferQueue)->Clear(m_slRecorderSimpleBufferQueue);
+ if (slResult != SL_RESULT_SUCCESS) {
+ AUDIO_OPENSLES_DEBUG_ERROR("Failed to clear recorder buffer queue with error code = %d", slResult);
+ return -1;
+ }
+
+ // Destroy the recorder object
+ (*m_slRecorder)->Destroy(m_slRecorder);
+ m_slRecorder = NULL;
+ m_slRecorderRecord = NULL;
+ m_slRecorderRecord = NULL;
+ }
+#endif
+
+ AUDIO_OPENSLES_DEBUG_INFO("Recording stopped");
+ m_bRecording = false;
+ m_bRecordingInitialized = false;
+ return 0;
+}
+
+int SLAudioDevice::Terminate()
+{
+ if (!m_bInitialized) {
+ return 0;
+ }
+
+ if(Recording()){
+ StopRecording();
+ }
+
+ if(Playing()){
+ StopPlayout();
+ }
+
+#if AUDIO_OPENSLES_UNDER_ANDROID
+
+ if(m_slPlayer){
+ (*m_slPlayer)->Destroy(m_slPlayer);
+ m_slPlayer = NULL;
+ m_slPlayerPlay = NULL;
+ m_slPlayerSimpleBufferQueue = NULL;
+ }
+
+ if(m_slRecorder){
+ (*m_slRecorder)->Destroy(m_slRecorder);
+ m_slRecorder = NULL;
+ m_slRecorderRecord = NULL;
+ m_slRecorderSimpleBufferQueue = NULL;
+ m_slAudioIODeviceCapabilities = NULL;
+ }
+
+ if(m_slOutputMixObject){
+ (*m_slOutputMixObject)->Destroy(m_slOutputMixObject);
+ m_slOutputMixObject = NULL;
+ }
+
+ if (m_slEngineObject) {
+ (*m_slEngineObject)->Destroy(m_slEngineObject);
+ m_slEngineObject = NULL;
+ m_slEngine = NULL;
+ }
+#endif
+
+ m_bSpeakerInitialized = false;
+ m_bPlayoutInitialized = false;
+ m_bRecordingInitialized = false;
+ m_bInitialized = false;
+
+ return 0;
+}
+
+int SLAudioDevice::PlayoutApplyNewConfig()
+{
+ AUDIO_OPENSLES_DEBUG_INFO("SLAudioDevice::PlayoutApplyNewConfig()");
+#if AUDIO_OPENSLES_UNDER_ANDROID
+ if(m_slPlayer){
+ SLresult slResult;
+ int ret;
+ bool wasPlaying = Playing();
+
+ if(wasPlaying){
+ if ((ret = StopPlayout())){
+ AUDIO_OPENSLES_DEBUG_ERROR("Failed to stop playout for reconf");
+ return ret;
+ }
+ if((ret = InitPlayout())){
+ AUDIO_OPENSLES_DEBUG_ERROR("Failed to init() playout after reconf");
+ return ret;
+ }
+ }
+
+ if(wasPlaying){
+ if((ret = StartPlayout())){
+ AUDIO_OPENSLES_DEBUG_ERROR("Failed to start() playout after reconf");
+ return ret;
+ }
+ }
+ }
+#endif /* AUDIO_OPENSLES_UNDER_ANDROID */
+ return 0;
+}
+
+
+
+uint32_t SLAudioDevice::PullPlayoutData(void* pAudioSamples, const uint32_t nSamples)
+{
+ if(!pAudioSamples || !nSamples){
+ AUDIO_OPENSLES_DEBUG_ERROR("PullPlayoutData() - Invalid parameter");
+ return 0;
+ }
+
+ if(!m_pCallback){
+ memset(pAudioSamples, 0, (nSamples << BYTES_PER_SAMPLE_LOG2));
+ return nSamples;
+ }
+
+ uint32_t nSamplesOut = 0;
+ const_cast<SLAudioDeviceCallback*>(m_pCallback)->NeedMorePlayData(nSamples,
+ BYTES_PER_SAMPLE,
+ m_bStereoPlayout ? 2 : 1,
+ m_nPlayoutSampleRate,
+ pAudioSamples,
+ nSamplesOut);
+ return nSamplesOut;
+}
+
+void SLAudioDevice::PushRecordingData(void* pAudioSamples, const uint32_t nSamples)
+{
+ if(!pAudioSamples || !nSamples){
+ AUDIO_OPENSLES_DEBUG_ERROR("PushRecordingData() - Invalid parameter");
+ return;
+ }
+
+ if(m_pCallback){
+ const_cast<SLAudioDeviceCallback*>(m_pCallback)->RecordedDataIsAvailable(pAudioSamples,
+ nSamples,
+ BYTES_PER_SAMPLE,
+ m_bStereoRecording ? 2 : 1,
+ m_nRecordingSampleRate);
+ }
+}
+
+#if AUDIO_OPENSLES_UNDER_ANDROID
+
+void SLAudioDevice::PlayerSimpleBufferQueueCallback(SLAndroidSimpleBufferQueueItf queueItf, void *pContext)
+{
+ SLAudioDevice* This = static_cast<SLAudioDevice*> (pContext);
+
+ // AUDIO_OPENSLES_DEBUG_INFO("PlayerSimpleBufferQueueCallback(playing=%s, _playQueueSeq=%d)", (This->m_bPlaying ? "true" : "false"), This->_playQueueSeq);
+
+ if (This->m_bPlaying && (This->_playQueueSeq < N_PLAY_QUEUE_BUFFERS)) {
+ unsigned int noSamp10ms = This->m_nPlayoutSampleRate / 100;
+ uint8_t playBuffer[noSamp10ms << BYTES_PER_SAMPLE_LOG2];
+ uint32_t noSamplesOut = This->PullPlayoutData(playBuffer, noSamp10ms);
+
+ if (noSamp10ms != noSamplesOut) {
+ if(noSamplesOut){ // (noSamplesOut==0) -> jitter buffer cannot provide data
+ AUDIO_OPENSLES_DEBUG_ERROR("noSamp10ms (%u) != noSamplesOut (%d)", noSamp10ms, noSamplesOut);
+ }
+ noSamplesOut = noSamp10ms;
+ memset(This->_playQueueBuffer[This->_playQueueSeq], 0, (noSamplesOut << BYTES_PER_SAMPLE_LOG2));
+ }
+ else{
+ memcpy(This->_playQueueBuffer[This->_playQueueSeq], playBuffer, (noSamplesOut << BYTES_PER_SAMPLE_LOG2));
+ }
+
+ SLresult slResult = (*This->m_slPlayerSimpleBufferQueue)->Enqueue(This->m_slPlayerSimpleBufferQueue, This->_playQueueBuffer[This->_playQueueSeq], (noSamplesOut << BYTES_PER_SAMPLE_LOG2));
+ if (slResult != SL_RESULT_SUCCESS) {
+ AUDIO_OPENSLES_DEBUG_ERROR("Player simpler buffer queue Enqueue failed, noSamplesOut=%d, ret=%d", noSamplesOut, slResult);
+ return;
+ }
+ // update the play buffer sequency
+ This->_playQueueSeq = (This->_playQueueSeq + 1) % N_PLAY_QUEUE_BUFFERS;
+ }
+}
+
+void SLAudioDevice::RecorderSimpleBufferQueueCallback(SLAndroidSimpleBufferQueueItf queueItf, void *pContext)
+{
+ // AUDIO_OPENSLES_DEBUG_INFO("RecorderSimpleBufferQueueCallback()");
+
+ SLAudioDevice* This = static_cast<SLAudioDevice*> (pContext);
+
+ if (This->m_bRecording) {
+ const unsigned int noSamp10ms = This->m_nRecordingSampleRate / 100;
+
+#if 1 // not using async thread
+ // push data
+ This->PushRecordingData(This->_recQueueBuffer[0], noSamp10ms);
+ // enqueue new buffer
+ SLresult slResult = (*This->m_slRecorderSimpleBufferQueue)->Enqueue(
+ This->m_slRecorderSimpleBufferQueue,
+ (void*) This->_recQueueBuffer[0],
+ (noSamp10ms << BYTES_PER_SAMPLE_LOG2));
+ if (slResult != SL_RESULT_SUCCESS) {
+ AUDIO_OPENSLES_DEBUG_WARN("Failed to enqueue recording buffer with error code = %d", slResult);
+ return;
+ }
+#else
+ unsigned int dataPos = 0;
+ uint16_t bufPos = 0;
+ int16_t insertPos = -1;
+ unsigned int nCopy = 0; // Number of samples to copy
+
+ while (dataPos < noSamp10ms)
+ {
+ // Loop over all recording buffers or until we find the partially
+ // full buffer
+ // First choice is to insert into partially full buffer,
+ // second choice is to insert into empty buffer
+ bufPos = 0;
+ insertPos = -1;
+ nCopy = 0;
+ while (bufPos < N_REC_BUFFERS)
+ {
+ if ((This->_recLength[bufPos] > 0) && (This->_recLength[bufPos] < noSamp10ms))
+ {
+ // Found the partially full buffer
+ insertPos = static_cast<int16_t> (bufPos);
+ bufPos = N_REC_BUFFERS; // Don't need to search more
+ }
+ else if ((-1 == insertPos) && (0 == This->_recLength[bufPos]))
+ {
+ // Found an empty buffer
+ insertPos = static_cast<int16_t> (bufPos);
+ }
+ ++bufPos;
+ }
+
+ if (insertPos > -1)
+ {
+ // We found a non-full buffer, copy data from the buffer queue
+ // o recBuffer
+ unsigned int dataToCopy = noSamp10ms - dataPos;
+ unsigned int currentRecLen = _recLength[insertPos];
+ unsigned int roomInBuffer = noSamp10ms - currentRecLen;
+ nCopy = (dataToCopy < roomInBuffer ? dataToCopy : roomInBuffer);
+ memcpy(&This->_recBuffer[insertPos][currentRecLen], &This->_recQueueBuffer[This->_recQueueSeq][dataPos], nCopy * sizeof(short));
+ if (0 == currentRecLen)
+ {
+ _recSeqNumber[insertPos] = This->_recCurrentSeq;
+ ++_recCurrentSeq;
+ }
+ This->_recBufferTotalSize += nCopy;
+ // Has to be done last to avoid interrupt problems
+ // between threads
+ This->_recLength[insertPos] += nCopy;
+ dataPos += nCopy;
+ }
+ else
+ {
+ // Didn't find a non-full buffer
+ AUDIO_OPENSLES_DEBUG_WARN("Could not insert into recording buffer");
+ dataPos = noSamp10ms; // Don't try to insert more
+ }
+ }
+
+ // clean the queue buffer
+ // Start with empty buffer
+ memset(This->_recQueueBuffer[This->_recQueueSeq], 0, (REC_BUF_SIZE_IN_SAMPLES << BYTES_PER_SAMPLE_LOG2));
+ // write the empty buffer to the queue
+ SLresult slResult = (*This->m_slRecorderSimpleBufferQueue)->Enqueue(
+ This->m_slRecorderSimpleBufferQueue,
+ (void*) This->_recQueueBuffer[This->_recQueueSeq],
+ (noSamp10ms << BYTES_PER_SAMPLE_LOG2));
+ if (slResult != SL_RESULT_SUCCESS) {
+ AUDIO_OPENSLES_DEBUG_WARN("Failed to enqueue recording buffer with error code = %d", slResult);
+ return;
+ }
+
+ // update the rec queue seq
+ This->_recQueueSeq = (This->_recQueueSeq + 1) % N_REC_QUEUE_BUFFERS;
+
+ // alert thread
+ // TODO
+#endif
+ }
+}
+
+#endif /* AUDIO_OPENSLES_UNDER_ANDROID */
diff --git a/plugins/audio_opensles/audio_opensles_device.h b/plugins/audio_opensles/audio_opensles_device.h
new file mode 100644
index 0000000..7f24dac
--- /dev/null
+++ b/plugins/audio_opensles/audio_opensles_device.h
@@ -0,0 +1,171 @@
+/* Copyright (C) 2012 Doubango Telecom <http://www.doubango.org>
+*
+* This file is part of Open Source Doubango Framework.
+*
+* DOUBANGO is free software: you can redistribute it and/or modify
+* it under the terms of the GNU General Public License as published by
+* the Free Software Foundation, either version 3 of the License, or
+* (at your option) any later version.
+*
+* DOUBANGO is distributed in the hope that it will be useful,
+* but WITHOUT ANY WARRANTY; without even the implied warranty of
+* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
+* GNU General Public License for more details.
+*
+* You should have received a copy of the GNU General Public License
+* along with DOUBANGO.
+*/
+#include "audio_opensles_config.h"
+
+#ifndef _DOUBANGO_AUDIO_OPENSLES_SLDEVICE_H
+#define _DOUBANGO_AUDIO_OPENSLES_SLDEVICE_H
+
+#if AUDIO_OPENSLES_UNDER_ANDROID
+# include <SLES/OpenSLES.h>
+# include <SLES/OpenSLES_Android.h>
+# include <SLES/OpenSLES_AndroidConfiguration.h>
+#endif
+
+#define BYTES_PER_SAMPLE 2
+#define BYTES_PER_SAMPLE_LOG2 1
+
+#define PLAYOUT_BUFFER_SIZE 10 // millis
+#define PLAYOUT_SAMPLE_RATE 16000
+#define RECORDING_BUFFER_SIZE 10
+#define RECORDING_SAMPLE_RATE 16000
+
+// max buffer size = 10 ms @ 48 kHz
+#define REC_BUF_SIZE_IN_SAMPLES 480
+#define PLAY_BUF_SIZE_IN_SAMPLES 480
+
+#define N_MAX_INTERFACES 4
+#define N_PLAY_QUEUE_BUFFERS 5
+#define N_REC_QUEUE_BUFFERS 5
+#define N_REC_BUFFERS 20
+
+class SLAudioDeviceCallback
+{
+public:
+ virtual int32_t RecordedDataIsAvailable(const void* audioSamples,
+ const uint32_t nSamples,
+ const uint8_t nBytesPerSample,
+ const uint8_t nChannels,
+ const uint32_t samplesPerSec) = 0;
+
+ virtual int32_t NeedMorePlayData(const uint32_t nSamples,
+ const uint8_t nBytesPerSample,
+ const uint8_t nChannels,
+ const uint32_t samplesPerSec,
+ void* audioSamples,
+ uint32_t& nSamplesOut) = 0;
+
+protected:
+ virtual ~SLAudioDeviceCallback() {}
+};
+
+class SLAudioDevice
+{
+public:
+ SLAudioDevice(const SLAudioDeviceCallback* pCallback = NULL);
+ virtual ~SLAudioDevice();
+
+public:
+ int SetCallback(const SLAudioDeviceCallback* pCallback);
+ int Init();
+ bool Initialized();
+ int SpeakerIsAvailable(bool *pAvailable);
+ int InitSpeaker();
+ int SetMaxSpeakerVolume(int nMaxSpeakerVolume);
+ int SetMinSpeakerVolume(int nMinSpeakerVolume);
+ int SetSpeakerVolume(int nSpeakerVolume);
+ int SetSpeakerOn(bool bSpeakerOn);
+ int PlayoutIsAvailable(bool *pAvailable);
+ int SetStereoPlayout(bool bEnabled);
+ int SetPlayoutBuffer(int nPlayoutBufferSize);
+ int SetPlayoutSampleRate(int nPlayoutSampleRate);
+ int InitPlayout();
+ int StereoPlayout(bool *pEnabled);
+ int PlayoutSampleRate(int *pPlayoutSampleRate);
+ int StartPlayout();
+ bool Playing();
+ int StopPlayout();
+ int RecordingIsAvailable(bool *pAvailable);
+ int MicrophoneIsAvailable(bool *pAvailable);
+ int InitMicrophone();
+ int SetMicrophoneVolume(int nMicrophoneVolume);
+ int SetStereoRecording(bool bEnabled);
+ int SetRecordingSampleRate(int nRecordingSampleRate);
+ int InitRecording();
+ int StereoRecording(bool *pEnabled);
+ int RecordingSampleRate(int *pRecordingSampleRate);
+ int StartRecording();
+ bool Recording();
+ int StopRecording();
+ int Terminate();
+
+private:
+ int PlayoutApplyNewConfig();
+ uint32_t PullPlayoutData(void* pAudioSamples, const uint32_t nSamples);
+ void PushRecordingData(void* pAudioSamples, const uint32_t nSamples);
+#if AUDIO_OPENSLES_UNDER_ANDROID
+ static void PlayerSimpleBufferQueueCallback(SLAndroidSimpleBufferQueueItf queueItf, void *pContext);
+ static void RecorderSimpleBufferQueueCallback(SLAndroidSimpleBufferQueueItf queueItf, void *pContext);
+#endif
+
+private:
+ const SLAudioDeviceCallback* m_pCallback;
+ int m_nMicDeviceId;
+ bool m_bInitialized;
+ bool m_bSpeakerInitialized;
+ bool m_bPlayoutInitialized;
+ bool m_bRecordingInitialized;
+ bool m_bMicrophoneInitialized;
+ bool m_bSpeakerOn;
+ bool m_bStereoPlayout;
+ bool m_bStereoRecording;
+ int m_nPlayoutBufferSize;
+ int m_nRecordingBufferSize;
+ int m_nPlayoutSampleRate;
+ int m_nRecordingSampleRate;
+ bool m_bPlaying;
+ bool m_bRecording;
+ int m_nSpeakerVolume;
+ int m_nMaxSpeakerVolume;
+ int m_nMinSpeakerVolume;
+
+#if AUDIO_OPENSLES_UNDER_ANDROID
+ // audio unit
+ SLObjectItf m_slEngineObject;
+
+ // playout device
+ SLObjectItf m_slPlayer;
+ SLEngineItf m_slEngine;
+ SLPlayItf m_slPlayerPlay;
+ SLAndroidSimpleBufferQueueItf m_slPlayerSimpleBufferQueue;
+ SLObjectItf m_slOutputMixObject;
+ SLVolumeItf m_slSpeakerVolume;
+
+ // recording device
+ SLObjectItf m_slRecorder;
+ SLRecordItf m_slRecorderRecord;
+ SLAudioIODeviceCapabilitiesItf m_slAudioIODeviceCapabilities;
+ SLAndroidSimpleBufferQueueItf m_slRecorderSimpleBufferQueue;
+ SLDeviceVolumeItf m_slMicVolume;
+
+ int _recQueueSeq;
+
+ // Playout buffer
+ uint8_t _playQueueBuffer[N_PLAY_QUEUE_BUFFERS][PLAY_BUF_SIZE_IN_SAMPLES << BYTES_PER_SAMPLE_LOG2];
+ int _playQueueSeq;
+ // Recording buffer
+ uint8_t _recQueueBuffer[N_REC_QUEUE_BUFFERS][REC_BUF_SIZE_IN_SAMPLES << BYTES_PER_SAMPLE_LOG2];
+ uint8_t _recBuffer[N_REC_BUFFERS][REC_BUF_SIZE_IN_SAMPLES << BYTES_PER_SAMPLE_LOG2];
+ int _recLength[N_REC_BUFFERS];
+ int _recSeqNumber[N_REC_BUFFERS];
+ int _recCurrentSeq;
+ // Current total size all data in buffers, used for delay estimate
+ int _recBufferTotalSize;
+#endif
+};
+
+#endif /* _DOUBANGO_AUDIO_OPENSLES_SLDEVICE_H */
diff --git a/plugins/audio_opensles/audio_opensles_device_impl.cxx b/plugins/audio_opensles/audio_opensles_device_impl.cxx
new file mode 100644
index 0000000..8e32d91
--- /dev/null
+++ b/plugins/audio_opensles/audio_opensles_device_impl.cxx
@@ -0,0 +1,58 @@
+/* Copyright (C) 2012 Doubango Telecom <http://www.doubango.org>
+*
+* This file is part of Open Source Doubango Framework.
+*
+* DOUBANGO is free software: you can redistribute it and/or modify
+* it under the terms of the GNU General Public License as published by
+* the Free Software Foundation, either version 3 of the License, or
+* (at your option) any later version.
+*
+* DOUBANGO is distributed in the hope that it will be useful,
+* but WITHOUT ANY WARRANTY; without even the implied warranty of
+* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
+* GNU General Public License for more details.
+*
+* You should have received a copy of the GNU General Public License
+* along with DOUBANGO.
+*/
+#include "audio_opensles_device_impl.h"
+
+#include "audio_opensles_producer.h"
+#include "audio_opensles_consumer.h"
+
+SLAudioDeviceCallbackImpl::SLAudioDeviceCallbackImpl():
+SLAudioDeviceCallback()
+{
+}
+
+SLAudioDeviceCallbackImpl::~SLAudioDeviceCallbackImpl()
+{
+}
+
+int32_t SLAudioDeviceCallbackImpl::RecordedDataIsAvailable(const void* audioSamples,
+ const uint32_t nSamples,
+ const uint8_t nBytesPerSample,
+ const uint8_t nChannels,
+ const uint32_t samplesPerSec)
+{
+ if(!m_pProducer){
+ AUDIO_OPENSLES_DEBUG_WARN("No wrapped producer");
+ return 0;
+ }
+ return audio_producer_opensles_handle_data_10ms(m_pProducer, audioSamples, nSamples, nBytesPerSample, samplesPerSec, nChannels);
+}
+
+
+int32_t SLAudioDeviceCallbackImpl::NeedMorePlayData(const uint32_t nSamples,
+ const uint8_t nBytesPerSample,
+ const uint8_t nChannels,
+ const uint32_t samplesPerSec,
+ void* audioSamples,
+ uint32_t& nSamplesOut)
+{
+ if(!m_pConsumer){
+ AUDIO_OPENSLES_DEBUG_WARN("No wrapped consumer");
+ return 0;
+ }
+ return audio_consumer_opensles_get_data_10ms(m_pConsumer, audioSamples, nSamples, nBytesPerSample, nChannels, samplesPerSec, nSamplesOut);
+} \ No newline at end of file
diff --git a/plugins/audio_opensles/audio_opensles_device_impl.h b/plugins/audio_opensles/audio_opensles_device_impl.h
new file mode 100644
index 0000000..8168fe9
--- /dev/null
+++ b/plugins/audio_opensles/audio_opensles_device_impl.h
@@ -0,0 +1,51 @@
+/* Copyright (C) 2012 Doubango Telecom <http://www.doubango.org>
+*
+* This file is part of Open Source Doubango Framework.
+*
+* DOUBANGO is free software: you can redistribute it and/or modify
+* it under the terms of the GNU General Public License as published by
+* the Free Software Foundation, either version 3 of the License, or
+* (at your option) any later version.
+*
+* DOUBANGO is distributed in the hope that it will be useful,
+* but WITHOUT ANY WARRANTY; without even the implied warranty of
+* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
+* GNU General Public License for more details.
+*
+* You should have received a copy of the GNU General Public License
+* along with DOUBANGO.
+*/
+#ifndef _DOUBANGO_AUDIO_OPENSLES_SLDEVICE_IMPL_H
+#define _DOUBANGO_AUDIO_OPENSLES_SLDEVICE_IMPL_H
+
+#include "audio_opensles_config.h"
+#include "audio_opensles_device.h"
+
+class SLAudioDeviceCallbackImpl : public SLAudioDeviceCallback
+{
+public:
+ SLAudioDeviceCallbackImpl();
+ virtual ~SLAudioDeviceCallbackImpl();
+
+ virtual int32_t RecordedDataIsAvailable(const void* audioSamples,
+ const uint32_t nSamples,
+ const uint8_t nBytesPerSample,
+ const uint8_t nChannels,
+ const uint32_t samplesPerSec);
+
+ virtual int32_t NeedMorePlayData(const uint32_t nSamples,
+ const uint8_t nBytesPerSample,
+ const uint8_t nChannels,
+ const uint32_t samplesPerSec,
+ void* audioSamples,
+ uint32_t& nSamplesOut);
+
+ inline void SetConsumer(const struct audio_consumer_opensles_s* pConsumer){ m_pConsumer = pConsumer; }
+ inline void SetProducer(const struct audio_producer_opensles_s* pProducer){ m_pProducer = pProducer; }
+
+private:
+ const struct audio_consumer_opensles_s* m_pConsumer; // mut be const and must not take reference
+ const struct audio_producer_opensles_s* m_pProducer; // mut be const and must not take reference
+};
+
+#endif /* _DOUBANGO_AUDIO_OPENSLES_SLDEVICE_IMPL_H */
diff --git a/plugins/audio_opensles/audio_opensles_producer.cxx b/plugins/audio_opensles/audio_opensles_producer.cxx
new file mode 100644
index 0000000..69c06f6
--- /dev/null
+++ b/plugins/audio_opensles/audio_opensles_producer.cxx
@@ -0,0 +1,239 @@
+/* Copyright (C) 2012 Doubango Telecom <http://www.doubango.org>
+*
+* This file is part of Open Source Doubango Framework.
+*
+* DOUBANGO is free software: you can redistribute it and/or modify
+* it under the terms of the GNU General Public License as published by
+* the Free Software Foundation, either version 3 of the License, or
+* (at your option) any later version.
+*
+* DOUBANGO is distributed in the hope that it will be useful,
+* but WITHOUT ANY WARRANTY; without even the implied warranty of
+* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
+* GNU General Public License for more details.
+*
+* You should have received a copy of the GNU General Public License
+* along with DOUBANGO.
+*/
+#include "audio_opensles_producer.h"
+#include "audio_opensles.h"
+
+#include "tinydav/audio/tdav_producer_audio.h"
+
+#include "tsk_string.h"
+#include "tsk_memory.h"
+#include "tsk_debug.h"
+
+typedef struct audio_producer_opensles_s
+{
+ TDAV_DECLARE_PRODUCER_AUDIO;
+
+ bool isMuted;
+ audio_opensles_instance_handle_t* audioInstHandle;
+ struct{
+ void* ptr;
+ int size;
+ int index;
+ } buffer;
+}
+audio_producer_opensles_t;
+
+int audio_producer_opensles_handle_data_10ms(const audio_producer_opensles_t* _self, const void* audioSamples, int nSamples, int nBytesPerSample, int samplesPerSec, int nChannels)
+{
+ if(!_self || !audioSamples || !nSamples){
+ AUDIO_OPENSLES_DEBUG_ERROR("Invalid parameter");
+ return -1;
+ }
+ if(!TMEDIA_PRODUCER(_self)->enc_cb.callback){
+ AUDIO_OPENSLES_DEBUG_WARN("No callback function is registered for the producer");
+ return 0;
+ }
+ if((nSamples != (samplesPerSec / 100))){
+ AUDIO_OPENSLES_DEBUG_ERROR("Not producing 10ms samples (nSamples=%d, samplesPerSec=%d)", nSamples, samplesPerSec);
+ return -2;
+ }
+ if((nBytesPerSample != (TMEDIA_PRODUCER(_self)->audio.bits_per_sample >> 3))){
+ AUDIO_OPENSLES_DEBUG_ERROR("%d not valid bytes/samples", nBytesPerSample);
+ return -3;
+ }
+ if((nChannels != TMEDIA_PRODUCER(_self)->audio.channels)){
+ AUDIO_OPENSLES_DEBUG_ERROR("Recording - %d not the expected number of channels but should be %d", nChannels, TMEDIA_PRODUCER(_self)->audio.channels);
+ return -4;
+ }
+
+ int nSamplesInBits = (nSamples * nBytesPerSample);
+ if(_self->buffer.index + nSamplesInBits > _self->buffer.size){
+ AUDIO_OPENSLES_DEBUG_ERROR("Buffer overflow");
+ return -5;
+ }
+
+ audio_producer_opensles_t* self = const_cast<audio_producer_opensles_t*>(_self);
+
+ if(self->isMuted){
+ memset((((uint8_t*)self->buffer.ptr) + self->buffer.index), 0, nSamplesInBits);
+ }
+ else{
+ memcpy((((uint8_t*)self->buffer.ptr) + self->buffer.index), audioSamples, nSamplesInBits);
+ }
+ self->buffer.index += nSamplesInBits;
+
+ if(self->buffer.index == self->buffer.size){
+ self->buffer.index = 0;
+ TMEDIA_PRODUCER(self)->enc_cb.callback(TMEDIA_PRODUCER(self)->enc_cb.callback_data, self->buffer.ptr, self->buffer.size);
+ }
+
+ return 0;
+}
+
+
+/* ============ Media Producer Interface ================= */
+static int audio_producer_opensles_set(tmedia_producer_t* _self, const tmedia_param_t* param)
+{
+ audio_producer_opensles_t* self = (audio_producer_opensles_t*)_self;
+ if(param->plugin_type == tmedia_ppt_producer){
+ if(param->value_type == tmedia_pvt_int32){
+ if(tsk_striequals(param->key, "mute")){
+ self->isMuted = (*((int32_t*)param->value) != 0);
+ // Mute not supported on android -> send silence when needed
+ return 0;
+ }
+ else if(tsk_striequals(param->key, "volume")){
+ return audio_opensles_instance_set_microphone_volume(self->audioInstHandle, *((int32_t*)param->value));
+ }
+ }
+ }
+ return tdav_producer_audio_set(TDAV_PRODUCER_AUDIO(self), param);
+}
+static int audio_producer_opensles_prepare(tmedia_producer_t* _self, const tmedia_codec_t* codec)
+{
+ audio_producer_opensles_t* self = (audio_producer_opensles_t*)_self;
+ if(!self || !codec){
+ AUDIO_OPENSLES_DEBUG_ERROR("Invalid parameter");
+ return -1;
+ }
+
+ // create audio instance
+ if(!(self->audioInstHandle = audio_opensles_instance_create(TMEDIA_PRODUCER(self)->session_id))){
+ AUDIO_OPENSLES_DEBUG_ERROR("Failed to create audio instance handle");
+ return -2;
+ }
+
+ // check that ptime is mutiple of 10
+ if((codec->plugin->audio.ptime % 10)){
+ AUDIO_OPENSLES_DEBUG_ERROR("ptime=%d not multiple of 10", codec->plugin->audio.ptime);
+ return -3;
+ }
+
+ // init input parameters from the codec
+ TMEDIA_PRODUCER(self)->audio.channels = TMEDIA_CODEC_CHANNELS_AUDIO_ENCODING(codec);
+ TMEDIA_PRODUCER(self)->audio.rate = TMEDIA_CODEC_RATE_ENCODING(codec);
+ TMEDIA_PRODUCER(self)->audio.ptime = TMEDIA_CODEC_PTIME_AUDIO_ENCODING(codec);
+
+ AUDIO_OPENSLES_DEBUG_INFO("audio_producer_opensles_prepare(channels=%d, rate=%d, ptime=%d)", codec->plugin->audio.channels, codec->plugin->rate, codec->plugin->audio.ptime);
+
+ // prepare playout device and update output parameters
+ int ret;
+ ret = audio_opensles_instance_prepare_producer(self->audioInstHandle, &_self);
+
+ // now that the producer is prepared we can initialize internal buffer using device caps
+ if(ret == 0){
+ // allocate buffer
+ int xsize = ((TMEDIA_PRODUCER(self)->audio.ptime * TMEDIA_PRODUCER(self)->audio.rate) / 1000) * (TMEDIA_PRODUCER(self)->audio.bits_per_sample >> 3);
+ AUDIO_OPENSLES_DEBUG_INFO("producer buffer xsize = %d", xsize);
+ if(!(self->buffer.ptr = tsk_realloc(self->buffer.ptr, xsize))){
+ AUDIO_OPENSLES_DEBUG_ERROR("Failed to allocate buffer with size = %d", xsize);
+ self->buffer.size = 0;
+ return -1;
+ }
+ self->buffer.size = xsize;
+ self->buffer.index = 0;
+ }
+ return ret;
+}
+
+static int audio_producer_opensles_start(tmedia_producer_t* _self)
+{
+ audio_producer_opensles_t* self = (audio_producer_opensles_t*)_self;
+ if(!self){
+ AUDIO_OPENSLES_DEBUG_ERROR("Invalid parameter");
+ return -1;
+ }
+ AUDIO_OPENSLES_DEBUG_INFO("audio_producer_opensles_start");
+
+ return audio_opensles_instance_start_producer(self->audioInstHandle);
+}
+
+static int audio_producer_opensles_pause(tmedia_producer_t* self)
+{
+ return 0;
+}
+
+static int audio_producer_opensles_stop(tmedia_producer_t* _self)
+{
+ audio_producer_opensles_t* self = (audio_producer_opensles_t*)_self;
+ if(!self){
+ AUDIO_OPENSLES_DEBUG_ERROR("Invalid parameter");
+ return -1;
+ }
+
+ return audio_opensles_instance_stop_producer(self->audioInstHandle);
+}
+
+
+//
+// SLES audio producer object definition
+//
+/* constructor */
+static tsk_object_t* audio_producer_opensles_ctor(tsk_object_t *_self, va_list * app)
+{
+ audio_producer_opensles_t *self = (audio_producer_opensles_t *)_self;
+ if(self){
+ /* init base */
+ tdav_producer_audio_init(TDAV_PRODUCER_AUDIO(self));
+ /* init self */
+
+ }
+ return self;
+}
+/* destructor */
+static tsk_object_t* audio_producer_opensles_dtor(tsk_object_t *_self)
+{
+ audio_producer_opensles_t *self = (audio_producer_opensles_t *)_self;
+ if(self){
+ /* stop */
+ audio_producer_opensles_stop(TMEDIA_PRODUCER(self));
+ /* deinit self */
+ if(self->audioInstHandle){
+ audio_opensles_instance_destroy(&self->audioInstHandle);
+ }
+ TSK_FREE(self->buffer.ptr);
+
+ /* deinit base */
+ tdav_producer_audio_deinit(TDAV_PRODUCER_AUDIO(self));
+ }
+
+ return self;
+}
+/* object definition */
+static const tsk_object_def_t audio_producer_opensles_def_s =
+{
+ sizeof(audio_producer_opensles_t),
+ audio_producer_opensles_ctor,
+ audio_producer_opensles_dtor,
+ tdav_producer_audio_cmp,
+};
+/* plugin definition*/
+static const tmedia_producer_plugin_def_t audio_producer_opensles_plugin_def_s =
+{
+ &audio_producer_opensles_def_s,
+
+ tmedia_audio,
+ "SLES audio producer",
+
+ audio_producer_opensles_set,
+ audio_producer_opensles_prepare,
+ audio_producer_opensles_start,
+ audio_producer_opensles_pause,
+ audio_producer_opensles_stop
+};
+const tmedia_producer_plugin_def_t *audio_producer_opensles_plugin_def_t = &audio_producer_opensles_plugin_def_s; \ No newline at end of file
diff --git a/plugins/audio_opensles/audio_opensles_producer.h b/plugins/audio_opensles/audio_opensles_producer.h
new file mode 100644
index 0000000..0c4c756
--- /dev/null
+++ b/plugins/audio_opensles/audio_opensles_producer.h
@@ -0,0 +1,32 @@
+/* Copyright (C) 2012 Doubango Telecom <http://www.doubango.org>
+*
+* This file is part of Open Source Doubango Framework.
+*
+* DOUBANGO is free software: you can redistribute it and/or modify
+* it under the terms of the GNU General Public License as published by
+* the Free Software Foundation, either version 3 of the License, or
+* (at your option) any later version.
+*
+* DOUBANGO is distributed in the hope that it will be useful,
+* but WITHOUT ANY WARRANTY; without even the implied warranty of
+* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
+* GNU General Public License for more details.
+*
+* You should have received a copy of the GNU General Public License
+* along with DOUBANGO.
+*/
+#ifndef _DOUBANGO_AUDIO_OPENSLES_PRODUCER_H
+#define _DOUBANGO_AUDIO_OPENSLES_PRODUCER_H
+
+#include "audio_opensles_config.h"
+
+AUDIO_OPENSLES_BEGIN_DECLS
+
+extern const struct tmedia_producer_plugin_def_s *audio_producer_opensles_plugin_def_t;
+
+// handle recorded data
+int audio_producer_opensles_handle_data_10ms(const struct audio_producer_opensles_s* self, const void* audioSamples, int nSamples, int nBytesPerSample, int samplesPerSec, int nChannels);
+
+AUDIO_OPENSLES_END_DECLS
+
+#endif /* _DOUBANGO_AUDIO_OPENSLES_PRODUCER_H */
diff --git a/plugins/audio_opensles/dllmain.cxx b/plugins/audio_opensles/dllmain.cxx
new file mode 100644
index 0000000..e6b34f4
--- /dev/null
+++ b/plugins/audio_opensles/dllmain.cxx
@@ -0,0 +1,35 @@
+/* Copyright (C) 2012 Doubango Telecom <http://www.doubango.org>
+*
+* This file is part of Open Source Doubango Framework.
+*
+* DOUBANGO is free software: you can redistribute it and/or modify
+* it under the terms of the GNU General Public License as published by
+* the Free Software Foundation, either version 3 of the License, or
+* (at your option) any later version.
+*
+* DOUBANGO is distributed in the hope that it will be useful,
+* but WITHOUT ANY WARRANTY; without even the implied warranty of
+* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
+* GNU General Public License for more details.
+*
+* You should have received a copy of the GNU General Public License
+* along with DOUBANGO.
+*/
+#include <windows.h>
+
+BOOL APIENTRY DllMain( HMODULE hModule,
+ DWORD ul_reason_for_call,
+ LPVOID lpReserved
+ )
+{
+ switch (ul_reason_for_call)
+ {
+ case DLL_PROCESS_ATTACH:
+ case DLL_THREAD_ATTACH:
+ case DLL_THREAD_DETACH:
+ case DLL_PROCESS_DETACH:
+ break;
+ }
+ return TRUE;
+}
+
diff --git a/plugins/audio_opensles/droid-makefile b/plugins/audio_opensles/droid-makefile
new file mode 100644
index 0000000..f92c3c9
--- /dev/null
+++ b/plugins/audio_opensles/droid-makefile
@@ -0,0 +1,45 @@
+APP := lib$(PROJECT).$(EXT)
+
+DOUBANGO_INSTALL_INC := $(DOUBANGO_INSTALL_HOME)/include
+DOUBANGO_INSTALL_LIB := $(DOUBANGO_INSTALL_HOME)/lib
+SLES_INC := $(SLES_INSTALL_HOME)/include
+SLES_LIB := $(SLES_INSTALL_HOME)/lib
+
+################################
+
+CFLAGS := $(CFLAGS_LIB) -DANDROID=1 -I$(SLES_INC) -I$(DOUBANGO_INSTALL_INC)/tinysak -I$(DOUBANGO_INSTALL_INC)/tinymedia -I$(DOUBANGO_INSTALL_INC)/tinysdp -I$(DOUBANGO_INSTALL_INC)/tinyrtp -I$(DOUBANGO_INSTALL_INC)/tinydav
+
+LDFLAGS := $(LDFLAGS_LIB) -L$(SLES_LIB) -L$(DOUBANGO_INSTALL_LIB) -ltinySAK -ltinyMEDIA -ltinySDP
+# -L$(DOUBANGO_SRC_HOME)/thirdparties/andy/$(CPU)/lib/dist -lm -ldl -llog -lgcc -lstdc++ -lOpenSLES -lgnustl
+
+all: $(APP)
+
+OBJS = \
+ audio_opensles.o \
+ audio_opensles_consumer.o \
+ audio_opensles_device_impl.o \
+ audio_opensles_producer.o \
+ audio_opensles_device.o \
+ ../../tinyDAV/src/audio/tdav_consumer_audio.o \
+ ../../tinyDAV/src/audio/tdav_producer_audio.o \
+
+$(APP): $(OBJS)
+ifeq ($(EXT), a)
+ $(AR) rcs $@ $^
+else
+ $(CC) $(LDFLAGS) -o $@ $^
+endif
+
+%.o: %.c
+ $(CC) -c $(INCLUDE) $(CFLAGS) $< -o $@
+
+%.o: %.cxx
+ $(CXX) -c $(INCLUDE) $(CFLAGS) $< -o $@
+
+install: $(APP)
+ $(ANDROID_SDK_ROOT)/tools/adb remount
+ $(ANDROID_SDK_ROOT)/tools/adb push $(APP) $(LIB_DIR)/$(APP)
+ $(ANDROID_SDK_ROOT)/tools/adb shell chmod 777 $(LIB_DIR)/$(APP)
+
+clean:
+ @rm -f $(OBJS) $(APP) \ No newline at end of file
diff --git a/plugins/audio_opensles/plugin_audio_opensles.pc.in b/plugins/audio_opensles/plugin_audio_opensles.pc.in
new file mode 100644
index 0000000..7368cdc
--- /dev/null
+++ b/plugins/audio_opensles/plugin_audio_opensles.pc.in
@@ -0,0 +1,14 @@
+prefix = @prefix@
+exec_prefix = @exec_prefix@
+libdir = @libdir@
+includedir = @includedir@
+
+Name : libplugin_audio_opensles
+Description : Doubango Telecom plugin_audio_opensles (OpenSL-ES) plugin
+Version : @PACKAGE_VERSION@
+Requires:
+Requires.private: tinySAK = @PACKAGE_VERSION@ tinyNET = @PACKAGE_VERSION@ tinySDP = @PACKAGE_VERSION@ tinyMEDIA = @PACKAGE_VERSION@
+Conflicts:
+Cflags : -I${includedir}/plugin_audio_opensles
+Libs : -L${libdir} -lplugin_audio_opensles
+Libs.private: @LIBSSL_LIBADD@ \ No newline at end of file
diff --git a/plugins/audio_webrtc/audio_webrtc.cxx b/plugins/audio_webrtc/audio_webrtc.cxx
new file mode 100644
index 0000000..167166d
--- /dev/null
+++ b/plugins/audio_webrtc/audio_webrtc.cxx
@@ -0,0 +1,562 @@
+/* Copyright (C) 2012 Doubango Telecom <http://www.doubango.org>
+*
+* This file is part of Open Source Doubango Framework.
+*
+* DOUBANGO is free software: you can redistribute it and/or modify
+* it under the terms of the GNU General Public License as published by
+* the Free Software Foundation, either version 3 of the License, or
+* (at your option) any later version.
+*
+* DOUBANGO is distributed in the hope that it will be useful,
+* but WITHOUT ANY WARRANTY; without even the implied warranty of
+* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
+* GNU General Public License for more details.
+*
+* You should have received a copy of the GNU General Public License
+* along with DOUBANGO.
+*/
+#include "audio_webrtc.h"
+
+#include "audio_webrtc_consumer.h"
+#include "audio_webrtc_producer.h"
+#include "audio_webrtc_transport.h"
+
+#include <webrtc/audio_device_config.h>
+#include <webrtc/audio_device_impl.h>
+
+#include "tinymedia/tmedia_consumer.h"
+#include "tinymedia/tmedia_producer.h"
+
+#include "tsk_list.h"
+#include "tsk_safeobj.h"
+#include "tsk_debug.h"
+
+using namespace webrtc;
+
+#define kAudioDeviceModuleId 444
+
+#if DOUBANGO_AUDIO_WEBRTC_UNDER_ANDROID
+// https://groups.google.com/group/android-ndk/browse_thread/thread/a1667f28162cf69b/8ef3a171df7f8dfe
+extern "C"
+{
+ void *__dso_handle = NULL;
+}
+#endif
+
+typedef enum PLUGIN_INDEX_E
+{
+ PLUGIN_INDEX_AUDIO_CONSUMER,
+ PLUGIN_INDEX_AUDIO_PRODUCER,
+ PLUGIN_INDEX_COUNT
+}
+PLUGIN_INDEX_T;
+
+
+int __plugin_get_def_count()
+{
+ return PLUGIN_INDEX_COUNT;
+}
+
+tsk_plugin_def_type_t __plugin_get_def_type_at(int index)
+{
+ switch(index){
+ case PLUGIN_INDEX_AUDIO_CONSUMER: return tsk_plugin_def_type_consumer;
+ case PLUGIN_INDEX_AUDIO_PRODUCER: return tsk_plugin_def_type_producer;
+ default:
+ {
+ DOUBANGO_AUDIO_WEBRTC_DEBUG_ERROR("No plugin at index %d", index);
+ return tsk_plugin_def_type_none;
+ }
+ }
+}
+
+tsk_plugin_def_media_type_t __plugin_get_def_media_type_at(int index)
+{
+ switch(index){
+ case PLUGIN_INDEX_AUDIO_CONSUMER:
+ case PLUGIN_INDEX_AUDIO_PRODUCER:
+ {
+ return tsk_plugin_def_media_type_audio;
+ }
+ default:
+ {
+ DOUBANGO_AUDIO_WEBRTC_DEBUG_ERROR("No plugin at index %d", index);
+ return tsk_plugin_def_media_type_none;
+ }
+ }
+}
+
+tsk_plugin_def_ptr_const_t __plugin_get_def_at(int index)
+{
+ switch(index){
+ case PLUGIN_INDEX_AUDIO_CONSUMER:
+ {
+ return audio_consumer_webrtc_plugin_def_t;
+ }
+ case PLUGIN_INDEX_AUDIO_PRODUCER:
+ {
+ return audio_producer_webrtc_plugin_def_t;
+ }
+ default:
+ {
+ DOUBANGO_AUDIO_WEBRTC_DEBUG_ERROR("No plugin at index %d", index);
+ return tsk_null;
+ }
+ }
+}
+
+//
+// WebRTC AudioInstance
+//
+
+typedef struct audio_webrtc_instance_s
+{
+ TSK_DECLARE_OBJECT;
+
+ uint64_t sessionId;
+
+ bool isStarted;
+
+ bool isConsumerPrepared;
+ bool isConsumerStarted;
+ bool isProducerPrepared;
+ bool isProducerStarted;
+
+ bool isSpeakerAvailable;
+ bool isPlayoutAvailable;
+ bool isRecordingAvailable;
+
+ AudioDeviceModule* device;
+ AudioTransportImpl* transport;
+
+ TSK_DECLARE_SAFEOBJ;
+}
+audio_webrtc_instance_t;
+typedef tsk_list_t audio_webrtc_instances_L_t;
+
+static audio_webrtc_instances_L_t* __audioInstances = tsk_null;
+
+static tsk_object_t* audio_webrtc_instance_ctor(tsk_object_t * self, va_list * app)
+{
+ audio_webrtc_instance_t* audioInstance = (audio_webrtc_instance_t*)self;
+ if(audioInstance){
+ tsk_safeobj_init(audioInstance);
+ }
+ return self;
+}
+static tsk_object_t* audio_webrtc_instance_dtor(tsk_object_t * self)
+{
+ DOUBANGO_AUDIO_WEBRTC_DEBUG_INFO("Audio Instance destroyed");
+ audio_webrtc_instance_t* audioInstance = (audio_webrtc_instance_t*)self;
+ if(audioInstance){
+ tsk_safeobj_lock(audioInstance);
+ if(audioInstance->device){
+ audioInstance->device->RegisterAudioCallback(tsk_null);
+ audioInstance->device->Terminate();
+ audioInstance->device->Release();//FIXME: must be deleted?
+ audioInstance->device = tsk_null;
+ }
+ if(audioInstance->transport){
+ delete audioInstance->transport;
+ audioInstance->transport = tsk_null;
+ }
+ tsk_safeobj_unlock(audioInstance);
+
+ tsk_safeobj_deinit(audioInstance);
+ }
+ return self;
+}
+static int audio_webrtc_instance_cmp(const tsk_object_t *_ai1, const tsk_object_t *_ai2)
+{
+ return ((int)_ai1 - (int)_ai2);
+}
+static const tsk_object_def_t audio_webrtc_instance_def_s =
+{
+ sizeof(audio_webrtc_instance_t),
+ audio_webrtc_instance_ctor,
+ audio_webrtc_instance_dtor,
+ audio_webrtc_instance_cmp,
+};
+const tsk_object_def_t *audio_webrtc_instance_def_t = &audio_webrtc_instance_def_s;
+
+
+audio_webrtc_instance_handle_t* audio_webrtc_instance_create(uint64_t sessionId)
+{
+ audio_webrtc_instance_t* audioInstance = tsk_null;
+
+ // create list used to hold instances
+ if(!__audioInstances && !(__audioInstances = tsk_list_create())){
+ DOUBANGO_AUDIO_WEBRTC_DEBUG_ERROR("Failed to create new list");
+ return tsk_null;
+ }
+
+ //= lock the list
+ tsk_list_lock(__audioInstances);
+
+ // find the instance from the list
+ const tsk_list_item_t* item;
+ tsk_list_foreach(item, __audioInstances){
+ if(((audio_webrtc_instance_t*)item->data)->sessionId == sessionId){
+ audioInstance = (audio_webrtc_instance_t*)tsk_object_ref(item->data);
+ break;
+ }
+ }
+
+ if(!audioInstance){
+ audio_webrtc_instance_t* _audioInstance;
+ if(!(_audioInstance = (audio_webrtc_instance_t*)tsk_object_new(&audio_webrtc_instance_def_s))){
+ DOUBANGO_AUDIO_WEBRTC_DEBUG_ERROR("Failed to create new audio instance");
+ goto done;
+ }
+
+ if(!(_audioInstance->device = AudioDeviceModuleImpl::Create(kAudioDeviceModuleId))){
+ DOUBANGO_AUDIO_WEBRTC_DEBUG_ERROR("Failed to create audio device");
+ TSK_OBJECT_SAFE_FREE(_audioInstance);
+ goto done;
+ }
+ _audioInstance->device->AddRef();
+
+ if(!(_audioInstance->transport = new AudioTransportImpl(_audioInstance->device))){
+ DOUBANGO_AUDIO_WEBRTC_DEBUG_ERROR("Failed to create audio transport");
+ TSK_OBJECT_SAFE_FREE(_audioInstance);
+ goto done;
+ }
+ if((_audioInstance->device->RegisterAudioCallback(_audioInstance->transport))){
+ DOUBANGO_AUDIO_WEBRTC_DEBUG_ERROR("AudioDeviceModule::RegisterAudioCallback() failed");
+ TSK_OBJECT_SAFE_FREE(_audioInstance);
+ goto done;
+ }
+
+ if((_audioInstance->device->Init())){
+ DOUBANGO_AUDIO_WEBRTC_DEBUG_ERROR("AudioDeviceModule::Init() failed");
+ TSK_OBJECT_SAFE_FREE(_audioInstance);
+ goto done;
+ }
+
+ _audioInstance->sessionId = sessionId;
+ audioInstance = _audioInstance;
+ tsk_list_push_back_data(__audioInstances, (void**)&_audioInstance);
+ }
+
+done:
+ //= unlock the list
+ tsk_list_unlock(__audioInstances);
+
+ return audioInstance;
+}
+
+int audio_webrtc_instance_prepare_consumer(audio_webrtc_instance_handle_t* _self, tmedia_consumer_t** _consumer)
+{
+ audio_webrtc_instance_t* self = (audio_webrtc_instance_t*)_self;
+ if(!self || !self->device || !self->transport || !_consumer || !*_consumer){
+ DOUBANGO_AUDIO_WEBRTC_DEBUG_ERROR("invalid parameter");
+ return -1;
+ }
+
+ if(self->isConsumerPrepared){
+ DOUBANGO_AUDIO_WEBRTC_DEBUG_WARN("Consumer already prepared");
+ return 0;
+ }
+
+ int ret;
+ bool _bool;
+
+ tsk_safeobj_lock(self);
+
+ self->transport->SetConsumer((const struct audio_consumer_webrtc_s*)*_consumer);
+
+ if((ret = self->device->SetPlayoutDevice(DOUBANGO_AUDIO_WEBRTC_DEVICE_DEFAULT))){
+ DOUBANGO_AUDIO_WEBRTC_DEBUG_ERROR("AudioDeviceModule->SetPlayoutDevice(%d) failed", DOUBANGO_AUDIO_WEBRTC_DEVICE_DEFAULT);
+ }
+
+ if((ret = self->device->SpeakerIsAvailable(&_bool))){
+ DOUBANGO_AUDIO_WEBRTC_DEBUG_ERROR("SpeakerIsAvailable() failed with error code=%d", ret);
+ }
+ else{
+ if(!_bool){
+ DOUBANGO_AUDIO_WEBRTC_DEBUG_ERROR("SpeakerIsAvailable() returned false");
+ }
+ self->isSpeakerAvailable = _bool;
+ }
+
+ if((ret = self->device->InitSpeaker())){
+ DOUBANGO_AUDIO_WEBRTC_DEBUG_ERROR("InitSpeaker() failed with error code=%d", ret);
+ }
+
+ if((ret = self->device->PlayoutIsAvailable(&_bool))){
+ DOUBANGO_AUDIO_WEBRTC_DEBUG_ERROR("PlayoutIsAvailable() failed with error code =%d", ret);
+ }
+ else{
+ if(!_bool){
+ DOUBANGO_AUDIO_WEBRTC_DEBUG_ERROR("PlayoutIsAvailable() returned false");
+ }
+ self->isPlayoutAvailable = _bool;
+ }
+
+ if((ret = self->device->SetStereoPlayout(((*_consumer)->audio.in.channels == 2)))){
+ DOUBANGO_AUDIO_WEBRTC_DEBUG_ERROR("SetStereoPlayout(%d==2) failed with error code=%d", (*_consumer)->audio.in.channels, ret);
+ }
+
+ //if((ret = self->device->SetPlayoutBuffer(AudioDeviceModule::kFixedBufferSize, (*_consumer)->audio.ptime))){
+ // DOUBANGO_AUDIO_WEBRTC_DEBUG_ERROR("SetPlayoutBuffer(%d ms) failed with error code=%d", (*_consumer)->audio.ptime, ret);
+ //}
+ // always request 10ms buffers. In all cases WebRTC don't support anything else
+ if((ret = self->device->SetPlayoutBuffer(AudioDeviceModule::kFixedBufferSize, 10))){
+ DOUBANGO_AUDIO_WEBRTC_DEBUG_ERROR("SetPlayoutBuffer(%d ms) failed with error code=%d", 10, ret);
+ }
+
+ uint32_t playoutSampleRate = (*_consumer)->audio.out.rate ? (*_consumer)->audio.out.rate : (*_consumer)->audio.in.rate;
+ if((ret = self->device->SetPlayoutSampleRate(playoutSampleRate))){
+ DOUBANGO_AUDIO_WEBRTC_DEBUG_ERROR("SetPlayoutSampleRate(%d) failed with error code=%d", playoutSampleRate, ret);
+ }
+
+ if((ret = self->device->InitPlayout())){
+ DOUBANGO_AUDIO_WEBRTC_DEBUG_ERROR("AudioDeviceModule::InitPlayout() failed with error code = %d", ret);
+ goto done;
+ }
+
+ // init output parameters
+ if((ret = self->device->StereoPlayout(&_bool))){
+ DOUBANGO_AUDIO_WEBRTC_DEBUG_ERROR("StereoPlayout() failed with error code=%d", ret);
+ }
+ else{
+ (*_consumer)->audio.out.channels = (_bool ? 2 : 1);
+ }
+ if((ret = self->device->PlayoutSampleRate(&playoutSampleRate))){
+ DOUBANGO_AUDIO_WEBRTC_DEBUG_ERROR("PlayoutSampleRate() failed with error code=%d", ret);
+ }
+ else{
+ (*_consumer)->audio.out.rate = playoutSampleRate;
+ }
+
+done:
+ tsk_safeobj_unlock(self);
+
+ self->isConsumerPrepared = (ret == 0);
+
+ return ret;
+}
+
+int audio_webrtc_instance_prepare_producer(audio_webrtc_instance_handle_t* _self, tmedia_producer_t** _producer)
+{
+ audio_webrtc_instance_t* self = (audio_webrtc_instance_t*)_self;
+ if(!self || !self->device || !self->transport || !_producer || !*_producer){
+ DOUBANGO_AUDIO_WEBRTC_DEBUG_ERROR("invalid parameter");
+ return -1;
+ }
+
+ if(self->isProducerPrepared){
+ DOUBANGO_AUDIO_WEBRTC_DEBUG_WARN("Producer already prepared");
+ return 0;
+ }
+
+ int ret;
+ bool _bool;
+
+ tsk_safeobj_lock(self);
+
+ self->transport->SetProducer((const struct audio_producer_webrtc_s*)*_producer);
+
+ if((ret = self->device->SetRecordingDevice(DOUBANGO_AUDIO_WEBRTC_DEVICE_DEFAULT))){
+ DOUBANGO_AUDIO_WEBRTC_DEBUG_ERROR("AudioDeviceModule->SetRecordingDevice(%d) failed", DOUBANGO_AUDIO_WEBRTC_DEVICE_DEFAULT);
+ }
+
+ if((ret = self->device->RecordingIsAvailable(&_bool))){
+ DOUBANGO_AUDIO_WEBRTC_DEBUG_ERROR("RecordingIsAvailable() failed with error code =%d", ret);
+ }
+ else{
+ if(!_bool){
+ DOUBANGO_AUDIO_WEBRTC_DEBUG_ERROR("RecordingIsAvailable() returned false");
+ }
+ self->isRecordingAvailable = _bool;
+ }
+
+ if((ret = self->device->MicrophoneIsAvailable(&_bool))){
+ DOUBANGO_AUDIO_WEBRTC_DEBUG_ERROR("MicrophoneIsAvailable() failed with error code =%d", ret);
+ }
+ else{
+ if(!_bool){
+ DOUBANGO_AUDIO_WEBRTC_DEBUG_ERROR("MicrophoneIsAvailable() returned false");
+ }
+ else{
+ if((ret = self->device->InitMicrophone())){
+ DOUBANGO_AUDIO_WEBRTC_DEBUG_ERROR("InitMicrophone() failed with error code =%d", ret);
+ }
+ }
+ }
+
+ if((ret = self->device->SetStereoRecording(((*_producer)->audio.channels == 2)))){
+ DOUBANGO_AUDIO_WEBRTC_DEBUG_ERROR("SetStereoRecording(%d==2) failed with error code=%d", (*_producer)->audio.channels, ret);
+ }
+
+ uint32_t recordingSampleRate = (*_producer)->audio.rate;
+ if((ret = self->device->SetRecordingSampleRate(recordingSampleRate))){
+ DOUBANGO_AUDIO_WEBRTC_DEBUG_ERROR("SetRecordingSampleRate(%d) failed with error code=%d", recordingSampleRate, ret);
+ }
+
+ if((ret = self->device->InitRecording())){
+ DOUBANGO_AUDIO_WEBRTC_DEBUG_ERROR("AudioDeviceModule::InitRecording() failed with error code = %d", ret);
+ goto done;
+ }
+
+ // init output parameters
+ if((ret = self->device->StereoRecording(&_bool))){
+ DOUBANGO_AUDIO_WEBRTC_DEBUG_ERROR("StereoRecording() failed with error code=%d", ret);
+ }
+ else{
+ (*_producer)->audio.channels = (_bool ? 2 : 1);
+ }
+ if((ret = self->device->RecordingSampleRate(&recordingSampleRate))){
+ DOUBANGO_AUDIO_WEBRTC_DEBUG_ERROR("RecordingSampleRate() failed with error code=%d", ret);
+ }
+ else{
+ (*_producer)->audio.rate = recordingSampleRate;
+ }
+
+done:
+ tsk_safeobj_unlock(self);
+
+ self->isProducerPrepared = (ret == 0);
+
+ return ret;
+}
+
+int audio_webrtc_instance_start_consumer(audio_webrtc_instance_handle_t* _self)
+{
+ audio_webrtc_instance_t* self = (audio_webrtc_instance_t*)_self;
+ if(!self || !self->device || !self->transport){
+ DOUBANGO_AUDIO_WEBRTC_DEBUG_ERROR("invalid parameter");
+ return -1;
+ }
+
+ tsk_safeobj_lock(self);
+ if(!self->isConsumerPrepared){
+ DOUBANGO_AUDIO_WEBRTC_DEBUG_ERROR("Consumer not prepared");
+ goto done;
+ }
+
+ if(self->isConsumerStarted){
+ DOUBANGO_AUDIO_WEBRTC_DEBUG_WARN("Consumer already started");
+ goto done;
+ }
+
+ if(self->isPlayoutAvailable){
+ int ret;
+ if((ret = self->device->StartPlayout())){
+ DOUBANGO_AUDIO_WEBRTC_DEBUG_ERROR("StartPlayout() failed with error code = %d", ret);
+ }
+
+ self->isConsumerStarted = self->device->Playing();
+ DOUBANGO_AUDIO_WEBRTC_DEBUG_INFO("isPlaying=%s", (self->isConsumerPrepared ? "true" : "false"));
+ }
+
+done:
+ tsk_safeobj_unlock(self);
+ return (self->isConsumerStarted ? 0 : -1);
+}
+
+int audio_webrtc_instance_start_producer(audio_webrtc_instance_handle_t* _self)
+{
+ audio_webrtc_instance_t* self = (audio_webrtc_instance_t*)_self;
+ if(!self || !self->device || !self->transport){
+ DOUBANGO_AUDIO_WEBRTC_DEBUG_ERROR("invalid parameter");
+ return -1;
+ }
+
+ tsk_safeobj_lock(self);
+ if(!self->isProducerPrepared){
+ DOUBANGO_AUDIO_WEBRTC_DEBUG_ERROR("Producer not prepared");
+ goto done;
+ }
+
+ if(self->isProducerStarted){
+ DOUBANGO_AUDIO_WEBRTC_DEBUG_WARN("Consumer already started");
+ goto done;
+ }
+
+ if(self->isRecordingAvailable){
+ int ret;
+ if((ret = self->device->StartRecording())){
+ DOUBANGO_AUDIO_WEBRTC_DEBUG_ERROR("StartRecording() failed with error code = %d", ret);
+ }
+
+ self->isProducerStarted = self->device->Recording();
+ DOUBANGO_AUDIO_WEBRTC_DEBUG_INFO("isRecording=%s", (self->isProducerStarted ? "true" : "false"));
+ }
+
+done:
+ tsk_safeobj_unlock(self);
+ return (self->isProducerStarted ? 0 : -1);
+ return 0;
+}
+
+int audio_webrtc_instance_stop_consumer(audio_webrtc_instance_handle_t* _self)
+{
+ audio_webrtc_instance_t* self = (audio_webrtc_instance_t*)_self;
+ if(!self || !self->device || !self->transport){
+ DOUBANGO_AUDIO_WEBRTC_DEBUG_ERROR("invalid parameter");
+ return -1;
+ }
+
+ tsk_safeobj_lock(self);
+
+ if(!self->isConsumerStarted){
+ goto done;
+ }
+
+ int ret;
+ if((ret = self->device->StopPlayout())){
+ DOUBANGO_AUDIO_WEBRTC_DEBUG_ERROR("StopPlayout() failed with error code = %d", ret);
+ }
+ else{
+ self->isConsumerStarted = self->device->Playing();
+ }
+
+done:
+ tsk_safeobj_unlock(self);
+ return (self->isConsumerStarted ? -1 : 0);
+}
+
+int audio_webrtc_instance_stop_producer(audio_webrtc_instance_handle_t* _self)
+{
+ audio_webrtc_instance_t* self = (audio_webrtc_instance_t*)_self;
+ if(!self || !self->device || !self->transport){
+ DOUBANGO_AUDIO_WEBRTC_DEBUG_ERROR("invalid parameter");
+ return -1;
+ }
+
+ tsk_safeobj_lock(self);
+
+ if(!self->isProducerStarted){
+ goto done;
+ }
+
+ int ret;
+ if((ret = self->device->StopRecording())){
+ DOUBANGO_AUDIO_WEBRTC_DEBUG_ERROR("StopRecording() failed with error code = %d", ret);
+ }
+ else{
+ self->isProducerStarted = self->device->Recording();
+ }
+
+done:
+ tsk_safeobj_unlock(self);
+ return (self->isProducerStarted ? -1 : 0);
+}
+
+int audio_webrtc_instance_destroy(audio_webrtc_instance_handle_t** _self){
+ if(!_self || !*_self){
+ DOUBANGO_AUDIO_WEBRTC_DEBUG_ERROR("Invalid parameter");
+ return -1;
+ }
+ tsk_list_lock(__audioInstances);
+ if(tsk_object_get_refcount(*_self)==1){
+ tsk_list_remove_item_by_data(__audioInstances, *_self);
+ }
+ else {
+ tsk_object_unref(*_self);
+ }
+ tsk_list_unlock(__audioInstances);
+ *_self = tsk_null;
+ return 0;
+}
diff --git a/plugins/audio_webrtc/audio_webrtc.h b/plugins/audio_webrtc/audio_webrtc.h
new file mode 100644
index 0000000..115a243
--- /dev/null
+++ b/plugins/audio_webrtc/audio_webrtc.h
@@ -0,0 +1,47 @@
+/* Copyright (C) 2012 Doubango Telecom <http://www.doubango.org>
+*
+* This file is part of Open Source Doubango Framework.
+*
+* DOUBANGO is free software: you can redistribute it and/or modify
+* it under the terms of the GNU General Public License as published by
+* the Free Software Foundation, either version 3 of the License, or
+* (at your option) any later version.
+*
+* DOUBANGO is distributed in the hope that it will be useful,
+* but WITHOUT ANY WARRANTY; without even the implied warranty of
+* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
+* GNU General Public License for more details.
+*
+* You should have received a copy of the GNU General Public License
+* along with DOUBANGO.
+*/
+#ifndef DOUBANGO_AUDIO_WEBRTC_H
+#define DOUBANGO_AUDIO_WEBRTC_H
+
+#include "audio_webrtc_config.h"
+
+#include "tsk_plugin.h"
+
+DOUBANGO_AUDIO_WEBRTC_BEGIN_DECLS
+
+typedef void audio_webrtc_instance_handle_t;
+
+DOUBANGO_AUDIO_WEBRTC_API int __plugin_get_def_count();
+DOUBANGO_AUDIO_WEBRTC_API tsk_plugin_def_type_t __plugin_get_def_type_at(int index);
+DOUBANGO_AUDIO_WEBRTC_API tsk_plugin_def_media_type_t __plugin_get_def_media_type_at(int index);
+DOUBANGO_AUDIO_WEBRTC_API tsk_plugin_def_ptr_const_t __plugin_get_def_at(int index);
+
+audio_webrtc_instance_handle_t* audio_webrtc_instance_create(uint64_t session_id);
+int audio_webrtc_instance_prepare_consumer(audio_webrtc_instance_handle_t* self, struct tmedia_consumer_s** consumer);
+int audio_webrtc_instance_prepare_producer(audio_webrtc_instance_handle_t* _self, struct tmedia_producer_s** producer);
+int audio_webrtc_instance_start_consumer(audio_webrtc_instance_handle_t* self);
+int audio_webrtc_instance_start_producer(audio_webrtc_instance_handle_t* self);
+int audio_webrtc_instance_stop_consumer(audio_webrtc_instance_handle_t* self);
+int audio_webrtc_instance_stop_producer(audio_webrtc_instance_handle_t* self);
+int audio_webrtc_instance_destroy(audio_webrtc_instance_handle_t** self);
+
+DOUBANGO_AUDIO_WEBRTC_END_DECLS
+
+#endif /* DOUBANGO_AUDIO_WEBRTC_H */
+
+
diff --git a/plugins/audio_webrtc/audio_webrtc.vcproj b/plugins/audio_webrtc/audio_webrtc.vcproj
new file mode 100644
index 0000000..950e2b6
--- /dev/null
+++ b/plugins/audio_webrtc/audio_webrtc.vcproj
@@ -0,0 +1,269 @@
+<?xml version="1.0" encoding="Windows-1252"?>
+<VisualStudioProject
+ ProjectType="Visual C++"
+ Version="9.00"
+ Name="audio_webrtc"
+ ProjectGUID="{8E01E57E-36AD-4830-BEEB-FBC2ED6F0CF1}"
+ RootNamespace="audio_webrtc"
+ Keyword="Win32Proj"
+ TargetFrameworkVersion="131072"
+ >
+ <Platforms>
+ <Platform
+ Name="Win32"
+ />
+ </Platforms>
+ <ToolFiles>
+ </ToolFiles>
+ <Configurations>
+ <Configuration
+ Name="Debug|Win32"
+ OutputDirectory="$(SolutionDir)$(ConfigurationName)"
+ IntermediateDirectory="$(ConfigurationName)"
+ ConfigurationType="2"
+ CharacterSet="1"
+ >
+ <Tool
+ Name="VCPreBuildEventTool"
+ />
+ <Tool
+ Name="VCCustomBuildTool"
+ />
+ <Tool
+ Name="VCXMLDataGeneratorTool"
+ />
+ <Tool
+ Name="VCWebServiceProxyGeneratorTool"
+ />
+ <Tool
+ Name="VCMIDLTool"
+ />
+ <Tool
+ Name="VCCLCompilerTool"
+ Optimization="0"
+ AdditionalIncludeDirectories="..\..\thirdparties\win32\include;..\..\tinySAK\src;..\..\tinyDAV\include;..\..\tinyMEDIA\include;..\..\tinySDP\include;..\..\tinyRTP\include"
+ PreprocessorDefinitions="WIN32;_DEBUG;_WINDOWS;_USRDLL;DOUBANGO_AUDIO_WEBRTC_EXPORTS;TINYDAV_EXPORTS"
+ MinimalRebuild="true"
+ BasicRuntimeChecks="3"
+ RuntimeLibrary="3"
+ UsePrecompiledHeader="0"
+ WarningLevel="3"
+ WarnAsError="false"
+ DebugInformationFormat="4"
+ />
+ <Tool
+ Name="VCManagedResourceCompilerTool"
+ />
+ <Tool
+ Name="VCResourceCompilerTool"
+ />
+ <Tool
+ Name="VCPreLinkEventTool"
+ />
+ <Tool
+ Name="VCLinkerTool"
+ AdditionalDependencies="Winmm.lib msdmo.lib dmoguids.lib wmcodecdspuuid.lib strmiids.lib uuid.lib $(OutDir)\tinySAK.lib $(OutDir)\tinyMEDIA.lib &quot;..\..\thirdparties\win32\lib\webrtc\audio_device.lib&quot; &quot;..\..\thirdparties\win32\lib\webrtc\system_wrappers.lib&quot; &quot;..\..\thirdparties\win32\lib\webrtc\spl.lib&quot;"
+ LinkIncremental="2"
+ GenerateDebugInformation="true"
+ SubSystem="2"
+ TargetMachine="1"
+ />
+ <Tool
+ Name="VCALinkTool"
+ />
+ <Tool
+ Name="VCManifestTool"
+ />
+ <Tool
+ Name="VCXDCMakeTool"
+ />
+ <Tool
+ Name="VCBscMakeTool"
+ />
+ <Tool
+ Name="VCFxCopTool"
+ />
+ <Tool
+ Name="VCAppVerifierTool"
+ />
+ <Tool
+ Name="VCPostBuildEventTool"
+ />
+ </Configuration>
+ <Configuration
+ Name="Release|Win32"
+ OutputDirectory="$(SolutionDir)$(ConfigurationName)"
+ IntermediateDirectory="$(ConfigurationName)"
+ ConfigurationType="2"
+ CharacterSet="1"
+ WholeProgramOptimization="1"
+ >
+ <Tool
+ Name="VCPreBuildEventTool"
+ />
+ <Tool
+ Name="VCCustomBuildTool"
+ />
+ <Tool
+ Name="VCXMLDataGeneratorTool"
+ />
+ <Tool
+ Name="VCWebServiceProxyGeneratorTool"
+ />
+ <Tool
+ Name="VCMIDLTool"
+ />
+ <Tool
+ Name="VCCLCompilerTool"
+ Optimization="2"
+ EnableIntrinsicFunctions="true"
+ AdditionalIncludeDirectories="..\..\thirdparties\win32\include;..\..\tinySAK\src;..\..\tinyDAV\include;..\..\tinyMEDIA\include;..\..\tinySDP\include;..\..\tinyRTP\include"
+ PreprocessorDefinitions="WIN32;NDEBUG;_WINDOWS;_USRDLL;DOUBANGO_AUDIO_WEBRTC_EXPORTS;TINYDAV_EXPORTS"
+ RuntimeLibrary="2"
+ EnableFunctionLevelLinking="true"
+ UsePrecompiledHeader="0"
+ WarningLevel="3"
+ WarnAsError="false"
+ DebugInformationFormat="0"
+ />
+ <Tool
+ Name="VCManagedResourceCompilerTool"
+ />
+ <Tool
+ Name="VCResourceCompilerTool"
+ />
+ <Tool
+ Name="VCPreLinkEventTool"
+ />
+ <Tool
+ Name="VCLinkerTool"
+ AdditionalDependencies="Winmm.lib msdmo.lib dmoguids.lib wmcodecdspuuid.lib strmiids.lib uuid.lib $(OutDir)\tinySAK.lib $(OutDir)\tinyMEDIA.lib &quot;..\..\thirdparties\win32\lib\webrtc\audio_device.lib&quot; &quot;..\..\thirdparties\win32\lib\webrtc\system_wrappers.lib&quot; &quot;..\..\thirdparties\win32\lib\webrtc\spl.lib&quot;"
+ LinkIncremental="1"
+ GenerateDebugInformation="true"
+ SubSystem="2"
+ OptimizeReferences="2"
+ EnableCOMDATFolding="2"
+ TargetMachine="1"
+ />
+ <Tool
+ Name="VCALinkTool"
+ />
+ <Tool
+ Name="VCManifestTool"
+ />
+ <Tool
+ Name="VCXDCMakeTool"
+ />
+ <Tool
+ Name="VCBscMakeTool"
+ />
+ <Tool
+ Name="VCFxCopTool"
+ />
+ <Tool
+ Name="VCAppVerifierTool"
+ />
+ <Tool
+ Name="VCPostBuildEventTool"
+ />
+ </Configuration>
+ </Configurations>
+ <References>
+ </References>
+ <Files>
+ <Filter
+ Name="Source Files"
+ Filter="cpp;c;cc;cxx;def;odl;idl;hpj;bat;asm;asmx"
+ UniqueIdentifier="{4FC737F1-C7A5-4376-A066-2A32D752A2FF}"
+ >
+ <File
+ RelativePath=".\audio_webrtc.cxx"
+ >
+ </File>
+ <File
+ RelativePath=".\audio_webrtc_consumer.cxx"
+ >
+ </File>
+ <File
+ RelativePath=".\audio_webrtc_producer.cxx"
+ >
+ </File>
+ <File
+ RelativePath=".\audio_webrtc_transport.cxx"
+ >
+ </File>
+ <File
+ RelativePath=".\dllmain.cxx"
+ >
+ <FileConfiguration
+ Name="Debug|Win32"
+ >
+ <Tool
+ Name="VCCLCompilerTool"
+ UsePrecompiledHeader="0"
+ CompileAsManaged="0"
+ />
+ </FileConfiguration>
+ <FileConfiguration
+ Name="Release|Win32"
+ >
+ <Tool
+ Name="VCCLCompilerTool"
+ UsePrecompiledHeader="0"
+ CompileAsManaged="0"
+ />
+ </FileConfiguration>
+ </File>
+ <File
+ RelativePath="..\..\tinyDAV\src\audio\tdav_consumer_audio.c"
+ >
+ </File>
+ <File
+ RelativePath="..\..\tinyDAV\src\audio\tdav_producer_audio.c"
+ >
+ </File>
+ </Filter>
+ <Filter
+ Name="Header Files"
+ Filter="h;hpp;hxx;hm;inl;inc;xsd"
+ UniqueIdentifier="{93995380-89BD-4b04-88EB-625FBE52EBFB}"
+ >
+ <File
+ RelativePath=".\audio_webrtc.h"
+ >
+ </File>
+ <File
+ RelativePath=".\audio_webrtc_config.h"
+ >
+ </File>
+ <File
+ RelativePath=".\audio_webrtc_consumer.h"
+ >
+ </File>
+ <File
+ RelativePath=".\audio_webrtc_producer.h"
+ >
+ </File>
+ <File
+ RelativePath=".\audio_webrtc_transport.h"
+ >
+ </File>
+ <File
+ RelativePath="..\..\tinyDAV\include\tinydav\audio\tdav_consumer_audio.h"
+ >
+ </File>
+ <File
+ RelativePath="..\..\tinyDAV\include\tinydav\audio\tdav_producer_audio.h"
+ >
+ </File>
+ </Filter>
+ <Filter
+ Name="Resource Files"
+ Filter="rc;ico;cur;bmp;dlg;rc2;rct;bin;rgs;gif;jpg;jpeg;jpe;resx;tiff;tif;png;wav"
+ UniqueIdentifier="{67DA6AB6-F800-4c08-8B7A-83BB121AAD01}"
+ >
+ </Filter>
+ </Files>
+ <Globals>
+ </Globals>
+</VisualStudioProject>
diff --git a/plugins/audio_webrtc/audio_webrtc_config.h b/plugins/audio_webrtc/audio_webrtc_config.h
new file mode 100644
index 0000000..dc7c1fd
--- /dev/null
+++ b/plugins/audio_webrtc/audio_webrtc_config.h
@@ -0,0 +1,144 @@
+/* Copyright (C) 2012 Doubango Telecom <http://www.doubango.org>
+*
+* This file is part of Open Source Doubango Framework.
+*
+* DOUBANGO is free software: you can redistribute it and/or modify
+* it under the terms of the GNU General Public License as published by
+* the Free Software Foundation, either version 3 of the License, or
+* (at your option) any later version.
+*
+* DOUBANGO is distributed in the hope that it will be useful,
+* but WITHOUT ANY WARRANTY; without even the implied warranty of
+* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
+* GNU General Public License for more details.
+*
+* You should have received a copy of the GNU General Public License
+* along with DOUBANGO.
+*/
+
+#ifndef DOUBANGO_AUDIO_WEBRTC_CONFIG_H
+#define DOUBANGO_AUDIO_WEBRTC_CONFIG_H
+
+#ifdef __SYMBIAN32__
+#undef _WIN32 /* Because of WINSCW */
+#endif
+
+// Windows (XP/Vista/7/CE and Windows Mobile) macro definition
+#if defined(WIN32)|| defined(_WIN32) || defined(_WIN32_WCE)
+# define DOUBANGO_AUDIO_WEBRTC_UNDER_WINDOWS 1
+#endif
+
+// OS X or iOS
+#if defined(__APPLE__)
+# define DOUBANGO_AUDIO_WEBRTC_UNDER_APPLE 1
+#endif
+#if TARGET_OS_MAC
+# define DOUBANGO_AUDIO_WEBRTC_UNDER_MAC 1
+#endif
+#if TARGET_OS_IPHONE
+# define DOUBANGO_AUDIO_WEBRTC_UNDER_IPHONE 1
+#endif
+#if TARGET_IPHONE_SIMULATOR
+# define DOUBANGO_AUDIO_WEBRTC_UNDER_IPHONE_SIMULATOR 1
+#endif
+
+#if defined(ANDROID)
+# define DOUBANGO_AUDIO_WEBRTC_UNDER_ANDROID 1
+#endif
+
+// x86
+#if DOUBANGO_AUDIO_WEBRTC_UNDER_WINDOWS || defined(__x86_64__) || defined(__x86__) || defined(__i386__)
+# define DOUBANGO_AUDIO_WEBRTC_UNDER_X86 1
+#endif
+
+// Mobile
+#if defined(_WIN32_WCE) || defined(ANDROID) // iOS (not true)=> || defined(IOS)
+# define DOUBANGO_AUDIO_WEBRTC_UNDER_MOBILE 1
+#endif
+
+#if (DOUBANGO_AUDIO_WEBRTC_UNDER_WINDOWS || defined(__SYMBIAN32__)) && defined(DOUBANGO_AUDIO_WEBRTC_EXPORTS)
+# define DOUBANGO_AUDIO_WEBRTC_API __declspec(dllexport)
+# define DOUBANGO_AUDIO_WEBRTC_GEXTERN __declspec(dllexport)
+#elif (DOUBANGO_AUDIO_WEBRTC_UNDER_WINDOWS || defined(__SYMBIAN32__))
+# define DOUBANGO_AUDIO_WEBRTC_API __declspec(dllimport)
+# define DOUBANGO_AUDIO_WEBRTC_GEXTERN __declspec(dllimport)
+#else
+# define DOUBANGO_AUDIO_WEBRTC_API
+# define DOUBANGO_AUDIO_WEBRTC_GEXTERN extern
+#endif
+
+/* Guards against C++ name mangling */
+#ifdef __cplusplus
+# define DOUBANGO_AUDIO_WEBRTC_BEGIN_DECLS extern "C" {
+# define DOUBANGO_AUDIO_WEBRTC_END_DECLS }
+#else
+# define DOUBANGO_AUDIO_WEBRTC_BEGIN_DECLS
+# define DOUBANGO_AUDIO_WEBRTC_END_DECLS
+#endif
+
+#ifdef _MSC_VER
+#if HAVE_FFMPEG // FFMPeg warnings (treated as errors)
+# pragma warning (disable:4244)
+#endif
+# define inline __inline
+# define _CRT_SECURE_NO_WARNINGS
+#endif
+
+/* Detecting C99 compilers
+ */
+#if (__STDC_VERSION__ == 199901L) && !defined(__C99__)
+# define __C99__
+#endif
+
+#include <stdint.h>
+#ifdef __SYMBIAN32__
+#include <stdlib.h>
+#endif
+
+#if HAVE_CONFIG_H
+ #include "../config.h"
+#endif
+
+#if DOUBANGO_AUDIO_WEBRTC_UNDER_WINDOWS
+# define DOUBANGO_AUDIO_WEBRTC_DEVICE_DEFAULT AudioDeviceModule::kDefaultCommunicationDevice
+#else
+# define DOUBANGO_AUDIO_WEBRTC_DEVICE_DEFAULT 0
+#endif
+
+#if DOUBANGO_AUDIO_WEBRTC_UNDER_ANDROID
+#include <android/log.h>
+#include "tsk_string.h"
+#include "tsk_memory.h"
+#include "tsk_debug.h"
+#define ANDROID_DEBUG_TAG "plugin_audio_webrtc" // DDMS log tag when using eclise
+static void DOUBANGO_AUDIO_WEBRTC_DEBUG_ANY(int level, const char* fmt, ...)
+{
+ char* message = tsk_null;
+ va_list ap;
+ va_start(ap, fmt);
+ tsk_sprintf_2(&message, fmt, &ap);
+
+ if(message){
+ switch(level){
+ case DEBUG_LEVEL_INFO: __android_log_write(ANDROID_LOG_INFO, ANDROID_DEBUG_TAG, message); break;
+ case DEBUG_LEVEL_WARN: __android_log_write(ANDROID_LOG_WARN, ANDROID_DEBUG_TAG, message); break;
+ case DEBUG_LEVEL_ERROR: __android_log_write(ANDROID_LOG_ERROR, ANDROID_DEBUG_TAG, message); break;
+ case DEBUG_LEVEL_FATAL: __android_log_write(ANDROID_LOG_FATAL, ANDROID_DEBUG_TAG, message); break;
+ }
+ TSK_FREE(message);
+ }
+
+ va_end(ap);
+}
+#define DOUBANGO_AUDIO_WEBRTC_DEBUG_INFO(FMT, ...) DOUBANGO_AUDIO_WEBRTC_DEBUG_ANY(DEBUG_LEVEL_INFO, FMT, ##__VA_ARGS__)
+#define DOUBANGO_AUDIO_WEBRTC_DEBUG_WARN(FMT, ...) DOUBANGO_AUDIO_WEBRTC_DEBUG_ANY(DEBUG_LEVEL_WARN, FMT, ##__VA_ARGS__)
+#define DOUBANGO_AUDIO_WEBRTC_DEBUG_ERROR(FMT, ...) DOUBANGO_AUDIO_WEBRTC_DEBUG_ANY(DEBUG_LEVEL_ERROR, FMT, ##__VA_ARGS__)
+#define DOUBANGO_AUDIO_WEBRTC_DEBUG_FATAL(FMT, ...) DOUBANGO_AUDIO_WEBRTC_DEBUG_ANY(DEBUG_LEVEL_FATAL, FMT, ##__VA_ARGS__)
+#else
+#define DOUBANGO_AUDIO_WEBRTC_DEBUG_INFO(FMT, ...) TSK_DEBUG_INFO(FMT, ##__VA_ARGS__)
+#define DOUBANGO_AUDIO_WEBRTC_DEBUG_WARN(FMT, ...) TSK_DEBUG_WARN(FMT, ##__VA_ARGS__)
+#define DOUBANGO_AUDIO_WEBRTC_DEBUG_ERROR(FMT, ...) TSK_DEBUG_ERROR(FMT, ##__VA_ARGS__)
+#define DOUBANGO_AUDIO_WEBRTC_DEBUG_FATAL(FMT, ...) TSK_DEBUG_FATAL(FMT, ##__VA_ARGS__)
+#endif /* DOUBANGO_AUDIO_WEBRTC_UNDER_ANDROID */
+
+#endif // DOUBANGO_AUDIO_WEBRTC_CONFIG_H
diff --git a/plugins/audio_webrtc/audio_webrtc_consumer.cxx b/plugins/audio_webrtc/audio_webrtc_consumer.cxx
new file mode 100644
index 0000000..e55097b
--- /dev/null
+++ b/plugins/audio_webrtc/audio_webrtc_consumer.cxx
@@ -0,0 +1,233 @@
+/* Copyright (C) 2012 Doubango Telecom <http://www.doubango.org>
+*
+* This file is part of Open Source Doubango Framework.
+*
+* DOUBANGO is free software: you can redistribute it and/or modify
+* it under the terms of the GNU General Public License as published by
+* the Free Software Foundation, either version 3 of the License, or
+* (at your option) any later version.
+*
+* DOUBANGO is distributed in the hope that it will be useful,
+* but WITHOUT ANY WARRANTY; without even the implied warranty of
+* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
+* GNU General Public License for more details.
+*
+* You should have received a copy of the GNU General Public License
+* along with DOUBANGO.
+*/
+#include "audio_webrtc_consumer.h"
+#include "audio_webrtc.h"
+
+#include "tinydav/audio/tdav_consumer_audio.h"
+
+#include "tsk_string.h"
+#include "tsk_memory.h"
+#include "tsk_debug.h"
+
+typedef struct audio_consumer_webrtc_s
+{
+ TDAV_DECLARE_CONSUMER_AUDIO;
+ audio_webrtc_instance_handle_t* audioInstHandle;
+ struct{
+ void* ptr;
+ bool isFull;
+ int size;
+ int index;
+ } buffer;
+}
+audio_consumer_webrtc_t;
+
+int audio_consumer_webrtc_get_data_10ms(const audio_consumer_webrtc_t* _self, void* audioSamples, int nSamples, int nBytesPerSample, int nChannels, int samplesPerSec, uint32_t &nSamplesOut)
+{
+ nSamplesOut = 0;
+ if(!_self || !audioSamples || !nSamples){
+ DOUBANGO_AUDIO_WEBRTC_DEBUG_ERROR("Invalid parameter");
+ return -1;
+ }
+ if((nSamples != (samplesPerSec / 100))){
+ DOUBANGO_AUDIO_WEBRTC_DEBUG_ERROR("Not producing 10ms samples (nSamples=%d, samplesPerSec=%d)", nSamples, samplesPerSec);
+ return -2;
+ }
+ if((nBytesPerSample != (TMEDIA_CONSUMER(_self)->audio.bits_per_sample >> 3))){
+ DOUBANGO_AUDIO_WEBRTC_DEBUG_ERROR("%d not valid bytes/samples", nBytesPerSample);
+ return -3;
+ }
+ if((nChannels != TMEDIA_CONSUMER(_self)->audio.out.channels)){
+ DOUBANGO_AUDIO_WEBRTC_DEBUG_ERROR("%d not the expected number of channels", nChannels);
+ return -4;
+ }
+
+ audio_consumer_webrtc_t* self = const_cast<audio_consumer_webrtc_t*>(_self);
+
+ if(self->buffer.index == self->buffer.size){
+ tdav_consumer_audio_tick(TDAV_CONSUMER_AUDIO(self));
+ self->buffer.index = 0;
+ if((tdav_consumer_audio_get(TDAV_CONSUMER_AUDIO(self), self->buffer.ptr, self->buffer.size)) != self->buffer.size){
+ nSamplesOut = 0;
+ return 0;
+ }
+ }
+
+ int nSamplesInBits = (nSamples * nBytesPerSample);
+ if(_self->buffer.index + nSamplesInBits <= _self->buffer.size){
+ memcpy(audioSamples, (((uint8_t*)self->buffer.ptr) + self->buffer.index), nSamplesInBits);
+ }
+ self->buffer.index += nSamplesInBits;
+ TSK_CLAMP(0, self->buffer.index, self->buffer.size);
+ nSamplesOut = nSamples;
+
+ return 0;
+}
+
+
+/* ============ Media Consumer Interface ================= */
+static int audio_consumer_webrtc_set(tmedia_consumer_t* self, const tmedia_param_t* param)
+{
+ audio_consumer_webrtc_t* webrtc = (audio_consumer_webrtc_t*)self;
+ int ret = tdav_consumer_audio_set(TDAV_CONSUMER_AUDIO(self), param);
+
+ if(ret == 0){
+ if(tsk_striequals(param->key, "volume")){
+
+ }
+ }
+
+ return ret;
+}
+
+static int audio_consumer_webrtc_prepare(tmedia_consumer_t* _self, const tmedia_codec_t* codec)
+{
+ audio_consumer_webrtc_t* self = (audio_consumer_webrtc_t*)_self;
+ if(!self){
+ DOUBANGO_AUDIO_WEBRTC_DEBUG_ERROR("Invalid parameter");
+ return -1;
+ }
+
+ // create audio instance
+ if(!(self->audioInstHandle = audio_webrtc_instance_create(TMEDIA_CONSUMER(self)->session_id))){
+ DOUBANGO_AUDIO_WEBRTC_DEBUG_ERROR("Failed to create audio instance handle");
+ return -1;
+ }
+
+ // initialize input parameters from the codec information
+ TMEDIA_CONSUMER(self)->audio.ptime = codec->plugin->audio.ptime;
+ TMEDIA_CONSUMER(self)->audio.in.channels = codec->plugin->audio.channels;
+ TMEDIA_CONSUMER(self)->audio.in.rate = codec->plugin->rate;
+
+ // prepare playout device and update output parameters
+ int ret = audio_webrtc_instance_prepare_consumer(self->audioInstHandle, &_self);
+
+ // now that the producer is prepared we can initialize internal buffer using device caps
+ if(ret == 0){
+ // allocate buffer
+ int xsize = ((TMEDIA_CONSUMER(self)->audio.ptime * TMEDIA_CONSUMER(self)->audio.out.rate) / 1000) * (TMEDIA_CONSUMER(self)->audio.bits_per_sample >> 3);
+ if(!(self->buffer.ptr = tsk_realloc(self->buffer.ptr, xsize))){
+ DOUBANGO_AUDIO_WEBRTC_DEBUG_ERROR("Failed to allocate buffer with size = %d", xsize);
+ self->buffer.size = 0;
+ return -1;
+ }
+ memset(self->buffer.ptr, 0, xsize);
+ self->buffer.size = xsize;
+ self->buffer.index = 0;
+ self->buffer.isFull = false;
+ }
+ return ret;
+}
+
+static int audio_consumer_webrtc_start(tmedia_consumer_t* _self)
+{
+ audio_consumer_webrtc_t* self = (audio_consumer_webrtc_t*)_self;
+ if(!self){
+ DOUBANGO_AUDIO_WEBRTC_DEBUG_ERROR("Invalid parameter");
+ return -1;
+ }
+
+ return audio_webrtc_instance_start_consumer(self->audioInstHandle);
+}
+
+
+static int audio_consumer_webrtc_consume(tmedia_consumer_t* _self, const void* buffer, tsk_size_t size, const tsk_object_t* proto_hdr)
+{
+ audio_consumer_webrtc_t* self = (audio_consumer_webrtc_t*)_self;
+ if(!self || !buffer || !size){
+ DOUBANGO_AUDIO_WEBRTC_DEBUG_ERROR("1Invalid parameter");
+ return -1;
+ }
+ /* buffer is already decoded */
+ return tdav_consumer_audio_put(TDAV_CONSUMER_AUDIO(self), buffer, size, proto_hdr);
+}
+
+static int audio_consumer_webrtc_pause(tmedia_consumer_t* self)
+{
+ return 0;
+}
+
+static int audio_consumer_webrtc_stop(tmedia_consumer_t* _self)
+{
+ audio_consumer_webrtc_t* self = (audio_consumer_webrtc_t*)_self;
+ if(!self){
+ DOUBANGO_AUDIO_WEBRTC_DEBUG_ERROR("Invalid parameter");
+ return -1;
+ }
+
+ return audio_webrtc_instance_stop_consumer(self->audioInstHandle);
+}
+
+
+//
+// WebRTC audio consumer object definition
+//
+/* constructor */
+static tsk_object_t* audio_consumer_webrtc_ctor(tsk_object_t *_self, va_list * app)
+{
+ audio_consumer_webrtc_t *self = (audio_consumer_webrtc_t *)_self;
+ if(self){
+ /* init base */
+ tdav_consumer_audio_init(TDAV_CONSUMER_AUDIO(self));
+ /* init self */
+
+ }
+ return self;
+}
+/* destructor */
+static tsk_object_t* audio_consumer_webrtc_dtor(tsk_object_t *_self)
+{
+ audio_consumer_webrtc_t *self = (audio_consumer_webrtc_t *)_self;
+ if(self){
+ /* stop */
+ audio_consumer_webrtc_stop(TMEDIA_CONSUMER(self));
+ /* deinit self */
+ if(self->audioInstHandle){
+ audio_webrtc_instance_destroy(&self->audioInstHandle);
+ }
+ TSK_FREE(self->buffer.ptr);
+ /* deinit base */
+ tdav_consumer_audio_deinit(TDAV_CONSUMER_AUDIO(self));
+ }
+
+ return self;
+}
+/* object definition */
+static const tsk_object_def_t audio_consumer_webrtc_def_s =
+{
+ sizeof(audio_consumer_webrtc_t),
+ audio_consumer_webrtc_ctor,
+ audio_consumer_webrtc_dtor,
+ tdav_consumer_audio_cmp,
+};
+/* plugin definition*/
+static const tmedia_consumer_plugin_def_t audio_consumer_webrtc_plugin_def_s =
+{
+ &audio_consumer_webrtc_def_s,
+
+ tmedia_audio,
+ "WebRTC audio consumer",
+
+ audio_consumer_webrtc_set,
+ audio_consumer_webrtc_prepare,
+ audio_consumer_webrtc_start,
+ audio_consumer_webrtc_consume,
+ audio_consumer_webrtc_pause,
+ audio_consumer_webrtc_stop
+};
+const tmedia_consumer_plugin_def_t *audio_consumer_webrtc_plugin_def_t = &audio_consumer_webrtc_plugin_def_s;
diff --git a/plugins/audio_webrtc/audio_webrtc_consumer.h b/plugins/audio_webrtc/audio_webrtc_consumer.h
new file mode 100644
index 0000000..9dc7dd1
--- /dev/null
+++ b/plugins/audio_webrtc/audio_webrtc_consumer.h
@@ -0,0 +1,33 @@
+/* Copyright (C) 2012 Doubango Telecom <http://www.doubango.org>
+*
+* This file is part of Open Source Doubango Framework.
+*
+* DOUBANGO is free software: you can redistribute it and/or modify
+* it under the terms of the GNU General Public License as published by
+* the Free Software Foundation, either version 3 of the License, or
+* (at your option) any later version.
+*
+* DOUBANGO is distributed in the hope that it will be useful,
+* but WITHOUT ANY WARRANTY; without even the implied warranty of
+* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
+* GNU General Public License for more details.
+*
+* You should have received a copy of the GNU General Public License
+* along with DOUBANGO.
+*/
+#ifndef DOUBANGO_AUDIO_WEBRTC_CONSUMER_H
+#define DOUBANGO_AUDIO_WEBRTC_CONSUMER_H
+
+#include "audio_webrtc_config.h"
+
+DOUBANGO_AUDIO_WEBRTC_BEGIN_DECLS
+
+extern const struct tmedia_consumer_plugin_def_s *audio_consumer_webrtc_plugin_def_t;
+
+int audio_consumer_webrtc_get_data_10ms(const struct audio_consumer_webrtc_s* self, void* audioSamples, int nSamples, int nBytesPerSample, int nChannels, int samplesPerSec, uint32_t &nSamplesOut);
+
+DOUBANGO_AUDIO_WEBRTC_END_DECLS
+
+#endif /* DOUBANGO_AUDIO_WEBRTC_CONSUMER_H */
+
+
diff --git a/plugins/audio_webrtc/audio_webrtc_producer.cxx b/plugins/audio_webrtc/audio_webrtc_producer.cxx
new file mode 100644
index 0000000..02c5aeb
--- /dev/null
+++ b/plugins/audio_webrtc/audio_webrtc_producer.cxx
@@ -0,0 +1,227 @@
+/* Copyright (C) 2012 Doubango Telecom <http://www.doubango.org>
+*
+* This file is part of Open Source Doubango Framework.
+*
+* DOUBANGO is free software: you can redistribute it and/or modify
+* it under the terms of the GNU General Public License as published by
+* the Free Software Foundation, either version 3 of the License, or
+* (at your option) any later version.
+*
+* DOUBANGO is distributed in the hope that it will be useful,
+* but WITHOUT ANY WARRANTY; without even the implied warranty of
+* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
+* GNU General Public License for more details.
+*
+* You should have received a copy of the GNU General Public License
+* along with DOUBANGO.
+*/
+#include "audio_webrtc_producer.h"
+#include "audio_webrtc.h"
+
+#include "tinydav/audio/tdav_producer_audio.h"
+
+#include "tsk_string.h"
+#include "tsk_memory.h"
+#include "tsk_debug.h"
+
+typedef struct audio_producer_webrtc_s
+{
+ TDAV_DECLARE_PRODUCER_AUDIO;
+
+ bool isMuted;
+ audio_webrtc_instance_handle_t* audioInstHandle;
+ struct{
+ void* ptr;
+ int size;
+ int index;
+ } buffer;
+}
+audio_producer_webrtc_t;
+
+int audio_producer_webrtc_handle_data_10ms(const audio_producer_webrtc_t* _self, const void* audioSamples, int nSamples, int nBytesPerSample, int samplesPerSec, int nChannels)
+{
+ if(!_self || !audioSamples || !nSamples){
+ DOUBANGO_AUDIO_WEBRTC_DEBUG_ERROR("Invalid parameter");
+ return -1;
+ }
+ if((nSamples != (samplesPerSec / 100))){
+ DOUBANGO_AUDIO_WEBRTC_DEBUG_ERROR("Not producing 10ms samples (nSamples=%d, samplesPerSec=%d)", nSamples, samplesPerSec);
+ return -2;
+ }
+ if((nBytesPerSample != (TMEDIA_PRODUCER(_self)->audio.bits_per_sample >> 3))){
+ DOUBANGO_AUDIO_WEBRTC_DEBUG_ERROR("%d not valid bytes/samples", nBytesPerSample);
+ return -3;
+ }
+ if((nChannels != TMEDIA_PRODUCER(_self)->audio.channels)){
+ DOUBANGO_AUDIO_WEBRTC_DEBUG_ERROR("%d not the expected number of channels", nChannels);
+ return -4;
+ }
+
+ int nSamplesInBits = (nSamples * nBytesPerSample);
+ if(_self->buffer.index + nSamplesInBits > _self->buffer.size){
+ DOUBANGO_AUDIO_WEBRTC_DEBUG_ERROR("Buffer overflow");
+ return -5;
+ }
+
+ audio_producer_webrtc_t* self = const_cast<audio_producer_webrtc_t*>(_self);
+
+ memcpy((((uint8_t*)self->buffer.ptr) + self->buffer.index), audioSamples, nSamplesInBits);
+ self->buffer.index += nSamplesInBits;
+
+ if(self->buffer.index == self->buffer.size){
+ self->buffer.index = 0;
+ if(TMEDIA_PRODUCER(self)->enc_cb.callback){
+ if(self->isMuted){
+ memset(self->buffer.ptr, 0, self->buffer.size);
+ }
+ TMEDIA_PRODUCER(self)->enc_cb.callback(TMEDIA_PRODUCER(self)->enc_cb.callback_data, self->buffer.ptr, self->buffer.size);
+ }
+ }
+
+ return 0;
+}
+
+
+/* ============ Media Producer Interface ================= */
+static int audio_producer_webrtc_set(tmedia_producer_t* _self, const tmedia_param_t* param)
+{
+ audio_producer_webrtc_t* self = (audio_producer_webrtc_t*)_self;
+ if(param->plugin_type == tmedia_ppt_producer){
+ if(param->value_type == tmedia_pvt_int32){
+ if(tsk_striequals(param->key, "mute")){
+ self->isMuted = (TSK_TO_INT32((uint8_t*)param->value) != 0);
+ return 0;
+ }
+ }
+ }
+ return tdav_producer_audio_set(TDAV_PRODUCER_AUDIO(self), param);
+}
+static int audio_producer_webrtc_prepare(tmedia_producer_t* _self, const tmedia_codec_t* codec)
+{
+ audio_producer_webrtc_t* self = (audio_producer_webrtc_t*)_self;
+ if(!self || !codec){
+ DOUBANGO_AUDIO_WEBRTC_DEBUG_ERROR("Invalid parameter");
+ return -1;
+ }
+
+ // create audio instance
+ if(!(self->audioInstHandle = audio_webrtc_instance_create(TMEDIA_PRODUCER(self)->session_id))){
+ DOUBANGO_AUDIO_WEBRTC_DEBUG_ERROR("Failed to create audio instance handle");
+ return -2;
+ }
+
+ // check that ptime is mutiple of 10
+ if((codec->plugin->audio.ptime % 10)){
+ DOUBANGO_AUDIO_WEBRTC_DEBUG_ERROR("ptime=%d not multiple of 10", codec->plugin->audio.ptime);
+ return -3;
+ }
+
+ // init input parameters from the codec
+ TMEDIA_PRODUCER(self)->audio.channels = codec->plugin->audio.channels;
+ TMEDIA_PRODUCER(self)->audio.rate = codec->plugin->rate;
+ TMEDIA_PRODUCER(self)->audio.ptime = codec->plugin->audio.ptime;
+
+ // prepare playout device and update output parameters
+ int ret;
+ ret = audio_webrtc_instance_prepare_producer(self->audioInstHandle, &_self);
+
+ // now that the producer is prepared we can initialize internal buffer using device caps
+ if(ret == 0){
+ // allocate buffer
+ int xsize = ((TMEDIA_PRODUCER(self)->audio.ptime * TMEDIA_PRODUCER(self)->audio.rate) / 1000) * (TMEDIA_PRODUCER(self)->audio.bits_per_sample >> 3);
+ if(!(self->buffer.ptr = tsk_realloc(self->buffer.ptr, xsize))){
+ DOUBANGO_AUDIO_WEBRTC_DEBUG_ERROR("Failed to allocate buffer with size = %d", xsize);
+ self->buffer.size = 0;
+ return -1;
+ }
+ self->buffer.size = xsize;
+ self->buffer.index = 0;
+ }
+ return ret;
+}
+
+static int audio_producer_webrtc_start(tmedia_producer_t* _self)
+{
+ audio_producer_webrtc_t* self = (audio_producer_webrtc_t*)_self;
+ if(!self){
+ DOUBANGO_AUDIO_WEBRTC_DEBUG_ERROR("Invalid parameter");
+ return -1;
+ }
+
+ return audio_webrtc_instance_start_producer(self->audioInstHandle);
+}
+
+static int audio_producer_webrtc_pause(tmedia_producer_t* self)
+{
+ return 0;
+}
+
+static int audio_producer_webrtc_stop(tmedia_producer_t* _self)
+{
+ audio_producer_webrtc_t* self = (audio_producer_webrtc_t*)_self;
+ if(!self){
+ DOUBANGO_AUDIO_WEBRTC_DEBUG_ERROR("Invalid parameter");
+ return -1;
+ }
+
+ return audio_webrtc_instance_stop_producer(self->audioInstHandle);
+}
+
+
+//
+// WebRTC audio producer object definition
+//
+/* constructor */
+static tsk_object_t* audio_producer_webrtc_ctor(tsk_object_t *_self, va_list * app)
+{
+ audio_producer_webrtc_t *self = (audio_producer_webrtc_t *)_self;
+ if(self){
+ /* init base */
+ tdav_producer_audio_init(TDAV_PRODUCER_AUDIO(self));
+ /* init self */
+
+ }
+ return self;
+}
+/* destructor */
+static tsk_object_t* audio_producer_webrtc_dtor(tsk_object_t *_self)
+{
+ audio_producer_webrtc_t *self = (audio_producer_webrtc_t *)_self;
+ if(self){
+ /* stop */
+ audio_producer_webrtc_stop(TMEDIA_PRODUCER(self));
+ /* deinit self */
+ if(self->audioInstHandle){
+ audio_webrtc_instance_destroy(&self->audioInstHandle);
+ }
+ TSK_FREE(self->buffer.ptr);
+
+ /* deinit base */
+ tdav_producer_audio_deinit(TDAV_PRODUCER_AUDIO(self));
+ }
+
+ return self;
+}
+/* object definition */
+static const tsk_object_def_t audio_producer_webrtc_def_s =
+{
+ sizeof(audio_producer_webrtc_t),
+ audio_producer_webrtc_ctor,
+ audio_producer_webrtc_dtor,
+ tdav_producer_audio_cmp,
+};
+/* plugin definition*/
+static const tmedia_producer_plugin_def_t audio_producer_webrtc_plugin_def_s =
+{
+ &audio_producer_webrtc_def_s,
+
+ tmedia_audio,
+ "WebRTC audio producer",
+
+ audio_producer_webrtc_set,
+ audio_producer_webrtc_prepare,
+ audio_producer_webrtc_start,
+ audio_producer_webrtc_pause,
+ audio_producer_webrtc_stop
+};
+const tmedia_producer_plugin_def_t *audio_producer_webrtc_plugin_def_t = &audio_producer_webrtc_plugin_def_s; \ No newline at end of file
diff --git a/plugins/audio_webrtc/audio_webrtc_producer.h b/plugins/audio_webrtc/audio_webrtc_producer.h
new file mode 100644
index 0000000..49adf0d
--- /dev/null
+++ b/plugins/audio_webrtc/audio_webrtc_producer.h
@@ -0,0 +1,32 @@
+/* Copyright (C) 2012 Doubango Telecom <http://www.doubango.org>
+*
+* This file is part of Open Source Doubango Framework.
+*
+* DOUBANGO is free software: you can redistribute it and/or modify
+* it under the terms of the GNU General Public License as published by
+* the Free Software Foundation, either version 3 of the License, or
+* (at your option) any later version.
+*
+* DOUBANGO is distributed in the hope that it will be useful,
+* but WITHOUT ANY WARRANTY; without even the implied warranty of
+* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
+* GNU General Public License for more details.
+*
+* You should have received a copy of the GNU General Public License
+* along with DOUBANGO.
+*/
+#ifndef DOUBANGO_AUDIO_WEBRTC_PRODUCER_H
+#define DOUBANGO_AUDIO_WEBRTC_PRODUCER_H
+
+#include "audio_webrtc_config.h"
+
+DOUBANGO_AUDIO_WEBRTC_BEGIN_DECLS
+
+extern const struct tmedia_producer_plugin_def_s *audio_producer_webrtc_plugin_def_t;
+
+// handle recorded data
+int audio_producer_webrtc_handle_data_10ms(const struct audio_producer_webrtc_s* self, const void* audioSamples, int nSamples, int nBytesPerSample, int samplesPerSec, int nChannels);
+
+DOUBANGO_AUDIO_WEBRTC_END_DECLS
+
+#endif /* DOUBANGO_AUDIO_WEBRTC_PRODUCER_H */
diff --git a/plugins/audio_webrtc/audio_webrtc_transport.cxx b/plugins/audio_webrtc/audio_webrtc_transport.cxx
new file mode 100644
index 0000000..470e4e7
--- /dev/null
+++ b/plugins/audio_webrtc/audio_webrtc_transport.cxx
@@ -0,0 +1,84 @@
+/* Copyright (C) 2012 Doubango Telecom <http://www.doubango.org>
+*
+* This file is part of Open Source Doubango Framework.
+*
+* DOUBANGO is free software: you can redistribute it and/or modify
+* it under the terms of the GNU General Public License as published by
+* the Free Software Foundation, either version 3 of the License, or
+* (at your option) any later version.
+*
+* DOUBANGO is distributed in the hope that it will be useful,
+* but WITHOUT ANY WARRANTY; without even the implied warranty of
+* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
+* GNU General Public License for more details.
+*
+* You should have received a copy of the GNU General Public License
+* along with DOUBANGO.
+*/
+#include "audio_webrtc_transport.h"
+#include "audio_webrtc_producer.h"
+#include "audio_webrtc_consumer.h"
+
+#include "tsk_debug.h"
+
+using namespace webrtc;
+
+AudioTransportImpl::AudioTransportImpl(AudioDeviceModule* audioDevice) :
+ _audioDevice(audioDevice),
+ _fullDuplex(false),
+ _speakerVolume(false),
+ _speakerMute(false),
+ _microphoneVolume(false),
+ _microphoneMute(false),
+ _microphoneBoost(false),
+ _microphoneAGC(false),
+ _loopBackMeasurements(false),
+ _consumer(tsk_null),
+ _producer(tsk_null)
+{
+
+}
+
+AudioTransportImpl::~AudioTransportImpl()
+{
+
+}
+
+void AudioTransportImpl::SetFullDuplex(bool enable)
+{
+ _fullDuplex = enable;
+}
+
+WebRtc_Word32 AudioTransportImpl::RecordedDataIsAvailable(
+ const void* audioSamples,
+ const WebRtc_UWord32 nSamples,
+ const WebRtc_UWord8 nBytesPerSample,
+ const WebRtc_UWord8 nChannels,
+ const WebRtc_UWord32 samplesPerSec,
+ const WebRtc_UWord32 totalDelayMS,
+ const WebRtc_Word32 clockDrift,
+ const WebRtc_UWord32 currentMicLevel,
+ WebRtc_UWord32& newMicLevel)
+{
+ if(!_producer){
+ DOUBANGO_AUDIO_WEBRTC_DEBUG_ERROR("No wrapped producer");
+ return 0;
+ }
+ return audio_producer_webrtc_handle_data_10ms(_producer, audioSamples, nSamples, nBytesPerSample, samplesPerSec, nChannels);
+}
+
+
+WebRtc_Word32 AudioTransportImpl::NeedMorePlayData(
+ const WebRtc_UWord32 nSamples,
+ const WebRtc_UWord8 nBytesPerSample,
+ const WebRtc_UWord8 nChannels,
+ const WebRtc_UWord32 samplesPerSec,
+ void* audioSamples,
+ WebRtc_UWord32& nSamplesOut)
+{
+ if(!_consumer){
+ DOUBANGO_AUDIO_WEBRTC_DEBUG_ERROR("No wrapped consumer");
+ return 0;
+ }
+ return audio_consumer_webrtc_get_data_10ms(_consumer, audioSamples, nSamples, nBytesPerSample, nChannels, samplesPerSec, nSamplesOut);
+} \ No newline at end of file
diff --git a/plugins/audio_webrtc/audio_webrtc_transport.h b/plugins/audio_webrtc/audio_webrtc_transport.h
new file mode 100644
index 0000000..6d98ab5
--- /dev/null
+++ b/plugins/audio_webrtc/audio_webrtc_transport.h
@@ -0,0 +1,115 @@
+/* Copyright (C) 2012 Doubango Telecom <http://www.doubango.org>
+*
+* This file is part of Open Source Doubango Framework.
+*
+* DOUBANGO is free software: you can redistribute it and/or modify
+* it under the terms of the GNU General Public License as published by
+* the Free Software Foundation, either version 3 of the License, or
+* (at your option) any later version.
+*
+* DOUBANGO is distributed in the hope that it will be useful,
+* but WITHOUT ANY WARRANTY; without even the implied warranty of
+* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
+* GNU General Public License for more details.
+*
+* You should have received a copy of the GNU General Public License
+* along with DOUBANGO.
+*/
+#ifndef DOUBANGO_AUDIO_WEBRTC_TRANSPORT_H
+#define DOUBANGO_AUDIO_WEBRTC_TRANSPORT_H
+
+#include "audio_webrtc_config.h"
+
+#include <webrtc/audio_device.h>
+
+class AudioTransportImpl: public webrtc::AudioTransport
+{
+public:
+ virtual WebRtc_Word32
+ RecordedDataIsAvailable(const void* audioSamples,
+ const WebRtc_UWord32 nSamples,
+ const WebRtc_UWord8 nBytesPerSample,
+ const WebRtc_UWord8 nChannels,
+ const WebRtc_UWord32 samplesPerSec,
+ const WebRtc_UWord32 totalDelayMS,
+ const WebRtc_Word32 clockDrift,
+ const WebRtc_UWord32 currentMicLevel,
+ WebRtc_UWord32& newMicLevel);
+
+ virtual WebRtc_Word32 NeedMorePlayData(const WebRtc_UWord32 nSamples,
+ const WebRtc_UWord8 nBytesPerSample,
+ const WebRtc_UWord8 nChannels,
+ const WebRtc_UWord32 samplesPerSec,
+ void* audioSamples,
+ WebRtc_UWord32& nSamplesOut);
+
+ AudioTransportImpl(webrtc::AudioDeviceModule* audioDevice);
+ ~AudioTransportImpl();
+
+public:
+ void SetFullDuplex(bool enable);
+ void SetSpeakerVolume(bool enable)
+ {
+ _speakerVolume = enable;
+ }
+ ;
+ void SetSpeakerMute(bool enable)
+ {
+ _speakerMute = enable;
+ }
+ ;
+ void SetMicrophoneMute(bool enable)
+ {
+ _microphoneMute = enable;
+ }
+ ;
+ void SetMicrophoneVolume(bool enable)
+ {
+ _microphoneVolume = enable;
+ }
+ ;
+ void SetMicrophoneBoost(bool enable)
+ {
+ _microphoneBoost = enable;
+ }
+ ;
+ void SetLoopbackMeasurements(bool enable)
+ {
+ _loopBackMeasurements = enable;
+ }
+ ;
+ void SetMicrophoneAGC(bool enable)
+ {
+ _microphoneAGC = enable;
+ }
+ ;
+
+ void SetConsumer(const struct audio_consumer_webrtc_s* consumer)
+ {
+ _consumer = consumer;
+ }
+ ;
+
+ void SetProducer(const struct audio_producer_webrtc_s* producer)
+ {
+ _producer = producer;
+ }
+ ;
+
+private:
+ webrtc::AudioDeviceModule* _audioDevice;
+ const struct audio_consumer_webrtc_s* _consumer; // mut be const and must not take reference
+ const struct audio_producer_webrtc_s* _producer; // mut be const and must not take reference
+
+ bool _fullDuplex;
+ bool _speakerVolume;
+ bool _speakerMute;
+ bool _microphoneVolume;
+ bool _microphoneMute;
+ bool _microphoneBoost;
+ bool _microphoneAGC;
+ bool _loopBackMeasurements;
+};
+
+
+#endif /* DOUBANGO_AUDIO_WEBRTC_TRANSPORT_H */
diff --git a/plugins/audio_webrtc/dllmain.cxx b/plugins/audio_webrtc/dllmain.cxx
new file mode 100644
index 0000000..8a319bc
--- /dev/null
+++ b/plugins/audio_webrtc/dllmain.cxx
@@ -0,0 +1,39 @@
+/* Copyright (C) 2012 Doubango Telecom <http://www.doubango.org>
+*
+* This file is part of Open Source Doubango Framework.
+*
+* DOUBANGO is free software: you can redistribute it and/or modify
+* it under the terms of the GNU General Public License as published by
+* the Free Software Foundation, either version 3 of the License, or
+* (at your option) any later version.
+*
+* DOUBANGO is distributed in the hope that it will be useful,
+* but WITHOUT ANY WARRANTY; without even the implied warranty of
+* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
+* GNU General Public License for more details.
+*
+* You should have received a copy of the GNU General Public License
+* along with DOUBANGO.
+*/
+#include "audio_webrtc_config.h"
+
+#if DOUBANGO_AUDIO_WEBRTC_UNDER_WINDOWS
+# include <windows.h>
+#endif /* DOUBANGO_AUDIO_WEBRTC_UNDER_WINDOWS */
+
+BOOL APIENTRY DllMain( HMODULE hModule,
+ DWORD ul_reason_for_call,
+ LPVOID lpReserved
+ )
+{
+ switch (ul_reason_for_call)
+ {
+ case DLL_PROCESS_ATTACH:
+ case DLL_THREAD_ATTACH:
+ case DLL_THREAD_DETACH:
+ case DLL_PROCESS_DETACH:
+ break;
+ }
+ return TRUE;
+}
+
diff --git a/plugins/audio_webrtc/droid-makefile b/plugins/audio_webrtc/droid-makefile
new file mode 100644
index 0000000..ba5e9f5
--- /dev/null
+++ b/plugins/audio_webrtc/droid-makefile
@@ -0,0 +1,49 @@
+APP := lib$(PROJECT)_$(MARCH).$(EXT)
+
+THIRDPARTIES_INC := ../../thirdparties/android/include
+THIRDPARTIES_INC_COMMON := ../../thirdparties/common/include
+THIRDPARTIES_LIB := ../../thirdparties/android/lib
+THIRDPARTIES_MARCH_LIB := ../../thirdparties/android/lib/$(MARCH)
+DOUBANGO_LIB := ../../android-projects/output
+
+WEBRTC_CFLAGS := -DHAVE_WEBRTC=1
+WEBRTC_LDFLAGS := -lwebrtc_spl_$(MARCH) -lwebrtc_audio_device_$(MARCH) -lwebrtc_system_wrappers_$(MARCH)
+
+################################
+
+CFLAGS := $(CFLAGS_LIB) -I$(THIRDPARTIES_INC) -I$(THIRDPARTIES_INC_COMMON) \
+ -I../../tinySAK/src -I../../tinyMEDIA/include -I../../tinySDP/include -I../../tinyRTP/include -I../../tinyDAV/include
+
+LDFLAGS := $(LDFLAGS_LIB) -L$(THIRDPARTIES_LIB) -L$(THIRDPARTIES_LIB)/android-9 -L$(THIRDPARTIES_MARCH_LIB) -L$(DOUBANGO_LIB) \
+ -ltinySAK_$(MARCH) -ltinyMEDIA_$(MARCH) -ltinySDP_$(MARCH) $(WEBRTC_LDFLAGS) -lm -ldl -llog -lgcc -lstdc++ -lOpenSLES -lgnustl_static
+
+all: $(APP)
+
+OBJS = \
+ audio_webrtc.o \
+ audio_webrtc_consumer.o \
+ audio_webrtc_producer.o \
+ audio_webrtc_transport.o \
+ ../../tinyDAV/src/audio/tdav_consumer_audio.o \
+ ../../tinyDAV/src/audio/tdav_producer_audio.o \
+
+$(APP): $(OBJS)
+ifeq ($(EXT), a)
+ $(AR) rcs $@ $^
+else
+ $(CC) $(LDFLAGS) -o $@ $^
+endif
+
+%.o: %.c
+ $(CC) -c $(INCLUDE) $(CFLAGS) $< -o $@
+
+%.o: %.cxx
+ $(CPP) -c $(INCLUDE) $(CFLAGS) $< -o $@
+
+install: $(APP)
+ $(ANDROID_SDK_ROOT)/tools/adb remount
+ $(ANDROID_SDK_ROOT)/tools/adb push $(APP) $(LIB_DIR)/$(APP)
+ $(ANDROID_SDK_ROOT)/tools/adb shell chmod 777 $(LIB_DIR)/$(APP)
+
+clean:
+ @rm -f $(OBJS) $(APP) \ No newline at end of file
diff --git a/plugins/audio_webrtc/makefile b/plugins/audio_webrtc/makefile
new file mode 100644
index 0000000..3e0d5ba
--- /dev/null
+++ b/plugins/audio_webrtc/makefile
@@ -0,0 +1,21 @@
+include ../../android-projects/root.mk
+PLATFORM:=android-9 # required because of OpenSL ES
+
+ifeq ($(BT), shared)
+all:
+ ($(MAKE) -f droid-makefile all; \
+ $(STRIP) --strip-all --remove-section=.comment --remove-section=.note ./lib$(PROJECT)_$(MARCH).$(EXT); \
+ cp -f ./lib$(PROJECT)_$(MARCH).$(EXT) $(OUTPUT_DIR); \
+ )
+else
+all:
+ ($(MAKE) -f droid-makefile all; cp -f lib$(PROJECT)_$(MARCH).$(EXT) $(OUTPUT_DIR))
+endif
+
+clean:
+ ($(MAKE) -f droid-makefile clean)
+
+
+gdbserver:
+ $(ANDROID_SDK_ROOT)/tools/adb forward tcp:1234: tcp:1234
+ $(ANDROID_SDK_ROOT)/tools/adb shell $(EXEC_DIR)/gdbserver :1234 $(EXEC_DIR)/test \ No newline at end of file
diff --git a/plugins/buildAll.sh b/plugins/buildAll.sh
new file mode 100644
index 0000000..c9551f2
--- /dev/null
+++ b/plugins/buildAll.sh
@@ -0,0 +1,23 @@
+#!/bin/bash
+# Build webRTC for Google Android Systems
+# Last version known to work: 2425 (4:56:50 PM, Tuesday, June 19, 2012)
+
+for IS_NEON in no yes
+do
+
+for project in audio_opensles
+do
+ if [ $project = "audio_opensles" ]; then \
+ export PLATFORM=android-9; \
+ else \
+ export PLATFORM=android-3; \
+ fi \
+
+ echo -e building "plugin_$project ($PLATFORM)....\n"
+ cd $project
+ make PROJECT=plugin_$project clean
+ make PROJECT=plugin_$project ANDROID_PLATFORM=$PLATFORM BT=shared NEON=$IS_NEON all
+ cd ..
+done
+
+done
diff --git a/plugins/pluginCUDA/dllmain_cuda.cxx b/plugins/pluginCUDA/dllmain_cuda.cxx
new file mode 100644
index 0000000..57c3ffd
--- /dev/null
+++ b/plugins/pluginCUDA/dllmain_cuda.cxx
@@ -0,0 +1,137 @@
+/* Copyright (C) 2013 Mamadou DIOP
+* Copyright (C) 2013 Doubango Telecom <http://www.doubango.org>
+*
+* This file is part of Open Source Doubango Framework.
+*
+* DOUBANGO is free software: you can redistribute it and/or modify
+* it under the terms of the GNU General Public License as published by
+* the Free Software Foundation, either version 3 of the License, or
+* (at your option) any later version.
+*
+* DOUBANGO is distributed in the hope that it will be useful,
+* but WITHOUT ANY WARRANTY; without even the implied warranty of
+* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
+* GNU General Public License for more details.
+*
+* You should have received a copy of the GNU General Public License
+* along with DOUBANGO.
+*/
+#include "plugin_cuda_config.h"
+#include "plugin_cuda_utils.h"
+
+#include "tinymedia/tmedia_codec.h"
+
+#include "tsk_plugin.h"
+#include "tsk_debug.h"
+
+#include <windows.h>
+
+#if defined(_MSC_VER)
+# pragma comment(lib, "nvcuvenc")
+# pragma comment(lib, "nvcuvid")
+# pragma comment(lib, "cuda")
+# pragma comment(lib, "cudart")
+
+# pragma comment(lib, "d3d9")
+# pragma comment(lib, "d3dx9")
+#endif
+
+
+#if !defined(PLUGIN_CUDA_H264_ENABLE)
+# define PLUGIN_CUDA_H264_ENABLE 1
+#endif
+
+extern const tmedia_codec_plugin_def_t *cuda_codec_h264_main_plugin_def_t;
+extern const tmedia_codec_plugin_def_t *cuda_codec_h264_base_plugin_def_t;
+
+PLUGIN_CUDA_BEGIN_DECLS /* BEGIN */
+PLUGIN_CUDA_API int __plugin_get_def_count();
+PLUGIN_CUDA_API tsk_plugin_def_type_t __plugin_get_def_type_at(int index);
+PLUGIN_CUDA_API tsk_plugin_def_media_type_t __plugin_get_def_media_type_at(int index);
+PLUGIN_CUDA_API tsk_plugin_def_ptr_const_t __plugin_get_def_at(int index);
+PLUGIN_CUDA_END_DECLS /* END */
+
+BOOL APIENTRY DllMain( HMODULE hModule,
+ DWORD ul_reason_for_call,
+ LPVOID lpReserved
+ )
+{
+ switch (ul_reason_for_call)
+ {
+ case DLL_PROCESS_ATTACH:
+ break;
+ case DLL_THREAD_ATTACH:
+ break;
+ case DLL_THREAD_DETACH:
+ break;
+ case DLL_PROCESS_DETACH:
+ break;
+ }
+ return TRUE;
+}
+
+
+typedef enum PLUGIN_INDEX_E
+{
+#if PLUGIN_CUDA_H264_ENABLE
+ PLUGIN_INDEX_CODEC_H264_MAIN,
+ PLUGIN_INDEX_CODEC_H264_BASE,
+#endif
+
+ PLUGIN_INDEX_COUNT
+}
+PLUGIN_INDEX_T;
+
+
+int __plugin_get_def_count()
+{
+ return CudaUtils::IsH264Supported() ? PLUGIN_INDEX_COUNT : 0;
+}
+
+tsk_plugin_def_type_t __plugin_get_def_type_at(int index)
+{
+#if PLUGIN_CUDA_H264_ENABLE
+ switch(index){
+ case PLUGIN_INDEX_CODEC_H264_MAIN:
+ case PLUGIN_INDEX_CODEC_H264_BASE:
+ {
+ return CudaUtils::IsH264Supported() ? tsk_plugin_def_type_codec : tsk_plugin_def_type_none;
+ }
+ }
+#endif
+ TSK_DEBUG_ERROR("No plugin at index %d", index);
+ return tsk_plugin_def_type_none;
+}
+
+tsk_plugin_def_media_type_t __plugin_get_def_media_type_at(int index)
+{
+#if PLUGIN_CUDA_H264_ENABLE
+ switch(index){
+ case PLUGIN_INDEX_CODEC_H264_MAIN:
+ case PLUGIN_INDEX_CODEC_H264_BASE:
+ {
+ return CudaUtils::IsH264Supported() ? tsk_plugin_def_media_type_video : tsk_plugin_def_media_type_none;
+ }
+ }
+#endif
+ TSK_DEBUG_ERROR("No plugin at index %d", index);
+ return tsk_plugin_def_media_type_none;
+}
+
+tsk_plugin_def_ptr_const_t __plugin_get_def_at(int index)
+{
+#if PLUGIN_CUDA_H264_ENABLE
+ switch(index){
+ case PLUGIN_INDEX_CODEC_H264_MAIN:
+ {
+ return CudaUtils::IsH264Supported() ? cuda_codec_h264_main_plugin_def_t : tsk_null;
+ }
+ case PLUGIN_INDEX_CODEC_H264_BASE:
+ {
+ return CudaUtils::IsH264Supported() ? cuda_codec_h264_base_plugin_def_t : tsk_null;
+ }
+ }
+#endif
+ TSK_DEBUG_ERROR("No plugin at index %d", index);
+ return tsk_null;
+}
diff --git a/plugins/pluginCUDA/pluginCUDA.vcproj b/plugins/pluginCUDA/pluginCUDA.vcproj
new file mode 100644
index 0000000..1f4e8f1
--- /dev/null
+++ b/plugins/pluginCUDA/pluginCUDA.vcproj
@@ -0,0 +1,225 @@
+<?xml version="1.0" encoding="Windows-1252"?>
+<VisualStudioProject
+ ProjectType="Visual C++"
+ Version="9.00"
+ Name="pluginCUDA"
+ ProjectGUID="{97008E5F-C6FC-4748-BE0D-50400E6764CB}"
+ RootNamespace="pluginCUDA"
+ Keyword="Win32Proj"
+ TargetFrameworkVersion="196613"
+ >
+ <Platforms>
+ <Platform
+ Name="Win32"
+ />
+ </Platforms>
+ <ToolFiles>
+ </ToolFiles>
+ <Configurations>
+ <Configuration
+ Name="Debug|Win32"
+ OutputDirectory="$(SolutionDir)$(ConfigurationName)"
+ IntermediateDirectory="$(ConfigurationName)"
+ ConfigurationType="2"
+ CharacterSet="1"
+ >
+ <Tool
+ Name="VCPreBuildEventTool"
+ />
+ <Tool
+ Name="VCCustomBuildTool"
+ />
+ <Tool
+ Name="VCXMLDataGeneratorTool"
+ />
+ <Tool
+ Name="VCWebServiceProxyGeneratorTool"
+ />
+ <Tool
+ Name="VCMIDLTool"
+ />
+ <Tool
+ Name="VCCLCompilerTool"
+ Optimization="0"
+ AdditionalIncludeDirectories="&quot;$(CUDA_PATH)include&quot;;.;..\..\thirdparties\win32\include;..\..\tinySAK\src;..\..\tinyMEDIA\include;..\..\tinySDP\include;..\..\tinyDAV\include;..\..\tinyRTP\include"
+ PreprocessorDefinitions="WIN32;_DEBUG;_WINDOWS;_USRDLL;PLUGIN_CUDA_EXPORTS"
+ MinimalRebuild="true"
+ BasicRuntimeChecks="3"
+ RuntimeLibrary="3"
+ UsePrecompiledHeader="0"
+ WarningLevel="3"
+ WarnAsError="true"
+ DebugInformationFormat="4"
+ />
+ <Tool
+ Name="VCManagedResourceCompilerTool"
+ />
+ <Tool
+ Name="VCResourceCompilerTool"
+ />
+ <Tool
+ Name="VCPreLinkEventTool"
+ />
+ <Tool
+ Name="VCLinkerTool"
+ AdditionalDependencies="$(OutDir)\tinySAK.lib $(OutDir)\tinyMEDIA.lib"
+ LinkIncremental="2"
+ AdditionalLibraryDirectories="$(DXSDK_DIR)/lib/x86;$(CUDA_PATH)/lib/$(PlatformName);"
+ GenerateDebugInformation="true"
+ SubSystem="2"
+ TargetMachine="1"
+ />
+ <Tool
+ Name="VCALinkTool"
+ />
+ <Tool
+ Name="VCManifestTool"
+ />
+ <Tool
+ Name="VCXDCMakeTool"
+ />
+ <Tool
+ Name="VCBscMakeTool"
+ />
+ <Tool
+ Name="VCFxCopTool"
+ />
+ <Tool
+ Name="VCAppVerifierTool"
+ />
+ <Tool
+ Name="VCPostBuildEventTool"
+ />
+ </Configuration>
+ <Configuration
+ Name="Release|Win32"
+ OutputDirectory="$(SolutionDir)$(ConfigurationName)"
+ IntermediateDirectory="$(ConfigurationName)"
+ ConfigurationType="2"
+ CharacterSet="1"
+ WholeProgramOptimization="1"
+ >
+ <Tool
+ Name="VCPreBuildEventTool"
+ />
+ <Tool
+ Name="VCCustomBuildTool"
+ />
+ <Tool
+ Name="VCXMLDataGeneratorTool"
+ />
+ <Tool
+ Name="VCWebServiceProxyGeneratorTool"
+ />
+ <Tool
+ Name="VCMIDLTool"
+ />
+ <Tool
+ Name="VCCLCompilerTool"
+ Optimization="2"
+ EnableIntrinsicFunctions="true"
+ AdditionalIncludeDirectories="&quot;$(CUDA_PATH)include&quot;;.;..\..\thirdparties\win32\include;..\..\tinySAK\src;..\..\tinyMEDIA\include;..\..\tinySDP\include;..\..\tinyDAV\include;..\..\tinyRTP\include"
+ PreprocessorDefinitions="WIN32;NDEBUG;_WINDOWS;_USRDLL;PLUGIN_CUDA_EXPORTS"
+ RuntimeLibrary="2"
+ EnableFunctionLevelLinking="true"
+ UsePrecompiledHeader="0"
+ WarningLevel="3"
+ WarnAsError="true"
+ DebugInformationFormat="0"
+ />
+ <Tool
+ Name="VCManagedResourceCompilerTool"
+ />
+ <Tool
+ Name="VCResourceCompilerTool"
+ />
+ <Tool
+ Name="VCPreLinkEventTool"
+ />
+ <Tool
+ Name="VCLinkerTool"
+ AdditionalDependencies="$(OutDir)\tinySAK.lib $(OutDir)\tinyMEDIA.lib"
+ LinkIncremental="1"
+ AdditionalLibraryDirectories="&quot;$(DXSDK_DIR)/lib/x86&quot;;&quot;$(CUDA_PATH)/lib/$(PlatformName)&quot;"
+ GenerateDebugInformation="true"
+ SubSystem="2"
+ OptimizeReferences="2"
+ EnableCOMDATFolding="2"
+ TargetMachine="1"
+ />
+ <Tool
+ Name="VCALinkTool"
+ />
+ <Tool
+ Name="VCManifestTool"
+ />
+ <Tool
+ Name="VCXDCMakeTool"
+ />
+ <Tool
+ Name="VCBscMakeTool"
+ />
+ <Tool
+ Name="VCFxCopTool"
+ />
+ <Tool
+ Name="VCAppVerifierTool"
+ />
+ <Tool
+ Name="VCPostBuildEventTool"
+ />
+ </Configuration>
+ </Configurations>
+ <References>
+ </References>
+ <Files>
+ <Filter
+ Name="Source Files"
+ Filter="cpp;c;cc;cxx;def;odl;idl;hpj;bat;asm;asmx"
+ UniqueIdentifier="{4FC737F1-C7A5-4376-A066-2A32D752A2FF}"
+ >
+ <File
+ RelativePath=".\dllmain_cuda.cxx"
+ >
+ </File>
+ <File
+ RelativePath=".\plugin_cuda_codec_h264.cxx"
+ >
+ </File>
+ <File
+ RelativePath=".\plugin_cuda_tdav.cxx"
+ >
+ </File>
+ <File
+ RelativePath=".\plugin_cuda_utils.cxx"
+ >
+ </File>
+ </Filter>
+ <Filter
+ Name="Header Files"
+ Filter="h;hpp;hxx;hm;inl;inc;xsd"
+ UniqueIdentifier="{93995380-89BD-4b04-88EB-625FBE52EBFB}"
+ >
+ <File
+ RelativePath=".\plugin_cuda_config.h"
+ >
+ </File>
+ <File
+ RelativePath=".\plugin_cuda_utils.h"
+ >
+ </File>
+ </Filter>
+ <Filter
+ Name="Resource Files"
+ Filter="rc;ico;cur;bmp;dlg;rc2;rct;bin;rgs;gif;jpg;jpeg;jpe;resx;tiff;tif;png;wav"
+ UniqueIdentifier="{67DA6AB6-F800-4c08-8B7A-83BB121AAD01}"
+ >
+ <File
+ RelativePath=".\version.rc"
+ >
+ </File>
+ </Filter>
+ </Files>
+ <Globals>
+ </Globals>
+</VisualStudioProject>
diff --git a/plugins/pluginCUDA/plugin_cuda_codec_h264.cxx b/plugins/pluginCUDA/plugin_cuda_codec_h264.cxx
new file mode 100644
index 0000000..b2c8e2e
--- /dev/null
+++ b/plugins/pluginCUDA/plugin_cuda_codec_h264.cxx
@@ -0,0 +1,1346 @@
+/* Copyright (C) 2013 Mamadou DIOP
+* Copyright (C) 2013 Doubango Telecom <http://www.doubango.org>
+*
+* This file is part of Open Source Doubango Framework.
+*
+* DOUBANGO is free software: you can redistribute it and/or modify
+* it under the terms of the GNU General Public License as published by
+* the Free Software Foundation, either version 3 of the License, or
+* (at your option) any later version.
+*
+* DOUBANGO is distributed in the hope that it will be useful,
+* but WITHOUT ANY WARRANTY; without even the implied warranty of
+* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
+* GNU General Public License for more details.
+*
+* You should have received a copy of the GNU General Public License
+* along with DOUBANGO.
+*/
+#include "plugin_cuda_config.h"
+#include "plugin_cuda_utils.h"
+
+#include "tinydav/codecs/h264/tdav_codec_h264_common.h"
+
+#include "tinyrtp/rtp/trtp_rtp_packet.h"
+
+#include "tinymedia/tmedia_codec.h"
+#include "tinymedia/tmedia_params.h"
+#include "tinymedia/tmedia_defaults.h"
+
+#include "tsk_mutex.h"
+#include "tsk_params.h"
+#include "tsk_memory.h"
+#include "tsk_debug.h"
+
+#include <unknwn.h>
+#include <nvcuvid.h>
+#include <cuviddec.h>
+#include <NVEncoderAPI.h>
+#include <NVEncodeDataTypes.h>
+#include <d3d9.h>
+#include <cudad3d9.h>
+#include <cuda/types.h>
+#include <cuda.h>
+#include <Windows.h>
+
+typedef struct cuda_codec_h264_s
+{
+ TDAV_DECLARE_CODEC_H264_COMMON;
+
+ // Encoder
+ struct{
+ NVEncoder pInst;
+ NVEncoderParams ctxParams;
+ NVVE_CallbackParams clbParams;
+ void* pBufferPtr;
+ tsk_size_t nBufferSize;
+ int64_t frame_count;
+ tsk_bool_t force_idr;
+ int32_t quality; // [1-31]
+ int rotation;
+ int neg_width;
+ int neg_height;
+ int neg_fps;
+ int max_bitrate_bps;
+ int32_t max_bw_kpbs;
+ tsk_bool_t passthrough; // whether to bypass encoding
+ } encoder;
+
+ // decoder
+ struct{
+ CUvideodecoder pInst;
+ CUVIDDECODECREATEINFO cuInfo;
+ CUvideoparser cuParser;
+ CUVIDPARSERPARAMS cuPaserParams;
+ CUdevice cuDevice;
+ IDirect3D9 *pD3D9;
+ IDirect3DDevice9 *pD3D9Device;
+ CUcontext cuContext;
+ struct {
+ void *pcuPtr; // MUST bee freed using cuMemFreeHost()
+ tsk_size_t nSize;
+ tsk_size_t nPitch;
+ tsk_bool_t bAvail;
+ } cuBuffer;
+ void* accumulator;
+ tsk_size_t accumulator_pos;
+ tsk_size_t accumulator_size;
+ uint16_t last_seq;
+ tsk_bool_t passthrough; // whether to bypass decoding
+ tsk_mutex_handle_t *phMutex;
+ } decoder;
+}
+cuda_codec_h264_t;
+
+#if !defined(PLUGIN_CUDA_H264_GOP_SIZE_IN_SECONDS)
+# define PLUGIN_CUDA_H264_GOP_SIZE_IN_SECONDS 25
+#endif
+#if !defined(PLUGIN_CUDA_H264_MAX_FRM_CNT)
+# define PLUGIN_CUDA_H264_MAX_FRM_CNT 2
+#endif
+
+static int cuda_codec_h264_init(cuda_codec_h264_t* self, profile_idc_t profile);
+static int cuda_codec_h264_deinit(cuda_codec_h264_t* self);
+static int cuda_codec_h264_open_encoder(cuda_codec_h264_t* self);
+static int cuda_codec_h264_close_encoder(cuda_codec_h264_t* self);
+static int cuda_codec_h264_open_decoder(cuda_codec_h264_t* self);
+static int cuda_codec_h264_close_decoder(cuda_codec_h264_t* self);
+
+static inline tsk_size_t _cuda_codec_h264_pict_layout(cuda_codec_h264_t* self, void**output, tsk_size_t *output_size);
+
+static int CUDAAPI _NVCallback_HandleVideoSequence(void *pvUserData, CUVIDEOFORMAT *pFormat);
+static int CUDAAPI _NVCallback_HandlePictureDecode(void *pvUserData, CUVIDPICPARAMS *pPicParams);
+static int CUDAAPI _NVCallback_HandlePictureDisplay(void *pvUserData, CUVIDPARSERDISPINFO *pPicParams);
+static unsigned char* CUDAAPI _NVCallback_HandleAcquireBitStream(int *pBufferSize, void *pUserdata);
+static void CUDAAPI _NVCallback_HandleReleaseBitStream(int nBytesInBuffer, unsigned char *cb,void *pUserdata);
+static void CUDAAPI _NVCallback_HandleOnBeginFrame(const NVVE_BeginFrameInfo *pbfi, void *pUserdata);
+static void CUDAAPI _NVCallback_HandleOnEndFrame(const NVVE_EndFrameInfo *pefi, void *pUserdata);
+
+/* ============ H.264 Base/Main Profile X.X Plugin interface functions ================= */
+
+static int cuda_codec_h264_set(tmedia_codec_t* self, const tmedia_param_t* param)
+{
+ cuda_codec_h264_t* h264 = (cuda_codec_h264_t*)self;
+ if(!self->opened){
+ TSK_DEBUG_ERROR("Codec not opened");
+ return -1;
+ }
+ if(param->value_type == tmedia_pvt_int32){
+ if(tsk_striequals(param->key, "action")){
+ tmedia_codec_action_t action = (tmedia_codec_action_t)TSK_TO_INT32((uint8_t*)param->value);
+ switch(action){
+ case tmedia_codec_action_encode_idr:
+ {
+ h264->encoder.force_idr = tsk_true;
+ break;
+ }
+ case tmedia_codec_action_bw_down:
+ {
+ h264->encoder.quality = TSK_CLAMP(1, (h264->encoder.quality + 1), 31);
+ break;
+ }
+ case tmedia_codec_action_bw_up:
+ {
+ h264->encoder.quality = TSK_CLAMP(1, (h264->encoder.quality - 1), 31);
+ break;
+ }
+ }
+ return 0;
+ }
+ else if(tsk_striequals(param->key, "bypass-encoding")){
+ h264->encoder.passthrough = *((int32_t*)param->value) ? tsk_true : tsk_false;
+ TSK_DEBUG_INFO("[H.264] bypass-encoding = %d", h264->encoder.passthrough);
+ return 0;
+ }
+ else if(tsk_striequals(param->key, "bypass-decoding")){
+ h264->decoder.passthrough = *((int32_t*)param->value) ? tsk_true : tsk_false;
+ TSK_DEBUG_INFO("[H.264] bypass-decoding = %d", h264->decoder.passthrough);
+ return 0;
+ }
+ else if(tsk_striequals(param->key, "rotation")){
+ int rotation = *((int32_t*)param->value);
+ if(h264->encoder.rotation != rotation){
+ if(self->opened){
+ int ret;
+ h264->encoder.rotation = rotation;
+ if((ret = cuda_codec_h264_close_encoder(h264))){
+ return ret;
+ }
+ if((ret = cuda_codec_h264_open_encoder(h264))){
+ return ret;
+ }
+ }
+ }
+ return 0;
+ }
+ }
+ return -1;
+}
+
+
+static int cuda_codec_h264_open(tmedia_codec_t* self)
+{
+ int ret;
+ cuda_codec_h264_t* h264 = (cuda_codec_h264_t*)self;
+
+ if(!h264){
+ TSK_DEBUG_ERROR("Invalid parameter");
+ return -1;
+ }
+
+ /* the caller (base class) already checked that the codec is not opened */
+
+ // Encoder
+ if((ret = cuda_codec_h264_open_encoder(h264))){
+ return ret;
+ }
+
+ // Decoder
+ if((ret = cuda_codec_h264_open_decoder(h264))){
+ return ret;
+ }
+
+ return 0;
+}
+
+static int cuda_codec_h264_close(tmedia_codec_t* self)
+{
+ cuda_codec_h264_t* h264 = (cuda_codec_h264_t*)self;
+
+ if(!h264){
+ TSK_DEBUG_ERROR("Invalid parameter");
+ return -1;
+ }
+
+ /* the caller (base class) alreasy checked that the codec is opened */
+
+ // Encoder
+ cuda_codec_h264_close_encoder(h264);
+
+ // Decoder
+ cuda_codec_h264_close_decoder(h264);
+
+ return 0;
+}
+
+static tsk_size_t cuda_codec_h264_encode(tmedia_codec_t* self, const void* in_data, tsk_size_t in_size, void** out_data, tsk_size_t* out_max_size)
+{
+ int ret = 0;
+ NVVE_EncodeFrameParams efparams;
+ tsk_bool_t send_idr, send_hdr;
+ unsigned long flags = 0;
+
+ cuda_codec_h264_t* h264 = (cuda_codec_h264_t*)self;
+ tdav_codec_h264_common_t* common = (tdav_codec_h264_common_t*)self;
+
+ if(!self || !in_data || !in_size)
+ {
+ TSK_DEBUG_ERROR("Invalid parameter");
+ return 0;
+ }
+
+ if(h264->encoder.passthrough) {
+ tdav_codec_h264_rtp_encap(common, (const uint8_t*)in_data, in_size);
+ return 0;
+ }
+
+ if((h264->encoder.ctxParams.iOutputSize[1] * h264->encoder.ctxParams.iOutputSize[0] * 3) >> 1 != in_size)
+ {
+ /* guard */
+ TSK_DEBUG_ERROR("Invalid size");
+ return 0;
+ }
+
+ if(!self->opened || !h264->encoder.pInst /*|| !h264->encoder.pInst->IsReady()*/)
+ {
+ TSK_DEBUG_ERROR("Encoder not opened or not ready");
+ return 0;
+ }
+
+ if(h264->encoder.passthrough)
+ {
+ tdav_codec_h264_rtp_encap(TDAV_CODEC_H264_COMMON(h264), (const uint8_t*)in_data, in_size);
+ return 0;
+ }
+
+ HRESULT hr = S_OK;
+
+ efparams.Width = h264->encoder.ctxParams.iOutputSize[0];
+ efparams.Height = h264->encoder.ctxParams.iOutputSize[1];
+ efparams.Pitch = (h264->encoder.ctxParams.nDeviceMemPitch ? h264->encoder.ctxParams.nDeviceMemPitch : h264->encoder.ctxParams.iOutputSize[0]);
+ efparams.PictureStruc = (NVVE_PicStruct)h264->encoder.ctxParams.iPictureType;
+ efparams.SurfFmt = (NVVE_SurfaceFormat)h264->encoder.ctxParams.iSurfaceFormat;
+ efparams.progressiveFrame = (h264->encoder.ctxParams.iSurfaceFormat == 3) ? 1 : 0;
+ efparams.repeatFirstField = 0;
+ efparams.topfieldfirst = (h264->encoder.ctxParams.iSurfaceFormat == 1) ? 1 : 0;
+ efparams.picBuf = (unsigned char *)in_data;
+ efparams.bLast = 0;
+
+ // send IDR for:
+ // - the first frame
+ // - remote peer requested an IDR
+ // - every second within the first 4seconds
+ send_idr = (
+ h264->encoder.frame_count++ == 0
+ || h264 ->encoder.force_idr
+ || ( (h264->encoder.frame_count < h264->encoder.neg_fps * 4) && ((h264->encoder.frame_count % h264->encoder.neg_fps)==0) )
+ );
+
+ if(send_idr)
+ {
+ flags |= 0x04; // FORCE IDR
+ }
+
+ // send SPS and PPS headers for:
+ // - IDR frames (not required but it's the easiest way to deal with pkt loss)
+ // - every 5 seconds after the first 4seconds
+ send_hdr = (
+ send_idr
+ || ( (h264->encoder.frame_count % (h264->encoder.neg_fps * 5))==0 )
+ );
+ if(send_hdr)
+ {
+ if(h264->encoder.ctxParams.iDisableSPSPPS)
+ {
+ unsigned char SPSPPSBuff[1024];
+ int SPSPPSBuffSize = sizeof(SPSPPSBuff);
+ hr = NVGetSPSPPS(h264->encoder.pInst, SPSPPSBuff, SPSPPSBuffSize, &SPSPPSBuffSize);
+ if(SUCCEEDED(hr))
+ {
+ int size = 0;
+ while(size < SPSPPSBuffSize - 2)
+ {
+ int16_t next_size = ((int16_t)SPSPPSBuff[size])<<1 | ((int16_t)SPSPPSBuff[size + 1]);
+ tdav_codec_h264_rtp_encap(common, &SPSPPSBuff[size + 2], next_size);
+ size += next_size + 2;
+ }
+ }
+ else
+ {
+ TSK_DEBUG_ERROR("NVGetSPSPPS failed with error code = %08x", hr)
+ }
+ }
+ }
+
+ // Encode data
+ CHECK_HR(hr = NVEncodeFrame(h264->encoder.pInst, &efparams, flags, NULL));
+
+ // reset
+ h264->encoder.force_idr = tsk_false;
+
+bail:
+ return 0;
+}
+
+static tsk_size_t cuda_codec_h264_decode(tmedia_codec_t* self, const void* in_data, tsk_size_t in_size, void** out_data, tsk_size_t* out_max_size, const tsk_object_t* proto_hdr)
+{
+ cuda_codec_h264_t* h264 = (cuda_codec_h264_t*)self;
+ const trtp_rtp_header_t* rtp_hdr = (const trtp_rtp_header_t*)proto_hdr;
+
+ const uint8_t* pay_ptr = tsk_null;
+ tsk_size_t pay_size = 0;
+ int ret;
+ tsk_bool_t append_scp;
+ tsk_bool_t sps_or_pps;
+ tsk_size_t retsize = 0, size_to_copy = 0;
+ static const tsk_size_t xmax_size = (3840 * 2160 * 3) >> 3; // >>3 instead of >>1 (not an error)
+ static tsk_size_t start_code_prefix_size = sizeof(H264_START_CODE_PREFIX);
+
+ if(!h264 || !in_data || !in_size || !out_data)
+ {
+ TSK_DEBUG_ERROR("Invalid parameter");
+ return 0;
+ }
+
+ if(!self->opened || !h264->encoder.pInst)
+ {
+ TSK_DEBUG_ERROR("Decoder not opened or not ready");
+ return 0;
+ }
+
+ HRESULT hr = S_OK;
+
+ /* Packet lost? */
+ if((h264->decoder.last_seq + 1) != rtp_hdr->seq_num && h264->decoder.last_seq)
+ {
+ TSK_DEBUG_INFO("[H.264] Packet loss, seq_num=%d", (h264->decoder.last_seq + 1));
+ }
+ h264->decoder.last_seq = rtp_hdr->seq_num;
+
+
+ /* 5.3. NAL Unit Octet Usage
+ +---------------+
+ |0|1|2|3|4|5|6|7|
+ +-+-+-+-+-+-+-+-+
+ |F|NRI| Type |
+ +---------------+
+ */
+ if(*((uint8_t*)in_data) & 0x80)
+ {
+ TSK_DEBUG_WARN("F=1");
+ /* reset accumulator */
+ h264->decoder.accumulator = 0;
+ return 0;
+ }
+
+ /* get payload */
+ if((ret = tdav_codec_h264_get_pay(in_data, in_size, (const void**)&pay_ptr, &pay_size, &append_scp)) || !pay_ptr || !pay_size)
+ {
+ TSK_DEBUG_ERROR("Depayloader failed to get H.264 content");
+ return 0;
+ }
+ //append_scp = tsk_true;
+ size_to_copy = pay_size + (append_scp ? start_code_prefix_size : 0);
+ // whether it's SPS or PPS (append_scp is false for subsequent FUA chuncks)
+ sps_or_pps = append_scp && pay_ptr && ((pay_ptr[0] & 0x1F) == 7 || (pay_ptr[0] & 0x1F) == 8);
+
+ // start-accumulator
+ if(!h264->decoder.accumulator)
+ {
+ if(size_to_copy > xmax_size)
+ {
+ TSK_DEBUG_ERROR("%u too big to contain valid encoded data. xmax_size=%u", size_to_copy, xmax_size);
+ return 0;
+ }
+ if(!(h264->decoder.accumulator = tsk_calloc(size_to_copy, sizeof(uint8_t))))
+ {
+ TSK_DEBUG_ERROR("Failed to allocated new buffer");
+ return 0;
+ }
+ h264->decoder.accumulator_size = size_to_copy;
+ }
+ if((h264->decoder.accumulator_pos + size_to_copy) >= xmax_size)
+ {
+ TSK_DEBUG_ERROR("BufferOverflow");
+ h264->decoder.accumulator_pos = 0;
+ return 0;
+ }
+ if((h264->decoder.accumulator_pos + size_to_copy) > h264->decoder.accumulator_size)
+ {
+ if(!(h264->decoder.accumulator = tsk_realloc(h264->decoder.accumulator, (h264->decoder.accumulator_pos + size_to_copy))))
+ {
+ TSK_DEBUG_ERROR("Failed to reallocated new buffer");
+ h264->decoder.accumulator_pos = 0;
+ h264->decoder.accumulator_size = 0;
+ return 0;
+ }
+ h264->decoder.accumulator_size = (h264->decoder.accumulator_pos + size_to_copy);
+ }
+
+ if(append_scp)
+ {
+ memcpy(&((uint8_t*)h264->decoder.accumulator)[h264->decoder.accumulator_pos], H264_START_CODE_PREFIX, start_code_prefix_size);
+ h264->decoder.accumulator_pos += start_code_prefix_size;
+ }
+ memcpy(&((uint8_t*)h264->decoder.accumulator)[h264->decoder.accumulator_pos], pay_ptr, pay_size);
+ h264->decoder.accumulator_pos += pay_size;
+ // end-accumulator
+
+
+ if(sps_or_pps)
+ {
+ // http://libav-users.943685.n4.nabble.com/Decode-H264-streams-how-to-fill-AVCodecContext-from-SPS-PPS-td2484472.html
+ // SPS and PPS should be bundled with IDR
+ TSK_DEBUG_INFO("Receiving SPS or PPS ...to be tied to an IDR");
+ }
+ else if(rtp_hdr->marker)
+ {
+ if(h264->decoder.passthrough)
+ {
+ if(*out_max_size < h264->decoder.accumulator_pos)
+ {
+ if((*out_data = tsk_realloc(*out_data, h264->decoder.accumulator_pos)))
+ {
+ *out_max_size = h264->decoder.accumulator_pos;
+ }
+ else
+ {
+ *out_max_size = 0;
+ return 0;
+ }
+ }
+ memcpy(*out_data, h264->decoder.accumulator, h264->decoder.accumulator_pos);
+ retsize = h264->decoder.accumulator_pos;
+ }
+ else
+ {
+ // !h264->decoder.passthrough
+ CUVIDSOURCEDATAPACKET pkt;
+ CUresult cuResult;
+ pkt.flags = 0;
+ pkt.payload_size = (unsigned long) h264->decoder.accumulator_pos;
+ pkt.payload = (unsigned char *)h264->decoder.accumulator;
+ pkt.timestamp = 0;
+
+ // reset accumulator
+ h264->decoder.accumulator_pos = 0;
+ cuResult = cuvidParseVideoData(h264->decoder.cuParser, &pkt);
+ if(cuResult != CUDA_SUCCESS)
+ {
+ TSK_DEBUG_ERROR("cuvidParseVideoData() failed with error code = %d", (int)cuResult);
+ CHECK_HR(hr = E_FAIL);
+ }
+
+ if(h264->decoder.cuBuffer.bAvail)
+ {
+ h264->decoder.cuBuffer.bAvail = tsk_false;
+ if((retsize = _cuda_codec_h264_pict_layout(h264, out_data, out_max_size)) == 0)
+ {
+ TSK_DEBUG_ERROR("_cuda_codec_h264_pict_layout failed");
+ CHECK_HR(hr = E_FAIL);
+ }
+ }
+ }// else(!h264->decoder.passthrough)
+ } // else if(rtp_hdr->marker)
+
+bail:
+ if(FAILED(hr))
+ {
+ TSK_DEBUG_INFO("Failed to decode the buffer with error code =%d, size=%u, append=%s", ret, h264->decoder.accumulator_pos, append_scp ? "yes" : "no");
+ if(TMEDIA_CODEC_VIDEO(self)->in.callback)
+ {
+ TMEDIA_CODEC_VIDEO(self)->in.result.type = tmedia_video_decode_result_type_error;
+ TMEDIA_CODEC_VIDEO(self)->in.result.proto_hdr = proto_hdr;
+ TMEDIA_CODEC_VIDEO(self)->in.callback(&TMEDIA_CODEC_VIDEO(self)->in.result);
+ }
+ }
+ return retsize;
+}
+
+static tsk_bool_t cuda_codec_h264_sdp_att_match(const tmedia_codec_t* self, const char* att_name, const char* att_value)
+{
+ return tdav_codec_h264_common_sdp_att_match((tdav_codec_h264_common_t*)self, att_name, att_value);
+}
+
+static char* cuda_codec_h264_sdp_att_get(const tmedia_codec_t* self, const char* att_name)
+{
+ char* att = tdav_codec_h264_common_sdp_att_get((const tdav_codec_h264_common_t*)self, att_name);
+ if(att && tsk_striequals(att_name, "fmtp")) {
+ tsk_strcat(&att, "; impl=CUDA");
+ }
+ return att;
+}
+
+
+
+
+/* ============ H.264 Base Profile Plugin interface ================= */
+
+/* constructor */
+static tsk_object_t* cuda_codec_h264_base_ctor(tsk_object_t * self, va_list * app)
+{
+ cuda_codec_h264_t *h264 = (cuda_codec_h264_t*)self;
+ if(h264){
+ /* init base: called by tmedia_codec_create() */
+ /* init self */
+ if(cuda_codec_h264_init(h264, profile_idc_baseline) != 0){
+ return tsk_null;
+ }
+ }
+ return self;
+}
+/* destructor */
+static tsk_object_t* cuda_codec_h264_base_dtor(tsk_object_t * self)
+{
+ cuda_codec_h264_t *h264 = (cuda_codec_h264_t*)self;
+ if(h264){
+ /* deinit base */
+ tdav_codec_h264_common_deinit(TDAV_CODEC_H264_COMMON(self));
+ /* deinit self */
+ cuda_codec_h264_deinit(h264);
+ }
+
+ return self;
+}
+/* object definition */
+static const tsk_object_def_t cuda_codec_h264_base_def_s =
+{
+ sizeof(cuda_codec_h264_t),
+ cuda_codec_h264_base_ctor,
+ cuda_codec_h264_base_dtor,
+ tmedia_codec_cmp,
+};
+/* plugin definition*/
+static const tmedia_codec_plugin_def_t cuda_codec_h264_base_plugin_def_s =
+{
+ &cuda_codec_h264_base_def_s,
+
+ tmedia_video,
+ tmedia_codec_id_h264_bp,
+ "H264",
+ "H264 Base Profile (NVIDIA CUDA)",
+ TMEDIA_CODEC_FORMAT_H264_BP,
+ tsk_true,
+ 90000, // rate
+
+ /* audio */
+ { 0 },
+
+ /* video (width, height, fps) */
+ {176, 144, 0}, // fps is @deprecated
+
+ cuda_codec_h264_set,
+ cuda_codec_h264_open,
+ cuda_codec_h264_close,
+ cuda_codec_h264_encode,
+ cuda_codec_h264_decode,
+ cuda_codec_h264_sdp_att_match,
+ cuda_codec_h264_sdp_att_get
+};
+const tmedia_codec_plugin_def_t *cuda_codec_h264_base_plugin_def_t = &cuda_codec_h264_base_plugin_def_s;
+
+/* ============ H.264 Main Profile Plugin interface ================= */
+
+/* constructor */
+static tsk_object_t* cuda_codec_h264_main_ctor(tsk_object_t * self, va_list * app)
+{
+ cuda_codec_h264_t *h264 = (cuda_codec_h264_t*)self;
+ if(h264){
+ /* init base: called by tmedia_codec_create() */
+ /* init self */
+ if(cuda_codec_h264_init(h264, profile_idc_main) != 0){
+ return tsk_null;
+ }
+ }
+ return self;
+}
+/* destructor */
+static tsk_object_t* cuda_codec_h264_main_dtor(tsk_object_t * self)
+{
+ cuda_codec_h264_t *h264 = (cuda_codec_h264_t*)self;
+ if(h264){
+ /* deinit base */
+ tdav_codec_h264_common_deinit(TDAV_CODEC_H264_COMMON(self));
+ /* deinit self */
+ cuda_codec_h264_deinit(h264);
+
+ }
+
+ return self;
+}
+/* object definition */
+static const tsk_object_def_t cuda_codec_h264_main_def_s =
+{
+ sizeof(cuda_codec_h264_t),
+ cuda_codec_h264_main_ctor,
+ cuda_codec_h264_main_dtor,
+ tmedia_codec_cmp,
+};
+/* plugin definition*/
+static const tmedia_codec_plugin_def_t cuda_codec_h264_main_plugin_def_s =
+{
+ &cuda_codec_h264_main_def_s,
+
+ tmedia_video,
+ tmedia_codec_id_h264_mp,
+ "H264",
+ "H264 Main Profile (NVIDIA CUDA)",
+ TMEDIA_CODEC_FORMAT_H264_MP,
+ tsk_true,
+ 90000, // rate
+
+ /* audio */
+ { 0 },
+
+ /* video (width, height, fps)*/
+ {176, 144, 0},// fps is @deprecated
+
+ cuda_codec_h264_set,
+ cuda_codec_h264_open,
+ cuda_codec_h264_close,
+ cuda_codec_h264_encode,
+ cuda_codec_h264_decode,
+ cuda_codec_h264_sdp_att_match,
+ cuda_codec_h264_sdp_att_get
+};
+const tmedia_codec_plugin_def_t *cuda_codec_h264_main_plugin_def_t = &cuda_codec_h264_main_plugin_def_s;
+
+
+
+
+
+/* ============ Common To all H264 codecs ================= */
+
+int cuda_codec_h264_open_encoder(cuda_codec_h264_t* self)
+{
+ HRESULT hr = S_OK;
+ int32_t max_bw_kpbs;
+ int bestGPU = 0, gpuPerf = 0;
+ static int low_latency = 1;
+ tdav_codec_h264_common_t* common = (tdav_codec_h264_common_t*)self;
+
+ if(self->encoder.pInst)
+ {
+ TSK_DEBUG_ERROR("Encoder already initialized");
+#if defined(E_ILLEGAL_METHOD_CALL)
+ CHECK_HR(hr = E_ILLEGAL_METHOD_CALL);
+#else
+ CHECK_HR(hr = 0x8000000EL);
+#endif
+ }
+
+ memset(&self->encoder.clbParams, 0, sizeof(self->encoder.clbParams));
+ memset(&self->encoder.ctxParams, 0, sizeof(self->encoder.ctxParams));
+
+ // create encoder
+ CHECK_HR(hr = NVCreateEncoder(&self->encoder.pInst));
+ CHECK_HR(hr = NVSetCodec(self->encoder.pInst, NV_CODEC_TYPE_H264));
+ CHECK_HR(hr = NVSetDefaultParam(self->encoder.pInst));
+
+ CHECK_HR(hr = NVGetParamValue(self->encoder.pInst, NVVE_GET_GPU_COUNT, &self->encoder.ctxParams.GPU_count));
+ {
+ int temp = 0, deviceCount;
+ for (deviceCount=0; deviceCount < self->encoder.ctxParams.GPU_count; deviceCount++)
+ {
+ NVVE_GPUAttributes GPUAttributes = {0};
+
+ GPUAttributes.iGpuOrdinal = deviceCount;
+ hr = NVGetParamValue(self->encoder.pInst, NVVE_GET_GPU_ATTRIBUTES, &GPUAttributes);
+ if(FAILED(hr))
+ {
+ TSK_DEBUG_ERROR("NVGetParamValue(NVVE_GET_GPU_ATTRIBUTES) failed with error code = %08x", hr);
+ continue;
+ }
+
+ temp = GPUAttributes.iClockRate * GPUAttributes.iMultiProcessorCount;
+ temp = temp * CudaUtils::ConvertSMVer2Cores(GPUAttributes.iMajor, GPUAttributes.iMinor);
+
+ if(temp > gpuPerf)
+ {
+ gpuPerf = temp;
+ bestGPU = deviceCount;
+ }
+ }
+ }
+
+ self->encoder.neg_width = (self->encoder.rotation == 90 || self->encoder.rotation == 270) ? TMEDIA_CODEC_VIDEO(self)->out.height : TMEDIA_CODEC_VIDEO(self)->out.width;
+ self->encoder.neg_height = (self->encoder.rotation == 90 || self->encoder.rotation == 270) ? TMEDIA_CODEC_VIDEO(self)->out.width : TMEDIA_CODEC_VIDEO(self)->out.height;
+ self->encoder.neg_fps = TMEDIA_CODEC_VIDEO(self)->out.fps;
+ max_bw_kpbs = TSK_CLAMP(
+ 0,
+ tmedia_get_video_bandwidth_kbps_2(self->encoder.neg_width, self->encoder.neg_height, self->encoder.neg_fps),
+ self->encoder.max_bw_kpbs
+ );
+ self->encoder.max_bitrate_bps = (max_bw_kpbs * 1024);
+
+ TSK_DEBUG_INFO("[H.264 CUDA Encoder] neg_width=%d, neg_height=%d, neg_fps=%d, max_bitrate_bps=%d",
+ self->encoder.neg_width,
+ self->encoder.neg_height,
+ self->encoder.neg_fps,
+ self->encoder.max_bitrate_bps
+ );
+
+ self->encoder.ctxParams.iForcedGPU = bestGPU;
+ self->encoder.ctxParams.iInputSize[0] = self->encoder.neg_width;
+ self->encoder.ctxParams.iInputSize[1] = self->encoder.neg_height;
+ self->encoder.ctxParams.iOutputSize[0] = self->encoder.neg_width;
+ self->encoder.ctxParams.iOutputSize[1] = self->encoder.neg_height;
+ self->encoder.ctxParams.GPUOffloadLevel= NVVE_GPU_OFFLOAD_ALL;
+ self->encoder.ctxParams.iSurfaceFormat = (int)IYUV;
+ self->encoder.ctxParams.iPictureType = (int)FRAME_PICTURE;
+ self->encoder.ctxParams.Fieldmode = MODE_FRAME;
+ self->encoder.ctxParams.Presets = (NVVE_PRESETS_TARGET)-1;//Should be iPod, Zune ...
+ // self->encoder.ctxParams.iP_Interval = 1;
+ self->encoder.ctxParams.iAspectRatio[0] = 1;
+ self->encoder.ctxParams.iAspectRatio[1] = 1;
+ self->encoder.ctxParams.iAspectRatio[2] = 0;
+ self->encoder.ctxParams.iIDR_Period = (self->encoder.neg_fps * PLUGIN_CUDA_H264_GOP_SIZE_IN_SECONDS);
+ self->encoder.ctxParams.iUseDeviceMem = 0;
+ self->encoder.ctxParams.iDynamicGOP = 0;
+ self->encoder.ctxParams.RCType = RC_CBR;
+ self->encoder.ctxParams.iAvgBitrate = self->encoder.max_bitrate_bps;
+ self->encoder.ctxParams.iPeakBitrate = self->encoder.max_bitrate_bps;
+ self->encoder.ctxParams.iQP_Level_Intra = 25;
+ self->encoder.ctxParams.iQP_Level_InterP = 28;
+ self->encoder.ctxParams.iQP_Level_InterB = 31;
+ self->encoder.ctxParams.iFrameRate[0] = self->encoder.neg_fps;
+ self->encoder.ctxParams.iFrameRate[1] = 1;
+ self->encoder.ctxParams.iDeblockMode = 1;
+ self->encoder.ctxParams.iForceIntra = 0;
+ self->encoder.ctxParams.iForceIDR = 0;
+ self->encoder.ctxParams.iClearStat = 0;
+ self->encoder.ctxParams.DIMode = DI_MEDIAN;
+ self->encoder.ctxParams.iDisableSPSPPS = 1; // Do not include SPS/PPS frames
+ self->encoder.ctxParams.iNaluFramingType = 0; // StartCodes
+ self->encoder.ctxParams.iMultiGPU = 1;
+ switch(common->profile)
+ {
+ case profile_idc_baseline:
+ {
+ self->encoder.ctxParams.iDisableCabac = 1;
+ self->encoder.ctxParams.iProfileLevel = 0xff42; // 0xff -> autoselect level
+ break;
+ }
+ case profile_idc_main:
+ {
+ self->encoder.ctxParams.iDisableCabac = 0;
+ self->encoder.ctxParams.iProfileLevel = 0xff4d; // 0xff -> autoselect level
+ break;
+ }
+ default:
+ {
+ CHECK_HR(hr = E_NOTIMPL);
+ break;
+ }
+ }
+
+ //
+ // Allocate memory
+ //
+ self->encoder.nBufferSize = (self->encoder.ctxParams.iOutputSize[1] * self->encoder.ctxParams.iOutputSize[0] * 3) >> 4;
+ if(!self->encoder.pBufferPtr && !(self->encoder.pBufferPtr = tsk_realloc(self->encoder.pBufferPtr, self->encoder.nBufferSize)))
+ {
+ self->encoder.nBufferSize = 0;
+ CHECK_HR(hr = E_OUTOFMEMORY);
+ }
+
+ //
+ // Set parameters
+ //
+ hr = NVSetParamValue(self->encoder.pInst, NVVE_FORCE_GPU_SELECTION, &self->encoder.ctxParams.iForcedGPU);
+ if(FAILED(hr))
+ {
+ TSK_DEBUG_WARN("NVSetParamValue(NVVE_FORCE_GPU_SELECTION) failed with error code = %08x", hr);
+ }
+ CHECK_HR(hr = NVSetParamValue(self->encoder.pInst, NVVE_DEVICE_MEMORY_INPUT, &(self->encoder.ctxParams.iUseDeviceMem)));
+ hr = NVSetParamValue(self->encoder.pInst,NVVE_OUT_SIZE, &(self->encoder.ctxParams.iOutputSize)); if (hr!=S_OK) { TSK_DEBUG_WARN("NVSetParamValue(NVVE_OUT_SIZE) failed with error code = %08x", hr); }
+ hr = NVSetParamValue(self->encoder.pInst,NVVE_IN_SIZE, &(self->encoder.ctxParams.iInputSize)); if (hr!=S_OK) { TSK_DEBUG_WARN("NVSetParamValue(NVVE_IN_SIZE) failed with error code = %08x", hr); }
+ hr = NVSetParamValue(self->encoder.pInst,NVVE_MULTI_GPU, &(self->encoder.ctxParams.iMultiGPU)); if (hr!=S_OK) { TSK_DEBUG_WARN("NVSetParamValue(NVVE_MULTI_GPU) failed with error code = %08x", hr); }
+ hr = NVSetParamValue(self->encoder.pInst,NVVE_ASPECT_RATIO, &(self->encoder.ctxParams.iAspectRatio));if (hr!=S_OK) { TSK_DEBUG_WARN("NVSetParamValue(NVVE_ASPECT_RATIO) failed with error code = %08x", hr); }
+ hr = NVSetParamValue(self->encoder.pInst,NVVE_FIELD_ENC_MODE, &(self->encoder.ctxParams.Fieldmode)); if (hr!=S_OK) { TSK_DEBUG_WARN("NVSetParamValue(NVVE_FIELD_ENC_MODE) failed with error code = %08x", hr); }
+ hr = NVSetParamValue(self->encoder.pInst,NVVE_P_INTERVAL, &(self->encoder.ctxParams.iP_Interval)); if (hr!=S_OK) { TSK_DEBUG_WARN("NVSetParamValue(NVVE_P_INTERVAL) failed with error code = %08x", hr); }
+ hr = NVSetParamValue(self->encoder.pInst,NVVE_IDR_PERIOD, &(self->encoder.ctxParams.iIDR_Period)); if (hr!=S_OK) { TSK_DEBUG_WARN("NVSetParamValue(NVVE_IDR_PERIOD) failed with error code = %08x", hr); }
+ hr = NVSetParamValue(self->encoder.pInst,NVVE_DYNAMIC_GOP, &(self->encoder.ctxParams.iDynamicGOP)); if (hr!=S_OK) { TSK_DEBUG_WARN("NVSetParamValue(NVVE_DYNAMIC_GOP) failed with error code = %08x", hr); }
+ hr = NVSetParamValue(self->encoder.pInst,NVVE_RC_TYPE, &(self->encoder.ctxParams.RCType)); if (hr!=S_OK) { TSK_DEBUG_WARN("NVSetParamValue(NVVE_RC_TYPE) failed with error code = %08x", hr); }
+ hr = NVSetParamValue(self->encoder.pInst,NVVE_AVG_BITRATE, &(self->encoder.ctxParams.iAvgBitrate)); if (hr!=S_OK) { TSK_DEBUG_WARN("NVSetParamValue(NVVE_AVG_BITRATE) failed with error code = %08x", hr); }
+ hr = NVSetParamValue(self->encoder.pInst,NVVE_PEAK_BITRATE, &(self->encoder.ctxParams.iPeakBitrate)); if (hr!=S_OK) { TSK_DEBUG_WARN("NVSetParamValue(NVVE_PEAK_BITRATE) failed with error code = %08x", hr); }
+ hr = NVSetParamValue(self->encoder.pInst,NVVE_QP_LEVEL_INTRA, &(self->encoder.ctxParams.iQP_Level_Intra)); if (hr!=S_OK) { TSK_DEBUG_WARN("NVSetParamValue(NVVE_OUT_SIZE) failed with error code = %08x", hr); }
+ hr = NVSetParamValue(self->encoder.pInst,NVVE_QP_LEVEL_INTER_P,&(self->encoder.ctxParams.iQP_Level_InterP)); if (hr!=S_OK) { TSK_DEBUG_WARN("NVSetParamValue(NVVE_QP_LEVEL_INTER_P) failed with error code = %08x", hr); }
+ hr = NVSetParamValue(self->encoder.pInst,NVVE_QP_LEVEL_INTER_B,&(self->encoder.ctxParams.iQP_Level_InterB)); if (hr!=S_OK) { TSK_DEBUG_WARN("NVSetParamValue(NVVE_QP_LEVEL_INTER_B) failed with error code = %08x", hr); }
+ hr = NVSetParamValue(self->encoder.pInst,NVVE_FRAME_RATE, &(self->encoder.ctxParams.iFrameRate)); if (hr!=S_OK) { TSK_DEBUG_WARN("NVSetParamValue(NVVE_FRAME_RATE) failed with error code = %08x", hr); }
+ hr = NVSetParamValue(self->encoder.pInst,NVVE_DEBLOCK_MODE, &(self->encoder.ctxParams.iDeblockMode)); if (hr!=S_OK) { TSK_DEBUG_WARN("NVSetParamValue(NVVE_DEBLOCK_MODE) failed with error code = %08x", hr); }
+ hr = NVSetParamValue(self->encoder.pInst,NVVE_PROFILE_LEVEL, &(self->encoder.ctxParams.iProfileLevel)); if (hr!=S_OK) { TSK_DEBUG_WARN("NVSetParamValue(NVVE_PROFILE_LEVEL) failed with error code = %08x", hr); }
+ hr = NVSetParamValue(self->encoder.pInst,NVVE_FORCE_INTRA, &(self->encoder.ctxParams.iForceIntra)); if (hr!=S_OK) { TSK_DEBUG_WARN("NVSetParamValue(NVVE_FORCE_INTRA) failed with error code = %08x", hr); }
+ hr = NVSetParamValue(self->encoder.pInst,NVVE_FORCE_IDR, &(self->encoder.ctxParams.iForceIDR)); if (hr!=S_OK) { TSK_DEBUG_WARN("NVSetParamValue(NVVE_FORCE_IDR) failed with error code = %08x", hr); }
+ hr = NVSetParamValue(self->encoder.pInst,NVVE_CLEAR_STAT, &(self->encoder.ctxParams.iClearStat)); if (hr!=S_OK) { TSK_DEBUG_WARN("NVSetParamValue(NVVE_CLEAR_STAT) failed with error code = %08x", hr); }
+ hr = NVSetParamValue(self->encoder.pInst,NVVE_SET_DEINTERLACE,&(self->encoder.ctxParams.DIMode)); if (hr!=S_OK) { TSK_DEBUG_WARN("NVSetParamValue(NVVE_SET_DEINTERLACE) failed with error code = %08x", hr); }
+ if (self->encoder.ctxParams.Presets != -1)
+ {
+ hr = NVSetParamValue(self->encoder.pInst,NVVE_PRESETS, &(self->encoder.ctxParams.Presets)); if (hr!=S_OK) { TSK_DEBUG_WARN("NVSetParamValue(NVVE_PRESETS) failed with error code = %08x", hr); }
+ }
+ hr = NVSetParamValue(self->encoder.pInst,NVVE_DISABLE_CABAC, &(self->encoder.ctxParams.iDisableCabac)); if (hr!=S_OK) { TSK_DEBUG_WARN("NVSetParamValue(NVVE_DISABLE_CABAC) failed with error code = %08x", hr); }
+ hr = NVSetParamValue(self->encoder.pInst,NVVE_CONFIGURE_NALU_FRAMING_TYPE, &(self->encoder.ctxParams.iNaluFramingType)); if (hr!=S_OK) { TSK_DEBUG_WARN("NVSetParamValue(NVVE_CONFIGURE_NALU_FRAMING_TYPE) failed with error code = %08x", hr); }
+ hr = NVSetParamValue(self->encoder.pInst,NVVE_DISABLE_SPS_PPS,&(self->encoder.ctxParams.iDisableSPSPPS)); if (hr!=S_OK) { TSK_DEBUG_WARN("NVSetParamValue(NVVE_DISABLE_SPS_PPS) failed with error code = %08x", hr); }
+ hr = NVSetParamValue(self->encoder.pInst,NVVE_LOW_LATENCY,&low_latency); if (hr!=S_OK) { TSK_DEBUG_WARN("NVSetParamValue(NVVE_LOW_LATENCY) failed with error code = %08x", hr); }
+
+ self->encoder.clbParams.pfnacquirebitstream = _NVCallback_HandleAcquireBitStream;
+ self->encoder.clbParams.pfnonbeginframe = _NVCallback_HandleOnBeginFrame;
+ self->encoder.clbParams.pfnonendframe = _NVCallback_HandleOnEndFrame;
+ self->encoder.clbParams.pfnreleasebitstream = _NVCallback_HandleReleaseBitStream;
+ NVRegisterCB(self->encoder.pInst, self->encoder.clbParams, self);
+
+
+ CHECK_HR(hr = NVCreateHWEncoder(self->encoder.pInst));
+
+bail:
+ return SUCCEEDED(hr) ? 0 : -1;
+}
+
+int cuda_codec_h264_close_encoder(cuda_codec_h264_t* self)
+{
+ if(self)
+ {
+ if(self->encoder.pInst)
+ {
+ NVDestroyEncoder(self->encoder.pInst);
+ self->encoder.pInst = NULL;
+ }
+ if(self->encoder.pBufferPtr)
+ {
+ TSK_FREE(self->encoder.pBufferPtr);
+ self->encoder.nBufferSize = 0;
+ }
+ self->encoder.frame_count = 0;
+ }
+
+ return 0;
+}
+
+int cuda_codec_h264_open_decoder(cuda_codec_h264_t* self)
+{
+ HRESULT hr = S_OK;
+ tdav_codec_h264_common_t* common = (tdav_codec_h264_common_t*)self;
+ int i, adapterCount;
+ CUresult cuResult;
+ D3DPRESENT_PARAMETERS d3dpp;
+
+ if(self->decoder.pInst || self->decoder.cuDevice || self->decoder.cuContext || self->decoder.pD3D9 || self->decoder.pD3D9Device)
+ {
+ TSK_DEBUG_ERROR("Decoder already initialized");
+#if defined(E_ILLEGAL_METHOD_CALL)
+ CHECK_HR(hr = E_ILLEGAL_METHOD_CALL);
+#else
+ CHECK_HR(hr = 0x8000000EL);
+#endif
+ }
+
+ TSK_DEBUG_INFO("[H.264 MF Decoder] neg_width=%d, neg_height=%d, neg_fps=%d",
+ TMEDIA_CODEC_VIDEO(self)->in.width,
+ TMEDIA_CODEC_VIDEO(self)->in.height,
+ TMEDIA_CODEC_VIDEO(self)->in.fps
+ );
+
+ memset(&self->decoder.cuInfo, 0, sizeof(self->decoder.cuInfo));
+ self->decoder.cuInfo.ulCreationFlags = cudaVideoCreate_PreferCUDA;
+ self->decoder.cuInfo.CodecType = cudaVideoCodec_H264;
+ self->decoder.cuInfo.ulWidth = TMEDIA_CODEC_VIDEO(self)->in.width;
+ self->decoder.cuInfo.ulTargetWidth = TMEDIA_CODEC_VIDEO(self)->in.width;
+ self->decoder.cuInfo.ulHeight = TMEDIA_CODEC_VIDEO(self)->in.height;
+ self->decoder.cuInfo.ulTargetHeight = TMEDIA_CODEC_VIDEO(self)->in.height;
+ self->decoder.cuInfo.ulNumDecodeSurfaces = PLUGIN_CUDA_H264_MAX_FRM_CNT;
+ self->decoder.cuInfo.ulNumOutputSurfaces = 1;
+ self->decoder.cuInfo.ChromaFormat = cudaVideoChromaFormat_420;
+ self->decoder.cuInfo.OutputFormat = cudaVideoSurfaceFormat_NV12;
+ self->decoder.cuInfo.DeinterlaceMode = cudaVideoDeinterlaceMode_Adaptive;
+
+ self->decoder.cuDevice = CudaUtils::GetMaxGflopsDeviceId();
+
+#if _DEBUG || DEBUG
+ {
+ int major, minor;
+ size_t totalGlobalMem;
+ char deviceName[256];
+ cuDeviceComputeCapability(&major, &minor, self->decoder.cuDevice);
+ cuDeviceGetName(deviceName, sizeof(deviceName), self->decoder.cuDevice);
+ TSK_DEBUG_INFO("[CUDA H.264 decoder] Using GPU Device %d: %s has SM %d.%d compute capability", self->decoder.cuDevice, deviceName, major, minor);
+
+ /*cutilDrvSafeCallNoSync(*/cuDeviceTotalMem(&totalGlobalMem, self->decoder.cuDevice)/*)*/;
+ TSK_DEBUG_INFO("[CUDA H.264 decoder] Total amount of global memory in GPU device: %4.4f MB", (float)totalGlobalMem/(1024*1024));
+ }
+#endif
+
+ // create Direct3D instance
+ self->decoder.pD3D9 = Direct3DCreate9(D3D_SDK_VERSION);
+ if(!self->decoder.pD3D9)
+ {
+ CHECK_HR(hr = E_OUTOFMEMORY);
+ }
+ adapterCount = self->decoder.pD3D9->GetAdapterCount();
+ for(i = 0; i < adapterCount; ++i)
+ {
+ ZeroMemory(&d3dpp, sizeof(d3dpp));
+ d3dpp.Windowed = TRUE;
+ d3dpp.BackBufferFormat = D3DFMT_X8R8G8B8;
+ d3dpp.BackBufferWidth = self->decoder.cuInfo.ulTargetWidth;
+ d3dpp.BackBufferHeight = self->decoder.cuInfo.ulTargetHeight;
+ d3dpp.BackBufferCount = 1;
+ d3dpp.SwapEffect = D3DSWAPEFFECT_COPY;
+ d3dpp.PresentationInterval = D3DPRESENT_INTERVAL_IMMEDIATE;
+ d3dpp.Flags = D3DPRESENTFLAG_VIDEO;
+ hr = self->decoder.pD3D9->CreateDevice(i,
+ D3DDEVTYPE_HAL,
+ GetDesktopWindow(),
+ D3DCREATE_FPU_PRESERVE | D3DCREATE_MULTITHREADED | D3DCREATE_HARDWARE_VERTEXPROCESSING,
+ &d3dpp,
+ &self->decoder.pD3D9Device);
+ if(hr == S_OK)
+ {
+ cuResult = cuD3D9CtxCreate(&self->decoder.cuContext, &self->decoder.cuDevice, 0, self->decoder.pD3D9Device);
+ if(cuResult == CUDA_SUCCESS)
+ {
+ break;
+ }
+ SafeRelease(&self->decoder.pD3D9Device);
+ if(self->decoder.cuContext)
+ {
+ cuCtxDestroy(self->decoder.cuContext);
+ self->decoder.cuContext = NULL;
+ }
+ }
+ }
+
+ if(!self->decoder.pD3D9Device)
+ {
+ TSK_DEBUG_ERROR("Failed to create D3D9 device");
+ CHECK_HR(hr = E_FAIL);
+ }
+
+
+ memset(&self->decoder.cuPaserParams, 0, sizeof(self->decoder.cuPaserParams));
+ self->decoder.cuPaserParams.CodecType = cudaVideoCodec_H264;
+ self->decoder.cuPaserParams.ulMaxNumDecodeSurfaces = PLUGIN_CUDA_H264_MAX_FRM_CNT;
+ self->decoder.cuPaserParams.pUserData = self;
+ self->decoder.cuPaserParams.pfnSequenceCallback = _NVCallback_HandleVideoSequence;
+ self->decoder.cuPaserParams.pfnDecodePicture = _NVCallback_HandlePictureDecode;
+ self->decoder.cuPaserParams.pfnDisplayPicture = _NVCallback_HandlePictureDisplay;
+ cuResult = cuvidCreateVideoParser(&self->decoder.cuParser, &self->decoder.cuPaserParams);
+ if(cuResult != CUDA_SUCCESS)
+ {
+ TSK_DEBUG_ERROR("cuvidCreateVideoParser(0) failed with error code = %d", (int)cuResult);
+ CHECK_HR(hr = E_FAIL);
+ }
+
+ cuResult = cuvidCreateDecoder(&self->decoder.pInst, &self->decoder.cuInfo);
+ if(CUDA_SUCCESS != cuResult)
+ {
+ TSK_DEBUG_ERROR("cuvidCreateDecoder failed with error code=%d", (int)cuResult);
+ CHECK_HR(hr = E_FAIL);
+ }
+
+ if(!self->decoder.phMutex && !(self->decoder.phMutex = tsk_mutex_create()))
+ {
+ TSK_DEBUG_ERROR("Failed to create mutex");
+ CHECK_HR(hr = E_FAIL);
+ }
+
+bail:
+ return SUCCEEDED(hr) ? 0 : -1;
+}
+
+int cuda_codec_h264_close_decoder(cuda_codec_h264_t* self)
+{
+ if(self)
+ {
+ if(self->decoder.pInst)
+ {
+ cuvidDestroyDecoder(self->decoder.pInst);
+ self->decoder.pInst = NULL;
+ }
+ if(self->decoder.cuContext)
+ {
+ cuCtxDestroy(self->decoder.cuContext);
+ self->decoder.cuContext = NULL;
+ }
+ SafeRelease(&self->decoder.pD3D9Device);
+ SafeRelease(&self->decoder.pD3D9);
+ if(self->decoder.cuParser)
+ {
+ cuvidDestroyVideoParser(self->decoder.cuParser);
+ self->decoder.cuParser = NULL;
+ }
+ {/* cuBuffer.XXX */
+ if(self->decoder.cuBuffer.pcuPtr)
+ {
+ cuMemFreeHost(self->decoder.cuBuffer.pcuPtr);
+ self->decoder.cuBuffer.pcuPtr = NULL;
+ }
+ self->decoder.cuBuffer.nSize = self->decoder.cuBuffer.nPitch = 0;
+ self->decoder.cuBuffer.bAvail = tsk_false;
+ }
+
+ if(self->decoder.phMutex)
+ {
+ tsk_mutex_destroy(&self->decoder.phMutex);
+ }
+
+ TSK_FREE(self->decoder.accumulator);
+ self->decoder.accumulator_pos = 0;
+ }
+
+ return 0;
+}
+
+int cuda_codec_h264_init(cuda_codec_h264_t* self, profile_idc_t profile)
+{
+ int ret = 0;
+ level_idc_t level;
+ tdav_codec_h264_common_t* common = (tdav_codec_h264_common_t*)self;
+
+ if(!self)
+ {
+ TSK_DEBUG_ERROR("Invalid parameter");
+ return -1;
+ }
+
+ CudaUtils::Startup();
+
+ if((ret = tdav_codec_h264_common_init(common)))
+ {
+ TSK_DEBUG_ERROR("cuda_codec_h264_common_init() faile with error code=%d", ret);
+ return ret;
+ }
+
+ if((ret = tdav_codec_h264_common_level_from_size(TMEDIA_CODEC_VIDEO(self)->out.width, TMEDIA_CODEC_VIDEO(self)->out.height, &level)))
+ {
+ TSK_DEBUG_ERROR("Failed to find level for size=[%u, %u]", TMEDIA_CODEC_VIDEO(self)->out.width, TMEDIA_CODEC_VIDEO(self)->out.height);
+ return ret;
+ }
+
+ (self)->encoder.max_bw_kpbs = tmedia_defaults_get_bandwidth_video_upload_max();
+ common->pack_mode = H264_PACKETIZATION_MODE;
+ common->profile = profile;
+ common->level = level;
+ TMEDIA_CODEC_VIDEO(self)->in.max_mbps = TMEDIA_CODEC_VIDEO(self)->out.max_mbps = H264_MAX_MBPS*1000;
+ TMEDIA_CODEC_VIDEO(self)->in.max_br = TMEDIA_CODEC_VIDEO(self)->out.max_br = H264_MAX_BR*1000;
+
+ TMEDIA_CODEC_VIDEO(self)->in.chroma = tmedia_chroma_nv12; // decoder
+ TMEDIA_CODEC_VIDEO(self)->out.chroma = tmedia_chroma_yuv420p; // encoder
+
+ self->encoder.quality = 1;
+
+ return ret;
+}
+
+int cuda_codec_h264_deinit(cuda_codec_h264_t* self)
+{
+ if(!self)
+ {
+ TSK_DEBUG_ERROR("Invalid parameter");
+ return -1;
+ }
+
+ cuda_codec_h264_close((tmedia_codec_t*)self);
+
+ return 0;
+}
+
+static inline tsk_size_t _cuda_codec_h264_pict_layout(cuda_codec_h264_t* self, void**output, tsk_size_t *output_size)
+{
+ if(self && self->decoder.cuBuffer.pcuPtr && self->decoder.cuBuffer.nSize)
+ {
+ const unsigned int w = TMEDIA_CODEC_VIDEO(self)->in.width;
+ const unsigned int w_div_2 = (w >> 1);
+ const unsigned int h = TMEDIA_CODEC_VIDEO(self)->in.height;
+ const unsigned int h_div_2 = (h >> 1);
+ const unsigned int pitch = self->decoder.cuBuffer.nPitch;
+ const unsigned int pitch_div_2 = (pitch >> 1);
+ const tsk_size_t xsize = (w * h * 3) >> 1;
+ // resize if too short
+ if(*output_size < xsize)
+ {
+ if((*output = tsk_realloc(*output, xsize)))
+ {
+ *output_size = xsize;
+ }
+ else
+ {
+ *output_size = 0;
+ return 0;
+ }
+ }
+
+
+ register unsigned int y;
+ const unsigned char *p = (const unsigned char *)self->decoder.cuBuffer.pcuPtr, *q = p + (h * pitch);
+ register unsigned char *i = (unsigned char *)*output, *j = i + (h * w);
+
+ for (y = 0; y < h; y++)
+ {
+ // luma
+ memcpy(i, p, w);
+ i += w;
+ p += pitch;
+
+ // chroma
+ memcpy(j, &q[(y&1) ? w_div_2 : 0], w_div_2);
+ j += w_div_2;
+ if(y&1)
+ {
+ q += pitch;
+ }
+ }
+
+ return xsize;
+ }
+ return 0;
+}
+
+
+
+static int CUDAAPI _NVCallback_HandleVideoSequence(void *pvUserData, CUVIDEOFORMAT *pFormat)
+{
+ cuda_codec_h264_t* h264 = (cuda_codec_h264_t*)pvUserData;
+ CUresult cuResult;
+
+ if(!h264 || !pFormat)
+ {
+ TSK_DEBUG_ERROR("Invalid parameter");
+ return 0;//error
+ }
+
+ tsk_mutex_lock(h264->decoder.phMutex);
+
+ int ret = 1;
+ // http://corecodec.com/products/coreavc/guide
+ // CROP 1088 to 1080
+ // H.264 encoded video size is always a multiple of 16, and sequences that are 1080 pixels high are encoded as 1088 padded at the bottom.
+ // Also H.264 specifications provides a set of cropping parameters to signal that parts of the encoded picture are not important and should not be displayed.
+ // Some H.264 encoders fail to specify cropping parameters when encoding 1080 video.
+ int newWidth = pFormat->coded_width;//pFormat->display_area.right - pFormat->display_area.left;
+ int newHeight = pFormat->coded_height;//pFormat->display_area.bottom - pFormat->display_area.top;
+
+ if(newWidth != TMEDIA_CODEC_VIDEO(h264)->in.width || pFormat->coded_height != newHeight)
+ {
+ TSK_DEBUG_INFO("[H.264 CUDA decoder] display area = left:%d, right:%d, bottom:%d, top:%d",
+ pFormat->display_area.left,
+ pFormat->display_area.right,
+ pFormat->display_area.bottom,
+ pFormat->display_area.top
+ );
+
+ h264->decoder.cuInfo.ulWidth = newWidth;
+ h264->decoder.cuInfo.ulTargetWidth = newWidth;
+ h264->decoder.cuInfo.ulHeight = newHeight;
+ h264->decoder.cuInfo.ulTargetHeight = newHeight;
+
+ CUresult cuResult = cuCtxPushCurrent(h264->decoder.cuContext);
+ if(cuResult != CUDA_SUCCESS)
+ {
+ TSK_DEBUG_ERROR("cuCtxPushCurrent failed with error code=%d", (int)cuResult);
+ ret = 0; //error
+ goto bail;
+ }
+
+ if(h264->decoder.pInst)
+ {
+ cuvidDestroyDecoder(h264->decoder.pInst);
+ h264->decoder.pInst = NULL;
+ }
+ cuResult = cuvidCreateDecoder(&h264->decoder.pInst, &h264->decoder.cuInfo);
+ if(CUDA_SUCCESS != cuResult)
+ {
+ TSK_DEBUG_ERROR("cuvidCreateDecoder failed with error code=%d", (int)cuResult);
+ ret = 0; //error
+ goto bail;
+ }
+ else
+ {
+ TMEDIA_CODEC_VIDEO(h264)->in.width = /*pFormat->coded_width*/newWidth;
+ TMEDIA_CODEC_VIDEO(h264)->in.height = /*pFormat->coded_height*/newHeight;
+ ret = 1; //success
+ }
+ }
+bail:
+ cuResult = cuCtxPopCurrent(NULL);
+ tsk_mutex_unlock(h264->decoder.phMutex);
+ return ret;//success
+}
+
+static int CUDAAPI _NVCallback_HandlePictureDecode(void *pvUserData, CUVIDPICPARAMS *pPicParams)
+{
+ cuda_codec_h264_t* h264 = (cuda_codec_h264_t*)pvUserData;
+ if(!h264 || !pPicParams)
+ {
+ TSK_DEBUG_ERROR("Invalid parameter");
+ return 0;//error
+ }
+
+ tsk_mutex_lock(h264->decoder.phMutex);
+ CUresult cuResult = cuvidDecodePicture(h264->decoder.pInst, pPicParams);
+ tsk_mutex_unlock(h264->decoder.phMutex);
+
+ if(cuResult != CUDA_SUCCESS)
+ {
+ TSK_DEBUG_ERROR("cuvidDecodePicture failed with error code= %d", cuResult);
+ return 0;//error
+ }
+
+ return 1;//success
+}
+
+static int CUDAAPI _NVCallback_HandlePictureDisplay(void *pvUserData, CUVIDPARSERDISPINFO *pPicParams)
+{
+ cuda_codec_h264_t* h264 = (cuda_codec_h264_t*)pvUserData;
+ CUVIDPROCPARAMS vpp = {0};
+ CUdeviceptr devPtr;
+ CUresult cuResult;
+ tsk_size_t nv12_size;
+ tsk_bool_t mapped = tsk_false;
+ int ret = 1;//success
+
+ if(!h264 || !pPicParams)
+ {
+ TSK_DEBUG_ERROR("Invalid parameter");
+ return 0;//error
+ }
+
+ cuResult = cuCtxPushCurrent(h264->decoder.cuContext);
+ if(cuResult != CUDA_SUCCESS)
+ {
+ TSK_DEBUG_ERROR("cuCtxPushCurrent failed with error code = %d", (int)cuResult);
+ ret = 0;//error
+ goto bail;
+ }
+
+ vpp.progressive_frame = pPicParams->progressive_frame;
+ vpp.top_field_first = pPicParams->top_field_first;
+ cuResult = cuvidMapVideoFrame(h264->decoder.pInst, pPicParams->picture_index, &devPtr, &h264->decoder.cuBuffer.nPitch, &vpp);
+
+ if(cuResult != CUDA_SUCCESS)
+ {
+ TSK_DEBUG_ERROR("cuvidMapVideoFrame failed with error code = %d", (int)cuResult);
+ ret = 0;//error
+ goto bail;
+ }
+ mapped = tsk_true;
+ nv12_size = ((h264->decoder.cuBuffer.nPitch * TMEDIA_CODEC_VIDEO(h264)->in.height) * 3) >> 1;
+ if ((!h264->decoder.cuBuffer.pcuPtr) || (nv12_size > h264->decoder.cuBuffer.nSize))
+ {
+ h264->decoder.cuBuffer.nSize = 0;
+ if (h264->decoder.cuBuffer.pcuPtr)
+ {
+ cuResult = cuMemFreeHost(h264->decoder.cuBuffer.pcuPtr);
+ h264->decoder.cuBuffer.pcuPtr = NULL;
+ }
+ cuResult = cuMemAllocHost((void**)&h264->decoder.cuBuffer.pcuPtr, nv12_size);
+ if (cuResult != CUDA_SUCCESS)
+ {
+ TSK_DEBUG_ERROR("cuMemAllocHost failed to allocate %d bytes (error code=%d)", nv12_size, (int)cuResult);
+ h264->decoder.cuBuffer.pcuPtr = tsk_null;
+ h264->decoder.cuBuffer.nSize = 0;
+ ret = 0;//error
+ }
+ else
+ {
+ h264->decoder.cuBuffer.nSize = nv12_size;
+ }
+ }
+ if(h264->decoder.cuBuffer.pcuPtr)
+ {
+ cuResult = cuMemcpyDtoH(h264->decoder.cuBuffer.pcuPtr, devPtr, nv12_size);
+ }
+
+bail:
+ if(mapped)
+ {
+ cuResult = cuvidUnmapVideoFrame(h264->decoder.pInst, devPtr);
+ }
+ cuResult = cuCtxPopCurrent(NULL);
+
+ h264->decoder.cuBuffer.bAvail = (ret == 1);
+ return ret;
+}
+
+static unsigned char* CUDAAPI _NVCallback_HandleAcquireBitStream(int *pBufferSize, void *pUserdata)
+{
+ cuda_codec_h264_t* h264 = (cuda_codec_h264_t*)pUserdata;
+ if(!h264 || !pBufferSize)
+ {
+ TSK_DEBUG_ERROR("Invalid parameter");
+ return tsk_null;
+ }
+
+ *pBufferSize = (int)h264->encoder.nBufferSize;
+ return (unsigned char*)h264->encoder.pBufferPtr;
+}
+
+static void CUDAAPI _NVCallback_HandleReleaseBitStream(int nBytesInBuffer, unsigned char *cb, void *pUserdata)
+{
+ tdav_codec_h264_common_t* common = (tdav_codec_h264_common_t*)pUserdata;
+ if(!common || !cb || !nBytesInBuffer)
+ {
+ TSK_DEBUG_ERROR("Invalid parameter");
+ return;
+ }
+ tdav_codec_h264_rtp_encap(common, (const uint8_t*)cb, (tsk_size_t)nBytesInBuffer);
+}
+
+static void CUDAAPI _NVCallback_HandleOnBeginFrame(const NVVE_BeginFrameInfo *pbfi, void *pUserdata)
+{
+ return;
+}
+
+static void CUDAAPI _NVCallback_HandleOnEndFrame(const NVVE_EndFrameInfo *pefi, void *pUserdata)
+{
+ return;
+} \ No newline at end of file
diff --git a/plugins/pluginCUDA/plugin_cuda_config.h b/plugins/pluginCUDA/plugin_cuda_config.h
new file mode 100644
index 0000000..4fceebb
--- /dev/null
+++ b/plugins/pluginCUDA/plugin_cuda_config.h
@@ -0,0 +1,75 @@
+/* Copyright (C) 2013 Mamadou DIOP
+* Copyright (C) 2013 Doubango Telecom <http://www.doubango.org>
+*
+* This file is part of Open Source Doubango Framework.
+*
+* DOUBANGO is free software: you can redistribute it and/or modify
+* it under the terms of the GNU General Public License as published by
+* the Free Software Foundation, either version 3 of the License, or
+* (at your option) any later version.
+*
+* DOUBANGO is distributed in the hope that it will be useful,
+* but WITHOUT ANY WARRANTY; without even the implied warranty of
+* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
+* GNU General Public License for more details.
+*
+* You should have received a copy of the GNU General Public License
+* along with DOUBANGO.
+*/
+#ifndef PLUGIN_CUDA_CONFIG_H
+#define PLUGIN_CUDA_CONFIG_H
+
+#ifdef __SYMBIAN32__
+#undef _WIN32 /* Because of WINSCW */
+#endif
+
+
+// Windows (XP/Vista/7/CE and Windows Mobile) macro definition
+#if defined(WIN32)|| defined(_WIN32) || defined(_WIN32_WCE)
+# define PLUGIN_CUDA_UNDER_WINDOWS 1
+# if defined(WINAPI_FAMILY) && (WINAPI_FAMILY == WINAPI_FAMILY_PHONE_APP || WINAPI_FAMILY == WINAPI_FAMILY_APP)
+# define PLUGIN_CUDA_UNDER_WINDOWS_RT 1
+# endif
+#endif
+
+#if (PLUGIN_CUDA_UNDER_WINDOWS || defined(__SYMBIAN32__)) && defined(PLUGIN_CUDA_EXPORTS)
+# define PLUGIN_CUDA_API __declspec(dllexport)
+# define PLUGIN_CUDA_GEXTERN extern __declspec(dllexport)
+#elif (PLUGIN_CUDA_UNDER_WINDOWS || defined(__SYMBIAN32__)) && !defined(PLUGIN_CUDA_IMPORTS_IGNORE)
+# define PLUGIN_CUDA_API __declspec(dllimport)
+# define PLUGIN_CUDA_GEXTERN __declspec(dllimport)
+#else
+# define PLUGIN_CUDA_API
+# define PLUGIN_CUDA_GEXTERN extern
+#endif
+
+// x86
+#if defined(__x86_64__) || defined(__x86__) || defined(__i386__)
+# define PLUGIN_CUDA_UNDER_X86 1
+#endif
+
+// Guards against C++ name mangling
+#ifdef __cplusplus
+# define PLUGIN_CUDA_BEGIN_DECLS extern "C" {
+# define PLUGIN_CUDA_END_DECLS }
+#else
+# define PLUGIN_CUDA_BEGIN_DECLS
+# define PLUGIN_CUDA_END_DECLS
+#endif
+
+#ifdef _MSC_VER
+# define inline __inline
+# define _CRT_SECURE_NO_WARNINGS
+# define _ALLOW_KEYWORD_MACROS
+#endif
+
+#include <stdint.h>
+#ifdef __SYMBIAN32__
+#include <stdlib.h>
+#endif
+
+#if HAVE_CONFIG_H
+ #include <config.h>
+#endif
+
+#endif // PLUGIN_CUDA_CONFIG_H
diff --git a/plugins/pluginCUDA/plugin_cuda_tdav.cxx b/plugins/pluginCUDA/plugin_cuda_tdav.cxx
new file mode 100644
index 0000000..2d16b72
--- /dev/null
+++ b/plugins/pluginCUDA/plugin_cuda_tdav.cxx
@@ -0,0 +1,20 @@
+/* Copyright (C) 2013 Mamadou DIOP
+* Copyright (C) 2013 Doubango Telecom <http://www.doubango.org>
+*
+* This file is part of Open Source Doubango Framework.
+*
+* DOUBANGO is free software: you can redistribute it and/or modify
+* it under the terms of the GNU General Public License as published by
+* the Free Software Foundation, either version 3 of the License, or
+* (at your option) any later version.
+*
+* DOUBANGO is distributed in the hope that it will be useful,
+* but WITHOUT ANY WARRANTY; without even the implied warranty of
+* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
+* GNU General Public License for more details.
+*
+* You should have received a copy of the GNU General Public License
+* along with DOUBANGO.
+*/
+// This file is used to avoid duplication for the .obj files
+#include "../../tinyDAV/src/codecs/h264/tdav_codec_h264_rtp.c"
diff --git a/plugins/pluginCUDA/plugin_cuda_utils.cxx b/plugins/pluginCUDA/plugin_cuda_utils.cxx
new file mode 100644
index 0000000..94c7baf
--- /dev/null
+++ b/plugins/pluginCUDA/plugin_cuda_utils.cxx
@@ -0,0 +1,168 @@
+/* Copyright (C) 2013 Mamadou DIOP
+* Copyright (C) 2013 Doubango Telecom <http://www.doubango.org>
+*
+* This file is part of Open Source Doubango Framework.
+*
+* DOUBANGO is free software: you can redistribute it and/or modify
+* it under the terms of the GNU General Public License as published by
+* the Free Software Foundation, either version 3 of the License, or
+* (at your option) any later version.
+*
+* DOUBANGO is distributed in the hope that it will be useful,
+* but WITHOUT ANY WARRANTY; without even the implied warranty of
+* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
+* GNU General Public License for more details.
+*
+* You should have received a copy of the GNU General Public License
+* along with DOUBANGO.
+*/
+#include "plugin_cuda_utils.h"
+
+#include "tsk_debug.h"
+
+#include <NVEncoderAPI.h>
+#include <cuda.h>
+#include <cuda_runtime_api.h>
+
+bool CudaUtils::g_bStarted = false;
+bool CudaUtils::g_bH264Checked = false;
+bool CudaUtils::g_bH264Supported = false;
+int CudaUtils::g_nCores = 0;
+
+HRESULT CudaUtils::Startup()
+{
+ if(!g_bStarted)
+ {
+ CUresult cuResult = CUDA_SUCCESS;
+ HRESULT hr = CoInitializeEx(NULL, COINIT_MULTITHREADED);
+ if(SUCCEEDED(hr) || hr == 0x80010106) // 0x80010106 when called from managed code (e.g. Boghe) - More info: http://support.microsoft.com/kb/824480
+ {
+ if((cuResult = cuInit(0)) != CUDA_SUCCESS)
+ {
+ TSK_DEBUG_ERROR("cuInit() failed with error code = %08x", cuResult);
+ hr = E_FAIL;
+ }
+ else
+ {
+ hr = S_OK;
+ }
+ }
+ g_bStarted = true;
+ return hr;
+ }
+ return S_OK;
+}
+
+HRESULT CudaUtils::Shutdown()
+{
+ // cuDeinit();
+ return S_OK;
+}
+
+bool CudaUtils::IsH264Supported()
+{
+ if(g_bH264Checked)
+ {
+ return g_bH264Supported;
+ }
+
+ HRESULT hr = S_OK;
+
+ CHECK_HR(hr = Startup());
+
+ g_bH264Checked = true;
+
+ NVEncoder pEncoder = NULL;
+
+ CHECK_HR(hr = NVGetHWEncodeCaps());
+ CHECK_HR(hr = NVCreateEncoder(&pEncoder));
+ // Both Base and Main profiles *must* be supported
+ CHECK_HR(hr = NVIsSupportedCodecProfile(pEncoder, NV_CODEC_TYPE_H264, NVVE_H264_PROFILE_BASELINE));
+ CHECK_HR(hr = NVIsSupportedCodecProfile(pEncoder, NV_CODEC_TYPE_H264, NVVE_H264_PROFILE_MAIN));
+
+ g_bH264Supported = true;
+
+bail:
+ if(pEncoder)
+ {
+ NVDestroyEncoder(pEncoder);
+ pEncoder = NULL;
+ }
+
+ return g_bH264Supported;
+}
+
+int CudaUtils::ConvertSMVer2Cores(int nMajor, int nMinor)
+{
+ if(g_nCores != 0)
+ {
+ return g_nCores;
+ }
+
+ // Defines for GPU Architecture types (using the SM version to determine the # of cores per SM
+ typedef struct
+ {
+ int SM; // 0xMm (hexidecimal notation), M = SM Major version, and m = SM minor version
+ int Cores;
+ } sSMtoCores;
+
+ sSMtoCores nGpuArchCoresPerSM[] =
+ {
+ { 0x10, 8 }, // Tesla Generation (SM 1.0) G80 class
+ { 0x11, 8 }, // Tesla Generation (SM 1.1) G8x class
+ { 0x12, 8 }, // Tesla Generation (SM 1.2) G9x class
+ { 0x13, 8 }, // Tesla Generation (SM 1.3) GT200 class
+ { 0x20, 32 }, // Fermi Generation (SM 2.0) GF100 class
+ { 0x21, 48 }, // Fermi Generation (SM 2.1) GF10x class
+ { 0x30, 192}, // Kepler Generation (SM 3.0) GK10x class
+ { 0x35, 192}, // Kepler Generation (SM 3.5) GK11x class
+ };
+
+ int index = 0;
+
+ while (nGpuArchCoresPerSM[index].SM != -1)
+ {
+ if (nGpuArchCoresPerSM[index].SM == ((nMajor << 4) + nMinor))
+ {
+ g_nCores = nGpuArchCoresPerSM[index].Cores;
+ break;
+ }
+
+ index++;
+ }
+
+ // If we don't find the values, we default use the previous one to run properly
+ TSK_DEBUG_INFO("MapSMtoCores for SM %d.%d is undefined. Default to use %d Cores/SM", nMajor, nMinor, nGpuArchCoresPerSM[7].Cores);
+ g_nCores = nGpuArchCoresPerSM[7].Cores;
+
+ return g_nCores;
+}
+
+int CudaUtils::GetMaxGflopsDeviceId()
+{
+ int device_count = 0;
+ cudaGetDeviceCount( &device_count );
+
+ cudaDeviceProp device_properties;
+ int max_gflops_device = 0;
+ int max_gflops = 0;
+
+ int current_device = 0;
+ cudaGetDeviceProperties( &device_properties, current_device );
+ max_gflops = device_properties.multiProcessorCount * device_properties.clockRate;
+ ++current_device;
+
+ while( current_device < device_count )
+ {
+ cudaGetDeviceProperties( &device_properties, current_device );
+ int gflops = device_properties.multiProcessorCount * device_properties.clockRate;
+ if( gflops > max_gflops )
+ {
+ max_gflops = gflops;
+ max_gflops_device = current_device;
+ }
+ ++current_device;
+ }
+
+ return max_gflops_device;
+} \ No newline at end of file
diff --git a/plugins/pluginCUDA/plugin_cuda_utils.h b/plugins/pluginCUDA/plugin_cuda_utils.h
new file mode 100644
index 0000000..4829275
--- /dev/null
+++ b/plugins/pluginCUDA/plugin_cuda_utils.h
@@ -0,0 +1,56 @@
+/* Copyright (C) 2013 Mamadou DIOP
+* Copyright (C) 2013 Doubango Telecom <http://www.doubango.org>
+*
+* This file is part of Open Source Doubango Framework.
+*
+* DOUBANGO is free software: you can redistribute it and/or modify
+* it under the terms of the GNU General Public License as published by
+* the Free Software Foundation, either version 3 of the License, or
+* (at your option) any later version.
+*
+* DOUBANGO is distributed in the hope that it will be useful,
+* but WITHOUT ANY WARRANTY; without even the implied warranty of
+* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
+* GNU General Public License for more details.
+*
+* You should have received a copy of the GNU General Public License
+* along with DOUBANGO.
+*/
+#ifndef PLUGIN_CUDA_UTILS_H
+#define PLUGIN_CUDA_UTILS_H
+
+#include "plugin_cuda_config.h"
+
+#include <Windows.h>
+
+#undef CHECK_HR
+// In CHECK_HR(x) When (x) is a function it will be executed twice when used in "TSK_DEBUG_ERROR(x)" and "If(x)"
+#define CHECK_HR(x) { HRESULT __hr__ = (x); if (FAILED(__hr__)) { TSK_DEBUG_ERROR("Operation Failed (%08x)", __hr__); goto bail; } }
+
+#undef SafeRelease
+#define SafeRelease(ppT) \
+{ \
+ if (*ppT) \
+ { \
+ (*ppT)->Release(); \
+ *ppT = NULL; \
+ } \
+}
+
+class CudaUtils
+{
+public:
+ static HRESULT Startup();
+ static HRESULT Shutdown();
+ static bool IsH264Supported();
+ static int ConvertSMVer2Cores(int nMajor, int nMinor);
+ static int GetMaxGflopsDeviceId();
+
+private:
+ static bool g_bStarted;
+ static bool g_bH264Checked;
+ static bool g_bH264Supported;
+ static int g_nCores;
+};
+
+#endif/* PLUGIN_CUDA_UTILS_H */
diff --git a/plugins/pluginCUDA/version.rc b/plugins/pluginCUDA/version.rc
new file mode 100644
index 0000000..bd81664
--- /dev/null
+++ b/plugins/pluginCUDA/version.rc
@@ -0,0 +1,102 @@
+// Microsoft Visual C++ generated resource script.
+//
+// #include "resource.h"
+
+#define APSTUDIO_READONLY_SYMBOLS
+/////////////////////////////////////////////////////////////////////////////
+//
+// Generated from the TEXTINCLUDE 2 resource.
+//
+#include "afxres.h"
+
+/////////////////////////////////////////////////////////////////////////////
+#undef APSTUDIO_READONLY_SYMBOLS
+
+/////////////////////////////////////////////////////////////////////////////
+// English (U.S.) resources
+
+#if !defined(AFX_RESOURCE_DLL) || defined(AFX_TARG_ENU)
+#ifdef _WIN32
+LANGUAGE LANG_ENGLISH, SUBLANG_ENGLISH_US
+#pragma code_page(1252)
+#endif //_WIN32
+
+#ifdef APSTUDIO_INVOKED
+/////////////////////////////////////////////////////////////////////////////
+//
+// TEXTINCLUDE
+//
+
+1 TEXTINCLUDE
+BEGIN
+ "resource.h\0"
+END
+
+2 TEXTINCLUDE
+BEGIN
+ "#include ""afxres.h""\r\n"
+ "\0"
+END
+
+3 TEXTINCLUDE
+BEGIN
+ "\r\n"
+ "\0"
+END
+
+#endif // APSTUDIO_INVOKED
+
+
+/////////////////////////////////////////////////////////////////////////////
+//
+// Version
+//
+
+VS_VERSION_INFO VERSIONINFO
+ FILEVERSION 2.0.0.1156
+ PRODUCTVERSION 2.0.0.1156
+ FILEFLAGSMASK 0x17L
+#ifdef _DEBUG
+ FILEFLAGS 0x1L
+#else
+ FILEFLAGS 0x0L
+#endif
+ FILEOS 0x4L
+ FILETYPE 0x2L
+ FILESUBTYPE 0x0L
+BEGIN
+ BLOCK "StringFileInfo"
+ BEGIN
+ BLOCK "040904b0"
+ BEGIN
+ VALUE "CompanyName", "Doubango Telecom"
+ VALUE "FileDescription", "Doubango IMS Framework NVIDIA CUDA Plugin"
+ VALUE "FileVersion", "2.0.0.1156"
+ VALUE "InternalName", "pluginCUDA.dll"
+ VALUE "LegalCopyright", "(c) 2010-2013 Doubango Telecom. All rights reserved."
+ VALUE "OriginalFilename", "pluginCUDA.dll"
+ VALUE "ProductName", "Doubango IMS Framework NVIDIA CUDA Plugin"
+ VALUE "ProductVersion", "2.0.0.1156"
+ END
+ END
+ BLOCK "VarFileInfo"
+ BEGIN
+ VALUE "Translation", 0x409, 1200
+ END
+END
+
+#endif // English (U.S.) resources
+/////////////////////////////////////////////////////////////////////////////
+
+
+
+#ifndef APSTUDIO_INVOKED
+/////////////////////////////////////////////////////////////////////////////
+//
+// Generated from the TEXTINCLUDE 3 resource.
+//
+
+
+/////////////////////////////////////////////////////////////////////////////
+#endif // not APSTUDIO_INVOKED
+
diff --git a/plugins/pluginDirectShow/dllmain_dshow.cxx b/plugins/pluginDirectShow/dllmain_dshow.cxx
new file mode 100644
index 0000000..2b1eda8
--- /dev/null
+++ b/plugins/pluginDirectShow/dllmain_dshow.cxx
@@ -0,0 +1,156 @@
+/* Copyright (C) 2011-2013 Doubango Telecom <http://www.doubango.org>
+*
+* This file is part of Open Source Doubango Framework.
+*
+* DOUBANGO is free software: you can redistribute it and/or modify
+* it under the terms of the GNU General Public License as published by
+* the Free Software Foundation, either version 3 of the License, or
+* (at your option) any later version.
+*
+* DOUBANGO is distributed in the hope that it will be useful,
+* but WITHOUT ANY WARRANTY; without even the implied warranty of
+* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
+* GNU General Public License for more details.
+*
+* You should have received a copy of the GNU General Public License
+* along with DOUBANGO.
+*/
+#include "plugin_dshow_config.h"
+
+#include "tinymedia/tmedia_producer.h"
+#include "tinymedia/tmedia_consumer.h"
+
+#include "tsk_plugin.h"
+#include "tsk_debug.h"
+
+#include "internals/DSUtils.h"
+
+#include <streams.h>
+
+#if !defined(ENABLE_SCREENCAST)
+# define ENABLE_SCREENCAST 0
+#endif /* ENABLE_SCREENCAST */
+
+PLUGIN_DSHOW_BEGIN_DECLS /* BEGIN */
+PLUGIN_DSHOW_API int __plugin_get_def_count();
+PLUGIN_DSHOW_API tsk_plugin_def_type_t __plugin_get_def_type_at(int index);
+PLUGIN_DSHOW_API tsk_plugin_def_media_type_t __plugin_get_def_media_type_at(int index);
+PLUGIN_DSHOW_API tsk_plugin_def_ptr_const_t __plugin_get_def_at(int index);
+PLUGIN_DSHOW_END_DECLS /* END */
+
+extern const tmedia_consumer_plugin_def_t *plugin_video_dshow_consumer_plugin_def_t;
+extern const tmedia_producer_plugin_def_t *plugin_video_dshow_producer_plugin_def_t;
+extern const tmedia_producer_plugin_def_t *plugin_screencast_dshow_producer_plugin_def_t;
+
+CFactoryTemplate g_Templates[]=
+{ { L""
+, NULL
+, NULL
+, NULL
+, NULL
+}
+};
+int g_cTemplates = sizeof(g_Templates)/sizeof(g_Templates[0]);
+
+#if !defined(_WIN32_WCE)
+BOOL APIENTRY DllMain( HMODULE hModule,
+ DWORD ul_reason_for_call,
+ LPVOID lpReserved
+ )
+{
+ switch (ul_reason_for_call)
+ {
+ case DLL_PROCESS_ATTACH:
+ case DLL_THREAD_ATTACH:
+ case DLL_THREAD_DETACH:
+ case DLL_PROCESS_DETACH:
+ break;
+ }
+ return TRUE;
+}
+#endif
+
+typedef enum PLUGIN_INDEX_E
+{
+ PLUGIN_INDEX_VIDEO_CONSUMER,
+ PLUGIN_INDEX_VIDEO_PRODUCER,
+#if 0
+ PLUGIN_INDEX_SCREENCAST_PRODUCER,
+#endif
+ PLUGIN_INDEX_COUNT
+}
+PLUGIN_INDEX_T;
+
+
+int __plugin_get_def_count()
+{
+ return PLUGIN_INDEX_COUNT;
+}
+
+tsk_plugin_def_type_t __plugin_get_def_type_at(int index)
+{
+ switch(index){
+ case PLUGIN_INDEX_VIDEO_CONSUMER:
+ return IsD3D9Supported() ? tsk_plugin_def_type_consumer : tsk_plugin_def_type_none;
+ case PLUGIN_INDEX_VIDEO_PRODUCER:
+#if ENABLE_SCREENCAST
+ case PLUGIN_INDEX_SCREENCAST_PRODUCER:
+#endif
+ return tsk_plugin_def_type_producer;
+ default:
+ {
+ TSK_DEBUG_ERROR("No plugin at index %d", index);
+ return tsk_plugin_def_type_none;
+ }
+ }
+}
+
+tsk_plugin_def_media_type_t __plugin_get_def_media_type_at(int index)
+{
+ switch(index){
+ case PLUGIN_INDEX_VIDEO_CONSUMER:
+ {
+ return IsD3D9Supported() ? tsk_plugin_def_media_type_video : tsk_plugin_def_media_type_none;
+ }
+ case PLUGIN_INDEX_VIDEO_PRODUCER:
+ {
+ return tsk_plugin_def_media_type_video;
+ }
+#if ENABLE_SCREENCAST
+ case PLUGIN_INDEX_SCREENCAST_PRODUCER:
+ {
+ return tsk_plugin_def_media_type_screencast;
+ }
+#endif
+ default:
+ {
+ TSK_DEBUG_ERROR("No plugin at index %d", index);
+ return tsk_plugin_def_media_type_none;
+ }
+ }
+}
+
+tsk_plugin_def_ptr_const_t __plugin_get_def_at(int index)
+{
+ switch(index){
+ case PLUGIN_INDEX_VIDEO_CONSUMER:
+ {
+ return IsD3D9Supported() ? plugin_video_dshow_consumer_plugin_def_t : tsk_null;
+ }
+ case PLUGIN_INDEX_VIDEO_PRODUCER:
+ {
+ return plugin_video_dshow_producer_plugin_def_t;
+ }
+#if ENABLE_SCREENCAST
+ case PLUGIN_INDEX_SCREENCAST_PRODUCER:
+ {
+ return plugin_screencast_dshow_producer_plugin_def_t;
+ }
+#endif
+ default:
+ {
+ TSK_DEBUG_ERROR("No plugin at index %d", index);
+ return tsk_null;
+ }
+ }
+}
diff --git a/plugins/pluginDirectShow/internals/DSBaseCaptureGraph.h b/plugins/pluginDirectShow/internals/DSBaseCaptureGraph.h
new file mode 100644
index 0000000..1817fb5
--- /dev/null
+++ b/plugins/pluginDirectShow/internals/DSBaseCaptureGraph.h
@@ -0,0 +1,64 @@
+/* Copyright (C) 2014 Mamadou DIOP
+*
+* This file is part of Open Source Doubango Framework.
+*
+* DOUBANGO is free software: you can redistribute it and/or modify
+* it under the terms of the GNU General Public License as published by
+* the Free Software Foundation, either version 3 of the License, or
+* (at your option) any later version.
+*
+* DOUBANGO is distributed in the hope that it will be useful,
+* but WITHOUT ANY WARRANTY; without even the implied warranty of
+* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
+* GNU General Public License for more details.
+*
+* You should have received a copy of the GNU General Public License
+* along with DOUBANGO.
+*/
+#ifndef PLUGIN_DSHOW_DSBASECAPTUREGRAPH_H
+#define PLUGIN_DSHOW_DSBASECAPTUREGRAPH_H
+
+#include "plugin_dshow_config.h"
+
+#include <vector>
+#include <control.h>
+#include "internals/DSCaptureFormat.h"
+
+#if defined(_WIN32_WCE)
+# include <internals/wince/DSSampleGrabber.h>
+# include <internals/wince/DSNullFilter.h>
+# include <internals/wince/DSISampleGrabberCB.h>
+#else
+# include <qedit.h>
+#endif
+
+class DSBaseCaptureGraph
+{
+public:
+#if defined(_WIN32_WCE)
+ DSBaseCaptureGraph(DSISampleGrabberCB* callback, HRESULT *hr) {}
+#else
+ DSBaseCaptureGraph(ISampleGrabberCB* callback, HRESULT *hr) {}
+#endif
+ virtual ~DSBaseCaptureGraph() {}
+
+ virtual std::vector<DSCaptureFormat> *getFormats() = 0;
+
+ virtual HRESULT setSource(const std::string &devicePath) = 0;
+ virtual HRESULT setParameters(DSCaptureFormat *format, int framerate) = 0;
+
+ virtual HRESULT connect() = 0;
+ virtual HRESULT disconnect() = 0;
+
+ virtual HRESULT start() = 0;
+ virtual HRESULT stop() = 0;
+ virtual HRESULT pause() = 0;
+ virtual bool isRunning() = 0;
+ virtual bool isPaused() = 0;
+
+ virtual std::string getDeviceId() const = 0;
+
+ virtual HRESULT getConnectedMediaType(AM_MEDIA_TYPE *mediaType) = 0;
+};
+
+#endif /* PLUGIN_DSHOW_DSBASECAPTUREGRAPH_H */
diff --git a/plugins/pluginDirectShow/internals/DSBufferWriter.h b/plugins/pluginDirectShow/internals/DSBufferWriter.h
new file mode 100644
index 0000000..dbe1484
--- /dev/null
+++ b/plugins/pluginDirectShow/internals/DSBufferWriter.h
@@ -0,0 +1,48 @@
+/* Copyright (C) 2011-2013 Doubango Telecom <http://www.doubango.org>
+*
+* This file is part of Open Source Doubango Framework.
+*
+* DOUBANGO is free software: you can redistribute it and/or modify
+* it under the terms of the GNU General Public License as published by
+* the Free Software Foundation, either version 3 of the License, or
+* (at your option) any later version.
+*
+* DOUBANGO is distributed in the hope that it will be useful,
+* but WITHOUT ANY WARRANTY; without even the implied warranty of
+* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
+* GNU General Public License for more details.
+*
+* You should have received a copy of the GNU General Public License
+* along with DOUBANGO.
+*/
+#ifndef PLUGIN_DSHOW_DSBUFFERWRITTER_H
+#define PLUGIN_DSHOW_DSBUFFERWRITTER_H
+/*
+// TODO: do it only once
+#if !defined(TDSHOW_DEFINE_GUID) && !defined(_WIN32_WCE)
+#define TDSHOW_DEFINE_GUID(name, l, w1, w2, b1, b2, b3, b4, b5, b6, b7, b8) \
+ EXTERN_C const GUID DECLSPEC_SELECTANY name \
+ = { l, w1, w2, { b1, b2, b3, b4, b5, b6, b7, b8 } }
+#elif !defined(TDSHOW_DEFINE_GUID) && defined(_WIN32_WCE)
+#define TDSHOW_DEFINE_GUID(name, l, w1, w2, b1, b2, b3, b4, b5, b6, b7, b8) \
+ EXTERN_C const GUID __declspec(selectany) name \
+ = { l, w1, w2, { b1, b2, b3, b4, b5, b6, b7, b8 } }
+#endif
+
+// {27AD9929-E4E7-423b-8BDD-8AF5AC894DE0}
+TDSHOW_DEFINE_GUID(IID_DSBufferWriter,
+ 0x27ad9929, 0xe4e7, 0x423b, 0x8b, 0xdd, 0x8a, 0xf5, 0xac, 0x89, 0x4d, 0xe0);
+ */
+
+
+class DSBufferWriter
+#ifndef _WIN32_WCE
+ : public IUnknown
+#endif
+{
+public:
+ virtual void setBuffer (void* pBuffer, int size) = 0;
+ virtual HRESULT setImageFormat(UINT width, UINT height/*, GUID subType, UINT fps*/) = 0;
+};
+
+#endif
diff --git a/plugins/pluginDirectShow/internals/DSCaptureFormat.cxx b/plugins/pluginDirectShow/internals/DSCaptureFormat.cxx
new file mode 100644
index 0000000..378a215
--- /dev/null
+++ b/plugins/pluginDirectShow/internals/DSCaptureFormat.cxx
@@ -0,0 +1,60 @@
+/* Copyright (C) 2011-2013 Doubango Telecom <http://www.doubango.org>
+*
+* This file is part of Open Source Doubango Framework.
+*
+* DOUBANGO is free software: you can redistribute it and/or modify
+* it under the terms of the GNU General Public License as published by
+* the Free Software Foundation, either version 3 of the License, or
+* (at your option) any later version.
+*
+* DOUBANGO is distributed in the hope that it will be useful,
+* but WITHOUT ANY WARRANTY; without even the implied warranty of
+* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
+* GNU General Public License for more details.
+*
+* You should have received a copy of the GNU General Public License
+* along with DOUBANGO.
+*/
+#include "internals/DSCaptureFormat.h"
+#include <uuids.h>
+
+
+int DSCaptureFormat::getMatchScore(int w, int h)
+{
+ int factor;
+
+ if ((w == width) && (h = height)){
+ factor = 100;
+ }
+ else if ((w > this->width) && (h > this->height)){
+ factor = 0;
+ }
+ else{
+ factor = (50 * w) / this->width + (50 * h) / this->height;
+ }
+
+ if (isRGB()){
+ factor *= 2;
+ }
+
+ return factor;
+}
+
+bool DSCaptureFormat::isRGB()
+{
+ // Order used is optimized for most used RGB types
+ if (IsEqualGUID(this->chroma, MEDIASUBTYPE_RGB32)) return true;
+ if (IsEqualGUID(this->chroma, MEDIASUBTYPE_RGB24)) return true;
+ if (IsEqualGUID(this->chroma, MEDIASUBTYPE_RGB565)) return true;
+ if (IsEqualGUID(this->chroma, MEDIASUBTYPE_RGB555)) return true;
+ if (IsEqualGUID(this->chroma, MEDIASUBTYPE_RGB8)) return true;
+ if (IsEqualGUID(this->chroma, MEDIASUBTYPE_RGB4)) return true;
+ if (IsEqualGUID(this->chroma, MEDIASUBTYPE_RGB1)) return true;
+#ifndef _WIN32_WCE
+ if (IsEqualGUID(this->chroma, MEDIASUBTYPE_ARGB32)) return true;
+ if (IsEqualGUID(this->chroma, MEDIASUBTYPE_ARGB4444)) return true;
+ if (IsEqualGUID(this->chroma, MEDIASUBTYPE_ARGB1555)) return true;
+#endif
+
+ return false;
+}
diff --git a/plugins/pluginDirectShow/internals/DSCaptureFormat.h b/plugins/pluginDirectShow/internals/DSCaptureFormat.h
new file mode 100644
index 0000000..5e1cabb
--- /dev/null
+++ b/plugins/pluginDirectShow/internals/DSCaptureFormat.h
@@ -0,0 +1,47 @@
+/* Copyright (C) 2011-2013 Doubango Telecom <http://www.doubango.org>
+*
+* This file is part of Open Source Doubango Framework.
+*
+* DOUBANGO is free software: you can redistribute it and/or modify
+* it under the terms of the GNU General Public License as published by
+* the Free Software Foundation, either version 3 of the License, or
+* (at your option) any later version.
+*
+* DOUBANGO is distributed in the hope that it will be useful,
+* but WITHOUT ANY WARRANTY; without even the implied warranty of
+* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
+* GNU General Public License for more details.
+*
+* You should have received a copy of the GNU General Public License
+* along with DOUBANGO.
+*/
+#ifndef PLUGIN_DSHOW_DSCAPTUREFORMAT_H
+#define PLUGIN_DSHOW_DSCAPTUREFORMAT_H
+
+#include "plugin_dshow_config.h"
+#include <strmif.h>
+
+
+class DSCaptureFormat
+{
+public:
+ DSCaptureFormat(int w, int h, int f, GUID c) : width(w), height(h), fps(f), chroma(c) {};
+ virtual ~DSCaptureFormat() {};
+
+ int getWidth() { return this->width; };
+ int getHeight() { return this->height; };
+ int getFramerate() { return this->fps; };
+ GUID getChroma() { return this->chroma; };
+
+ int getMatchScore(int w, int h);
+ bool isRGB();
+
+private:
+ int width;
+ int height;
+ int fps;
+ GUID chroma;
+};
+
+
+#endif
diff --git a/plugins/pluginDirectShow/internals/DSCaptureGraph.cxx b/plugins/pluginDirectShow/internals/DSCaptureGraph.cxx
new file mode 100644
index 0000000..3da6da1
--- /dev/null
+++ b/plugins/pluginDirectShow/internals/DSCaptureGraph.cxx
@@ -0,0 +1,436 @@
+/* Copyright (C) 2011-2013 Doubango Telecom <http://www.doubango.org>
+*
+* This file is part of Open Source Doubango Framework.
+*
+* DOUBANGO is free software: you can redistribute it and/or modify
+* it under the terms of the GNU General Public License as published by
+* the Free Software Foundation, either version 3 of the License, or
+* (at your option) any later version.
+*
+* DOUBANGO is distributed in the hope that it will be useful,
+* but WITHOUT ANY WARRANTY; without even the implied warranty of
+* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
+* GNU General Public License for more details.
+*
+* You should have received a copy of the GNU General Public License
+* along with DOUBANGO.
+*/
+#include "internals/DSCaptureGraph.h"
+#include "internals/DSUtils.h"
+#include "internals/DSCaptureUtils.h"
+
+#include "tsk_debug.h"
+
+#include <iostream>
+
+using namespace std;
+
+#ifdef _WIN32_WCE
+DSCaptureGraph::DSCaptureGraph(DSISampleGrabberCB* callback, HRESULT *hr)
+: DSBaseCaptureGraph(callback, hr)
+#else
+DSCaptureGraph::DSCaptureGraph(ISampleGrabberCB* callback, HRESULT *hr)
+: DSBaseCaptureGraph(callback, hr)
+#endif
+{
+ this->grabberCallback = callback;
+
+ this->captureFormat = NULL;
+ this->captureGraphBuilder = NULL;
+ this->graphBuilder = NULL;
+
+ this->sourceFilter = NULL;
+ this->sampleGrabberFilter = NULL;
+
+#ifdef _WIN32_WCE
+ this->colorConvertor565 = NULL;
+#else
+ this->frameRateFilter = NULL;
+#endif
+
+ this->nullRendererFilter = NULL;
+ this->grabberController = NULL;
+ this->mediaController = NULL;
+ this->mediaEventController = NULL;
+
+ this->streamConfiguration = NULL;
+
+ this->running = FALSE;
+ this->paused = FALSE;
+ this->deviceId = "";
+
+ *hr = this->createCaptureGraph();
+}
+
+DSCaptureGraph::~DSCaptureGraph()
+{
+ SAFE_RELEASE(this->streamConfiguration);
+
+ SAFE_RELEASE(this->mediaEventController);
+ SAFE_RELEASE(this->mediaController);
+ SAFE_RELEASE(this->grabberController);
+
+#if defined(_WIN32_WCE)
+ SAFE_RELEASE(this->colorConvertor565);
+#else
+#endif
+
+ SAFE_RELEASE(this->nullRendererFilter);
+ SAFE_RELEASE(this->sampleGrabberFilter);
+ SAFE_RELEASE(this->sourceFilter);
+
+ SAFE_RELEASE(this->graphBuilder);
+ SAFE_RELEASE(this->captureGraphBuilder);
+}
+
+HRESULT DSCaptureGraph::setSource(const std::string &devicePath)
+{
+ HRESULT hr = E_FAIL;
+
+ if (this->sourceFilter){
+ this->graphBuilder->RemoveFilter(this->sourceFilter);
+ }
+
+ SAFE_RELEASE(this->streamConfiguration);
+ SAFE_RELEASE(this->sourceFilter);
+
+ // Create the filter
+ this->deviceId = devicePath;
+ hr = createSourceFilter(&this->deviceId, &this->sourceFilter);
+
+ if (this->sourceFilter){
+ // Gets the supported formats
+ this->supportedFormats.clear();
+ getSupportedFormats(this->sourceFilter, &this->supportedFormats);
+
+ // Query for video stream config
+ hr = this->captureGraphBuilder->FindInterface(
+ &PIN_CATEGORY_CAPTURE,
+ &MEDIATYPE_Video,
+ this->sourceFilter,
+ IID_IAMStreamConfig,
+ reinterpret_cast<void**>(&this->streamConfiguration));
+
+ hr = this->graphBuilder->AddFilter(this->sourceFilter, FILTER_WEBCAM);
+ }
+
+ return hr;
+}
+
+HRESULT DSCaptureGraph::setParameters(DSCaptureFormat *format, int framerate)
+{
+ HRESULT hr = E_FAIL;
+ AM_MEDIA_TYPE *mediaType = NULL;
+
+ if (!this->streamConfiguration) goto bail;
+
+ hr = this->streamConfiguration->GetFormat(&mediaType);
+ if (FAILED(hr)) goto bail;
+
+ VIDEOINFOHEADER* vih = reinterpret_cast<VIDEOINFOHEADER*>(mediaType->pbFormat);
+ BITMAPINFOHEADER* bih = &vih->bmiHeader;
+
+ int w = format->getWidth();
+ int h = format->getHeight();
+
+ bool wN = (bih->biWidth<0);
+ bool hN = (bih->biHeight<0);
+
+ // DIBS are DWORD aligned
+ int data_size = h * ((w * bih->biBitCount + 31) / 32) * 4;
+
+ bih->biSize = sizeof(BITMAPINFOHEADER);
+ bih->biWidth = w*(wN?-1:1);
+ bih->biHeight = h*(hN?-1:1);
+ bih->biSizeImage = data_size;
+
+ //vih->dwBitRate = framerate * data_size;
+ //vih->AvgTimePerFrame = SECONDS_TO_100NS(framerate);
+
+ mediaType->cbFormat = sizeof(VIDEOINFOHEADER);
+ //mediaType->lSampleSize = data_size;
+ mediaType->subtype = format->getChroma();
+
+ hr = this->streamConfiguration->SetFormat(mediaType);
+ if (FAILED(hr)) goto bail;
+
+#if defined(_WIN32_WCE)
+ hr = this->grabberController->SetFps((int) DS_SECONDS_FROM_100NS(vih->AvgTimePerFrame)/*format->getFramerate()*/, framerate);
+ if (FAILED(hr)) goto bail;
+ hr = this->grabberController->SetSize(w,h);
+#else
+ // Set fps using tdshow filter
+ hr = this->frameRateFilter->SetFps((int) ((float)vih->AvgTimePerFrame/10000.f)/*format->getFramerate()*/, framerate);
+#endif
+ if (FAILED(hr)) goto bail;
+
+ this->captureFormat = format;
+
+bail:
+ DeleteMediaType(mediaType);
+
+ return hr;
+}
+
+#if defined(_WIN32_WCE)
+# include "internals/wince/DSNullFilter.h"
+#endif
+
+HRESULT DSCaptureGraph::connect()
+{
+ HRESULT hr;
+
+ if (!this->sourceFilter){
+ TSK_DEBUG_ERROR("Invalid source filter");
+ return E_FAIL;
+ }
+
+ if (!this->captureFormat){
+ TSK_DEBUG_ERROR("Invalid capture format");
+ return E_FAIL;
+ }
+
+ if (!this->graphBuilder){
+ TSK_DEBUG_ERROR("Invalid grash builder");
+ return E_FAIL;
+ }
+
+ if (this->captureFormat->isRGB())
+ {
+#if defined(_WIN32_WCE)
+ hr = ConnectFilters(this->graphBuilder, this->sourceFilter, this->colorConvertor565) ; if(FAILED(hr)) { TSK_DEBUG_ERROR("ConnectFilters failed"); return hr; }
+ hr = ConnectFilters(this->graphBuilder, this->colorConvertor565, this->sampleGrabberFilter) ; if(FAILED(hr)) { TSK_DEBUG_ERROR("ConnectFilters failed"); return hr; }
+ hr = ConnectFilters(this->graphBuilder, this->sampleGrabberFilter, this->nullRendererFilter); if(FAILED(hr)) { TSK_DEBUG_ERROR("ConnectFilters failed"); return hr; }
+#else
+ hr = ConnectFilters(this->graphBuilder, this->sourceFilter, this->frameRateFilter); if(FAILED(hr)) { TSK_DEBUG_ERROR("ConnectFilters failed"); return hr; }
+ hr = ConnectFilters(this->graphBuilder, this->frameRateFilter, this->sampleGrabberFilter); if(FAILED(hr)) { TSK_DEBUG_ERROR("ConnectFilters failed"); return hr; }
+ hr = ConnectFilters(this->graphBuilder, this->sampleGrabberFilter, this->nullRendererFilter); if(FAILED(hr)) { TSK_DEBUG_ERROR("ConnectFilters failed"); return hr; }
+#endif
+ }
+ else
+ {
+#if defined(_WIN32_WCE)
+ hr = ConnectFilters(this->graphBuilder, this->sourceFilter, this->colorConvertor565) ; if(FAILED(hr))return hr;
+ hr = ConnectFilters(this->graphBuilder, this->colorConvertor565, this->sampleGrabberFilter) ; if(FAILED(hr))return hr;
+ hr = ConnectFilters(this->graphBuilder, this->sampleGrabberFilter, this->nullRendererFilter); if(FAILED(hr))return hr;
+#else
+ // No convertor needed
+ // AVI Decompressor Filter is automatically by the Filter Graph Manager when needed
+ hr = ConnectFilters(this->graphBuilder, this->sourceFilter, this->frameRateFilter); if(FAILED(hr)) { TSK_DEBUG_ERROR("ConnectFilters failed"); return hr; }
+ hr = ConnectFilters(this->graphBuilder, this->frameRateFilter, this->sampleGrabberFilter); if(FAILED(hr)) { TSK_DEBUG_ERROR("ConnectFilters failed"); return hr; }
+ hr = ConnectFilters(this->graphBuilder, this->sampleGrabberFilter, this->nullRendererFilter); if(FAILED(hr)) { TSK_DEBUG_ERROR("ConnectFilters failed"); return hr; }
+#endif
+ }
+
+ return hr;
+}
+
+HRESULT DSCaptureGraph::disconnect()
+{
+ HRESULT hr;
+
+ if (!this->sourceFilter) {
+ return E_FAIL;
+ }
+
+ if (!this->captureFormat) {
+ return E_FAIL;
+ }
+
+ if (this->captureFormat->isRGB())
+ {
+#if defined(_WIN32_WCE)
+ hr = DisconnectFilters(this->graphBuilder, this->sourceFilter, this->colorConvertor565);
+ hr = DisconnectFilters(this->graphBuilder, this->colorConvertor565, this->sampleGrabberFilter);
+ hr = DisconnectFilters(this->graphBuilder, this->sampleGrabberFilter, this->nullRendererFilter);
+#else
+ hr = DisconnectFilters(this->graphBuilder, this->sourceFilter, this->frameRateFilter);
+ hr = DisconnectFilters(this->graphBuilder, this->frameRateFilter, this->sampleGrabberFilter);
+ hr = DisconnectFilters(this->graphBuilder, this->sampleGrabberFilter, this->nullRendererFilter);
+#endif
+ }
+ else
+ {
+#if defined(_WIN32_WCE)
+ hr = DisconnectFilters(this->graphBuilder, this->sourceFilter, this->colorConvertor565); if(FAILED(hr))return hr;
+ hr = DisconnectFilters(this->graphBuilder, this->colorConvertor565, this->sampleGrabberFilter); if(FAILED(hr))return hr;
+ hr = DisconnectFilters(this->graphBuilder, this->sampleGrabberFilter, this->nullRendererFilter); if(FAILED(hr))return hr;
+#else
+ hr = DisconnectFilters(this->graphBuilder, this->sourceFilter, this->frameRateFilter);
+ hr = DisconnectFilters(this->graphBuilder, this->frameRateFilter, this->sampleGrabberFilter);
+ hr = DisconnectFilters(this->graphBuilder, this->sampleGrabberFilter, this->nullRendererFilter);
+#endif
+ }
+
+ return hr;
+}
+
+HRESULT DSCaptureGraph::start()
+{
+ HRESULT hr;
+
+ if (isRunning() && !isPaused()) {
+ return S_OK;
+ }
+
+ //this->mediaController->Stop();
+
+ hr = this->mediaController ? this->mediaController->Run() : E_POINTER;
+ /*if (hr == S_FALSE)
+ {
+ cerr << "DSCaptureGraph::mediaController->Start() has failed with " << hr << ". Waiting for transition." << endl;
+ FILTER_STATE pfs;
+ hr = this->mediaController->GetState(2500, (OAFilterState*) &pfs);
+ hr = this->mediaController->Run();
+ }*/
+
+ if (!SUCCEEDED(hr))
+ {
+#if defined(_WIN32_WCE)
+ MessageBox(NULL, _T("Starting DirectShow Graph Failed"), _T("Failure"), MB_OK);
+ //assert(1==15);
+#endif
+ TSK_DEBUG_ERROR("DSCaptureGraph::mediaController->Run() has failed with %ld", hr);
+ return hr;
+ }
+ this->running = true;
+ return hr;
+}
+
+HRESULT DSCaptureGraph::pause()
+{
+ HRESULT hr = S_OK;
+ if (isRunning()) {
+ hr = this->mediaController->Pause();
+ if (SUCCEEDED(hr)) {
+ this->paused = TRUE;
+ }
+ }
+ return hr;
+}
+
+HRESULT DSCaptureGraph::stop()
+{
+ HRESULT hr;
+#if 0 // Must not
+ hr = this->mediaController->Pause();
+ if (hr == S_FALSE)
+ {
+ TSK_DEBUG_ERROR("DSCaptureGraph::mediaController->Pause() has failed with %ld. Waiting for transition.", hr);
+ FILTER_STATE pfs;
+ hr = this->mediaController->GetState(2500, (OAFilterState*) &pfs);
+ }
+#endif
+ hr = this->mediaController->Stop();
+ if (!SUCCEEDED(hr))
+ {
+ TSK_DEBUG_ERROR("DSCaptureGraph::mediaController->Stop() has failed with %ld", hr);
+ }
+ this->running = false;
+ this->paused = false;
+ return hr;
+}
+
+bool DSCaptureGraph::isRunning()
+{
+ return this->running;
+}
+
+bool DSCaptureGraph::isPaused()
+{
+ return this->paused;
+}
+
+HRESULT DSCaptureGraph::getConnectedMediaType(AM_MEDIA_TYPE *mediaType)
+{
+#if defined(_WIN32_WCE)
+ memmove(mediaType, &this->grabberController->GetMediaType(), sizeof(AM_MEDIA_TYPE));
+ return S_OK;
+#else
+ return this->grabberController->GetConnectedMediaType(mediaType);
+#endif
+}
+
+HRESULT DSCaptureGraph::createCaptureGraph()
+{
+ HRESULT hr;
+
+#if defined(_WIN32_WCE)
+ // Create capture graph builder
+ CHECK_HR(hr = COCREATE(CLSID_CaptureGraphBuilder, IID_ICaptureGraphBuilder2, this->captureGraphBuilder));
+ CHECK_HR(hr = COCREATE(CLSID_FilterGraph, IID_IGraphBuilder, this->graphBuilder));
+ CHECK_HR(hr = this->captureGraphBuilder->SetFiltergraph(this->graphBuilder));
+
+ // Create filters
+ LPUNKNOWN pUnk1 = NULL, pUnk2 = NULL;
+ CHECK_HR(hr = COCREATE(CLSID_Colour, IID_IBaseFilter, this->colorConvertor565));
+ this->sampleGrabberFilter = new DSSampleGrabber(FITLER_SAMPLE_GRABBER, pUnk1, &hr); CHECK_HR(hr);
+ this->nullRendererFilter = new DSNullFilter(/*FILTER_NULL_RENDERER,*/ pUnk2, &hr); CHECK_HR(hr);
+ this->grabberController = (DSSampleGrabber*)(this->sampleGrabberFilter); if (!this->grabberController) CHECK_HR(E_FAIL);
+
+ // Add Filters
+ CHECK_HR(hr = this->graphBuilder->AddFilter(this->colorConvertor565, FILTER_COLOR_CONVERTOR_565));
+ CHECK_HR(hr = this->graphBuilder->AddFilter(this->sampleGrabberFilter, FITLER_SAMPLE_GRABBER));
+ CHECK_HR(hr = this->graphBuilder->AddFilter(this->nullRendererFilter, FILTER_NULL_RENDERER));
+
+ // Find media control
+ CHECK_HR(hr = QUERY(this->graphBuilder, IID_IMediaControl, this->mediaController));
+
+ // Set callback
+ CHECK_HR(hr = this->grabberController->SetCallback(this->grabberCallback));
+#else
+ // Create capture graph builder
+ CHECK_HR(hr = COCREATE(CLSID_CaptureGraphBuilder2, IID_ICaptureGraphBuilder2, this->captureGraphBuilder));
+
+ // Create the graph builder
+ CHECK_HR(hr = COCREATE(CLSID_FilterGraph, IID_IGraphBuilder, this->graphBuilder));
+
+ // Initialize the Capture Graph Builder.
+ CHECK_HR(hr = this->captureGraphBuilder->SetFiltergraph(this->graphBuilder));
+
+ // Create the sample grabber filter
+ CHECK_HR(hr = COCREATE(CLSID_SampleGrabber, IID_IBaseFilter, this->sampleGrabberFilter));
+
+ // Create tdshow filter
+ LPUNKNOWN pUnk = NULL;
+ this->frameRateFilter = new DSFrameRateFilter(FILTER_FRAMERATE, pUnk, &hr); CHECK_HR(hr);
+ if (!this->frameRateFilter == NULL) CHECK_HR(E_FAIL);
+
+ // Create the NULL renderer
+ CHECK_HR(hr = COCREATE(CLSID_NullRenderer, IID_IBaseFilter, this->nullRendererFilter));
+
+ // Add sample grabber to the graph
+ CHECK_HR(hr = this->graphBuilder->AddFilter(this->sampleGrabberFilter, FITLER_SAMPLE_GRABBER));
+
+ // Add null renderer to the graph
+ CHECK_HR(hr = this->graphBuilder->AddFilter(this->nullRendererFilter, FILTER_NULL_RENDERER));
+
+ // Add tdshow filter
+ CHECK_HR(hr = this->graphBuilder->AddFilter(this->frameRateFilter, FILTER_FRAMERATE));
+
+ // Find media control
+ CHECK_HR(hr = QUERY(this->graphBuilder, IID_IMediaControl, this->mediaController));
+
+ // Create the sample grabber
+ CHECK_HR(hr = QUERY(this->sampleGrabberFilter, IID_ISampleGrabber, this->grabberController));
+
+ // Set the sample grabber media type (RGB24)
+ // TODO : CHECK
+ AM_MEDIA_TYPE mt;
+ ZeroMemory(&mt, sizeof(AM_MEDIA_TYPE));
+ mt.majortype = MEDIATYPE_Video;
+ mt.subtype = MEDIASUBTYPE_RGB24;
+ mt.formattype = FORMAT_VideoInfo;
+
+ CHECK_HR(hr = this->grabberController->SetMediaType(&mt));
+
+ // Set sample grabber media type
+ this->grabberController->SetOneShot(FALSE);
+ this->grabberController->SetBufferSamples(FALSE);
+
+ CHECK_HR(hr = this->grabberController->SetCallback(this->grabberCallback, 1));
+#endif
+
+bail:
+ return hr;
+} \ No newline at end of file
diff --git a/plugins/pluginDirectShow/internals/DSCaptureGraph.h b/plugins/pluginDirectShow/internals/DSCaptureGraph.h
new file mode 100644
index 0000000..6bf8862
--- /dev/null
+++ b/plugins/pluginDirectShow/internals/DSCaptureGraph.h
@@ -0,0 +1,106 @@
+/* Copyright (C) 2011-2013 Doubango Telecom <http://www.doubango.org>
+*
+* This file is part of Open Source Doubango Framework.
+*
+* DOUBANGO is free software: you can redistribute it and/or modify
+* it under the terms of the GNU General Public License as published by
+* the Free Software Foundation, either version 3 of the License, or
+* (at your option) any later version.
+*
+* DOUBANGO is distributed in the hope that it will be useful,
+* but WITHOUT ANY WARRANTY; without even the implied warranty of
+* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
+* GNU General Public License for more details.
+*
+* You should have received a copy of the GNU General Public License
+* along with DOUBANGO.
+*/
+#ifndef PLUGIN_DSHOW_DSCAPTUREGRAPH_H
+#define PLUGIN_DSHOW_DSCAPTUREGRAPH_H
+
+#include "plugin_dshow_config.h"
+#include <vector>
+#include <control.h>
+#include "internals/DSBaseCaptureGraph.h"
+#include "internals/DSFrameRateFilter.h"
+
+#if defined(_WIN32_WCE)
+# include "internals/wince/DSSampleGrabber.h"
+# include "internals/wince/DSNullFilter.h"
+# include "internals/wince/DSISampleGrabberCB.h"
+#else
+# include <qedit.h>
+#endif
+
+
+class DSCaptureGraph : public DSBaseCaptureGraph
+{
+public:
+#ifdef _WIN32_WCE
+ DSCaptureGraph(DSISampleGrabberCB* callback, HRESULT *hr);
+#else
+ DSCaptureGraph(ISampleGrabberCB* callback, HRESULT *hr);
+#endif
+ virtual ~DSCaptureGraph();
+
+ std::vector<DSCaptureFormat> *getFormats() { return &this->supportedFormats; };
+
+ HRESULT setSource(const std::string &devicePath);
+ HRESULT setParameters(DSCaptureFormat *format, int framerate);
+
+ HRESULT connect();
+ HRESULT disconnect();
+
+ HRESULT start();
+ HRESULT stop();
+ HRESULT pause();
+ bool isRunning();
+ bool isPaused();
+
+ std::string getDeviceId() const { return this->deviceId; };
+
+ HRESULT getConnectedMediaType(AM_MEDIA_TYPE *mediaType);
+
+private:
+ HRESULT createCaptureGraph();
+
+private:
+#ifdef _WIN32_WCE
+ DSISampleGrabberCB *grabberCallback;
+#else
+ ISampleGrabberCB *grabberCallback;
+#endif
+
+ ICaptureGraphBuilder2 *captureGraphBuilder;
+ IGraphBuilder *graphBuilder;
+
+ IBaseFilter *sourceFilter;
+ IBaseFilter *nullRendererFilter;
+ IBaseFilter *sampleGrabberFilter;
+
+#ifdef _WIN32_WCE
+ IBaseFilter *colorConvertor565; //http://msdn.microsoft.com/en-us/library/aa926076.aspx
+#else
+ DSFrameRateFilter *frameRateFilter;
+#endif
+
+#ifdef _WIN32_WCE
+ DSSampleGrabber *grabberController;
+#else
+ ISampleGrabber *grabberController;
+#endif
+
+ IMediaControl *mediaController;
+ IMediaEventEx *mediaEventController;
+
+ IAMStreamConfig *streamConfiguration;
+
+ std::vector<DSCaptureFormat> supportedFormats;
+ DSCaptureFormat *captureFormat;
+
+ bool running;
+ bool paused;
+ std::string deviceId;
+};
+
+#endif
diff --git a/plugins/pluginDirectShow/internals/DSCaptureUtils.cxx b/plugins/pluginDirectShow/internals/DSCaptureUtils.cxx
new file mode 100644
index 0000000..d95f996
--- /dev/null
+++ b/plugins/pluginDirectShow/internals/DSCaptureUtils.cxx
@@ -0,0 +1,377 @@
+/* Copyright (C) 2011-2013 Doubango Telecom <http://www.doubango.org>
+*
+* This file is part of Open Source Doubango Framework.
+*
+* DOUBANGO is free software: you can redistribute it and/or modify
+* it under the terms of the GNU General Public License as published by
+* the Free Software Foundation, either version 3 of the License, or
+* (at your option) any later version.
+*
+* DOUBANGO is distributed in the hope that it will be useful,
+* but WITHOUT ANY WARRANTY; without even the implied warranty of
+* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
+* GNU General Public License for more details.
+*
+* You should have received a copy of the GNU General Public License
+* along with DOUBANGO.
+*/
+#include "internals/DSUtils.h"
+#include "internals/DSCaptureUtils.h"
+#include <amvideo.h>
+#include <uuids.h>
+#include <mtype.h>
+
+#if defined (_WIN32_WCE)
+#include <atlbase.h>
+#include <atlstr.h>
+#else
+#include <atlconv.h>
+#endif
+#include <iostream>
+#include <assert.h>
+
+#include "tsk_debug.h"
+
+#if defined (_WIN32_WCE)
+# include "internals/wince/cpropertybag.h"
+#endif
+
+HRESULT enumerateCaptureDevices(const std::string &prefix, std::vector<VideoGrabberName> *names)
+{
+ HRESULT hr = S_OK;
+
+#ifdef _WIN32_WCE
+
+ // FIXME: use FindNextDevice to query all devices
+ HANDLE handle = NULL;
+ DEVMGR_DEVICE_INFORMATION di;
+
+ TCHAR pwzName[MAX_PATH]; memset(pwzName,NULL,MAX_PATH);
+
+ GUID guidCamera = { 0xCB998A05, 0x122C, 0x4166, 0x84, 0x6A,
+ 0x93, 0x3E, 0x4D, 0x7E, 0x3C, 0x86 }; // http://msdn.microsoft.com/en-us/library/aa918757.aspx
+
+ di.dwSize = sizeof(di);
+
+ for( int i=0; ; i++)
+ {
+ if(0 == i)
+ { /* 1st time */
+ handle = FindFirstDevice( DeviceSearchByGuid, &guidCamera, &di );
+ if(!handle || !di.hDevice)
+ {
+ hr = ( HRESULT_FROM_WIN32( GetLastError() ));
+ goto bail;
+ }
+ }
+ else if(handle)
+ { /* 2nd or 3rd time */
+ BOOL ret = FindNextDevice(handle, &di);
+ if(!ret || !di.hDevice)
+ {
+ /* No 2nd or 3rd camera ==> do not return error*/
+ goto bail;
+ }
+ }
+ else assert(0);
+
+ StringCchCopy( pwzName, MAX_PATH, di.szDeviceName );
+
+ /* from LPWSTR to LPSTR */
+ char mbstr_name[MAX_PATH]; memset(mbstr_name,NULL,MAX_PATH);
+ wcstombs(mbstr_name, pwzName, MAX_PATH);
+
+ VideoGrabberName grabberName(std::string((const char*)mbstr_name), std::string((const char*)mbstr_name));
+ names->push_back(grabberName);
+ }
+
+bail:
+ /* close */
+ if(handle) FindClose( handle );
+
+#else
+ ICreateDevEnum *deviceEnum;
+ IEnumMoniker *enumerator;
+ IMoniker *moniker;
+
+ // Create the System Device Enumerator
+ hr = COCREATE(CLSID_SystemDeviceEnum, IID_ICreateDevEnum, deviceEnum);
+ if (FAILED(hr)) goto bail;
+
+ // Ask for a device enumerator
+ hr = deviceEnum->CreateClassEnumerator(CLSID_VideoInputDeviceCategory, &enumerator, INCLUDE_CATEGORY_FLAG);
+ if (FAILED(hr)) goto bail;
+
+ // hr = S_FALSE and enumerator is NULL if there is no device to enumerate
+ if (!enumerator) goto bail;
+
+ USES_CONVERSION;
+
+ while (enumerator->Next(1, &moniker, NULL) == S_OK)
+ {
+ // Get the properties bag for each device
+ IPropertyBag *propBag;
+ hr = moniker->BindToStorage(0, 0, IID_IPropertyBag, reinterpret_cast<void**>(&propBag));
+ if (FAILED(hr))
+ {
+ SAFE_RELEASE(moniker);
+ continue;
+ }
+
+ std::string name;
+ std::string description;
+
+ VARIANT varName;
+ VariantInit(&varName);
+ VARIANT varDescription;
+ VariantInit(&varDescription);
+
+ // Find the device path (uniqueness is guaranteed)
+ hr = propBag->Read(L"DevicePath", &varName, 0);
+ if (SUCCEEDED(hr))
+ {
+ if (prefix != "") name = prefix + ":";
+ name = name + std::string(W2A(varName.bstrVal));
+ }
+
+ // Find friendly name or the description
+ hr = propBag->Read(L"FriendlyName", &varDescription, 0);
+ if (SUCCEEDED(hr))
+ {
+ description = std::string(W2A(varDescription.bstrVal));
+ }
+ else
+ {
+ hr = propBag->Read(L"Description", &varDescription, 0);
+ if (SUCCEEDED(hr)) description = std::string(W2A(varDescription.bstrVal));
+ }
+
+ hr = VariantClear(&varName);
+ hr = VariantClear(&varDescription);
+
+ SAFE_RELEASE(propBag);
+ SAFE_RELEASE(moniker);
+
+ // Add it to the list
+ if (name != "")
+ {
+ VideoGrabberName grabberName(name, description);
+ names->push_back(grabberName);
+ }
+ }
+
+bail:
+ SAFE_RELEASE(enumerator);
+ SAFE_RELEASE(deviceEnum);
+#endif
+
+ return hr;
+}
+
+HRESULT createSourceFilter(std::string *devicePath, IBaseFilter **sourceFilter)
+{
+ HRESULT hr;
+
+ IEnumMoniker *enumerator = NULL;
+ IMoniker *moniker = NULL;
+ bool found = false;
+
+ // Set sourceFilter to null
+ SAFE_RELEASE((*sourceFilter));
+
+#if defined( _WIN32_WCE)
+ CPropertyBag pBag;
+ HANDLE handle = NULL;
+ DEVMGR_DEVICE_INFORMATION di;
+ TCHAR pwzName[MAX_PATH];
+ CComVariant varCamName;
+ IPersistPropertyBag *propBag = NULL;
+ GUID guidCamera = { 0xCB998A05, 0x122C, 0x4166, 0x84, 0x6A,
+ 0x93, 0x3E, 0x4D, 0x7E, 0x3C, 0x86 }; // http://msdn.microsoft.com/en-us/library/aa918757.aspx
+
+ di.dwSize = sizeof(di);
+
+ for( int i=0; ; i++)
+ {
+ if(0 == i)
+ { /* 1st time */
+ handle = FindFirstDevice( DeviceSearchByGuid, &guidCamera, &di );
+ if(!handle || !di.hDevice)
+ {
+ hr = ( HRESULT_FROM_WIN32( GetLastError() ));
+ goto bail;
+ }
+ }
+ else if(handle)
+ { /* 2nd or 3rd time */
+ BOOL ret = FindNextDevice(handle, &di);
+ if(!ret || !di.hDevice)
+ {
+ /* No 2nd or 3rd camera ==> do not return error*/
+ goto bail;
+ }
+ }
+ else assert(0);
+
+ StringCchCopy( pwzName, MAX_PATH, di.szDeviceName );
+
+ /* from LPWSTR to LPSTR */
+ char mbstr_name[MAX_PATH];
+ memset(mbstr_name,NULL,MAX_PATH);
+ wcstombs(mbstr_name, pwzName, MAX_PATH);
+
+ if((std::string((const char*)mbstr_name) == (*devicePath)) || ("0" == (*devicePath)))
+ {
+ varCamName = pwzName;
+ if( varCamName.vt != VT_BSTR )
+ {
+ hr = E_OUTOFMEMORY;
+ goto bail;
+ }
+
+ // Create Source filter
+ hr = COCREATE(CLSID_VideoCapture, IID_IBaseFilter, *sourceFilter);
+ if(FAILED(hr)) goto bail;
+
+ // Query PropertyBag
+ hr = QUERY((*sourceFilter), IID_IPersistPropertyBag, propBag);
+ if(FAILED(hr)) goto bail;
+
+ hr = pBag.Write( L"VCapName", &varCamName );
+ if(FAILED(hr)) goto bail;
+
+ hr = propBag->Load( &pBag, NULL );
+ if(FAILED(hr)) goto bail;
+ }
+ }
+#else
+ ICreateDevEnum *deviceEnum = NULL;
+ IPropertyBag *propBag = NULL;
+
+ // Create the System Device Enumerator
+ hr = COCREATE(CLSID_SystemDeviceEnum, IID_ICreateDevEnum, deviceEnum);
+ if (FAILED(hr)){
+ goto bail;
+ }
+
+ // Ask for a device enumerator
+ hr = deviceEnum->CreateClassEnumerator(CLSID_VideoInputDeviceCategory, &enumerator, INCLUDE_CATEGORY_FLAG);
+ if(FAILED(hr)){
+ goto bail;
+ }
+
+ // hr = S_FALSE and enumerator is NULL if there is no device to enumerate
+ if(!enumerator){
+ goto bail;
+ }
+
+ USES_CONVERSION;
+
+ while (!found && (enumerator->Next(1, &moniker, NULL) == S_OK)){
+ // Get the properties bag for each device
+ hr = moniker->BindToStorage(0, 0, IID_IPropertyBag, reinterpret_cast<void**>(&propBag));
+ if (FAILED(hr)){
+ SAFE_RELEASE(moniker);
+ continue;
+ }
+
+ std::string name;
+
+ VARIANT varName;
+ VariantInit(&varName);
+
+ // Find the device path (uniqueness is guaranteed)
+ hr = propBag->Read(L"DevicePath", &varName, 0);
+ if (SUCCEEDED(hr)) name = std::string(W2A(varName.bstrVal));
+
+ // Check for device path
+ // "Null" means first found
+ if ((name == (*devicePath)) ||
+ ("Null" == (*devicePath)))
+ {
+ hr = moniker->BindToObject(0, 0, IID_IBaseFilter, reinterpret_cast<void**>(&(*sourceFilter)));
+ if (SUCCEEDED(hr)){
+ (*devicePath) = name;
+ found = true;
+ }
+ }
+
+ hr = VariantClear(&varName);
+
+ SAFE_RELEASE(propBag);
+ SAFE_RELEASE(moniker);
+ }
+#endif
+
+bail:
+#ifdef _WIN32_WCE
+ if(handle) FindClose(handle);
+#else
+ SAFE_RELEASE(deviceEnum);
+#endif
+ SAFE_RELEASE(moniker);
+ SAFE_RELEASE(enumerator);
+ SAFE_RELEASE(propBag);
+
+ return hr;
+}
+
+HRESULT getSupportedFormats(IBaseFilter *sourceFilter, std::vector<DSCaptureFormat> *formats)
+{
+ HRESULT hr = E_FAIL;
+ IPin *pinOut = NULL;
+ IAMStreamConfig *streamConfig = NULL;
+ AM_MEDIA_TYPE *mediaType = NULL;
+ int count, size;
+
+ // Check source filter pointer
+ if (!sourceFilter) goto bail;
+
+ pinOut = GetPin(sourceFilter, PINDIR_OUTPUT);
+ if(!pinOut) goto bail;
+
+ // Retrieve the stream config interface
+ hr = QUERY(pinOut, IID_IAMStreamConfig, streamConfig);
+ if (FAILED(hr)) goto bail;
+
+ // Get the number of capabilities
+ hr = streamConfig->GetNumberOfCapabilities(&count, &size);
+ if (FAILED(hr)) goto bail;
+
+ hr = streamConfig->GetFormat(&mediaType);
+ if (FAILED(hr)) goto bail;
+
+ // Iterate through the formats
+ for (int i = 0; i < count; i++){
+ VIDEO_STREAM_CONFIG_CAPS streamConfigCaps;
+
+ hr = streamConfig->GetStreamCaps(i, &mediaType, reinterpret_cast<BYTE*>(&streamConfigCaps));
+
+ if (FAILED(hr)){
+ TSK_DEBUG_ERROR("Failed to get Stream caps");
+ break;
+ }
+
+ if (streamConfigCaps.guid == FORMAT_VideoInfo){
+ VIDEOINFOHEADER* vih = reinterpret_cast<VIDEOINFOHEADER*>(mediaType->pbFormat);
+ BITMAPINFOHEADER* bih = &vih->bmiHeader;
+
+ int width = abs(bih->biWidth);
+ int height = abs(bih->biHeight);
+ int fps = (int) ((float)(vih->AvgTimePerFrame)/10000.f);
+ GUID chroma = mediaType->subtype;
+
+ // Add format to the list
+ DSCaptureFormat format(width, height, fps, chroma);
+ formats->push_back(format);
+ }
+
+ DeleteMediaType(mediaType);
+ }
+
+bail:
+ SAFE_RELEASE(streamConfig);
+ SAFE_RELEASE(pinOut);
+
+ return hr;
+}
diff --git a/plugins/pluginDirectShow/internals/DSCaptureUtils.h b/plugins/pluginDirectShow/internals/DSCaptureUtils.h
new file mode 100644
index 0000000..7d99ed2
--- /dev/null
+++ b/plugins/pluginDirectShow/internals/DSCaptureUtils.h
@@ -0,0 +1,60 @@
+/* Copyright (C) 2011-2013 Doubango Telecom <http://www.doubango.org>
+*
+* This file is part of Open Source Doubango Framework.
+*
+* DOUBANGO is free software: you can redistribute it and/or modify
+* it under the terms of the GNU General Public License as published by
+* the Free Software Foundation, either version 3 of the License, or
+* (at your option) any later version.
+*
+* DOUBANGO is distributed in the hope that it will be useful,
+* but WITHOUT ANY WARRANTY; without even the implied warranty of
+* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
+* GNU General Public License for more details.
+*
+* You should have received a copy of the GNU General Public License
+* along with DOUBANGO.
+*/
+#ifndef PLUGIN_DSHOW_DSCAPTUREUTILS_H
+#define PLUGIN_DSHOW_DSCAPTUREUTILS_H
+
+#include "plugin_dshow_config.h"
+#include "internals/DSCaptureFormat.h"
+#include "internals/VideoGrabberName.h"
+
+#include <vector>
+
+// --------------------------------------------------------------------------------
+
+#ifdef INCLUDE_VFW_DEVICES
+#define INCLUDE_CATEGORY_FLAG 0
+#else
+#define INCLUDE_CATEGORY_FLAG CDEF_DEVMON_FILTER | CDEF_DEVMON_PNP_DEVICE
+#endif
+
+// --------------------------------------------------------------------------------
+
+/**
+* \brief Fills in a vector with VideoGrabberName instances constructed from the video capture devices.
+* \param A pointer to the device vector to append
+* \return An HRESULT value
+*/
+HRESULT enumerateCaptureDevices(const std::string &prefix, std::vector<VideoGrabberName> *names);
+
+/**
+* \brief Fills in a vector with VideoGrabberName instances constructed from the video capture devices.
+* \param A constant string containing a device path
+* \param A pointer to the filter that will contains the filter created or NULL if not the device is not found
+* \return An HRESULT value
+*/
+HRESULT createSourceFilter(std::string *devicePath, IBaseFilter **sourceFilter);
+
+/**
+* \brief Fills in a vector with DSCaptureFormat instances constructed from the given video capture device.
+* \param An instance of a capture device
+* \param A pointer to the format vector to append
+* \return An HRESULT value
+*/
+HRESULT getSupportedFormats(IBaseFilter *sourceFilter, std::vector<DSCaptureFormat> *formats);
+
+#endif
diff --git a/plugins/pluginDirectShow/internals/DSDibHelper.cxx b/plugins/pluginDirectShow/internals/DSDibHelper.cxx
new file mode 100644
index 0000000..b7f40d5
--- /dev/null
+++ b/plugins/pluginDirectShow/internals/DSDibHelper.cxx
@@ -0,0 +1,80 @@
+#if !defined(_WIN32_WCE)
+//------------------------------------------------------------------------------
+// File: DibHelper.cpp
+//
+// Desc: DirectShow sample code - In-memory push mode source filter
+// Helper routines for manipulating bitmaps.
+//
+// Copyright (c) Microsoft Corporation. All rights reserved.
+//------------------------------------------------------------------------------
+
+#include <windows.h>
+
+#include "dsdibhelper.h"
+
+
+HBITMAP CopyScreenToBitmap(LPRECT lpRect, BYTE *pData, BITMAPINFO *pHeader)
+{
+ HDC hScrDC, hMemDC; // screen DC and memory DC
+ HBITMAP hBitmap, hOldBitmap; // handles to deice-dependent bitmaps
+ int nX, nY, nX2, nY2; // coordinates of rectangle to grab
+ int nWidth, nHeight; // DIB width and height
+ int xScrn, yScrn; // screen resolution
+
+ // check for an empty rectangle
+ if (IsRectEmpty(lpRect))
+ return NULL;
+
+ // create a DC for the screen and create
+ // a memory DC compatible to screen DC
+ hScrDC = CreateDC(TEXT("DISPLAY"), NULL, NULL, NULL);
+ hMemDC = CreateCompatibleDC(hScrDC);
+
+ // get points of rectangle to grab
+ nX = lpRect->left;
+ nY = lpRect->top;
+ nX2 = lpRect->right;
+ nY2 = lpRect->bottom;
+
+ // get screen resolution
+ xScrn = GetDeviceCaps(hScrDC, HORZRES);
+ yScrn = GetDeviceCaps(hScrDC, VERTRES);
+
+ //make sure bitmap rectangle is visible
+ if (nX < 0)
+ nX = 0;
+ if (nY < 0)
+ nY = 0;
+ if (nX2 > xScrn)
+ nX2 = xScrn;
+ if (nY2 > yScrn)
+ nY2 = yScrn;
+
+ nWidth = nX2 - nX;
+ nHeight = nY2 - nY;
+
+ // create a bitmap compatible with the screen DC
+ hBitmap = CreateCompatibleBitmap(hScrDC, nWidth, nHeight);
+
+ // select new bitmap into memory DC
+ hOldBitmap = (HBITMAP) SelectObject(hMemDC, hBitmap);
+
+ // bitblt screen DC to memory DC
+ BitBlt(hMemDC, 0, 0, nWidth, nHeight, hScrDC, nX, nY, SRCCOPY);
+
+ // select old bitmap back into memory DC and get handle to
+ // bitmap of the screen
+ hBitmap = (HBITMAP) SelectObject(hMemDC, hOldBitmap);
+
+ // Copy the bitmap data into the provided BYTE buffer
+ GetDIBits(hScrDC, hBitmap, 0, nHeight, pData, pHeader, DIB_RGB_COLORS);
+
+ // clean up
+ DeleteDC(hScrDC);
+ DeleteDC(hMemDC);
+
+ // return handle to the bitmap
+ return hBitmap;
+}
+
+#endif /* _WIN32_WCE */
diff --git a/plugins/pluginDirectShow/internals/DSDibHelper.h b/plugins/pluginDirectShow/internals/DSDibHelper.h
new file mode 100644
index 0000000..7b5481e
--- /dev/null
+++ b/plugins/pluginDirectShow/internals/DSDibHelper.h
@@ -0,0 +1,106 @@
+//------------------------------------------------------------------------------
+// File: DibHelper.H
+//
+// Desc: DirectShow sample code - Helper code for bitmap manipulation
+//
+// Copyright (c) Microsoft Corporation. All rights reserved.
+//------------------------------------------------------------------------------
+
+#ifndef PLUGIN_DSHOW_DSDIBHELPER_H
+#define PLUGIN_DSHOW_DSDIBHELPER_H
+
+#define HDIB HANDLE
+
+/* DIB macros */
+#define IS_WIN30_DIB(lpbi) ((*(LPDWORD)(lpbi)) == sizeof(BITMAPINFOHEADER))
+#define RECTWIDTH(lpRect) ((lpRect)->right - (lpRect)->left)
+#define RECTHEIGHT(lpRect) ((lpRect)->bottom - (lpRect)->top)
+
+// Function prototypes
+HDIB BitmapToDIB (HBITMAP hBitmap, HPALETTE hPal);
+HDIB ChangeBitmapFormat (HBITMAP hBitmap,
+ WORD wBitCount,
+ DWORD dwCompression,
+ HPALETTE hPal);
+HDIB ChangeDIBFormat (HDIB hDIB, WORD wBitCount, DWORD dwCompression);
+
+HBITMAP CopyScreenToBitmap(LPRECT lpRect, BYTE *pData, BITMAPINFO *pHeader);
+HDIB CopyScreenToDIB (LPRECT);
+HBITMAP CopyWindowToBitmap (HWND, WORD);
+HDIB CopyWindowToDIB (HWND, WORD);
+
+HPALETTE CreateDIBPalette (HDIB);
+HDIB CreateDIB(DWORD, DWORD, WORD);
+WORD DestroyDIB (HDIB);
+
+void DIBError (int ErrNo);
+DWORD DIBHeight (LPSTR lpDIB);
+WORD DIBNumColors (LPSTR lpDIB);
+HBITMAP DIBToBitmap (HDIB hDIB, HPALETTE hPal);
+DWORD DIBWidth (LPSTR lpDIB);
+
+LPSTR FindDIBBits (LPSTR lpDIB);
+HPALETTE GetSystemPalette (void);
+HDIB LoadDIB (LPSTR);
+
+BOOL PaintBitmap (HDC, LPRECT, HBITMAP, LPRECT, HPALETTE);
+BOOL PaintDIB (HDC, LPRECT, HDIB, LPRECT, HPALETTE);
+
+int PalEntriesOnDevice (HDC hDC);
+WORD PaletteSize (LPSTR lpDIB);
+WORD SaveDIB (HDIB, LPSTR);
+
+#endif /* PLUGIN_DSHOW_DSDIBHELPER_H */
+//------------------------------------------------------------------------------
+// File: DibHelper.H
+//
+// Desc: DirectShow sample code - Helper code for bitmap manipulation
+//
+// Copyright (c) Microsoft Corporation. All rights reserved.
+//------------------------------------------------------------------------------
+
+#ifndef PLUGIN_DSHOW_DSDIBHELPER_H
+#define PLUGIN_DSHOW_DSDIBHELPER_H
+
+#define HDIB HANDLE
+
+/* DIB macros */
+#define IS_WIN30_DIB(lpbi) ((*(LPDWORD)(lpbi)) == sizeof(BITMAPINFOHEADER))
+#define RECTWIDTH(lpRect) ((lpRect)->right - (lpRect)->left)
+#define RECTHEIGHT(lpRect) ((lpRect)->bottom - (lpRect)->top)
+
+// Function prototypes
+HDIB BitmapToDIB (HBITMAP hBitmap, HPALETTE hPal);
+HDIB ChangeBitmapFormat (HBITMAP hBitmap,
+ WORD wBitCount,
+ DWORD dwCompression,
+ HPALETTE hPal);
+HDIB ChangeDIBFormat (HDIB hDIB, WORD wBitCount, DWORD dwCompression);
+
+HBITMAP CopyScreenToBitmap(LPRECT lpRect, BYTE *pData, BITMAPINFO *pHeader);
+HDIB CopyScreenToDIB (LPRECT);
+HBITMAP CopyWindowToBitmap (HWND, WORD);
+HDIB CopyWindowToDIB (HWND, WORD);
+
+HPALETTE CreateDIBPalette (HDIB);
+HDIB CreateDIB(DWORD, DWORD, WORD);
+WORD DestroyDIB (HDIB);
+
+void DIBError (int ErrNo);
+DWORD DIBHeight (LPSTR lpDIB);
+WORD DIBNumColors (LPSTR lpDIB);
+HBITMAP DIBToBitmap (HDIB hDIB, HPALETTE hPal);
+DWORD DIBWidth (LPSTR lpDIB);
+
+LPSTR FindDIBBits (LPSTR lpDIB);
+HPALETTE GetSystemPalette (void);
+HDIB LoadDIB (LPSTR);
+
+BOOL PaintBitmap (HDC, LPRECT, HBITMAP, LPRECT, HPALETTE);
+BOOL PaintDIB (HDC, LPRECT, HDIB, LPRECT, HPALETTE);
+
+int PalEntriesOnDevice (HDC hDC);
+WORD PaletteSize (LPSTR lpDIB);
+WORD SaveDIB (HDIB, LPSTR);
+
+#endif /* PLUGIN_DSHOW_DSDIBHELPER_H */ \ No newline at end of file
diff --git a/plugins/pluginDirectShow/internals/DSDisplay.cxx b/plugins/pluginDirectShow/internals/DSDisplay.cxx
new file mode 100644
index 0000000..326b86c
--- /dev/null
+++ b/plugins/pluginDirectShow/internals/DSDisplay.cxx
@@ -0,0 +1,622 @@
+/*
+* Copyright (C) 2010-2011 Mamadou Diop.
+*
+* Contact: Mamadou Diop <diopmamadou(at)doubango.org>
+*
+* This file is part of Open Source Doubango Framework.
+*
+* DOUBANGO is free software: you can redistribute it and/or modify
+* it under the terms of the GNU General Public License as published by
+* the Free Software Foundation, either version 3 of the License, or
+* (at your option) any later version.
+*
+* DOUBANGO is distributed in the hope that it will be useful,
+* but WITHOUT ANY WARRANTY; without even the implied warranty of
+* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
+* GNU General Public License for more details.
+*
+* You should have received a copy of the GNU General Public License
+* along with DOUBANGO.
+*
+*/
+#include "internals/DSDisplay.h"
+#include "internals/DSUtils.h"
+
+#include "tsk_list.h"
+#include "tsk_debug.h"
+
+#include <string>
+
+using namespace std;
+
+#define USE_OVERLAY 0
+#define OVERLAY_TIMEOUT 3
+#define WM_GRAPHNOTIFY WM_APP + 1
+
+#define FSCREEN_MIN_IDEAL_WIDTH 352
+#define FSCREEN_MIN_IDEAL_HEIGHT 288
+
+typedef struct tdshow_display_s
+{
+ TSK_DECLARE_OBJECT;
+
+ HWND hwnd;
+ DSDisplay* display;
+}
+tdshow_display_t;
+typedef tsk_list_t tdshow_displays_L_t;
+const tsk_object_def_t *tdshow_display_def_t;
+
+// Static list to find which display is link to a given hWnd
+static tdshow_displays_L_t* __directshow__Displays = tsk_null;
+
+/*== Predicate function to find tdshow_display_t object by HWND. */
+static int __pred_find_display_by_hwnd(const tsk_list_item_t *item, const void *hWnd)
+{
+ if(item && item->data){
+ const tdshow_display_t *display = (const tdshow_display_t *)item->data;
+ int ret = 0;
+ tsk_subsat_int32_ptr(display->hwnd, *((HWND*)hWnd), &ret);
+ return ret;
+ }
+ return -1;
+}
+
+// C Callback that dispatch event to the right display
+static LRESULT CALLBACK __directshow__WndProcWindow(HWND hWnd, UINT uMsg, WPARAM wParam, LPARAM lParam)
+{
+ LRESULT result = FALSE;
+ BOOL resultSet = FALSE;
+
+ if(__directshow__Displays){
+ tsk_list_lock(__directshow__Displays);
+
+ const tdshow_display_t *display = (const tdshow_display_t *)tsk_list_find_object_by_pred(__directshow__Displays, __pred_find_display_by_hwnd, &hWnd);
+ if((resultSet = (display && display->display))){
+ result = display->display->handleEvents(hWnd, uMsg, wParam, lParam);
+ }
+
+ tsk_list_unlock(__directshow__Displays);
+ }
+
+ return resultSet ? result : DefWindowProc(hWnd, uMsg, wParam, lParam);
+}
+
+
+DSDisplay::DSDisplay(HRESULT *hr)
+{
+ this->window = NULL;
+ this->parentWindowProc = NULL;
+ this->hooked = false;
+ this->fullscreen = false;
+ this->bPluginFirefox = false;
+ this->top = 0;
+ this->left = 0;
+ this->width = this->imgWidth = 176;
+ this->height = this->imgHeight = 144;
+ this->fps = 15;
+
+ this->graph = new DSDisplayGraph(hr);
+ if (FAILED(*hr)) return;
+#if USE_OVERLAY
+ this->overlay = new DSDisplayOverlay();
+#else
+ this->overlay = NULL;
+#endif
+
+ this->graph->getVideoWindow()->put_Visible(OAFALSE);
+}
+
+DSDisplay::~DSDisplay()
+{
+ this->unhook();
+
+ SAFE_DELETE_PTR(this->overlay);
+ SAFE_DELETE_PTR(this->graph);
+}
+
+void DSDisplay::start()
+{
+ if (!this->graph->isRunning()){
+ this->hook();
+ }
+ if (!this->graph->isRunning() || this->graph->isPaused()){
+ this->graph->start();
+ }
+ this->graph->getVideoWindow()->put_Visible(OATRUE);
+}
+
+void DSDisplay::pause()
+{
+ this->graph->pause();
+}
+
+void DSDisplay::stop()
+{
+ if (this->graph->isRunning()){
+ this->setFullscreen(false);
+
+ this->graph->stop();
+ this->unhook();
+ }
+}
+
+void DSDisplay::attach(INT64 parent)
+{
+ this->attach((void*)parent);
+}
+
+void DSDisplay::attach(void *parent)
+{
+ // Don't reattach if this is the same parent
+ if (this->isAttached() && parent){
+ HWND hwnd = reinterpret_cast<HWND>(parent);
+ if (hwnd != this->window){
+ this->detach();
+ }
+ }
+
+ // Gets the handle of the parent
+ this->window = reinterpret_cast<HWND>(parent);
+ // Hook to the parent WindowProc
+ this->hook();
+
+#if USE_OVERLAY
+ // Allows the overlay to initialize
+ this->overlay->attach(this->window, this->graph);
+#endif
+}
+
+void DSDisplay::detach(void *parent)
+{
+ // The detach action is only valid and if this is the same parent
+ if (parent){
+ HWND hwnd = reinterpret_cast<HWND>(parent);
+ if (hwnd == this->window){
+ this->detach();
+ }
+ }
+}
+
+void DSDisplay::detach()
+{
+ if (!this->isAttached()){
+ return;
+ }
+
+#if USE_OVERLAY
+ // Clean up overlay
+ this->overlay->detach();
+#endif
+
+ // Unhook from the parent WindowProc
+ this->unhook();
+
+ // Set the handle of the parent to NULL
+ this->window = NULL;
+}
+
+bool DSDisplay::isAttached()
+{
+ return (this->window != NULL);
+}
+
+int DSDisplay::getWidth()
+{
+ return this->width;
+}
+
+int DSDisplay::getHeight()
+{
+ return this->height;
+}
+
+void DSDisplay::setSize(int w, int h)
+{
+ //this->width = w;
+ //this->height = h;
+
+ if (!this->fullscreen){
+ this->graph->setImageFormat(w, h);
+ if(this->hooked){
+#if 0
+ #if defined(VMR9_WINDOWLESS)
+ RECT rc;
+ SetRect(&rc, 0, 0, w, h);
+ this->graph->getWindowlessControl()->SetVideoPosition(&rc, &rc);
+ #else
+ this->graph->getVideoWindow()->SetWindowPosition(0, 0, this->width , this->height);
+ #endif
+#endif
+ }
+ }
+}
+
+void DSDisplay::applyRatio(RECT rect)
+{
+ long w = rect.right - rect.left;
+ long h = rect.bottom - rect.top;
+ float ratio = ((float)this->imgWidth/(float)this->imgHeight);
+ // (w/h)=ratio =>
+ // 1) h=w/ratio
+ // and
+ // 2) w=h*ratio
+ this->width = (int)(w/ratio) > h ? (int)(h * ratio) : w;
+ this->height = (int)(this->width/ratio) > h ? h : (int)(this->width/ratio);
+ this->left = ((w - this->width) >> 1);
+ this->top = ((h - this->height) >> 1);
+}
+
+bool DSDisplay::isFullscreen()
+{
+#if defined(VMR9_WINDOWLESS)
+ // TODO
+#else
+ long result;
+ HRESULT hr = this->graph->getVideoWindow()->get_FullScreenMode(&result);
+ if (SUCCEEDED(hr)){
+ this->fullscreen = (result == OATRUE);
+ }
+ else{
+ TSK_DEBUG_ERROR("get_FullScreenMode failed with %ld", hr);
+ this->fullscreen = FALSE;
+ }
+#endif
+ return this->fullscreen;
+}
+
+void DSDisplay::setFullscreen(bool value)
+{
+ if(!this->canFullscreen()){
+ TSK_DEBUG_WARN("Cannot fullscreen");
+ return;
+ }
+
+ HRESULT hr;
+
+#if defined(VMR9_WINDOWLESS)
+ // TODO
+#else
+ if (this->isFullscreen() == value){
+ return;
+ }
+
+ hr = this->graph->getVideoWindow()->put_FullScreenMode(value ? OATRUE : OAFALSE);
+ if (SUCCEEDED(hr)){
+ this->fullscreen = value;
+#if USE_OVERLAY
+ this->overlay->show(this->fullscreen ? (OVERLAY_TIMEOUT * this->graph->getDisplayFps()) : 0);
+#endif
+ }
+ else{
+ TSK_DEBUG_ERROR("put_FullScreenMode failed with %ld", hr);
+ }
+#endif
+}
+
+void DSDisplay::setPluginFirefox(bool value)
+{
+ bPluginFirefox = value;
+}
+
+bool DSDisplay::canFullscreen()
+{
+#if defined(VMR9_WINDOWLESS)
+ // TODO
+#else
+ if(this->graph){
+ UINT image_w, image_h;
+
+ if( this->graph->getImageFormat(image_w, image_h) ){
+ //this->graph->getVideoWindow()->GetMinIdealImageSize(&ideal_w, &ideal_h);
+ return (((long)image_w >= FSCREEN_MIN_IDEAL_WIDTH) && ((long)image_h >= FSCREEN_MIN_IDEAL_HEIGHT));
+ }
+ }
+#endif
+ return false;
+}
+
+void DSDisplay::setFps(int fps_)
+{
+ this->fps = fps_;
+ this->graph->setDisplayFps(fps_);
+}
+
+
+// w and h are the size of the buffer not the display
+void DSDisplay::handleVideoFrame(const void* data, int w, int h)
+{
+ if (this->graph->isRunning()){
+ // The graph will take care of changing the source filter if needed
+ // in case of dimension change or anything else...
+ this->graph->handleFrame(data, w, h);
+ if(this->imgWidth != w || this->imgHeight != h){
+ this->imgWidth = w;
+ this->imgHeight = h;
+ if(this->window){
+ SendMessage(this->window, WM_SIZE, SIZE_RESTORED, MAKELPARAM(this->width , this->height));
+ }
+ }
+#if USE_OVERLAY
+ this->overlay->update();
+#endif
+ }
+}
+
+LRESULT DSDisplay::handleEvents(HWND hWnd, UINT uMsg, WPARAM wParam, LPARAM lParam)
+{
+ switch(uMsg)
+ {
+ case WM_CREATE:
+ case WM_SIZE:
+ case WM_MOVE:
+ {
+ RECT rect = {0};
+ GetWindowRect(hWnd, &rect);
+ applyRatio(rect);
+
+#if defined(VMR9_WINDOWLESS)
+ this->graph->getWindowlessControl()->SetVideoPosition(&rect, &rect);
+#else
+ this->graph->getVideoWindow()->SetWindowPosition(this->left, this->top, this->width , this->height);
+#endif
+ }
+ break;
+
+ case WM_LBUTTONDBLCLK:
+ if(this->canFullscreen()){
+ this->setFullscreen(true);
+ }
+ break;
+
+ case WM_FULLSCREEN_SET:
+ if(this->canFullscreen()){
+ this->setFullscreen(!this->isFullscreen());
+ }
+ break;
+
+ case WM_LBUTTONDOWN:
+ case WM_RBUTTONDOWN:
+ case WM_KEYDOWN:
+ if(this->isFullscreen())
+ {
+#if USE_OVERLAY
+ // Re-Show overlay
+ this->overlay->show(OVERLAY_TIMEOUT * this->graph->getDisplayFps());
+#endif
+ }
+ break;
+
+ case WM_CHAR:
+ case WM_KEYUP:
+ if(this->isFullscreen() && (wParam == 0x1B || wParam == VK_ESCAPE))
+ {
+ // escape
+ this->setFullscreen(false);
+ }
+
+ break;
+
+ case WM_GRAPHNOTIFY:
+ {
+ long evCode;
+ LONG_PTR param1, param2;
+ HRESULT hr;
+ while (hr = this->graph->getMediaEvent()->GetEvent(&evCode, &param1, &param2, 0), SUCCEEDED(hr))
+ {
+ hr = this->graph->getMediaEvent()->FreeEventParams(evCode, param1, param2);
+
+ switch(evCode)
+ {
+ case EC_FULLSCREEN_LOST:
+#if USE_OVERLAY
+ this->overlay->show(0);
+#endif
+ break;
+ case EC_COMPLETE:
+ case EC_USERABORT:
+ default:
+ break;
+ }
+ }
+ }
+ break;
+
+#if defined(VMR9_WINDOWLESS)
+ case WM_DISPLAYCHANGE:
+ {
+ this->graph->getWindowlessControl()->DisplayModeChanged();
+ }
+ break;
+ case WM_PAINT:
+ {
+ RECT rect = {0};
+ GetWindowRect(hWnd, &rect);
+
+ PAINTSTRUCT ps;
+ HDC hdc = BeginPaint(hWnd, &ps);
+
+ this->graph->getWindowlessControl()->RepaintVideo(hWnd, hdc);
+
+ EndPaint(hWnd, &ps);
+ }
+ break;
+#endif
+
+ }
+
+ return bPluginFirefox ? DefWindowProc(hWnd, uMsg, wParam, lParam) : CallWindowProc(this->parentWindowProc, hWnd, uMsg, wParam, lParam);
+}
+
+void DSDisplay::hook()
+{
+ HRESULT hr;
+
+ if (!this->window){
+ return;
+ }
+
+ if(this->hooked){
+ return;
+ }
+ this->hooked = TRUE;
+
+ bool lock = (__directshow__Displays != NULL);
+
+ if(lock)
+ tsk_list_lock(__directshow__Displays);
+ {
+ // Gets the parent Window procedure
+#if defined(_WIN32_WCE)
+ // Workaround for bug in SetWindowLong, call twice the API
+ //this->parentWindowProc = (WNDPROC)SetWindowLong( this->window, GWL_WNDPROC, (LONG) __directshow__WndProcWindow );
+ //this->parentWindowProc = (WNDPROC)SetWindowLong( this->window, GWL_WNDPROC, (LONG) __directshow__WndProcWindow );
+ //__directshow__Displays[this->window] = this;
+#else
+ this->parentWindowProc = (WNDPROC) SetWindowLongPtr(this->window, GWLP_WNDPROC, (LONG_PTR) __directshow__WndProcWindow);
+ // Add this instance to the callback map
+ tsk_object_new(tdshow_display_def_t, this->window, this);
+#endif
+ }
+ if(lock)
+ tsk_list_unlock(__directshow__Displays);
+
+ RECT rect;
+ GetWindowRect(this->window, &rect);
+ applyRatio(rect);
+
+#if defined(VMR9_WINDOWLESS)
+ rect.left = 0;
+ rect.top = 0;
+ rect.right = this->width;
+ rect.bottom = this->height;
+
+ // TODO : Review
+ hr = this->graph->getWindowlessControl()->SetVideoClippingWindow(this->window);
+ hr = this->graph->getWindowlessControl()->SetBorderColor(RGB(0, 0, 128));
+ hr = this->graph->getWindowlessControl()->SetVideoPosition(NULL, &rect);
+#else
+ // TODO : Review the order
+ hr = this->graph->getVideoWindow()->put_Owner((OAHWND) this->window);
+ hr = this->graph->getVideoWindow()->put_WindowStyle(WS_CHILD | WS_CLIPSIBLINGS | WS_CLIPCHILDREN);
+ hr = this->graph->getVideoWindow()->SetWindowPosition(this->left, this->top, this->width, this->height);
+ hr = this->graph->getVideoWindow()->put_MessageDrain((OAHWND) this->window);
+ hr = this->graph->getVideoWindow()->put_Visible(OATRUE);
+#endif
+
+ hr = this->graph->getMediaEvent()->SetNotifyWindow((OAHWND) this->window, WM_GRAPHNOTIFY, 0);
+}
+
+void DSDisplay::unhook()
+{
+ HRESULT hr;
+
+ if(!this->window){
+ return;
+ }
+
+ if(!this->hooked){
+ return;
+ }
+
+ hr = this->graph->getMediaEvent()->SetNotifyWindow(NULL, WM_GRAPHNOTIFY, 0);
+
+#if defined(VMR9_WINDOWLESS)
+ // TODO : Review
+ hr = this->graph->getWindowlessControl()->SetVideoClippingWindow(NULL);
+#else
+ // TODO : Review the order
+ hr = this->graph->getVideoWindow()->put_Visible(OAFALSE);
+ hr = this->graph->getVideoWindow()->put_MessageDrain((OAHWND) NULL);
+ hr = this->graph->getVideoWindow()->put_Owner((OAHWND) NULL);
+ hr = this->graph->getVideoWindow()->put_AutoShow(OAFALSE);
+#endif
+
+ bool lock = (__directshow__Displays != NULL);
+ if(lock)
+ tsk_list_lock(__directshow__Displays);
+ {
+ // Remove this instance from the callback map
+ tsk_list_remove_item_by_pred(__directshow__Displays, __pred_find_display_by_hwnd, &this->window);
+ // Restore parent Window procedure
+#if defined(_WIN32_WCE)
+ // Workaround for bug in SetWindowLong, call twice the API
+ //this->parentWindowProc = (WNDPROC)SetWindowLong( this->window, GWL_WNDPROC, (LONG) this->parentWindowProc );
+ //this->parentWindowProc = (WNDPROC)SetWindowLong( this->window, GWL_WNDPROC, (LONG) this->parentWindowProc );
+#else
+ SetWindowLongPtr(this->window, GWLP_WNDPROC, (LONG_PTR) this->parentWindowProc);
+#endif
+ }
+ if(lock)
+ tsk_list_unlock(__directshow__Displays);
+
+ this->hooked = FALSE;
+}
+
+
+
+
+
+
+
+
+
+
+
+//=================================================================================================
+// String object definition
+//
+static tsk_object_t* tdshow_display_ctor(tsk_object_t * self, va_list * app)
+{
+ tdshow_display_t *display = (tdshow_display_t *)self;
+
+ if(display){
+ display->hwnd = va_arg(*app, HWND);
+ display->display = va_arg(*app, DSDisplay*);
+
+ if(!__directshow__Displays){
+ __directshow__Displays = tsk_list_create();
+ }
+ tsk_list_push_back_data(__directshow__Displays, (void**)&display);
+ }
+
+ return self;
+}
+
+static tsk_object_t* tdshow_display_dtor(tsk_object_t * self)
+{
+ tdshow_display_t *display = (tdshow_display_t *)self;
+ if(display){
+ if(__directshow__Displays){
+ tsk_list_remove_item_by_data(__directshow__Displays, display);
+ //if(TSK_LIST_IS_EMPTY(__directshow__Displays)){
+ // TSK_OBJECT_SAFE_FREE(__directshow__Displays);
+ //}
+ }
+ }
+
+ return self;
+}
+
+static int tdshow_display_cmp(const tsk_object_t *_d1, const tsk_object_t *_d2)
+{
+ const tdshow_display_t *d1 = (const tdshow_display_t *)_d1;
+ const tdshow_display_t *d2 = (const tdshow_display_t *)_d2;
+
+ if(d1 && d2){
+ int ret = 0;
+ tsk_subsat_int32_ptr(d1->hwnd, d2->hwnd, &ret);
+ return ret;
+ }
+ else if(!d1 && !d2) return 0;
+ else return -1;
+}
+
+static const tsk_object_def_t tdshow_display_def_s =
+{
+ sizeof(tdshow_display_t),
+ tdshow_display_ctor,
+ tdshow_display_dtor,
+ tdshow_display_cmp,
+};
+extern const tsk_object_def_t *tdshow_display_def_t = &tdshow_display_def_s;
diff --git a/plugins/pluginDirectShow/internals/DSDisplay.h b/plugins/pluginDirectShow/internals/DSDisplay.h
new file mode 100644
index 0000000..b2985ef
--- /dev/null
+++ b/plugins/pluginDirectShow/internals/DSDisplay.h
@@ -0,0 +1,84 @@
+/*
+* Copyright (C) 2010-2011 Mamadou Diop.
+*
+* Contact: Mamadou Diop <diopmamadou(at)doubango.org>
+*
+* This file is part of Open Source Doubango Framework.
+*
+* DOUBANGO is free software: you can redistribute it and/or modify
+* it under the terms of the GNU General Public License as published by
+* the Free Software Foundation, either version 3 of the License, or
+* (at your option) any later version.
+*
+* DOUBANGO is distributed in the hope that it will be useful,
+* but WITHOUT ANY WARRANTY; without even the implied warranty of
+* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
+* GNU General Public License for more details.
+*
+* You should have received a copy of the GNU General Public License
+* along with DOUBANGO.
+*
+*/
+#ifndef PLUGIN_DSHOW_DIRECTSHOW_DISPLAY_H
+#define PLUGIN_DSHOW_DIRECTSHOW_DISPLAY_H
+
+#include "plugin_dshow_config.h"
+
+#include "internals/DSDisplayGraph.h"
+#include "internals/DSDisplayOverlay.h"
+
+#define WM_FULLSCREEN_SET (WM_USER + 401)
+
+class DSDisplay
+{
+public:
+ DSDisplay(HRESULT *hr);
+ virtual ~DSDisplay();
+
+ virtual void attach(INT64 parent);
+ virtual void attach(void *parent);
+ virtual void detach(void *parent);
+ virtual void detach();
+ virtual bool isAttached();
+
+ virtual void start();
+ virtual void pause();
+ virtual void stop();
+
+ virtual int getWidth();
+ virtual int getHeight();
+ virtual void setSize(int w, int h);
+
+ virtual bool isFullscreen();
+ virtual void setFullscreen(bool value);
+ virtual void setPluginFirefox(bool value);
+
+ virtual bool canFullscreen();
+
+ virtual void setFps(int fps_);
+
+ virtual void handleVideoFrame(const void* data, int w, int h);
+
+ LRESULT handleEvents(HWND hWnd, UINT uMsg, WPARAM wParam, LPARAM lParam);
+
+private:
+ void hook();
+ void unhook();
+ void applyRatio(RECT rect);
+
+private:
+ DSDisplayGraph *graph;
+ DSDisplayOverlay *overlay;
+
+ int fps;
+ int left, top, width, height, imgWidth, imgHeight;
+
+ bool bPluginFirefox;
+ bool fullscreen;
+ HWND window;
+ WNDPROC parentWindowProc;
+
+ bool hooked;
+};
+
+#endif
diff --git a/plugins/pluginDirectShow/internals/DSDisplayGraph.cxx b/plugins/pluginDirectShow/internals/DSDisplayGraph.cxx
new file mode 100644
index 0000000..b2da43b
--- /dev/null
+++ b/plugins/pluginDirectShow/internals/DSDisplayGraph.cxx
@@ -0,0 +1,345 @@
+/* Copyright (C) 2011-2013 Doubango Telecom <http://www.doubango.org>
+*
+* This file is part of Open Source Doubango Framework.
+*
+* DOUBANGO is free software: you can redistribute it and/or modify
+* it under the terms of the GNU General Public License as published by
+* the Free Software Foundation, either version 3 of the License, or
+* (at your option) any later version.
+*
+* DOUBANGO is distributed in the hope that it will be useful,
+* but WITHOUT ANY WARRANTY; without even the implied warranty of
+* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
+* GNU General Public License for more details.
+*
+* You should have received a copy of the GNU General Public License
+* along with DOUBANGO.
+*/
+#if defined(VMR9)
+#define DIRECT3D_VERSION 0x0900
+#endif
+
+#include "internals/DSDisplayGraph.h"
+#include "internals/DSUtils.h"
+#include "internals/DSOutputFilter.h"
+
+#include "tsk_debug.h"
+
+#include <iostream>
+
+using namespace std;
+
+DSDisplayGraph::DSDisplayGraph(HRESULT *hr)
+{
+ this->running = FALSE;
+ this->paused = FALSE;
+ this->fps = 15;
+
+ this->graphBuilder = NULL;
+
+ this->sourceFilter = NULL;
+ this->colorspaceConverterFilter = NULL;
+ this->videoRendererFilter = NULL;
+
+ this->mediaController = NULL;
+ this->mediaEvent = NULL;
+ this->videoWindow = NULL;
+
+#if defined(VMR) ||defined(VMR9) || defined(VMR9_WINDOWLESS)
+ this->mixerBitmap = NULL;
+ this->filterConfig = NULL;
+#endif
+
+#if defined(VMR9_WINDOWLESS)
+ this->windowlessControl = NULL;
+#endif
+
+ *hr = this->createDisplayGraph();
+ if (FAILED(*hr)) return;
+
+ *hr = this->connect();
+ if (FAILED(*hr)) return;
+}
+
+DSDisplayGraph::~DSDisplayGraph()
+{
+ this->disconnect();
+
+#if defined(VMR9_WINDOWLESS)
+ SAFE_RELEASE(this->windowlessControl);
+#endif
+
+#if defined(VMR) ||defined(VMR9) || defined(VMR9_WINDOWLESS)
+ SAFE_RELEASE(this->filterConfig);
+ SAFE_RELEASE(this->mixerBitmap);
+#endif
+
+ SAFE_RELEASE(this->videoWindow);
+ SAFE_RELEASE(this->mediaEvent);
+ SAFE_RELEASE(this->mediaController);
+
+ SAFE_RELEASE(this->colorspaceConverterFilter);
+ SAFE_RELEASE(this->videoRendererFilter);
+ //SAFE_RELEASE(this->sourceFilter);
+
+ SAFE_RELEASE(this->graphBuilder);
+}
+
+void DSDisplayGraph::setDisplayFps(int fps_)
+{
+ this->fps = fps_;
+ if(this->sourceFilter){
+ this->sourceFilter->setFps(fps_);
+ }
+}
+
+bool DSDisplayGraph::getImageFormat(UINT &width, UINT &height)
+{
+ if(this->sourceFilter){
+ return this->sourceFilter->getImageFormat(width, height);
+ }
+ return false;
+}
+
+bool DSDisplayGraph::setImageFormat(UINT width, UINT height)
+{
+ bool ret = true;
+ if(this->sourceFilter){
+ UINT w=width, h = height;
+ if(this->sourceFilter->getImageFormat(w, h)){
+ if(w!= width || h!=height){ // Image format has changed
+ bool reconnect = this->connected; // IMPORTANT: Must reconnect all elements
+ HRESULT hr;
+ if(reconnect){
+ if((hr = this->disconnect()) != S_OK){
+ return false;
+ }
+ }
+ ret = (this->sourceFilter->setImageFormat(width, height) == S_OK);
+ if(reconnect){
+ if((hr = this->connect())){
+ return false;
+ }
+ }
+ }
+ }
+ }
+ return ret;
+}
+
+HRESULT DSDisplayGraph::connect()
+{
+ HRESULT hr;
+
+ if((hr = ConnectFilters(this->graphBuilder, this->sourceFilter, this->colorspaceConverterFilter)) != S_OK){
+ TSK_DEBUG_ERROR("Failed to connect sourcefilter with the colorspace");
+ return hr;
+ }
+ if((hr = ConnectFilters(this->graphBuilder, this->colorspaceConverterFilter, this->videoRendererFilter)) != S_OK){
+ TSK_DEBUG_ERROR("Failed to connect colorspace with the videorenderer");
+ return hr;
+ }
+
+ this->connected = true;
+ return S_OK;
+}
+
+HRESULT DSDisplayGraph::disconnect()
+{
+ HRESULT hr;
+
+ if((hr = DisconnectFilters(this->graphBuilder, this->sourceFilter, this->colorspaceConverterFilter)) != S_OK){
+ TSK_DEBUG_ERROR("Failed to disconnect sourcefilter with the colorspace");
+ return hr;
+ }
+ if((hr = DisconnectFilters(this->graphBuilder, this->colorspaceConverterFilter, this->videoRendererFilter)) != S_OK){
+ TSK_DEBUG_ERROR("Failed to connect colorspace with the videorenderer");
+ return hr;
+ }
+
+ this->connected = false;
+ return S_OK;
+}
+
+HRESULT DSDisplayGraph::start()
+{
+ HRESULT hr;
+ this->running = true;
+ this->sourceFilter->reset();
+
+ hr = this->mediaController->Run();
+ if (!SUCCEEDED(hr)){
+ TSK_DEBUG_ERROR("DSDisplayGraph::mediaController->Run() has failed with %ld", hr);
+ }
+ return hr;
+}
+
+HRESULT DSDisplayGraph::pause()
+{
+ HRESULT hr = S_OK;
+ if(isRunning() && !isPaused()){
+ hr = this->mediaController->Pause();
+ if(SUCCEEDED(hr)){
+ this->paused = true;
+ }
+ }
+ return hr;
+}
+
+HRESULT DSDisplayGraph::stop()
+{
+ HRESULT hr;
+
+ hr = this->mediaController->Pause();
+ if (hr == S_FALSE){
+ TSK_DEBUG_ERROR("DSDisplayGraph::mediaController->Pause() has failed with %ld. Waiting for transition.", hr);
+ FILTER_STATE pfs;
+ hr = this->mediaController->GetState(2500, (OAFilterState*) &pfs);
+ }
+
+ hr = this->mediaController->Stop();
+ if (!SUCCEEDED(hr)){
+ TSK_DEBUG_ERROR("DSDisplayGraph::mediaController->Stop() has failed with %ld", hr);
+ }
+
+ this->running = false;
+ this->paused = false;
+
+ return hr;
+}
+
+bool DSDisplayGraph::isRunning()
+{
+ return this->running;
+}
+
+bool DSDisplayGraph::isPaused()
+{
+ return this->paused;
+}
+
+void DSDisplayGraph::handleFrame(const void* data, int w, int h)
+{
+ HRESULT hr;
+
+ if(!this->sourceFilter){
+ TSK_DEBUG_ERROR("Invalid parameter");
+ return;
+ }
+
+ if(!data || !this->running){
+ this->sourceFilter->setBuffer(NULL, (w*h*3));
+ return;
+ }
+
+ hr = this->sourceFilter->setImageFormat(w, h);
+ if (hr == S_OK){
+ this->stop();
+
+ this->disconnect();
+ this->connect();
+
+ this->start();
+ }
+
+ this->sourceFilter->setBuffer((void*)data, (w*h*3));
+}
+
+HRESULT DSDisplayGraph::createDisplayGraph()
+{
+ HRESULT hr;
+
+ // Create the graph builder
+ hr = COCREATE(CLSID_FilterGraph, IID_IGraphBuilder, this->graphBuilder);
+ if(FAILED(hr)) return hr;
+
+
+ // Create my custom filter
+ LPUNKNOWN pUnk = NULL;
+ this->sourceFilter = new DSOutputFilter(pUnk, &hr /*, this*/);
+ if(FAILED(hr) || this->sourceFilter == NULL) return hr;
+
+ // Create the color space convertor filter
+ hr = COCREATE(CLSID_Colour, IID_IBaseFilter, this->colorspaceConverterFilter);
+ if(FAILED(hr)) return hr;
+
+#if defined(VMR)
+ // Create the video mixing renderer based on Direct X
+ hr = COCREATE(CLSID_VideoMixingRenderer, IID_IBaseFilter, this->videoRendererFilter);
+ if(FAILED(hr)) return hr;
+#elif defined(VMR9) || defined(VMR9_WINDOWLESS)
+ // Create the video mixing renderer based on Direct X 9.0
+ hr = COCREATE(CLSID_VideoMixingRenderer9, IID_IBaseFilter, this->videoRendererFilter);
+ if(FAILED(hr)) return hr;
+#else
+ // Create the video renderer
+ hr = COCREATE(CLSID_VideoRenderer, IID_IBaseFilter, this->videoRendererFilter);
+ if(FAILED(hr)) return hr;
+#endif
+
+
+ // Add dource filter to the graph
+ hr = this->graphBuilder->AddFilter(this->sourceFilter, FILTER_OUTPUT);
+ if(FAILED(hr)) return hr;
+
+ // Add the color space convertor to the graph
+ hr = this->graphBuilder->AddFilter(this->colorspaceConverterFilter, FILTER_COLORSPACE_CONVERTOR);
+ if(FAILED(hr)) return hr;
+
+ // Add video renderer to the graph
+ hr = this->graphBuilder->AddFilter(this->videoRendererFilter, FILTER_VIDEO_RENDERER);
+ if(FAILED(hr)) return hr;
+
+
+ // Find media control
+ hr = QUERY(this->graphBuilder, IID_IMediaControl, this->mediaController);
+ if(FAILED(hr)) return hr;
+
+ // Find media event
+ hr = QUERY(this->graphBuilder, IID_IMediaEventEx, this->mediaEvent);
+ if(FAILED(hr)) return hr;
+ // hr = this->mediaEvent->SetNotifyFlags(AM_MEDIAEVENT_NONOTIFY);
+
+
+#if defined(VMR)
+ // Find the bitmap mixer (Direct X)
+ hr = QUERY(this->videoRendererFilter, IID_IVMRMixerBitmap, this->mixerBitmap);
+ if(FAILED(hr)) return hr;
+
+ // Find the bitmap configurer (Direct X)
+ hr = QUERY(this->videoRendererFilter, IID_IVMRFilterConfig, this->filterConfig);
+ if(FAILED(hr)) return hr;
+
+ // Set the number of streams (Direct X)
+ hr = this->filterConfig->SetNumberOfStreams(1);
+ if(FAILED(hr)) return hr;
+#elif defined(VMR9) || defined(VMR9_WINDOWLESS)
+ // Find the bitmap mixer (Direct X 9.0)
+ hr = QUERY(this->videoRendererFilter, IID_IVMRMixerBitmap9, this->mixerBitmap);
+ if(FAILED(hr)) return hr;
+
+ // Find the bitmap configurer (Direct X 9.0)
+ hr = QUERY(this->videoRendererFilter, IID_IVMRFilterConfig9, this->filterConfig);
+ if(FAILED(hr)) return hr;
+
+ // Set the number of streams (Direct X 9.0)
+ hr = this->filterConfig->SetNumberOfStreams(1);
+ if(FAILED(hr)) return hr;
+#endif
+
+#if defined(VMR9_WINDOWLESS)
+ // Set the rendering mode (Direct X 9.0)
+ hr = this->filterConfig->SetRenderingMode(VMR9Mode_Windowless);
+ if(FAILED(hr)) return hr;
+
+ // Find the windowless control (Direct X 9.0)
+ hr = QUERY(this->videoRendererFilter, IID_IVMRWindowlessControl9, this->windowlessControl);
+ if(FAILED(hr)) return hr;
+#else
+ // Find IVideoWindow interface
+ hr = QUERY(this->graphBuilder, IID_IVideoWindow, this->videoWindow);
+ if(FAILED(hr)) return hr;
+#endif
+
+ return hr;
+}
diff --git a/plugins/pluginDirectShow/internals/DSDisplayGraph.h b/plugins/pluginDirectShow/internals/DSDisplayGraph.h
new file mode 100644
index 0000000..c9080fe
--- /dev/null
+++ b/plugins/pluginDirectShow/internals/DSDisplayGraph.h
@@ -0,0 +1,110 @@
+/*
+* Copyright (C) 2010-2011 Mamadou Diop.
+*
+* Contact: Mamadou Diop <diopmamadou(at)doubango.org>
+*
+* This file is part of Open Source Doubango Framework.
+*
+* DOUBANGO is free software: you can redistribute it and/or modify
+* it under the terms of the GNU General Public License as published by
+* the Free Software Foundation, either version 3 of the License, or
+* (at your option) any later version.
+*
+* DOUBANGO is distributed in the hope that it will be useful,
+* but WITHOUT ANY WARRANTY; without even the implied warranty of
+* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
+* GNU General Public License for more details.
+*
+* You should have received a copy of the GNU General Public License
+* along with DOUBANGO.
+*
+*/
+#ifndef PLUGIN_DSHOW_DSDISPLAYGRAPH_H
+#define PLUGIN_DSHOW_DSDISPLAYGRAPH_H
+
+#include "plugin_dshow_config.h"
+
+#include <control.h>
+
+#include "internals/VideoFrame.h"
+#include "internals/DSOutputFilter.h"
+#include "internals/DSDisplayOverlay.h"
+
+#if defined(VMR9) || defined(VMR9_WINDOWLESS)
+#include <D3D9.h>
+#include <vmr9.h>
+#endif
+
+
+class DSDisplayGraph
+{
+public:
+ DSDisplayGraph(HRESULT *hr);
+ virtual ~DSDisplayGraph();
+
+ int getDisplayFps() { return this->fps; };
+ void setDisplayFps(int fps_);
+
+ bool getImageFormat(UINT &width, UINT &height);
+ bool setImageFormat(UINT width, UINT height);
+
+ HRESULT connect();
+ HRESULT disconnect();
+
+ HRESULT start();
+ HRESULT pause();
+ HRESULT stop();
+ bool isRunning();
+ bool isPaused();
+
+ IMediaEventEx *getMediaEvent() { return this->mediaEvent; };
+ IVideoWindow *getVideoWindow() { return this->videoWindow; };
+ DSOutputFilter *getSourceFilter() { return this->sourceFilter; };
+
+#if defined(VMR)
+ IVMRMixerBitmap *getMixerBitmap() { return this->mixerBitmap; };
+#elif defined(VMR9)
+ IVMRMixerBitmap9 *getMixerBitmap() { return this->mixerBitmap; };
+#elif defined(VMR9_WINDOWLESS)
+ IVMRMixerBitmap9 *getMixerBitmap() { return this->mixerBitmap; };
+ IVMRMixerControl9 *getMixerControl() { return this->mixerControl; };
+ IVMRWindowlessControl9 *getWindowlessControl() { return this->windowlessControl; };
+#endif
+
+ void handleFrame(const void* data, int w, int h);
+
+private:
+ HRESULT createDisplayGraph();
+
+private:
+ IGraphBuilder *graphBuilder;
+
+ DSOutputFilter *sourceFilter;
+ IBaseFilter *colorspaceConverterFilter;
+ IBaseFilter *videoRendererFilter;
+
+ IMediaControl *mediaController;
+ IMediaEventEx *mediaEvent;
+ IVideoWindow *videoWindow;
+
+#if defined(VMR)
+ IVMRMixerBitmap *mixerBitmap;
+ IVMRFilterConfig *filterConfig;
+#elif defined(VMR9)
+ IVMRMixerBitmap9 *mixerBitmap;
+ IVMRMixerControl9 *mixerControl;
+ IVMRFilterConfig9 *filterConfig;
+#elif defined(VMR9_WINDOWLESS)
+ IVMRMixerBitmap9 *mixerBitmap;
+ IVMRMixerControl9 *mixerControl;
+ IVMRFilterConfig9 *filterConfig;
+ IVMRWindowlessControl9 *windowlessControl;
+#endif
+
+ bool connected;
+ bool running;
+ bool paused;
+ int fps;
+};
+
+#endif
diff --git a/plugins/pluginDirectShow/internals/DSDisplayOverlay.VMR.cxx b/plugins/pluginDirectShow/internals/DSDisplayOverlay.VMR.cxx
new file mode 100644
index 0000000..dacea84
--- /dev/null
+++ b/plugins/pluginDirectShow/internals/DSDisplayOverlay.VMR.cxx
@@ -0,0 +1,179 @@
+/*
+* Copyright (C) 2010-2011 Mamadou Diop.
+*
+* Contact: Mamadou Diop <diopmamadou(at)doubango.org>
+*
+* This file is part of Open Source Doubango Framework.
+*
+* DOUBANGO is free software: you can redistribute it and/or modify
+* it under the terms of the GNU General Public License as published by
+* the Free Software Foundation, either version 3 of the License, or
+* (at your option) any later version.
+*
+* DOUBANGO is distributed in the hope that it will be useful,
+* but WITHOUT ANY WARRANTY; without even the implied warranty of
+* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
+* GNU General Public License for more details.
+*
+* You should have received a copy of the GNU General Public License
+* along with DOUBANGO.
+*
+*/
+#if defined(VMR)
+
+#include "internals/DSDisplayOverlay.h"
+#include "internals/DSDisplayGraph.h"
+#include "internals/DSUtils.h"
+#include "../../resource.h"
+
+using namespace std;
+
+#define ALPHA_VALUE_START 0.8f
+#define ALPHA_VALUE_STOP 0.0f
+
+
+// Hack to get module of the current code
+// Only works with Microsoft Linker and could break in the future
+EXTERN_C IMAGE_DOS_HEADER __ImageBase;
+
+
+DSDisplayOverlay::DSDisplayOverlay()
+{
+ this->window = NULL;
+ this->hdcBmp = NULL;
+ this->hbmOld = NULL;
+}
+
+DSDisplayOverlay::~DSDisplayOverlay()
+{
+}
+
+void DSDisplayOverlay::attach(HWND parent, DSDisplayGraph *graph)
+{
+ HRESULT hr;
+
+ // Gets the handle of the parent and the graph
+ this->window = parent;
+ this->displayGraph = graph;
+
+ if (this->window)
+ {
+ // Hack to get module of the current code
+ TCHAR *modulePath = (TCHAR *) calloc(255, sizeof(TCHAR));
+ GetModuleFileName((HINSTANCE)&__ImageBase, modulePath, 255);
+ HMODULE module = GetModuleHandle(modulePath);
+ delete[] modulePath;
+ if (!module)
+ {
+ cout << "Failed to get current module";
+ return;
+ }
+
+ HBITMAP bitmap = LoadBitmap(module, MAKEINTRESOURCE(IDB_BITMAP_OVERLAY));
+ if (!bitmap)
+ {
+ cout << "Failed to load overlay bitmap" << endl;
+ return;
+ }
+
+ RECT rect;
+ hr = GetWindowRect(this->window, &rect);
+ if (FAILED(hr))
+ {
+ cout << "Failed to get window size" << endl;
+ return;
+ }
+
+ BITMAP bm;
+ HDC hdc = GetDC(this->window);
+ this->hdcBmp = CreateCompatibleDC(hdc);
+ ReleaseDC(this->window, hdc);
+
+ GetObject(bitmap, sizeof(bm), &bm);
+ this->hbmOld= (HBITMAP) SelectObject(this->hdcBmp, bitmap);
+
+ ZeroMemory(&this->alphaBitmap, sizeof(VMRALPHABITMAP));
+ this->alphaBitmap.dwFlags = VMRBITMAP_HDC | VMRBITMAP_SRCCOLORKEY;
+ this->alphaBitmap.hdc = this->hdcBmp;
+ this->alphaBitmap.clrSrcKey = 0x00FF00FF;
+ // Source rectangle
+ this->alphaBitmap.rSrc.left = 0;
+ this->alphaBitmap.rSrc.top = 0;
+ this->alphaBitmap.rSrc.right = bm.bmWidth;
+ this->alphaBitmap.rSrc.bottom = bm.bmHeight;
+ // Destination rectangle
+ this->alphaBitmap.rDest.left = (rect.right - rect.left - bm.bmWidth) / 2.0;
+ this->alphaBitmap.rDest.top = (rect.bottom - rect.top - bm.bmHeight) / 2.0;
+ this->alphaBitmap.rDest.right = this->alphaBitmap.rDest.left + bm.bmWidth;
+ this->alphaBitmap.rDest.bottom = this->alphaBitmap.rDest.top + bm.bmHeight;
+ this->alphaBitmap.rDest.left /= (rect.right - rect.left);
+ this->alphaBitmap.rDest.top /= (rect.bottom - rect.top);
+ this->alphaBitmap.rDest.right /= (rect.right - rect.left);
+ this->alphaBitmap.rDest.bottom /= (rect.bottom - rect.top);
+ // Alpha value for start
+ this->alphaBitmap.fAlpha = ALPHA_VALUE_START;
+
+ }
+}
+
+void DSDisplayOverlay::detach()
+{
+ // Clean up
+ DeleteObject(SelectObject(this->hdcBmp, this->hbmOld));
+ DeleteDC(this->hdcBmp);
+
+ this->hdcBmp = NULL;
+ this->hbmOld = NULL;
+ this->displayGraph = NULL;
+ this->window = NULL;
+}
+
+void DSDisplayOverlay::show(int value)
+{
+ // Store the ticks to count down
+ this->ticks = value;
+
+ // Compute alpha value decrement
+ this->alphaStep = (this->ticks > 0) ? ((ALPHA_VALUE_START - ALPHA_VALUE_STOP) / this->ticks) : 0;
+ this->alphaBitmap.fAlpha = ALPHA_VALUE_START;
+
+ this->internalUpdate();
+}
+
+void DSDisplayOverlay::update()
+{
+ if (this->displayGraph && (this->ticks > 0))
+ {
+ this->ticks--;
+
+ // Be sure alpha is in 0.0 .. 1.0 range.
+ float value = this->alphaBitmap.fAlpha;
+ value -= this->alphaStep;
+ this->alphaBitmap.fAlpha = (value >= 0.0f) ? value : 0.0f;
+
+ this->internalUpdate();
+ }
+}
+
+void DSDisplayOverlay::internalUpdate()
+{
+ HRESULT hr;
+
+ if (this->ticks > 0)
+ {
+ this->alphaBitmap.dwFlags = VMRBITMAP_HDC | VMRBITMAP_SRCCOLORKEY;
+ }
+ else
+ {
+ this->alphaBitmap.dwFlags = VMRBITMAP_DISABLE;
+ }
+
+ hr = this->displayGraph->getMixerBitmap()->SetAlphaBitmap(&this->alphaBitmap);
+ if (FAILED(hr))
+ {
+ cout << "Failed to mix overylay (" << hr << ")" << endl;
+ return;
+ }
+}
+
+#endif
diff --git a/plugins/pluginDirectShow/internals/DSDisplayOverlay.VMR9.cxx b/plugins/pluginDirectShow/internals/DSDisplayOverlay.VMR9.cxx
new file mode 100644
index 0000000..972945f
--- /dev/null
+++ b/plugins/pluginDirectShow/internals/DSDisplayOverlay.VMR9.cxx
@@ -0,0 +1,207 @@
+/* Copyright (C) 2011-2013 Doubango Telecom <http://www.doubango.org>
+*
+* This file is part of Open Source Doubango Framework.
+*
+* DOUBANGO is free software: you can redistribute it and/or modify
+* it under the terms of the GNU General Public License as published by
+* the Free Software Foundation, either version 3 of the License, or
+* (at your option) any later version.
+*
+* DOUBANGO is distributed in the hope that it will be useful,
+* but WITHOUT ANY WARRANTY; without even the implied warranty of
+* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
+* GNU General Public License for more details.
+*
+* You should have received a copy of the GNU General Public License
+* along with DOUBANGO.
+*/
+#if defined(VMR9) || defined(VMR9_WINDOWLESS)
+
+#define DIRECT3D_VERSION 0x0900
+
+#include <internals/DSDisplayOverlay.h>
+#include <internals/DSDisplayGraph.h>
+#include <internals/DSUtils.h>
+
+using namespace std;
+
+#define FILENAME _T("Overlay.png")
+#define ALPHA_VALUE_START 0.8f
+#define ALPHA_VALUE_STOP 0.0f
+
+
+DSDisplayOverlay::DSDisplayOverlay()
+{
+ this->window = NULL;
+ this->direct3DDevice = NULL;
+ this->direct3DSurface = NULL;
+
+ this->direct3D = Direct3DCreate9(D3D_SDK_VERSION);
+ if (!this->direct3D)
+ {
+ cout << "Cannot create Direct3D environment" << endl;
+ return;
+ }
+}
+
+DSDisplayOverlay::~DSDisplayOverlay()
+{
+ SAFE_RELEASE(this->direct3D);
+}
+
+void DSDisplayOverlay::attach(HWND parent, DSDisplayGraph *graph)
+{
+ HRESULT hr;
+
+ // Gets the handle of the parent and the graph
+ this->window = parent;
+ this->displayGraph = graph;
+
+ if (this->window)
+ {
+ D3DPRESENT_PARAMETERS d3dpp;
+ ZeroMemory(&d3dpp, sizeof(D3DPRESENT_PARAMETERS));
+ d3dpp.Windowed = TRUE;
+ d3dpp.SwapEffect = D3DSWAPEFFECT_COPY;
+
+ hr = this->direct3D->CreateDevice(
+ D3DADAPTER_DEFAULT,
+ D3DDEVTYPE_HAL,
+ this->window,
+ D3DCREATE_SOFTWARE_VERTEXPROCESSING,
+ &d3dpp,
+ &this->direct3DDevice);
+ if (FAILED(hr))
+ {
+ cout << "Cannot create Direct3D device" << endl;
+ return;
+ }
+
+ ZeroMemory(&this->overlayInfo, sizeof(D3DXIMAGE_INFO));
+ hr = D3DXGetImageInfoFromFile(FILENAME, &this->overlayInfo);
+ if (FAILED(hr))
+ {
+ cout << "Cannot stat overlay file" << endl;
+ return;
+ }
+
+ hr = this->direct3DDevice->CreateOffscreenPlainSurface(
+ this->overlayInfo.Width,
+ this->overlayInfo.Height,
+ D3DFMT_A8R8G8B8,
+ D3DPOOL_SYSTEMMEM,
+ &this->direct3DSurface,
+ NULL);
+ if (FAILED(hr))
+ {
+ cout << "Cannot create Direct3D surface" << endl;
+ return;
+ }
+
+ D3DCOLOR alphaKey = 0xFF000000;
+
+ hr = D3DXLoadSurfaceFromFile(this->direct3DSurface,
+ NULL,
+ NULL,
+ FILENAME,
+ NULL,
+ D3DX_FILTER_NONE,
+ alphaKey,
+ &this->overlayInfo);
+ if (FAILED(hr))
+ {
+ cout << "Cannot load overlay file" << endl;
+ return;
+ }
+
+ D3DVIEWPORT9 viewport;
+ ZeroMemory(&viewport, sizeof(D3DVIEWPORT9));
+
+ hr= this->direct3DDevice->GetViewport(&viewport);
+ if (FAILED(hr))
+ {
+ cout << "Cannot get view port" << endl;
+ return;
+ }
+
+ ZeroMemory(&this->alphaBitmap, sizeof(VMR9AlphaBitmap));
+ this->alphaBitmap.dwFlags = VMR9AlphaBitmap_EntireDDS;
+ this->alphaBitmap.hdc = NULL;
+ this->alphaBitmap.pDDS = this->direct3DSurface;
+ // Source rectangle
+ this->alphaBitmap.rSrc.left = 0;
+ this->alphaBitmap.rSrc.top = 0;
+ this->alphaBitmap.rSrc.right = this->overlayInfo.Width;
+ this->alphaBitmap.rSrc.bottom = this->overlayInfo.Height;
+ // Destination rectangle
+ this->alphaBitmap.rDest.left = (viewport.Width - this->overlayInfo.Width) / 2.0;
+ this->alphaBitmap.rDest.top = (viewport.Height - this->overlayInfo.Height) / 2.0;
+ this->alphaBitmap.rDest.right = this->alphaBitmap.rDest.left + this->overlayInfo.Width;
+ this->alphaBitmap.rDest.bottom = this->alphaBitmap.rDest.top + this->overlayInfo.Height;
+ this->alphaBitmap.rDest.left /= viewport.Width;
+ this->alphaBitmap.rDest.top /= viewport.Height;
+ this->alphaBitmap.rDest.right /= viewport.Width;
+ this->alphaBitmap.rDest.bottom /= viewport.Height;
+ // Alpha value for start
+ this->alphaBitmap.fAlpha = ALPHA_VALUE_START;
+ }
+}
+
+void DSDisplayOverlay::detach()
+{
+ SAFE_RELEASE(this->direct3DSurface);
+ SAFE_RELEASE(this->direct3DDevice);
+
+ this->displayGraph = NULL;
+ this->window = NULL;
+}
+
+void DSDisplayOverlay::show(int value)
+{
+ // Store the ticks to count down
+ this->ticks = value;
+
+ // Compute alpha value decrement
+ this->alphaStep = (this->ticks > 0) ? ((ALPHA_VALUE_START - ALPHA_VALUE_STOP) / this->ticks) : 0;
+ this->alphaBitmap.fAlpha = ALPHA_VALUE_START;
+
+ this->internalUpdate();
+}
+
+void DSDisplayOverlay::update()
+{
+ if (this->displayGraph && (this->ticks > 0))
+ {
+ this->ticks--;
+
+ // Be sure alpha is in 0.0 .. 1.0 range.
+ float value = this->alphaBitmap.fAlpha;
+ value -= this->alphaStep;
+ this->alphaBitmap.fAlpha = (value >= 0.0f) ? value : 0.0f;
+
+ this->internalUpdate();
+ }
+}
+
+void DSDisplayOverlay::internalUpdate()
+{
+ HRESULT hr;
+
+ if (this->ticks > 0)
+ {
+ this->alphaBitmap.dwFlags = VMR9AlphaBitmap_EntireDDS;
+ }
+ else
+ {
+ this->alphaBitmap.dwFlags = VMR9AlphaBitmap_Disable;
+ }
+
+ hr = this->displayGraph->getMixerBitmap()->SetAlphaBitmap(&this->alphaBitmap);
+ if (FAILED(hr))
+ {
+ cout << "Failed to mix overylay (" << hr << ")" << endl;
+ return;
+ }
+}
+
+#endif
diff --git a/plugins/pluginDirectShow/internals/DSDisplayOverlay.cxx b/plugins/pluginDirectShow/internals/DSDisplayOverlay.cxx
new file mode 100644
index 0000000..eb355c4
--- /dev/null
+++ b/plugins/pluginDirectShow/internals/DSDisplayOverlay.cxx
@@ -0,0 +1,67 @@
+/* Copyright (C) 2011-2013 Doubango Telecom <http://www.doubango.org>
+*
+* This file is part of Open Source Doubango Framework.
+*
+* DOUBANGO is free software: you can redistribute it and/or modify
+* it under the terms of the GNU General Public License as published by
+* the Free Software Foundation, either version 3 of the License, or
+* (at your option) any later version.
+*
+* DOUBANGO is distributed in the hope that it will be useful,
+* but WITHOUT ANY WARRANTY; without even the implied warranty of
+* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
+* GNU General Public License for more details.
+*
+* You should have received a copy of the GNU General Public License
+* along with DOUBANGO.
+*/
+#if !defined(VMR) && !defined(VMR9) && !defined(VMR9_WINDOWLESS)
+
+#include "internals/DSDisplayOverlay.h"
+#include "internals/DSDisplayGraph.h"
+
+#include <iostream>
+
+using namespace std;
+
+DSDisplayOverlay::DSDisplayOverlay()
+{
+}
+
+DSDisplayOverlay::~DSDisplayOverlay()
+{
+}
+
+void DSDisplayOverlay::attach(HWND parent, DSDisplayGraph *graph)
+{
+ this->displayGraph = graph;
+}
+
+void DSDisplayOverlay::detach()
+{
+ this->displayGraph = NULL;
+}
+
+void DSDisplayOverlay::show(int value)
+{
+ // Store the ticks to count down
+ this->ticks = value;
+
+ this->internalUpdate();
+}
+
+void DSDisplayOverlay::update()
+{
+ if (this->displayGraph && (this->ticks > 0))
+ {
+ this->ticks--;
+ this->internalUpdate();
+ }
+}
+
+void DSDisplayOverlay::internalUpdate()
+{
+ this->displayGraph->getSourceFilter()->showOverlay(this->ticks);
+}
+
+#endif
diff --git a/plugins/pluginDirectShow/internals/DSDisplayOverlay.h b/plugins/pluginDirectShow/internals/DSDisplayOverlay.h
new file mode 100644
index 0000000..0db887d
--- /dev/null
+++ b/plugins/pluginDirectShow/internals/DSDisplayOverlay.h
@@ -0,0 +1,68 @@
+/* Copyright (C) 2011-2013 Doubango Telecom <http://www.doubango.org>
+*
+* This file is part of Open Source Doubango Framework.
+*
+* DOUBANGO is free software: you can redistribute it and/or modify
+* it under the terms of the GNU General Public License as published by
+* the Free Software Foundation, either version 3 of the License, or
+* (at your option) any later version.
+*
+* DOUBANGO is distributed in the hope that it will be useful,
+* but WITHOUT ANY WARRANTY; without even the implied warranty of
+* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
+* GNU General Public License for more details.
+*
+* You should have received a copy of the GNU General Public License
+* along with DOUBANGO.
+*/
+#ifndef PLUGIN_DSHOW_DSDISPLAYOVERLAY_H
+#define PLUGIN_DSHOW_DSDISPLAYOVERLAY_H
+
+#include "plugin_dshow_config.h"
+#include <strmif.h>
+
+#if defined(VMR9) || defined(VMR9_WINDOWLESS)
+#include <D3D9.h>
+#include <D3Dx9.h>
+#include <vmr9.h>
+#endif
+
+class DSDisplayGraph;
+
+class DSDisplayOverlay
+{
+public:
+ DSDisplayOverlay();
+ virtual ~DSDisplayOverlay();
+
+ void attach(HWND parent, DSDisplayGraph *graph);
+ void detach();
+
+ void show(int value);
+ void update();
+
+private:
+ void internalUpdate();
+
+private:
+ HWND window;
+
+ DSDisplayGraph *displayGraph;
+ int ticks;
+
+#if defined(VMR)
+ HDC hdcBmp;
+ HBITMAP hbmOld;
+ VMRALPHABITMAP alphaBitmap;
+ float alphaStep;
+#elif defined(VMR9) || defined(VMR9_WINDOWLESS)
+ IDirect3D9 *direct3D;
+ IDirect3DDevice9 *direct3DDevice;
+ IDirect3DSurface9 *direct3DSurface;
+ D3DXIMAGE_INFO overlayInfo;
+ VMR9AlphaBitmap alphaBitmap;
+ float alphaStep;
+#endif
+};
+
+#endif
diff --git a/plugins/pluginDirectShow/internals/DSFrameRateFilter.cxx b/plugins/pluginDirectShow/internals/DSFrameRateFilter.cxx
new file mode 100644
index 0000000..cbf2a0a
--- /dev/null
+++ b/plugins/pluginDirectShow/internals/DSFrameRateFilter.cxx
@@ -0,0 +1,120 @@
+/* Copyright (C) 2011-2013 Doubango Telecom <http://www.doubango.org>
+*
+* This file is part of Open Source Doubango Framework.
+*
+* DOUBANGO is free software: you can redistribute it and/or modify
+* it under the terms of the GNU General Public License as published by
+* the Free Software Foundation, either version 3 of the License, or
+* (at your option) any later version.
+*
+* DOUBANGO is distributed in the hope that it will be useful,
+* but WITHOUT ANY WARRANTY; without even the implied warranty of
+* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
+* GNU General Public License for more details.
+*
+* You should have received a copy of the GNU General Public License
+* along with DOUBANGO.
+*/
+#include "internals/DSFrameRateFilter.h"
+
+#include <iostream>
+#include <string>
+
+using namespace std;
+
+#define FPS_INPUT 30
+#define FPS_OUTPUT 5
+
+// {7F9F08CF-139F-40b2-A283-01C4EC26A452}
+TDSHOW_DEFINE_GUID(CLSID_DSFrameRateFilter,
+0x7f9f08cf, 0x139f, 0x40b2, 0xa2, 0x83, 0x1, 0xc4, 0xec, 0x26, 0xa4, 0x52);
+
+DSFrameRateFilter::DSFrameRateFilter(TCHAR *tszName, LPUNKNOWN punk, HRESULT *phr)
+:CTransInPlaceFilter (tszName, punk, CLSID_DSFrameRateFilter, phr)
+{
+ this->m_rtFrameLength = (10000000)/FPS_OUTPUT;
+
+ this->m_inputFps = FPS_INPUT;
+ this->m_outputFps = FPS_OUTPUT;
+
+ this->m_iFrameNumber = 0;
+ this->m_progress = 0;
+ this->m_bProcessFrame = true;
+}
+
+DSFrameRateFilter::~DSFrameRateFilter()
+{
+}
+
+HRESULT DSFrameRateFilter::SetFps(int inputFps, int outputFps)
+{
+ if(inputFps <= 0 || outputFps <= 0)
+ {
+ return E_FAIL;
+ }
+
+ // Stop prcessing
+ this->m_bProcessFrame = false;
+
+ if (inputFps < outputFps) {
+ this->m_inputFps = this->m_outputFps = inputFps;
+ }
+ else {
+ this->m_outputFps = outputFps;
+ this->m_inputFps = inputFps;
+ }
+
+ // Restart processing
+ this->m_iFrameNumber = 0;
+ this->m_progress = 0;
+ this->m_bProcessFrame = true;
+
+ return S_OK;
+}
+
+HRESULT DSFrameRateFilter::Transform(IMediaSample *pSample)
+{
+ if(!this->m_bProcessFrame) return S_FALSE;
+
+ CheckPointer(pSample, E_POINTER);
+
+ HRESULT hr = S_OK;
+ HRESULT ret = S_FALSE;
+
+ pSample->SetTime(NULL, NULL);
+
+ // Drop frame?
+ if (this->m_iFrameNumber == 0) {
+ ret = S_OK;
+ }
+ else if (this->m_progress >= this->m_inputFps) {
+ this->m_progress -= this->m_inputFps;
+ ret = S_OK;
+ }
+
+ // Mark frame as accepted
+ if (ret == S_OK) {
+ // Set TRUE on every sample for uncompressed frames
+ pSample->SetSyncPoint(TRUE);
+ }
+
+ this->m_progress += this->m_outputFps;
+ this->m_iFrameNumber++;
+
+ return ret;
+}
+
+HRESULT DSFrameRateFilter::CheckInputType(const CMediaType* mtIn)
+{
+ return S_OK;
+}
+
+//Implement CreateInstance for your filter object. Typically, CreateInstance calls the constructor of your filter clas
+CUnknown * WINAPI DSFrameRateFilter::CreateInstance(LPUNKNOWN punk, HRESULT *phr)
+{
+ DSFrameRateFilter *pNewObject = new DSFrameRateFilter(_T("Tdshow DirectShow Framerate Limiter Filter."), punk, phr );
+ if (pNewObject == NULL) {
+ *phr = E_OUTOFMEMORY;
+ }
+ return pNewObject;
+}
diff --git a/plugins/pluginDirectShow/internals/DSFrameRateFilter.h b/plugins/pluginDirectShow/internals/DSFrameRateFilter.h
new file mode 100644
index 0000000..9f2296e
--- /dev/null
+++ b/plugins/pluginDirectShow/internals/DSFrameRateFilter.h
@@ -0,0 +1,64 @@
+/* Copyright (C) 2011-2013 Doubango Telecom <http://www.doubango.org>
+*
+* This file is part of Open Source Doubango Framework.
+*
+* DOUBANGO is free software: you can redistribute it and/or modify
+* it under the terms of the GNU General Public License as published by
+* the Free Software Foundation, either version 3 of the License, or
+* (at your option) any later version.
+*
+* DOUBANGO is distributed in the hope that it will be useful,
+* but WITHOUT ANY WARRANTY; without even the implied warranty of
+* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
+* GNU General Public License for more details.
+*
+* You should have received a copy of the GNU General Public License
+* along with DOUBANGO.
+*/
+#ifndef PLUGIN_DSHOW_DSFRAMERATEFILTER_H
+#define PLUGIN_DSHOW_DSFRAMERATEFILTER_H
+
+#include "plugin_dshow_config.h"
+
+#include <streams.h>
+#include <math.h>
+
+class DSFrameRateFilter : public CTransInPlaceFilter
+{
+public:
+ DSFrameRateFilter(TCHAR *tszName, LPUNKNOWN punk, HRESULT *phr);
+ ~DSFrameRateFilter(void);
+
+public:
+ HRESULT Transform(IMediaSample *pSample);
+ HRESULT CheckInputType(const CMediaType* mtIn);
+
+public:
+ /**
+ * \def SetFps
+ * \brief fps1 define source .
+ */
+ HRESULT SetFps(int inputFps, int outputFps);
+
+ static CUnknown *WINAPI CreateInstance(LPUNKNOWN punk, HRESULT *phr);
+ DECLARE_IUNKNOWN;
+
+ /*STDMETHODIMP_(ULONG) NonDelegatingRelease()
+ {
+ if(InterlockedDecrement(&m_cRef) == 0)
+ {
+ delete this;
+ return 0;
+ }
+ return m_cRef;
+ }*/
+
+private:
+ int m_progress;
+ int m_inputFps, m_outputFps;
+ bool m_bProcessFrame;
+ REFERENCE_TIME m_rtFrameLength; // UNITS/fps
+ LONGLONG m_iFrameNumber;
+};
+
+#endif ////DSFrameRateFilter_H \ No newline at end of file
diff --git a/plugins/pluginDirectShow/internals/DSGrabber.cxx b/plugins/pluginDirectShow/internals/DSGrabber.cxx
new file mode 100644
index 0000000..e4dc3a7
--- /dev/null
+++ b/plugins/pluginDirectShow/internals/DSGrabber.cxx
@@ -0,0 +1,292 @@
+/* Copyright (C) 2011-2013 Doubango Telecom <http://www.doubango.org>
+*
+* This file is part of Open Source Doubango Framework.
+*
+* DOUBANGO is free software: you can redistribute it and/or modify
+* it under the terms of the GNU General Public License as published by
+* the Free Software Foundation, either version 3 of the License, or
+* (at your option) any later version.
+*
+* DOUBANGO is distributed in the hope that it will be useful,
+* but WITHOUT ANY WARRANTY; without even the implied warranty of
+* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
+* GNU General Public License for more details.
+*
+* You should have received a copy of the GNU General Public License
+* along with DOUBANGO.
+*/
+#include "internals/DSGrabber.h"
+#include "internals/DSDisplay.h"
+#include "internals/DSUtils.h"
+#include "internals/DSCaptureUtils.h"
+#include "internals/Resizer.h"
+#include "internals/DSUtils.h"
+#include "internals/DSCaptureGraph.h"
+#if !defined(_WIN32_WCE)
+# include "internals/DSScreenCastGraph.h"
+#endif
+
+#include "tsk_debug.h"
+
+#include <assert.h>
+
+using namespace std;
+
+DSGrabber::DSGrabber(HRESULT *hr, BOOL _screenCast)
+: mutex_buffer(NULL), preview(NULL)
+, screenCast(_screenCast)
+{
+#if defined(_WIN32_WCE)
+ assert(!screenCast);
+ this->graph = new DSCaptureGraph(this, hr);
+ CHECK_HR((*hr));
+#else
+ this->graph = screenCast ? dynamic_cast<DSBaseCaptureGraph*>(new DSScreenCastGraph(this, hr)) : dynamic_cast<DSBaseCaptureGraph*>(new DSCaptureGraph(this, hr));
+ CHECK_HR((*hr));
+ this->preview = new DSDisplay(hr);
+#endif
+
+ // Init the bitmap info header with default values
+ memset(&(this->bitmapInfo), 0, sizeof(BITMAPINFOHEADER));
+ this->bitmapInfo.biSize = sizeof(BITMAPINFOHEADER);
+ this->bitmapInfo.biWidth = 352;
+ this->bitmapInfo.biHeight = 288;
+ this->bitmapInfo.biPlanes = 1;
+ this->bitmapInfo.biBitCount = 24;
+ this->bitmapInfo.biCompression = 0;
+ this->bitmapInfo.biXPelsPerMeter = 0;
+ this->bitmapInfo.biYPelsPerMeter = 0;
+ this->bitmapInfo.biClrUsed = 0;
+ this->bitmapInfo.biClrImportant = 0;
+
+ this->plugin_cb = NULL;
+ this->buffer = NULL;
+ this->mutex_buffer = tsk_mutex_create();
+
+bail: ;
+}
+
+DSGrabber::~DSGrabber()
+{
+ SAFE_DELETE_PTR ( this->graph );
+ SAFE_DELETE_PTR ( this->preview );
+ SAFE_DELETE_ARRAY ( this->buffer );
+ tsk_mutex_destroy(&this->mutex_buffer);
+}
+
+void DSGrabber::setCaptureDevice(const std::string &devicePath)
+{
+ this->graph->setSource(devicePath);
+}
+
+void DSGrabber::setCallback(tmedia_producer_enc_cb_f callback, const void* callback_data)
+{
+ this->plugin_cb = callback;
+ this->plugin_cb_data = callback_data;
+}
+
+void DSGrabber::start()
+{
+ if(this->graph->isPaused()){
+ this->graph->start();
+ this->preview->start();
+ return;
+ }
+
+ if (!this->graph->isRunning()){
+ first_buffer = true;
+
+ if(this->preview){
+ this->preview->start();
+ }
+ this->graph->connect();
+ this->graph->start();
+ }
+}
+
+void DSGrabber::pause()
+{
+ if(this->graph && this->graph->isRunning()){
+ this->graph->pause();
+ this->preview->pause();
+ }
+}
+
+void DSGrabber::stop()
+{
+ if (this->graph->isRunning()){
+ if(this->preview){
+ this->preview->stop();
+ }
+ this->graph->stop();
+ this->graph->disconnect();
+ }
+}
+
+bool DSGrabber::setCaptureParameters(int w, int h, int f)
+{
+ tsk_mutex_lock(this->mutex_buffer);
+
+ // Store the framerate
+ this->fps = f;
+ this->width = w;
+ this->height = h;
+
+ // Store the required dimensions
+ this->bitmapInfo.biWidth = this->width;
+ this->bitmapInfo.biHeight = this->height;
+ this->bitmapInfo.biBitCount = 24;
+ this->bitmapInfo.biSizeImage = (this->width * this->height * 3);
+
+ // Change the intermediate buffer
+ SAFE_DELETE_ARRAY ( this->buffer );
+ this->buffer = new BYTE[this->bitmapInfo.biSizeImage];
+ memset(this->buffer,0,this->bitmapInfo.biSizeImage);
+
+ // Find closest matching format to drive the source filter
+ DSCaptureFormat *fmt = NULL;
+ int score = 0;
+ std::vector<DSCaptureFormat> *formats = this->graph->getFormats();
+ std::vector<DSCaptureFormat>::iterator iter;
+ std::vector<DSCaptureFormat>::iterator last = formats->end();
+ for(iter = formats->begin(); iter != last; iter++){
+ int value = (*iter).getMatchScore(this->width, this->height);
+ if (value > score || !fmt){
+ score = value;
+ fmt = &(*iter);
+ }
+ }
+
+ // Setup source filter in the graph
+ HRESULT hr = this->graph->setParameters(fmt, this->fps);
+ // Set preview parameters
+ if(this->preview){
+ this->preview->setFps(this->fps);
+ this->preview->setSize(this->width, this->height);
+ }
+
+ tsk_mutex_unlock(this->mutex_buffer);
+
+ return SUCCEEDED(hr);
+}
+
+void DSGrabber::setPluginFirefox(bool value)
+{
+ if(this->preview){
+ this->preview->setPluginFirefox(value);
+ }
+}
+
+bool DSGrabber::setCaptureParameters(int format, int f)
+{
+ int w, h;
+ // Get size from the format
+ VIDEOFORMAT_TO_SIZE(format, w, h);
+ return this->setCaptureParameters(w, h, f);
+}
+
+int DSGrabber::getFramerate()
+{
+ return this->fps;
+}
+
+HRESULT DSGrabber::getConnectedMediaType(AM_MEDIA_TYPE *mediaType)
+{
+ if (!this->graph || !mediaType) {
+ return E_INVALIDARG;
+ }
+ return this->graph->getConnectedMediaType(mediaType);
+}
+
+HRESULT DSGrabber::SampleCB(double SampleTime, IMediaSample *pSample)
+{
+ return S_OK;
+}
+
+HRESULT DSGrabber::BufferCB(double SampleTime, BYTE *pBuffer, long BufferLen)
+{
+ HRESULT hr;
+
+ tsk_mutex_lock(this->mutex_buffer);
+
+ AM_MEDIA_TYPE mediaType;
+ hr = this->graph->getConnectedMediaType(&mediaType);
+ if (FAILED(hr) || !this->buffer){
+ return hr;
+ }
+
+ if(first_buffer){
+ first_buffer = false;
+
+ tsk_mutex_unlock(this->mutex_buffer);
+ return hr;
+ }
+
+ // Examine the format block.
+ if ((mediaType.formattype == FORMAT_VideoInfo) && (mediaType.cbFormat >= sizeof(VIDEOINFOHEADER)) && (mediaType.pbFormat != NULL) )
+ {
+ VIDEOINFOHEADER *pVih = reinterpret_cast<VIDEOINFOHEADER *>(mediaType.pbFormat);
+ BITMAPINFOHEADER* bih = &pVih->bmiHeader;
+
+ //int framerate = pVih->AvgTimePerFrame;
+ if( (bih->biHeight == this->bitmapInfo.biHeight) && (bih->biWidth == this->bitmapInfo.biWidth) && (bih->biBitCount == this->bitmapInfo.biBitCount) )
+ {
+ memmove(this->buffer, pBuffer, this->bitmapInfo.biSizeImage);
+ }
+ else
+ {
+ ResizeRGB(
+ bih,
+ (const unsigned char *) pBuffer,
+ &this->bitmapInfo,
+ (unsigned char *) this->buffer,
+ this->width,
+ this->height);
+ }
+
+ // for the network
+ if(this->plugin_cb){
+ this->plugin_cb(this->plugin_cb_data, this->buffer, (this->width*this->height*3));
+ }
+
+ // for the preview
+ if(this->preview){
+ this->preview->handleVideoFrame(this->buffer, this->width, this->height);
+ }
+ }
+
+ // Free the format
+#ifdef _WIN32_WCE
+ // Nothing had been allocated
+#else
+ FreeMediaType(mediaType);
+#endif
+
+ tsk_mutex_unlock(this->mutex_buffer);
+
+ return hr;
+}
+
+HRESULT DSGrabber::QueryInterface(REFIID iid, LPVOID *ppv)
+{
+#ifdef _WIN32_WCE
+ assert(1==0);
+#else
+ if( iid == IID_ISampleGrabberCB || iid == IID_IUnknown )
+ {
+ *ppv = (void *) static_cast<ISampleGrabberCB*>(this);
+ return NOERROR;
+ }
+#endif
+ return E_NOINTERFACE;
+}
+
+ULONG DSGrabber::AddRef()
+{
+ return 2;
+}
+
+ULONG DSGrabber::Release()
+{
+ return 1;
+} \ No newline at end of file
diff --git a/plugins/pluginDirectShow/internals/DSGrabber.h b/plugins/pluginDirectShow/internals/DSGrabber.h
new file mode 100644
index 0000000..64cde75
--- /dev/null
+++ b/plugins/pluginDirectShow/internals/DSGrabber.h
@@ -0,0 +1,92 @@
+/* Copyright (C) 2011-2013 Doubango Telecom <http://www.doubango.org>
+*
+* This file is part of Open Source Doubango Framework.
+*
+* DOUBANGO is free software: you can redistribute it and/or modify
+* it under the terms of the GNU General Public License as published by
+* the Free Software Foundation, either version 3 of the License, or
+* (at your option) any later version.
+*
+* DOUBANGO is distributed in the hope that it will be useful,
+* but WITHOUT ANY WARRANTY; without even the implied warranty of
+* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
+* GNU General Public License for more details.
+*
+* You should have received a copy of the GNU General Public License
+* along with DOUBANGO.
+*/
+#ifndef PLUGIN_DSHOW_DIRECTSHOW_GRABBER_H
+#define PLUGIN_DSHOW_DIRECTSHOW_GRABBER_H
+
+#include "plugin_dshow_config.h"
+
+#include "internals/DSBaseCaptureGraph.h"
+#include "internals/VideoFrame.h"
+
+#include "tinymedia/tmedia_producer.h"
+
+#include "tsk_mutex.h"
+
+class DSDisplay;
+
+#if defined(_WIN32_WCE)
+# include "internals/wince/DSISampleGrabberCB.h"
+#endif
+
+
+class DSGrabber : public
+#if defined(_WIN32_WCE)
+ DSISampleGrabberCB
+#else
+ ISampleGrabberCB
+#endif
+{
+public:
+ DSGrabber(HRESULT *hr, BOOL screenCast);
+ virtual ~DSGrabber();
+
+ void setCallback(tmedia_producer_enc_cb_f callback, const void* callback_data);
+ void setCaptureDevice(const std::string &devicePath);
+
+ virtual void start();
+ virtual void pause();
+ virtual void stop();
+
+ virtual bool setCaptureParameters(int format, int f);
+ virtual bool setCaptureParameters(int w, int h, int f);
+
+ virtual void setPluginFirefox(bool value);
+
+ virtual int getFramerate();
+ virtual HRESULT getConnectedMediaType(AM_MEDIA_TYPE *mediaType);
+
+ virtual HRESULT STDMETHODCALLTYPE SampleCB(double SampleTime, IMediaSample *pSample);
+ virtual HRESULT STDMETHODCALLTYPE BufferCB(double SampleTime, BYTE *pBuffer, long BufferLen);
+
+ virtual HRESULT STDMETHODCALLTYPE QueryInterface(REFIID riid, void __RPC_FAR *__RPC_FAR *ppvObject);
+ virtual ULONG STDMETHODCALLTYPE AddRef();
+ virtual ULONG STDMETHODCALLTYPE Release();
+
+ DSDisplay *preview;
+
+private:
+ int width;
+ int height;
+ int fps;
+
+ DSBaseCaptureGraph *graph;
+
+ //VideoFrame *currentFrame;
+ BITMAPINFOHEADER bitmapInfo;
+ BYTE *buffer;
+
+ tsk_mutex_handle_t *mutex_buffer;
+
+ BOOL first_buffer;
+ BOOL screenCast;
+
+ const void* plugin_cb_data;
+ tmedia_producer_enc_cb_f plugin_cb;
+};
+
+#endif
diff --git a/plugins/pluginDirectShow/internals/DSOutputFilter.cxx b/plugins/pluginDirectShow/internals/DSOutputFilter.cxx
new file mode 100644
index 0000000..ab5aa0f
--- /dev/null
+++ b/plugins/pluginDirectShow/internals/DSOutputFilter.cxx
@@ -0,0 +1,113 @@
+/*
+* Copyright (C) 2010-2011 Mamadou DIOP.
+*
+* This file is part of Open Source Doubango Framework.
+*
+* DOUBANGO is free software: you can redistribute it and/or modify
+* it under the terms of the GNU General Public License as published by
+* the Free Software Foundation, either version 3 of the License, or
+* (at your option) any later version.
+*
+* DOUBANGO is distributed in the hope that it will be useful,
+* but WITHOUT ANY WARRANTY; without even the implied warranty of
+* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
+* GNU General Public License for more details.
+*
+* You should have received a copy of the GNU General Public License
+* along with DOUBANGO.
+*
+*/
+#include "internals/DSOutputFilter.h"
+#include "internals/DSOutputStream.h"
+#include "internals/DSUtils.h"
+
+#include "tsk_memory.h"
+
+DSOutputFilter::DSOutputFilter(LPUNKNOWN pUnk, HRESULT *phr)
+: CSource(_T("TDSHOW_OUTPUT"), pUnk, CLSID_TdshowOutputFilter)
+{
+#if !(defined(_WIN32_WCE) && defined(_DEBUG))
+ CAutoLock cAutoLock(&m_cStateLock);
+#endif
+
+ // Add one source stream (output pin)!
+ this->outputStream = new DSOutputStream(phr, this, _T("Out"));
+}
+
+DSOutputFilter::~DSOutputFilter()
+{
+ //SAFE_RELEASE(this->outputStream);
+}
+
+void DSOutputFilter::setBuffer(void *pointer, int size)
+{
+ this->outputStream->lockBuffer();
+ if(pointer && size){
+ if(this->outputStream->buffer_size != size){
+ if((this->outputStream->buffer = tsk_realloc(this->outputStream->buffer, size))){
+ this->outputStream->buffer_size = size;
+ }
+ else goto done;
+ }
+ memcpy(this->outputStream->buffer, pointer, size);
+ }
+done:
+ this->outputStream->unlockBuffer();
+}
+
+void DSOutputFilter::getMediaType(AM_MEDIA_TYPE* &pmt)
+{
+ //if(pmt)
+ //{
+ // memcpy(pmt, &this->outputStream->pmt, sizeof(AM_MEDIA_TYPE));
+ //}
+}
+
+HRESULT DSOutputFilter::setMediaType(const AM_MEDIA_TYPE* pmt)
+{
+ return this->ReconnectPin(this->outputStream, pmt);
+}
+
+HRESULT DSOutputFilter::setImageFormat(UINT width, UINT height)
+{
+ return this->outputStream->setImageFormat(width, height);
+}
+
+bool DSOutputFilter::getImageFormat(UINT &width, UINT &height)
+{
+ if(this->outputStream){
+ return this->outputStream->getImageFormat(width, height);
+ }
+ return false;
+}
+
+void DSOutputFilter::setFps(int fps_)
+{
+ this->outputStream->setFps(fps_);
+}
+
+void DSOutputFilter::showOverlay(int value)
+{
+ this->outputStream->showOverlay(value);
+}
+
+void DSOutputFilter::reset()
+{
+ this->outputStream->frameNumber = 0;
+ this->outputStream->lockBuffer();
+ this->outputStream->buffer = NULL;
+ this->outputStream->buffer_size = 0;
+ this->outputStream->unlockBuffer();
+}
+
+#ifdef _WIN32_WCE
+STDMETHODIMP_(ULONG) DSOutputFilter::NonDelegatingRelease()
+{
+ if(InterlockedDecrement(&m_cRef) == 0)
+ {
+ delete this;
+ return 0;
+ }
+ return m_cRef;
+}
+#endif
diff --git a/plugins/pluginDirectShow/internals/DSOutputFilter.h b/plugins/pluginDirectShow/internals/DSOutputFilter.h
new file mode 100644
index 0000000..fea2d23
--- /dev/null
+++ b/plugins/pluginDirectShow/internals/DSOutputFilter.h
@@ -0,0 +1,112 @@
+/* Copyright (C) 2011-2013 Doubango Telecom <http://www.doubango.org>
+*
+* This file is part of Open Source Doubango Framework.
+*
+* DOUBANGO is free software: you can redistribute it and/or modify
+* it under the terms of the GNU General Public License as published by
+* the Free Software Foundation, either version 3 of the License, or
+* (at your option) any later version.
+*
+* DOUBANGO is distributed in the hope that it will be useful,
+* but WITHOUT ANY WARRANTY; without even the implied warranty of
+* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
+* GNU General Public License for more details.
+*
+* You should have received a copy of the GNU General Public License
+* along with DOUBANGO.
+*/
+#ifndef PLUGIN_DSHOW_DSOUTPUTFILTER_H
+#define PLUGIN_DSHOW_DSOUTPUTFILTER_H
+
+#include "plugin_dshow_config.h"
+
+#include "internals/DSBufferWriter.h"
+#include <streams.h>
+
+class DSOutputStream;
+
+// {17D9D5CB-850D-4339-B72A-F72D084D8D64}
+TDSHOW_DEFINE_GUID(CLSID_TdshowOutputFilter,
+0x17d9d5cb, 0x850d, 0x4339, 0xb7, 0x2a, 0xf7, 0x2d, 0x8, 0x4d, 0x8d, 0x64);
+
+class DSOutputFilter : public CSource, public DSBufferWriter
+{
+public:
+ DSOutputFilter(LPUNKNOWN pUnk, HRESULT *phr);
+ virtual ~DSOutputFilter();
+
+ //static CUnknown * WINAPI CreateInstance(LPUNKNOWN pUnk, HRESULT *phr);
+ DECLARE_IUNKNOWN;
+
+ virtual void setBuffer(void *pointer, int size);
+ virtual inline HRESULT setImageFormat(UINT width, UINT height);
+ virtual bool getImageFormat(UINT &width, UINT &height);
+
+ virtual void setFps(int fps_);
+ virtual void showOverlay(int value);
+
+ virtual void getMediaType(AM_MEDIA_TYPE* &pmt);
+ virtual HRESULT setMediaType(const AM_MEDIA_TYPE* pmt);
+
+ void reset();
+
+#ifdef _WIN32_WCE
+ STDMETHODIMP_(ULONG) NonDelegatingRelease();
+#endif
+
+//protected:
+#ifdef _WIN32_WCE
+ /*STDMETHODIMP QueryInterface(REFIID riid, void **ppv)
+ {
+ CheckPointer(ppv, E_POINTER);
+
+ if (riid == IID_IBaseFilter
+ || riid == IID_IBaseFilter
+ || riid == IID_IUnknown
+ || riid == IID_IMediaFilter
+ )
+ {
+ return GetInterface((IBaseFilter *) this, ppv);
+ }
+ else
+ {
+ *ppv = NULL;
+ return E_NOINTERFACE;
+ }
+ };
+
+ STDMETHODIMP_(ULONG) AddRef() {
+ //return GetOwner()->AddRef();
+ //return 1;
+ return (ULONG)InterlockedIncrement(&m_cRef);
+ };
+
+ STDMETHODIMP_(ULONG) Release() {
+ LONG lRefCount = InterlockedDecrement(&m_cRef);
+ if(m_cRef < 1) delete this;
+ return (ULONG)m_cRef;
+ };
+
+ STDMETHODIMP_(ULONG) NonDelegatingAddRef()
+ {
+ return InterlockedIncrement(&m_cRef);
+ }*/
+#endif
+/*
+ STDMETHODIMP_(ULONG) NonDelegatingRelease()
+ {
+ if(InterlockedDecrement(&m_cRef) == 0)
+ {
+ delete this;
+ return 0;
+ }
+ return m_cRef;
+ }*/
+
+private:
+ DSOutputStream *outputStream;
+
+ friend class DSOutputStream;
+};
+
+#endif
diff --git a/plugins/pluginDirectShow/internals/DSOutputStream.cxx b/plugins/pluginDirectShow/internals/DSOutputStream.cxx
new file mode 100644
index 0000000..670d0ae
--- /dev/null
+++ b/plugins/pluginDirectShow/internals/DSOutputStream.cxx
@@ -0,0 +1,313 @@
+/* Copyright (C) 2011-2013 Doubango Telecom <http://www.doubango.org>
+*
+* This file is part of Open Source Doubango Framework.
+*
+* DOUBANGO is free software: you can redistribute it and/or modify
+* it under the terms of the GNU General Public License as published by
+* the Free Software Foundation, either version 3 of the License, or
+* (at your option) any later version.
+*
+* DOUBANGO is distributed in the hope that it will be useful,
+* but WITHOUT ANY WARRANTY; without even the implied warranty of
+* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
+* GNU General Public License for more details.
+*
+* You should have received a copy of the GNU General Public License
+* along with DOUBANGO.
+*/
+#include "internals/DSOutputStream.h"
+#include "internals/DSOutputFilter.h"
+#include "internals/DSUtils.h"
+
+#include <iostream>
+
+#include "tsk_memory.h"
+
+using namespace std;
+
+#define DEFAULT_FPS 15
+
+#define MEMCPY_WORKAROUND 1
+
+// Overlay
+#define OVERLAY 0
+#define OVERLAY_TEXT TEXT("Press ESC to exit full screen mode")
+#define OVERLAY_DURATION 3 // in seconds
+
+DSOutputStream::DSOutputStream(HRESULT *phr, DSOutputFilter *pParent, LPCWSTR pPinName)
+: CSourceStream(_T("DSOutputStream"), phr, pParent, pPinName)
+{
+#if !(defined(_WIN32_WCE) && defined(_DEBUG))
+ CAutoLock cAutoLock(m_pFilter->pStateLock());
+#endif
+
+ this->buffer = NULL;
+ this->buffer_size = NULL;
+
+ this->frameNumber = 0;
+ this->frameLength = (1000)/DEFAULT_FPS;
+ this->fps = DEFAULT_FPS;
+
+ this->width = 352;
+ this->height = 288;
+
+ this->overlay = false;
+
+ this->paintBuffer = NULL;
+ this->paintDC = NULL;
+ this->hDibSection = NULL;
+ this->hObject = NULL;
+
+ this->mutex = tsk_mutex_create();
+}
+
+DSOutputStream::~DSOutputStream()
+{
+ TSK_FREE(this->buffer);
+ tsk_mutex_destroy(&this->mutex);
+ // TODO : Is there anything to free ???
+}
+
+void DSOutputStream::setFps(int fps_)
+{
+ this->fps = fps_;
+ this->frameLength = (1000)/this->fps;
+}
+
+void DSOutputStream::showOverlay(int value)
+{
+ if (value == 0){
+ this->overlay = false;
+ }
+ this->overlay = (value > 0);
+}
+
+HRESULT DSOutputStream::setImageFormat(UINT width, UINT height)
+{
+ if ((this->width == width) && (this->height == height)) return S_FALSE;
+
+ this->width = width;
+ this->height = height;
+
+ this->frameNumber = 0;
+
+ return S_OK;
+}
+
+bool DSOutputStream::getImageFormat(UINT &width, UINT &height)
+{
+ width = this->width;
+ height = this->height;
+ return true;
+}
+
+HRESULT DSOutputStream::GetMediaType(CMediaType *pMediaType)
+{
+ HRESULT hr = S_OK;
+#if !(defined(_WIN32_WCE) && defined(_DEBUG))
+ CAutoLock lock(m_pFilter->pStateLock());
+#endif
+
+ ZeroMemory(pMediaType, sizeof(CMediaType));
+
+ VIDEOINFO *pvi = (VIDEOINFO *)pMediaType->AllocFormatBuffer(sizeof(VIDEOINFO));
+ if (NULL == pvi)
+ return E_OUTOFMEMORY;
+
+ ZeroMemory(pvi, sizeof(VIDEOINFO));
+
+ pvi->bmiHeader.biCompression = BI_RGB;
+ pvi->bmiHeader.biBitCount = 24;
+ pvi->bmiHeader.biSize = sizeof(BITMAPINFOHEADER);
+ pvi->bmiHeader.biWidth = this->width;
+ pvi->bmiHeader.biHeight = this->height;
+ pvi->bmiHeader.biPlanes = 1;
+ pvi->bmiHeader.biSizeImage = GetBitmapSize(&pvi->bmiHeader);
+ pvi->bmiHeader.biClrImportant = 0;
+
+ // Frame rate
+ pvi->AvgTimePerFrame = DS_MILLIS_TO_100NS(1000/this->fps);
+
+ SetRectEmpty(&(pvi->rcSource)); // we want the whole image area rendered.
+ SetRectEmpty(&(pvi->rcTarget)); // no particular destination rectangle
+
+ pMediaType->SetType(&MEDIATYPE_Video);
+ pMediaType->SetFormatType(&FORMAT_VideoInfo);
+ pMediaType->SetTemporalCompression(FALSE);
+
+ pMediaType->SetSubtype(&MEDIASUBTYPE_RGB24);
+ pMediaType->SetSampleSize(pvi->bmiHeader.biSizeImage);
+
+ bitmapInfo.bmiHeader = pvi->bmiHeader;
+
+ return hr;
+}
+
+HRESULT DSOutputStream::DecideBufferSize(IMemAllocator *pMemAlloc, ALLOCATOR_PROPERTIES *pProperties)
+{
+ CheckPointer(pMemAlloc, E_POINTER);
+ CheckPointer(pProperties, E_POINTER);
+
+#if !(defined(_WIN32_WCE) && defined(_DEBUG))
+ CAutoLock cAutoLock(m_pFilter->pStateLock());
+#endif
+
+ HRESULT hr = NOERROR;
+
+ VIDEOINFO *pvi = (VIDEOINFO *) m_mt.Format();
+ pProperties->cBuffers = 1;
+ pProperties->cbBuffer = pvi->bmiHeader.biSizeImage;
+
+ // Ask the allocator to reserve us some sample memory. NOTE: the function
+ // can succeed (return NOERROR) but still not have allocated the
+ // memory that we requested, so we must check we got whatever we wanted.
+ ALLOCATOR_PROPERTIES Actual;
+ hr = pMemAlloc->SetProperties(pProperties,&Actual);
+ if(FAILED(hr)){
+ return hr;
+ }
+
+ // Is this allocator unsuitable?
+ if(Actual.cbBuffer < pProperties->cbBuffer)
+ {
+ return E_FAIL;
+ }
+
+ // Make sure that we have only 1 buffer (we erase the ball in the
+ // old buffer to save having to zero a 200k+ buffer every time
+ // we draw a frame)
+ return NOERROR;
+}
+
+HRESULT DSOutputStream::OnThreadCreate()
+{
+#if OVERLAY
+ hDibSection = CreateDIBSection(NULL, (BITMAPINFO *) &bitmapInfo, DIB_RGB_COLORS, &paintBuffer, NULL, 0);
+
+ HDC hDC = GetDC(NULL);
+ paintDC = CreateCompatibleDC(hDC);
+ SetMapMode(paintDC, GetMapMode(hDC));
+ SetBkMode(paintDC, TRANSPARENT);
+ SetTextColor(paintDC, RGB(255,255,255));
+
+ hObject = SelectObject(paintDC, hDibSection);
+#endif
+
+ return CSourceStream::OnThreadCreate();
+}
+
+HRESULT DSOutputStream::OnThreadDestroy()
+{
+#if OVERLAY
+ if (paintDC) DeleteDC(paintDC);
+ if (hObject) DeleteObject(hObject);
+
+ if (paintBuffer)
+ {
+ //delete[] paintBuffer; // will be done
+ //paintBuffer = NULL;
+ }
+#endif
+ return CSourceStream::OnThreadDestroy();
+}
+
+inline HRESULT DSOutputStream::DrawOverLay(void *pBuffer, long lSize)
+{
+ // called only #if OVERLAY
+ CopyMemory(paintBuffer, pBuffer, lSize);
+
+ // Draw the current frame
+#ifdef _WIN32_WCE
+
+#else
+ if( !TextOut( paintDC, 0, 0, OVERLAY_TEXT, (int)_tcslen( OVERLAY_TEXT ) ) ) return E_FAIL;
+#endif
+
+ CopyMemory(pBuffer, paintBuffer, lSize);
+
+ return S_OK;
+}
+
+static __inline void TransfertBuffer(void* src, void* dest, long lSize)
+{
+ __try
+ {
+#if MEMCPY_WORKAROUND
+ //#ifdef _WIN32_WCE
+ memmove(dest, src, lSize);
+ /*#else
+ unsigned char * pDst = (unsigned char *) dest;
+
+ if(src){
+ unsigned char const * pSrc = (unsigned char const *) src;
+ for( register int i=0; ((i< lSize) && src); i++) *pDst++ = *pSrc++;
+ }else{
+ for( register int i=0; i< lSize; i++) *pDst++ = 0;
+ }
+ #endif*/
+#else
+ CopyMemory(dest, src, lSize); //BUGGY
+#endif
+ }
+ __except(EXCEPTION_ACCESS_VIOLATION == GetExceptionCode())
+ {
+ //ZeroMemory(dest, sizeof(void*));
+ }
+}
+
+HRESULT DSOutputStream::FillBuffer(IMediaSample *pSample)
+{
+ CheckPointer(pSample, E_POINTER);
+#if !(defined(_WIN32_WCE) && defined(_DEBUG))
+ CAutoLock lock(m_pFilter->pStateLock());
+#endif
+
+ HRESULT hr;
+ BYTE *pBuffer = NULL;
+ long lSize, lDataSize;
+
+ hr = pSample->GetPointer(&pBuffer);
+ if (SUCCEEDED(hr))
+ {
+ lDataSize = lSize = pSample->GetSize();
+
+ // Check that we're still using video
+ //ASSERT(m_mt.formattype == FORMAT_VideoInfo);
+
+ if (this->buffer)
+ {
+#if OVERLAY
+ if (this->overlay)
+ {
+ DrawOverLay(this->buffer, lSize);
+ }
+#endif
+ // Why try do not work, see: http://msdn2.microsoft.com/en-us/library/xwtb73ad(vs.80).aspx
+ this->lockBuffer();
+ lDataSize = TSK_MIN(lSize, this->buffer_size);
+ TransfertBuffer(this->buffer, (void*)pBuffer, lDataSize);
+ this->unlockBuffer();
+ }
+ else
+ {
+ // Avoid caching last image
+ memset((void*)pBuffer, NULL, lSize);
+ }
+
+ REFERENCE_TIME rtStart = DS_MILLIS_TO_100NS(this->frameNumber * this->frameLength);
+ REFERENCE_TIME rtStop = rtStart + DS_MILLIS_TO_100NS(this->frameLength);
+
+ this->frameNumber++;
+
+ pSample->SetTime(&rtStart, &rtStop);
+ //pSample->SetMediaTime(&rtStart, &rtStop);
+ pSample->SetActualDataLength(lDataSize);
+ pSample->SetPreroll(FALSE);
+ pSample->SetDiscontinuity(FALSE);
+ }
+
+ // Set TRUE on every sample for uncompressed frames (KEYFRAME)
+ pSample->SetSyncPoint(TRUE);
+
+ return S_OK;
+}
diff --git a/plugins/pluginDirectShow/internals/DSOutputStream.h b/plugins/pluginDirectShow/internals/DSOutputStream.h
new file mode 100644
index 0000000..db3ede4
--- /dev/null
+++ b/plugins/pluginDirectShow/internals/DSOutputStream.h
@@ -0,0 +1,90 @@
+/* Copyright (C) 2011-2013 Doubango Telecom <http://www.doubango.org>
+*
+* This file is part of Open Source Doubango Framework.
+*
+* DOUBANGO is free software: you can redistribute it and/or modify
+* it under the terms of the GNU General Public License as published by
+* the Free Software Foundation, either version 3 of the License, or
+* (at your option) any later version.
+*
+* DOUBANGO is distributed in the hope that it will be useful,
+* but WITHOUT ANY WARRANTY; without even the implied warranty of
+* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
+* GNU General Public License for more details.
+*
+* You should have received a copy of the GNU General Public License
+* along with DOUBANGO.
+*/
+#ifndef PLUGIN_DSHOW_DSOUTPUTSTREAM_H
+#define PLUGIN_DSHOW_DSOUTPUTSTREAM_H
+
+#include "plugin_dshow_config.h"
+
+#include <streams.h>
+
+#include "tsk_mutex.h"
+
+class DSOutputFilter;
+
+class DSOutputStream : public CSourceStream
+{
+public:
+ DSOutputStream(HRESULT *phr, DSOutputFilter *pParent, LPCWSTR pPinName);
+ virtual ~DSOutputStream();
+
+ void setFps(int fps_);
+ void showOverlay(int value);
+
+ HRESULT setImageFormat(UINT width, UINT height);
+ bool getImageFormat(UINT &width, UINT &height);
+ STDMETHODIMP Notify(IBaseFilter *pSelf, Quality q) { return E_NOTIMPL; };
+ inline bool lockBuffer() {
+ if (this->mutex) {
+ return tsk_mutex_lock(this->mutex) == 0;
+ }
+ return false;
+ }
+ inline bool unlockBuffer() {
+ if (this->mutex) {
+ return tsk_mutex_unlock(this->mutex) == 0;
+ }
+ return false;
+ }
+
+public:
+ void *buffer;
+ int buffer_size;
+ LONGLONG frameNumber;
+
+protected: // Overrides
+ HRESULT GetMediaType(CMediaType *pMediaType);
+ HRESULT DecideBufferSize(IMemAllocator *pMemAlloc, ALLOCATOR_PROPERTIES *pProperties);
+ HRESULT OnThreadCreate();
+ HRESULT OnThreadDestroy();
+ HRESULT FillBuffer(IMediaSample *pSample);
+
+private:
+ inline HRESULT DrawOverLay(void *pBuffer, long lSize);
+
+private:
+ // TIMING
+ REFERENCE_TIME frameLength;
+ int fps;
+
+ // sizing
+ UINT width;
+ UINT height;
+
+ // overlaying
+ bool overlay;
+ BITMAPINFO bitmapInfo;
+ void *paintBuffer;
+ HDC paintDC;
+ HBITMAP hDibSection;
+ HGDIOBJ hObject;
+
+ tsk_mutex_handle_t* mutex;
+};
+
+
+#endif
diff --git a/plugins/pluginDirectShow/internals/DSPushSource.h b/plugins/pluginDirectShow/internals/DSPushSource.h
new file mode 100644
index 0000000..140d9bd
--- /dev/null
+++ b/plugins/pluginDirectShow/internals/DSPushSource.h
@@ -0,0 +1,496 @@
+//------------------------------------------------------------------------------
+// File: PushSource.H
+//
+// Desc: DirectShow sample code - In-memory push mode source filter
+//
+// Copyright (c) Microsoft Corporation. All rights reserved.
+//------------------------------------------------------------------------------
+#ifndef PLUGIN_DSHOW_DSPUSHSOURCE_H
+#define PLUGIN_DSHOW_DSPUSHSOURCE_H
+
+#include "plugin_dshow_config.h"
+
+#include <strsafe.h>
+
+// UNITS = 10 ^ 7
+// UNITS / 30 = 30 fps;
+// UNITS / 20 = 20 fps, etc
+const REFERENCE_TIME FPS_30 = UNITS / 30;
+const REFERENCE_TIME FPS_20 = UNITS / 20;
+const REFERENCE_TIME FPS_10 = UNITS / 10;
+const REFERENCE_TIME FPS_5 = UNITS / 5;
+const REFERENCE_TIME FPS_4 = UNITS / 4;
+const REFERENCE_TIME FPS_3 = UNITS / 3;
+const REFERENCE_TIME FPS_2 = UNITS / 2;
+const REFERENCE_TIME FPS_1 = UNITS / 1;
+
+const REFERENCE_TIME rtDefaultFrameLength = FPS_10;
+
+// Filter name strings
+#define g_wszPushBitmap L"PushSource Bitmap Filter"
+#define g_wszPushBitmapSet L"PushSource BitmapSet Filter"
+#define g_wszPushDesktop L"PushSource Desktop Filter"
+
+// Number of bitmap files to load in the CPushPinBitmapSet class
+#define NUM_FILES 5
+
+// {3FD3081A-A8C9-4958-9F75-07EC89690024}
+TDSHOW_DEFINE_GUID(CLSID_PushSourceDesktop,
+0x3fd3081a, 0xa8c9, 0x4958, 0x9f, 0x75, 0x7, 0xec, 0x89, 0x69, 0x0, 0x24);
+
+
+
+/**********************************************
+ *
+ * Class declarations
+ *
+ **********************************************/
+
+class CPushPinBitmap : public CSourceStream
+{
+protected:
+
+ int m_FramesWritten; // To track where we are in the file
+ BOOL m_bZeroMemory; // Do we need to clear the buffer?
+ CRefTime m_rtSampleTime; // The time stamp for each sample
+
+ BITMAPINFO *m_pBmi; // Pointer to the bitmap header
+ DWORD m_cbBitmapInfo; // Size of the bitmap header
+
+ // File opening variables
+ HANDLE m_hFile; // Handle returned from CreateFile
+ BYTE * m_pFile; // Points to beginning of file buffer
+ BYTE * m_pImage; // Points to pixel bits
+
+ int m_iFrameNumber;
+ const REFERENCE_TIME m_rtFrameLength;
+
+ CCritSec m_cSharedState; // Protects our internal state
+ CImageDisplay m_Display; // Figures out our media type for us
+
+public:
+
+ CPushPinBitmap(HRESULT *phr, CSource *pFilter);
+ ~CPushPinBitmap();
+
+ // Override the version that offers exactly one media type
+ HRESULT GetMediaType(CMediaType *pMediaType);
+ HRESULT DecideBufferSize(IMemAllocator *pAlloc, ALLOCATOR_PROPERTIES *pRequest);
+ HRESULT FillBuffer(IMediaSample *pSample);
+
+ // Quality control
+ // Not implemented because we aren't going in real time.
+ // If the file-writing filter slows the graph down, we just do nothing, which means
+ // wait until we're unblocked. No frames are ever dropped.
+ STDMETHODIMP Notify(IBaseFilter *pSelf, Quality q)
+ {
+ return E_FAIL;
+ }
+
+};
+
+
+class CPushPinBitmapSet : public CSourceStream
+{
+protected:
+
+ int m_FramesWritten; // To track where we are in the file
+ BOOL m_bZeroMemory; // Do we need to clear the buffer?
+ CRefTime m_rtSampleTime; // The time stamp for each sample
+
+ BITMAPINFO *m_pBmi[NUM_FILES]; // Pointer to the bitmap headers
+ DWORD m_cbBitmapInfo[NUM_FILES]; // Size of the bitmap headers
+
+ // File opening variables
+ HANDLE m_hFile[NUM_FILES]; // Handles returned from CreateFile
+ BYTE * m_pFile[NUM_FILES]; // Points to beginning of file buffers
+ BYTE * m_pImage[NUM_FILES]; // Points to pixel bits
+ BOOL m_bFilesLoaded;
+
+ int m_iCurrentBitmap; // Which bitmap is being displayed
+ int m_iFrameNumber; // How many frames have been displayed
+ const REFERENCE_TIME m_rtFrameLength; // Duration of one frame
+
+ CCritSec m_cSharedState; // Protects our internal state
+ CImageDisplay m_Display; // Figures out our media type for us
+
+public:
+
+ CPushPinBitmapSet(HRESULT *phr, CSource *pFilter);
+ ~CPushPinBitmapSet();
+
+ // Override the version that offers exactly one media type
+ HRESULT GetMediaType(CMediaType *pMediaType);
+ HRESULT DecideBufferSize(IMemAllocator *pAlloc, ALLOCATOR_PROPERTIES *pRequest);
+ HRESULT FillBuffer(IMediaSample *pSample);
+
+ // Quality control
+ // Not implemented because we aren't going in real time.
+ // If the file-writing filter slows the graph down, we just do nothing, which means
+ // wait until we're unblocked. No frames are ever dropped.
+ STDMETHODIMP Notify(IBaseFilter *pSelf, Quality q)
+ {
+ return E_FAIL;
+ }
+
+};
+
+
+class CPushPinDesktop : public CSourceStream
+{
+protected:
+
+ int m_FramesWritten; // To track where we are in the file
+ BOOL m_bZeroMemory; // Do we need to clear the buffer?
+ CRefTime m_rtSampleTime; // The time stamp for each sample
+
+ int m_iFrameNumber;
+ const REFERENCE_TIME m_rtFrameLength;
+
+ RECT m_rScreen; // Rect containing entire screen coordinates
+
+ int m_iImageHeight; // The current image height
+ int m_iImageWidth; // And current image width
+ int m_iRepeatTime; // Time in msec between frames
+ int m_nCurrentBitDepth; // Screen bit depth
+
+ CMediaType m_MediaType;
+ CCritSec m_cSharedState; // Protects our internal state
+ CImageDisplay m_Display; // Figures out our media type for us
+
+ HWND m_hSrcHwnd; // Handle to the window to grab
+
+public:
+
+ CPushPinDesktop(HRESULT *phr, CSource *pFilter);
+ ~CPushPinDesktop();
+
+ // Override the version that offers exactly one media type
+ HRESULT DecideBufferSize(IMemAllocator *pAlloc, ALLOCATOR_PROPERTIES *pRequest);
+ HRESULT FillBuffer(IMediaSample *pSample);
+
+ // Set the agreed media type and set up the necessary parameters
+ HRESULT SetMediaType(const CMediaType *pMediaType);
+
+ // Support multiple display formats
+ HRESULT CheckMediaType(const CMediaType *pMediaType);
+ HRESULT GetMediaType(int iPosition, CMediaType *pmt);
+
+ // Quality control
+ // Not implemented because we aren't going in real time.
+ // If the file-writing filter slows the graph down, we just do nothing, which means
+ // wait until we're unblocked. No frames are ever dropped.
+ STDMETHODIMP Notify(IBaseFilter *pSelf, Quality q)
+ {
+ return E_FAIL;
+ }
+
+ HRESULT SetSrcHwnd(HWND hWnd)
+ {
+ m_hSrcHwnd = hWnd;
+ return S_OK;
+ }
+
+};
+
+
+
+class CPushSourceBitmap : public CSource
+{
+
+private:
+ // Constructor is private because you have to use CreateInstance
+ CPushSourceBitmap(IUnknown *pUnk, HRESULT *phr);
+ ~CPushSourceBitmap();
+
+ CPushPinBitmap *m_pPin;
+
+public:
+ static CUnknown * WINAPI CreateInstance(IUnknown *pUnk, HRESULT *phr);
+
+};
+
+
+class CPushSourceBitmapSet : public CSource
+{
+
+private:
+ // Constructor is private because you have to use CreateInstance
+ CPushSourceBitmapSet(IUnknown *pUnk, HRESULT *phr);
+ ~CPushSourceBitmapSet();
+
+ CPushPinBitmapSet *m_pPin;
+
+public:
+ static CUnknown * WINAPI CreateInstance(IUnknown *pUnk, HRESULT *phr);
+
+};
+
+
+class CPushSourceDesktop : public CSource
+{
+
+private:
+ // Constructor is private because you have to use CreateInstance
+ CPushSourceDesktop(IUnknown *pUnk, HRESULT *phr);
+ ~CPushSourceDesktop();
+
+ CPushPinDesktop *m_pPin;
+
+public:
+ static CUnknown * WINAPI CreateInstance(IUnknown *pUnk, HRESULT *phr);
+ DECLARE_IUNKNOWN;
+
+ HRESULT SetSrcHwnd(HWND hWnd);
+};
+
+
+#endif /* PLUGIN_DSHOW_DSPUSHSOURCE_H */
+//------------------------------------------------------------------------------
+// File: PushSource.H
+//
+// Desc: DirectShow sample code - In-memory push mode source filter
+//
+// Copyright (c) Microsoft Corporation. All rights reserved.
+//------------------------------------------------------------------------------
+#ifndef PLUGIN_DSHOW_DSPUSHSOURCE_H
+#define PLUGIN_DSHOW_DSPUSHSOURCE_H
+
+#include "plugin_dshow_config.h"
+
+#include <strsafe.h>
+
+// UNITS = 10 ^ 7
+// UNITS / 30 = 30 fps;
+// UNITS / 20 = 20 fps, etc
+const REFERENCE_TIME FPS_30 = UNITS / 30;
+const REFERENCE_TIME FPS_20 = UNITS / 20;
+const REFERENCE_TIME FPS_10 = UNITS / 10;
+const REFERENCE_TIME FPS_5 = UNITS / 5;
+const REFERENCE_TIME FPS_4 = UNITS / 4;
+const REFERENCE_TIME FPS_3 = UNITS / 3;
+const REFERENCE_TIME FPS_2 = UNITS / 2;
+const REFERENCE_TIME FPS_1 = UNITS / 1;
+
+const REFERENCE_TIME rtDefaultFrameLength = FPS_10;
+
+// Filter name strings
+#define g_wszPushBitmap L"PushSource Bitmap Filter"
+#define g_wszPushBitmapSet L"PushSource BitmapSet Filter"
+#define g_wszPushDesktop L"PushSource Desktop Filter"
+
+// Number of bitmap files to load in the CPushPinBitmapSet class
+#define NUM_FILES 5
+
+// {3FD3081A-A8C9-4958-9F75-07EC89690024}
+TDSHOW_DEFINE_GUID(CLSID_PushSourceDesktop,
+0x3fd3081a, 0xa8c9, 0x4958, 0x9f, 0x75, 0x7, 0xec, 0x89, 0x69, 0x0, 0x24);
+
+
+
+/**********************************************
+ *
+ * Class declarations
+ *
+ **********************************************/
+
+class CPushPinBitmap : public CSourceStream
+{
+protected:
+
+ int m_FramesWritten; // To track where we are in the file
+ BOOL m_bZeroMemory; // Do we need to clear the buffer?
+ CRefTime m_rtSampleTime; // The time stamp for each sample
+
+ BITMAPINFO *m_pBmi; // Pointer to the bitmap header
+ DWORD m_cbBitmapInfo; // Size of the bitmap header
+
+ // File opening variables
+ HANDLE m_hFile; // Handle returned from CreateFile
+ BYTE * m_pFile; // Points to beginning of file buffer
+ BYTE * m_pImage; // Points to pixel bits
+
+ int m_iFrameNumber;
+ const REFERENCE_TIME m_rtFrameLength;
+
+ CCritSec m_cSharedState; // Protects our internal state
+ CImageDisplay m_Display; // Figures out our media type for us
+
+public:
+
+ CPushPinBitmap(HRESULT *phr, CSource *pFilter);
+ ~CPushPinBitmap();
+
+ // Override the version that offers exactly one media type
+ HRESULT GetMediaType(CMediaType *pMediaType);
+ HRESULT DecideBufferSize(IMemAllocator *pAlloc, ALLOCATOR_PROPERTIES *pRequest);
+ HRESULT FillBuffer(IMediaSample *pSample);
+
+ // Quality control
+ // Not implemented because we aren't going in real time.
+ // If the file-writing filter slows the graph down, we just do nothing, which means
+ // wait until we're unblocked. No frames are ever dropped.
+ STDMETHODIMP Notify(IBaseFilter *pSelf, Quality q)
+ {
+ return E_FAIL;
+ }
+
+};
+
+
+class CPushPinBitmapSet : public CSourceStream
+{
+protected:
+
+ int m_FramesWritten; // To track where we are in the file
+ BOOL m_bZeroMemory; // Do we need to clear the buffer?
+ CRefTime m_rtSampleTime; // The time stamp for each sample
+
+ BITMAPINFO *m_pBmi[NUM_FILES]; // Pointer to the bitmap headers
+ DWORD m_cbBitmapInfo[NUM_FILES]; // Size of the bitmap headers
+
+ // File opening variables
+ HANDLE m_hFile[NUM_FILES]; // Handles returned from CreateFile
+ BYTE * m_pFile[NUM_FILES]; // Points to beginning of file buffers
+ BYTE * m_pImage[NUM_FILES]; // Points to pixel bits
+ BOOL m_bFilesLoaded;
+
+ int m_iCurrentBitmap; // Which bitmap is being displayed
+ int m_iFrameNumber; // How many frames have been displayed
+ const REFERENCE_TIME m_rtFrameLength; // Duration of one frame
+
+ CCritSec m_cSharedState; // Protects our internal state
+ CImageDisplay m_Display; // Figures out our media type for us
+
+public:
+
+ CPushPinBitmapSet(HRESULT *phr, CSource *pFilter);
+ ~CPushPinBitmapSet();
+
+ // Override the version that offers exactly one media type
+ HRESULT GetMediaType(CMediaType *pMediaType);
+ HRESULT DecideBufferSize(IMemAllocator *pAlloc, ALLOCATOR_PROPERTIES *pRequest);
+ HRESULT FillBuffer(IMediaSample *pSample);
+
+ // Quality control
+ // Not implemented because we aren't going in real time.
+ // If the file-writing filter slows the graph down, we just do nothing, which means
+ // wait until we're unblocked. No frames are ever dropped.
+ STDMETHODIMP Notify(IBaseFilter *pSelf, Quality q)
+ {
+ return E_FAIL;
+ }
+
+};
+
+
+class CPushPinDesktop : public CSourceStream
+{
+protected:
+
+ int m_FramesWritten; // To track where we are in the file
+ BOOL m_bZeroMemory; // Do we need to clear the buffer?
+ CRefTime m_rtSampleTime; // The time stamp for each sample
+
+ int m_iFrameNumber;
+ const REFERENCE_TIME m_rtFrameLength;
+
+ RECT m_rScreen; // Rect containing entire screen coordinates
+
+ int m_iImageHeight; // The current image height
+ int m_iImageWidth; // And current image width
+ int m_iRepeatTime; // Time in msec between frames
+ int m_nCurrentBitDepth; // Screen bit depth
+
+ CMediaType m_MediaType;
+ CCritSec m_cSharedState; // Protects our internal state
+ CImageDisplay m_Display; // Figures out our media type for us
+
+ HWND m_hSrcHwnd; // Handle to the window to grab
+
+public:
+
+ CPushPinDesktop(HRESULT *phr, CSource *pFilter);
+ ~CPushPinDesktop();
+
+ // Override the version that offers exactly one media type
+ HRESULT DecideBufferSize(IMemAllocator *pAlloc, ALLOCATOR_PROPERTIES *pRequest);
+ HRESULT FillBuffer(IMediaSample *pSample);
+
+ // Set the agreed media type and set up the necessary parameters
+ HRESULT SetMediaType(const CMediaType *pMediaType);
+
+ // Support multiple display formats
+ HRESULT CheckMediaType(const CMediaType *pMediaType);
+ HRESULT GetMediaType(int iPosition, CMediaType *pmt);
+
+ // Quality control
+ // Not implemented because we aren't going in real time.
+ // If the file-writing filter slows the graph down, we just do nothing, which means
+ // wait until we're unblocked. No frames are ever dropped.
+ STDMETHODIMP Notify(IBaseFilter *pSelf, Quality q)
+ {
+ return E_FAIL;
+ }
+
+ HRESULT SetSrcHwnd(HWND hWnd)
+ {
+ m_hSrcHwnd = hWnd;
+ return S_OK;
+ }
+
+};
+
+
+
+class CPushSourceBitmap : public CSource
+{
+
+private:
+ // Constructor is private because you have to use CreateInstance
+ CPushSourceBitmap(IUnknown *pUnk, HRESULT *phr);
+ ~CPushSourceBitmap();
+
+ CPushPinBitmap *m_pPin;
+
+public:
+ static CUnknown * WINAPI CreateInstance(IUnknown *pUnk, HRESULT *phr);
+
+};
+
+
+class CPushSourceBitmapSet : public CSource
+{
+
+private:
+ // Constructor is private because you have to use CreateInstance
+ CPushSourceBitmapSet(IUnknown *pUnk, HRESULT *phr);
+ ~CPushSourceBitmapSet();
+
+ CPushPinBitmapSet *m_pPin;
+
+public:
+ static CUnknown * WINAPI CreateInstance(IUnknown *pUnk, HRESULT *phr);
+
+};
+
+
+class CPushSourceDesktop : public CSource
+{
+
+private:
+ // Constructor is private because you have to use CreateInstance
+ CPushSourceDesktop(IUnknown *pUnk, HRESULT *phr);
+ ~CPushSourceDesktop();
+
+ CPushPinDesktop *m_pPin;
+
+public:
+ static CUnknown * WINAPI CreateInstance(IUnknown *pUnk, HRESULT *phr);
+ DECLARE_IUNKNOWN;
+
+ HRESULT SetSrcHwnd(HWND hWnd);
+};
+
+
+#endif /* PLUGIN_DSHOW_DSPUSHSOURCE_H */
diff --git a/plugins/pluginDirectShow/internals/DSPushSourceDesktop.cxx b/plugins/pluginDirectShow/internals/DSPushSourceDesktop.cxx
new file mode 100644
index 0000000..1bdee9e
--- /dev/null
+++ b/plugins/pluginDirectShow/internals/DSPushSourceDesktop.cxx
@@ -0,0 +1,434 @@
+#if !defined(_WIN32_WCE)
+//------------------------------------------------------------------------------
+// File: PushSourceDesktop.cpp
+//
+// Desc: DirectShow sample code - In-memory push mode source filter
+// Provides an image of the user's desktop as a continuously updating stream.
+//
+// Copyright (c) Microsoft Corporation. All rights reserved.
+//------------------------------------------------------------------------------
+
+#include <streams.h>
+
+#include "DSPushSource.h"
+#include "DSDibHelper.h"
+#include "DSUtils.h"
+
+
+/**********************************************
+ *
+ * CPushPinDesktop Class
+ *
+ *
+ **********************************************/
+
+CPushPinDesktop::CPushPinDesktop(HRESULT *phr, CSource *pFilter)
+ : CSourceStream(NAME("Push Source Desktop"), phr, pFilter, L"Out"),
+ m_FramesWritten(0),
+ m_bZeroMemory(0),
+ m_iFrameNumber(0),
+ m_rtFrameLength(FPS_5), // Capture and display desktop 5 times per second
+ m_nCurrentBitDepth(24),
+ m_hSrcHwnd(NULL)
+{
+ // The main point of this sample is to demonstrate how to take a DIB
+ // in host memory and insert it into a video stream.
+
+ // To keep this sample as simple as possible, we just read the desktop image
+ // from a file and copy it into every frame that we send downstream.
+ //
+ // In the filter graph, we connect this filter to the AVI Mux, which creates
+ // the AVI file with the video frames we pass to it. In this case,
+ // the end result is a screen capture video (GDI images only, with no
+ // support for overlay surfaces).
+
+ // Get the device context of the main display
+ HDC hDC;
+ hDC = CreateDC(TEXT("DISPLAY"), NULL, NULL, NULL);
+
+ // Get the dimensions of the main desktop window
+ m_rScreen.left = m_rScreen.top = 0;
+ m_rScreen.right = GetDeviceCaps(hDC, HORZRES);
+ m_rScreen.bottom = GetDeviceCaps(hDC, VERTRES);
+
+ // Save dimensions for later use in FillBuffer()
+ m_iImageWidth = m_rScreen.right - m_rScreen.left;
+ m_iImageHeight = m_rScreen.bottom - m_rScreen.top;
+
+ // Release the device context
+ DeleteDC(hDC);
+}
+
+CPushPinDesktop::~CPushPinDesktop()
+{
+ DbgLog((LOG_TRACE, 3, TEXT("Frames written %d"), m_iFrameNumber));
+}
+
+
+//
+// GetMediaType
+//
+// Prefer 5 formats - 8, 16 (*2), 24 or 32 bits per pixel
+//
+// Prefered types should be ordered by quality, with zero as highest quality.
+// Therefore, iPosition =
+// 0 Return a 32bit mediatype
+// 1 Return a 24bit mediatype
+// 2 Return 16bit RGB565
+// 3 Return a 16bit mediatype (rgb555)
+// 4 Return 8 bit palettised format
+// >4 Invalid
+//
+HRESULT CPushPinDesktop::GetMediaType(int iPosition, CMediaType *pmt)
+{
+ CheckPointer(pmt,E_POINTER);
+ CAutoLock cAutoLock(m_pFilter->pStateLock());
+
+ if(iPosition < 0)
+ return E_INVALIDARG;
+
+ // Have we run off the end of types?
+ if(iPosition > 4)
+ return VFW_S_NO_MORE_ITEMS;
+
+ VIDEOINFO *pvi = (VIDEOINFO *) pmt->AllocFormatBuffer(sizeof(VIDEOINFO));
+ if(NULL == pvi)
+ return(E_OUTOFMEMORY);
+
+ // Initialize the VideoInfo structure before configuring its members
+ ZeroMemory(pvi, sizeof(VIDEOINFO));
+
+ switch(iPosition)
+ {
+ case 0:
+ {
+ // Return our highest quality 32bit format
+
+ // Since we use RGB888 (the default for 32 bit), there is
+ // no reason to use BI_BITFIELDS to specify the RGB
+ // masks. Also, not everything supports BI_BITFIELDS
+ pvi->bmiHeader.biCompression = BI_RGB;
+ pvi->bmiHeader.biBitCount = 32;
+ break;
+ }
+
+ case 1:
+ { // Return our 24bit format
+ pvi->bmiHeader.biCompression = BI_RGB;
+ pvi->bmiHeader.biBitCount = 24;
+ break;
+ }
+
+ case 2:
+ {
+ // 16 bit per pixel RGB565
+
+ // Place the RGB masks as the first 3 doublewords in the palette area
+ for(int i = 0; i < 3; i++)
+ pvi->TrueColorInfo.dwBitMasks[i] = bits565[i];
+
+ pvi->bmiHeader.biCompression = BI_BITFIELDS;
+ pvi->bmiHeader.biBitCount = 16;
+ break;
+ }
+
+ case 3:
+ { // 16 bits per pixel RGB555
+
+ // Place the RGB masks as the first 3 doublewords in the palette area
+ for(int i = 0; i < 3; i++)
+ pvi->TrueColorInfo.dwBitMasks[i] = bits555[i];
+
+ pvi->bmiHeader.biCompression = BI_BITFIELDS;
+ pvi->bmiHeader.biBitCount = 16;
+ break;
+ }
+
+ case 4:
+ { // 8 bit palettised
+
+ pvi->bmiHeader.biCompression = BI_RGB;
+ pvi->bmiHeader.biBitCount = 8;
+ pvi->bmiHeader.biClrUsed = iPALETTE_COLORS;
+ break;
+ }
+ }
+
+ // Adjust the parameters common to all formats
+ pvi->bmiHeader.biSize = sizeof(BITMAPINFOHEADER);
+ pvi->bmiHeader.biWidth = m_iImageWidth;
+ pvi->bmiHeader.biHeight = m_iImageHeight;
+ pvi->bmiHeader.biPlanes = 1;
+ pvi->bmiHeader.biSizeImage = GetBitmapSize(&pvi->bmiHeader);
+ pvi->bmiHeader.biClrImportant = 0;
+
+ SetRectEmpty(&(pvi->rcSource)); // we want the whole image area rendered.
+ SetRectEmpty(&(pvi->rcTarget)); // no particular destination rectangle
+
+ pmt->SetType(&MEDIATYPE_Video);
+ pmt->SetFormatType(&FORMAT_VideoInfo);
+ pmt->SetTemporalCompression(FALSE);
+
+ // Work out the GUID for the subtype from the header info.
+ const GUID SubTypeGUID = GetBitmapSubtype(&pvi->bmiHeader);
+ pmt->SetSubtype(&SubTypeGUID);
+ pmt->SetSampleSize(pvi->bmiHeader.biSizeImage);
+
+ return NOERROR;
+
+} // GetMediaType
+
+
+//
+// CheckMediaType
+//
+// We will accept 8, 16, 24 or 32 bit video formats, in any
+// image size that gives room to bounce.
+// Returns E_INVALIDARG if the mediatype is not acceptable
+//
+HRESULT CPushPinDesktop::CheckMediaType(const CMediaType *pMediaType)
+{
+ CheckPointer(pMediaType,E_POINTER);
+
+ if((*(pMediaType->Type()) != MEDIATYPE_Video) || // we only output video
+ !(pMediaType->IsFixedSize())) // in fixed size samples
+ {
+ return E_INVALIDARG;
+ }
+
+ // Check for the subtypes we support
+ const GUID *SubType = pMediaType->Subtype();
+ if (SubType == NULL)
+ return E_INVALIDARG;
+
+ if( (*SubType != MEDIASUBTYPE_RGB24)
+#if 0
+ && (*SubType != MEDIASUBTYPE_RGB565)
+ && (*SubType != MEDIASUBTYPE_RGB555)
+ && (*SubType != MEDIASUBTYPE_RGB32)
+ && (*SubType != MEDIASUBTYPE_RGB8)
+#endif
+ )
+ {
+ return E_INVALIDARG;
+ }
+
+ // Get the format area of the media type
+ VIDEOINFO *pvi = (VIDEOINFO *) pMediaType->Format();
+
+ if(pvi == NULL)
+ return E_INVALIDARG;
+
+ // Check if the image width & height have changed
+ if( pvi->bmiHeader.biWidth != m_iImageWidth ||
+ abs(pvi->bmiHeader.biHeight) != m_iImageHeight)
+ {
+ // If the image width/height is changed, fail CheckMediaType() to force
+ // the renderer to resize the image.
+ return E_INVALIDARG;
+ }
+
+ // Don't accept formats with negative height, which would cause the desktop
+ // image to be displayed upside down.
+ if (pvi->bmiHeader.biHeight < 0)
+ return E_INVALIDARG;
+
+ return S_OK; // This format is acceptable.
+
+} // CheckMediaType
+
+
+//
+// DecideBufferSize
+//
+// This will always be called after the format has been sucessfully
+// negotiated. So we have a look at m_mt to see what size image we agreed.
+// Then we can ask for buffers of the correct size to contain them.
+//
+HRESULT CPushPinDesktop::DecideBufferSize(IMemAllocator *pAlloc,
+ ALLOCATOR_PROPERTIES *pProperties)
+{
+ CheckPointer(pAlloc,E_POINTER);
+ CheckPointer(pProperties,E_POINTER);
+
+ CAutoLock cAutoLock(m_pFilter->pStateLock());
+ HRESULT hr = NOERROR;
+
+ VIDEOINFO *pvi = (VIDEOINFO *) m_mt.Format();
+ pProperties->cBuffers = 1;
+ pProperties->cbBuffer = pvi->bmiHeader.biSizeImage;
+
+ ASSERT(pProperties->cbBuffer);
+
+ // Ask the allocator to reserve us some sample memory. NOTE: the function
+ // can succeed (return NOERROR) but still not have allocated the
+ // memory that we requested, so we must check we got whatever we wanted.
+ ALLOCATOR_PROPERTIES Actual;
+ hr = pAlloc->SetProperties(pProperties,&Actual);
+ if(FAILED(hr))
+ {
+ return hr;
+ }
+
+ // Is this allocator unsuitable?
+ if(Actual.cbBuffer < pProperties->cbBuffer)
+ {
+ return E_FAIL;
+ }
+
+ // Make sure that we have only 1 buffer (we erase the ball in the
+ // old buffer to save having to zero a 200k+ buffer every time
+ // we draw a frame)
+ ASSERT(Actual.cBuffers == 1);
+ return NOERROR;
+
+} // DecideBufferSize
+
+
+//
+// SetMediaType
+//
+// Called when a media type is agreed between filters
+//
+HRESULT CPushPinDesktop::SetMediaType(const CMediaType *pMediaType)
+{
+ CAutoLock cAutoLock(m_pFilter->pStateLock());
+
+ // Pass the call up to my base class
+ HRESULT hr = CSourceStream::SetMediaType(pMediaType);
+
+ if(SUCCEEDED(hr))
+ {
+ VIDEOINFO * pvi = (VIDEOINFO *) m_mt.Format();
+ if (pvi == NULL)
+ return E_UNEXPECTED;
+
+ switch(pvi->bmiHeader.biBitCount)
+ {
+ case 8: // 8-bit palettized
+ case 16: // RGB565, RGB555
+ case 24: // RGB24
+ case 32: // RGB32
+ // Save the current media type and bit depth
+ m_MediaType = *pMediaType;
+ m_nCurrentBitDepth = pvi->bmiHeader.biBitCount;
+ hr = S_OK;
+ break;
+
+ default:
+ // We should never agree any other media types
+ ASSERT(FALSE);
+ hr = E_INVALIDARG;
+ break;
+ }
+ }
+
+ return hr;
+
+} // SetMediaType
+
+
+// This is where we insert the DIB bits into the video stream.
+// FillBuffer is called once for every sample in the stream.
+HRESULT CPushPinDesktop::FillBuffer(IMediaSample *pSample)
+{
+ BYTE *pData;
+ long cbData;
+
+ CheckPointer(pSample, E_POINTER);
+
+ CAutoLock cAutoLockShared(&m_cSharedState);
+
+ // Access the sample's data buffer
+ pSample->GetPointer(&pData);
+ cbData = pSample->GetSize();
+
+ // Check that we're still using video
+ ASSERT(m_mt.formattype == FORMAT_VideoInfo);
+
+ VIDEOINFOHEADER *pVih = (VIDEOINFOHEADER*)m_mt.pbFormat;
+
+ // Copy the DIB bits over into our filter's output buffer.
+ // Since sample size may be larger than the image size, bound the copy size.
+ int nSize = min(pVih->bmiHeader.biSizeImage, (DWORD) cbData);
+ HDIB hDib = CopyScreenToBitmap(&m_rScreen, pData, (BITMAPINFO *) &(pVih->bmiHeader));
+
+ if (hDib)
+ DeleteObject(hDib);
+
+ // Set the timestamps that will govern playback frame rate.
+ // If this file is getting written out as an AVI,
+ // then you'll also need to configure the AVI Mux filter to
+ // set the Average Time Per Frame for the AVI Header.
+ // The current time is the sample's start.
+ REFERENCE_TIME rtStart = m_iFrameNumber * m_rtFrameLength;
+ REFERENCE_TIME rtStop = rtStart + m_rtFrameLength;
+
+ pSample->SetTime(&rtStart, &rtStop);
+ m_iFrameNumber++;
+
+ // Set TRUE on every sample for uncompressed frames
+ pSample->SetSyncPoint(TRUE);
+
+ return S_OK;
+}
+
+
+
+/**********************************************
+ *
+ * CPushSourceDesktop Class
+ *
+ **********************************************/
+
+CPushSourceDesktop::CPushSourceDesktop(IUnknown *pUnk, HRESULT *phr)
+ : CSource(NAME("PushSourceDesktop"), pUnk, CLSID_PushSourceDesktop)
+{
+ // The pin magically adds itself to our pin array.
+ m_pPin = new CPushPinDesktop(phr, this);
+
+ if (phr)
+ {
+ if (m_pPin == NULL)
+ *phr = E_OUTOFMEMORY;
+ else
+ *phr = S_OK;
+ }
+}
+
+
+CPushSourceDesktop::~CPushSourceDesktop()
+{
+ if (m_pPin)
+ {
+ delete m_pPin;
+ m_pPin = NULL;
+ }
+}
+
+
+CUnknown * WINAPI CPushSourceDesktop::CreateInstance(IUnknown *pUnk, HRESULT *phr)
+{
+ CPushSourceDesktop *pNewFilter = new CPushSourceDesktop(pUnk, phr );
+
+ if (phr)
+ {
+ if (pNewFilter == NULL)
+ *phr = E_OUTOFMEMORY;
+ else
+ *phr = S_OK;
+ }
+ return pNewFilter;
+
+}
+
+HRESULT CPushSourceDesktop::SetSrcHwnd(HWND hWnd)
+{
+ if (m_pPin)
+ {
+ return m_pPin->SetSrcHwnd(hWnd);
+ }
+ return E_FAIL;
+}
+
+#endif /* _WIN32_WCE */
diff --git a/plugins/pluginDirectShow/internals/DSScreenCastGraph.cxx b/plugins/pluginDirectShow/internals/DSScreenCastGraph.cxx
new file mode 100644
index 0000000..b425d65
--- /dev/null
+++ b/plugins/pluginDirectShow/internals/DSScreenCastGraph.cxx
@@ -0,0 +1,257 @@
+#if !defined(_WIN32_WCE)
+/* Copyright (C) 2014 Mamadou DIOP
+*
+* This file is part of Open Source Doubango Framework.
+*
+* DOUBANGO is free software: you can redistribute it and/or modify
+* it under the terms of the GNU General Public License as published by
+* the Free Software Foundation, either version 3 of the License, or
+* (at your option) any later version.
+*
+* DOUBANGO is distributed in the hope that it will be useful,
+* but WITHOUT ANY WARRANTY; without even the implied warranty of
+* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
+* GNU General Public License for more details.
+*
+* You should have received a copy of the GNU General Public License
+* along with DOUBANGO.
+*/
+#include <streams.h>
+#include "internals/DSScreenCastGraph.h"
+#include "internals/DSPushSource.h"
+#include "internals/DSUtils.h"
+#include "internals/DSCaptureUtils.h"
+
+#include "tsk_debug.h"
+
+#include <iostream>
+
+using namespace std;
+
+DSScreenCastGraph::DSScreenCastGraph(ISampleGrabberCB* callback, HRESULT *hr)
+: DSBaseCaptureGraph(callback, hr)
+{
+ this->grabberCallback = callback;
+
+ this->captureFormat = NULL;
+ this->captureGraphBuilder = NULL;
+ this->graphBuilder = NULL;
+
+ this->sourceFilter = NULL;
+ this->sampleGrabberFilter = NULL;
+
+ this->nullRendererFilter = NULL;
+ this->grabberController = NULL;
+ this->mediaController = NULL;
+ this->mediaEventController = NULL;
+
+ this->running = FALSE;
+ this->paused = FALSE;
+
+ *hr = this->createCaptureGraph();
+}
+
+DSScreenCastGraph::~DSScreenCastGraph()
+{
+ SAFE_RELEASE(this->mediaEventController);
+ SAFE_RELEASE(this->mediaController);
+ SAFE_RELEASE(this->grabberController);
+
+ SAFE_RELEASE(this->nullRendererFilter);
+ SAFE_RELEASE(this->sampleGrabberFilter);
+
+ SAFE_RELEASE(this->graphBuilder);
+ SAFE_RELEASE(this->captureGraphBuilder);
+
+ SAFE_RELEASE(this->sourceFilter);
+}
+
+HRESULT DSScreenCastGraph::setParameters(DSCaptureFormat *format, int framerate)
+{
+ return S_OK;
+}
+
+#ifdef _WIN32_WCE
+# include <tinydshow/wce/DSInxbNullFilter.h>
+#endif
+
+HRESULT DSScreenCastGraph::connect()
+{
+ HRESULT hr;
+
+ if (!this->sourceFilter){
+ TSK_DEBUG_ERROR("Invalid source filter");
+ return E_FAIL;
+ }
+#if 0
+ if (!this->captureFormat){
+ TSK_DEBUG_ERROR("Invalid capture format");
+ return E_FAIL;
+ }
+#endif
+
+ hr = ConnectFilters(this->graphBuilder, this->sourceFilter, this->sampleGrabberFilter); if(FAILED(hr)) { TSK_DEBUG_ERROR("ConnectFilters failed"); return hr; }
+ hr = ConnectFilters(this->graphBuilder, this->sampleGrabberFilter, this->nullRendererFilter); if(FAILED(hr)) { TSK_DEBUG_ERROR("ConnectFilters failed"); return hr; }
+
+ return hr;
+}
+
+HRESULT DSScreenCastGraph::disconnect()
+{
+ HRESULT hr;
+
+ if (!this->sourceFilter)
+ {
+ return E_FAIL;
+ }
+#if 0
+ if (!this->captureFormat)
+ {
+ return E_FAIL;
+ }
+#endif
+
+ hr = DisconnectFilters(this->graphBuilder, this->sourceFilter, this->sampleGrabberFilter);
+ hr = DisconnectFilters(this->graphBuilder, this->sampleGrabberFilter, this->nullRendererFilter);
+
+ return hr;
+}
+
+HRESULT DSScreenCastGraph::start()
+{
+ HRESULT hr;
+
+ if (isRunning() && !isPaused())
+ {
+ return S_OK;
+ }
+
+ hr = this->mediaController->Run();
+ if (!SUCCEEDED(hr))
+ {
+ TSK_DEBUG_ERROR("DSScreenCastGraph::mediaController->Run() has failed with %ld", hr);
+ return hr;
+ }
+ this->running = true;
+ return hr;
+}
+
+HRESULT DSScreenCastGraph::pause()
+{
+ HRESULT hr = S_OK;
+ if (isRunning())
+ {
+ hr = this->mediaController->Pause();
+ if (SUCCEEDED(hr))
+ {
+ this->paused = TRUE;
+ }
+ }
+ return hr;
+}
+
+HRESULT DSScreenCastGraph::stop()
+{
+ if (!this->running)
+ {
+ return S_OK;
+ }
+
+ HRESULT hr;
+ hr = this->mediaController->Stop();
+ if (FAILED(hr))
+ {
+ TSK_DEBUG_ERROR("DSScreenCastGraph::mediaController->Stop() has failed with %ld", hr);
+ }
+ this->running = false;
+ this->paused = false;
+ return hr;
+}
+
+bool DSScreenCastGraph::isRunning()
+{
+ return this->running;
+}
+
+bool DSScreenCastGraph::isPaused()
+{
+ return this->paused;
+}
+
+HRESULT DSScreenCastGraph::getConnectedMediaType(AM_MEDIA_TYPE *mediaType)
+{
+ return this->grabberController->GetConnectedMediaType(mediaType);
+}
+
+HRESULT DSScreenCastGraph::createCaptureGraph()
+{
+ HRESULT hr;
+
+ // Create capture graph builder
+ hr = COCREATE(CLSID_CaptureGraphBuilder2, IID_ICaptureGraphBuilder2, this->captureGraphBuilder);
+ if(FAILED(hr)) return hr;
+
+ // Create the graph builder
+ hr = COCREATE(CLSID_FilterGraph, IID_IGraphBuilder, this->graphBuilder);
+ if(FAILED(hr)) return hr;
+
+ // Initialize the Capture Graph Builder.
+ hr = this->captureGraphBuilder->SetFiltergraph(this->graphBuilder);
+ if(FAILED(hr)) return hr;
+
+ // Create source filter
+ LPUNKNOWN pUnk = NULL;
+ this->sourceFilter = (CPushSourceDesktop*)CPushSourceDesktop::CreateInstance(pUnk, &hr);
+ if(FAILED(hr)) return hr;
+ this->sourceFilter->AddRef();
+
+ // Create the sample grabber filter
+ hr = COCREATE(CLSID_SampleGrabber, IID_IBaseFilter, this->sampleGrabberFilter);
+ if(FAILED(hr)) return hr;
+
+ // Create the NULL renderer
+ hr = COCREATE(CLSID_NullRenderer, IID_IBaseFilter, this->nullRendererFilter);
+ if(FAILED(hr)) return hr;
+
+ // Add source filter to the graph
+ hr = this->graphBuilder->AddFilter(this->sourceFilter, FILTER_SCREENCAST);
+ if(FAILED(hr)) return hr;
+
+ // Add sample grabber to the graph
+ hr = this->graphBuilder->AddFilter(this->sampleGrabberFilter, FITLER_SAMPLE_GRABBER);
+ if(FAILED(hr)) return hr;
+
+ // Add null renderer to the graph
+ hr = this->graphBuilder->AddFilter(this->nullRendererFilter, FILTER_NULL_RENDERER);
+ if(FAILED(hr)) return hr;
+
+ // Find media control
+ hr = QUERY(this->graphBuilder, IID_IMediaControl, this->mediaController);
+ if(FAILED(hr)) return hr;
+
+ // Create the sample grabber controller
+ hr = QUERY(this->sampleGrabberFilter, IID_ISampleGrabber, this->grabberController);
+ if(FAILED(hr)) return hr;
+
+ // Set the sample grabber media type (RGB24)
+ // TODO : CHECK
+ AM_MEDIA_TYPE mt;
+ ZeroMemory(&mt, sizeof(AM_MEDIA_TYPE));
+ mt.majortype = MEDIATYPE_Video;
+ mt.subtype = MEDIASUBTYPE_RGB24;
+ mt.formattype = FORMAT_VideoInfo;
+
+ hr = this->grabberController->SetMediaType(&mt);
+ if(FAILED(hr)) return hr;
+
+ // Set sample grabber media type
+ this->grabberController->SetOneShot(FALSE);
+ this->grabberController->SetBufferSamples(FALSE);
+
+ hr = this->grabberController->SetCallback(this->grabberCallback, 1);
+ if(FAILED(hr)) return hr;
+
+ return hr;
+}
+
+#endif /* _WIN32_WCE */ \ No newline at end of file
diff --git a/plugins/pluginDirectShow/internals/DSScreenCastGraph.h b/plugins/pluginDirectShow/internals/DSScreenCastGraph.h
new file mode 100644
index 0000000..d201668
--- /dev/null
+++ b/plugins/pluginDirectShow/internals/DSScreenCastGraph.h
@@ -0,0 +1,160 @@
+/* Copyright (C) 2014 Mamadou DIOP.
+*
+* This file is part of Open Source Doubango Framework.
+*
+* DOUBANGO is free software: you can redistribute it and/or modify
+* it under the terms of the GNU General Public License as published by
+* the Free Software Foundation, either version 3 of the License, or
+* (at your option) any later version.
+*
+* DOUBANGO is distributed in the hope that it will be useful,
+* but WITHOUT ANY WARRANTY; without even the implied warranty of
+* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
+* GNU General Public License for more details.
+*
+* You should have received a copy of the GNU General Public License
+* along with DOUBANGO.
+*/
+#ifndef PLUGIN_DSHOW_DSSCREENCAST_H
+#define PLUGIN_DSHOW_DSSCREENCAST_H
+
+#include "plugin_dshow_config.h"
+#include "internals/DSBaseCaptureGraph.h"
+
+#include <control.h>
+#include <vector>
+#include <qedit.h>
+
+
+class CPushSourceDesktop;
+
+class DSScreenCastGraph : public DSBaseCaptureGraph
+{
+public:
+ DSScreenCastGraph(ISampleGrabberCB* callback, HRESULT *hr);
+ virtual ~DSScreenCastGraph();
+
+ std::vector<DSCaptureFormat> *getFormats() { return &this->supportedFormats; };
+
+ virtual HRESULT setSource(const std::string &devicePath) { return S_OK; }
+ HRESULT setParameters(DSCaptureFormat *format, int framerate);
+
+ HRESULT connect();
+ HRESULT disconnect();
+
+ HRESULT start();
+ HRESULT stop();
+ HRESULT pause();
+ bool isRunning();
+ bool isPaused();
+
+ HRESULT getConnectedMediaType(AM_MEDIA_TYPE *mediaType);
+
+ virtual std::string getDeviceId() const { return std::string("screencast"); }
+
+private:
+ HRESULT createCaptureGraph();
+
+private:
+ ISampleGrabberCB *grabberCallback;
+
+ ICaptureGraphBuilder2 *captureGraphBuilder;
+ IGraphBuilder *graphBuilder;
+
+ CPushSourceDesktop *sourceFilter;
+ IBaseFilter *nullRendererFilter;
+ IBaseFilter *sampleGrabberFilter;
+
+ ISampleGrabber *grabberController;
+
+ IMediaControl *mediaController;
+ IMediaEventEx *mediaEventController;
+
+ std::vector<DSCaptureFormat> supportedFormats;
+ DSCaptureFormat *captureFormat;
+
+ bool running;
+ bool paused;
+};
+
+#endif /* PLUGIN_DSHOW_DSSCREENCAST_H */
+/* Copyright (C) 2014 Mamadou DIOP.
+*
+* This file is part of Open Source Doubango Framework.
+*
+* DOUBANGO is free software: you can redistribute it and/or modify
+* it under the terms of the GNU General Public License as published by
+* the Free Software Foundation, either version 3 of the License, or
+* (at your option) any later version.
+*
+* DOUBANGO is distributed in the hope that it will be useful,
+* but WITHOUT ANY WARRANTY; without even the implied warranty of
+* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
+* GNU General Public License for more details.
+*
+* You should have received a copy of the GNU General Public License
+* along with DOUBANGO.
+*/
+#ifndef PLUGIN_DSHOW_DSSCREENCAST_H
+#define PLUGIN_DSHOW_DSSCREENCAST_H
+
+#include "plugin_dshow_config.h"
+#include "internals/DSBaseCaptureGraph.h"
+
+#include <control.h>
+#include <vector>
+#include <qedit.h>
+
+
+class CPushSourceDesktop;
+
+class DSScreenCastGraph : public DSBaseCaptureGraph
+{
+public:
+ DSScreenCastGraph(ISampleGrabberCB* callback, HRESULT *hr);
+ virtual ~DSScreenCastGraph();
+
+ std::vector<DSCaptureFormat> *getFormats() { return &this->supportedFormats; };
+
+ virtual HRESULT setSource(const std::string &devicePath) { return S_OK; }
+ HRESULT setParameters(DSCaptureFormat *format, int framerate);
+
+ HRESULT connect();
+ HRESULT disconnect();
+
+ HRESULT start();
+ HRESULT stop();
+ HRESULT pause();
+ bool isRunning();
+ bool isPaused();
+
+ HRESULT getConnectedMediaType(AM_MEDIA_TYPE *mediaType);
+
+ virtual std::string getDeviceId() const { return std::string("screencast"); }
+
+private:
+ HRESULT createCaptureGraph();
+
+private:
+ ISampleGrabberCB *grabberCallback;
+
+ ICaptureGraphBuilder2 *captureGraphBuilder;
+ IGraphBuilder *graphBuilder;
+
+ CPushSourceDesktop *sourceFilter;
+ IBaseFilter *nullRendererFilter;
+ IBaseFilter *sampleGrabberFilter;
+
+ ISampleGrabber *grabberController;
+
+ IMediaControl *mediaController;
+ IMediaEventEx *mediaEventController;
+
+ std::vector<DSCaptureFormat> supportedFormats;
+ DSCaptureFormat *captureFormat;
+
+ bool running;
+ bool paused;
+};
+
+#endif /* PLUGIN_DSHOW_DSSCREENCAST_H */
diff --git a/plugins/pluginDirectShow/internals/DSUtils.cxx b/plugins/pluginDirectShow/internals/DSUtils.cxx
new file mode 100644
index 0000000..913c081
--- /dev/null
+++ b/plugins/pluginDirectShow/internals/DSUtils.cxx
@@ -0,0 +1,365 @@
+/* Copyright (C) 2011-2013 Doubango Telecom <http://www.doubango.org>
+*
+* This file is part of Open Source Doubango Framework.
+*
+* DOUBANGO is free software: you can redistribute it and/or modify
+* it under the terms of the GNU General Public License as published by
+* the Free Software Foundation, either version 3 of the License, or
+* (at your option) any later version.
+*
+* DOUBANGO is distributed in the hope that it will be useful,
+* but WITHOUT ANY WARRANTY; without even the implied warranty of
+* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
+* GNU General Public License for more details.
+*
+* You should have received a copy of the GNU General Public License
+* along with DOUBANGO.
+*/
+#include "internals/DSUtils.h"
+
+#if defined (_WIN32_WCE)
+#include <atlbase.h>
+#include <atlstr.h>
+#else
+#include <atlconv.h>
+#include <d3d9.h>
+#endif
+
+#include "tsk_debug.h"
+
+HWND GetMainWindow()
+{
+ HWND hWnd;
+ if (!(hWnd = GetActiveWindow())) {
+ if (!(hWnd = GetForegroundWindow())) {
+#if !defined(_WIN32_WCE)
+ if (!(hWnd = GetConsoleWindow())) {
+ return NULL;
+ }
+#endif
+ }
+ }
+ return hWnd;
+}
+
+bool IsMainThread()
+{
+ HWND hWnd = GetMainWindow();
+ if (hWnd) {
+ DWORD mainTid = GetWindowThreadProcessId(hWnd, NULL);
+ DWORD currentTid = GetCurrentThreadId();
+ return (mainTid == currentTid);
+ }
+ return false;
+}
+
+bool IsD3D9Supported()
+{
+#if defined(_WIN32_WCE)
+ return false;
+#else
+ static bool g_bChecked = false;
+ static bool g_bSupported = false;
+
+ if (g_bChecked) {
+ return g_bSupported;
+ }
+ g_bChecked = true;
+ HRESULT hr = S_OK;
+ IDirect3D9* pD3D = NULL;
+ D3DDISPLAYMODE mode = { 0 };
+ D3DPRESENT_PARAMETERS pp = {0};
+ IDirect3DDevice9* pDevice = NULL;
+
+ if (!(pD3D = Direct3DCreate9(D3D_SDK_VERSION))) {
+ hr = E_OUTOFMEMORY;
+ goto bail;
+ }
+
+ hr = pD3D->GetAdapterDisplayMode(
+ D3DADAPTER_DEFAULT,
+ &mode
+ );
+ if (FAILED(hr)) {
+ goto bail;
+ }
+
+ hr = pD3D->CheckDeviceType(
+ D3DADAPTER_DEFAULT,
+ D3DDEVTYPE_HAL,
+ mode.Format,
+ D3DFMT_X8R8G8B8,
+ TRUE // windowed
+ );
+ if (FAILED(hr)) {
+ goto bail;
+ }
+ pp.BackBufferFormat = D3DFMT_X8R8G8B8;
+ pp.SwapEffect = D3DSWAPEFFECT_DISCARD;
+ pp.PresentationInterval = D3DPRESENT_INTERVAL_IMMEDIATE;
+ pp.Windowed = TRUE;
+ pp.hDeviceWindow = GetDesktopWindow();
+ hr = pD3D->CreateDevice(
+ D3DADAPTER_DEFAULT,
+ D3DDEVTYPE_HAL,
+ pp.hDeviceWindow,
+ D3DCREATE_HARDWARE_VERTEXPROCESSING,
+ &pp,
+ &pDevice
+ );
+ if (FAILED(hr)) {
+ goto bail;
+ }
+
+ // Everythings is OK
+ g_bSupported = true;
+ TSK_DEBUG_INFO("D3D9 supported");
+
+bail:
+ if (!g_bSupported) {
+ TSK_DEBUG_WARN("D3D9 not supported");
+ }
+ SAFE_RELEASE(pDevice);
+ SAFE_RELEASE(pD3D);
+ return g_bSupported;
+#endif /* _WIN32_WCE */
+}
+
+IPin *GetPin(IBaseFilter *filter, PIN_DIRECTION direction)
+{
+ IEnumPins *enumPins = NULL;
+ IPin *pin = NULL;
+
+ HRESULT hr = filter->EnumPins(&enumPins);
+ if(!enumPins){
+ return NULL;
+ }
+
+ for(;;){
+ ULONG fetched = 0;
+ PIN_DIRECTION pinDir = PIN_DIRECTION(-1);
+ pin = NULL;
+
+ if (FAILED(enumPins->Next(1, &pin, &fetched))){
+ enumPins->Release();
+ return NULL;
+ }
+
+ if (fetched == 1 && pin){
+ pin->QueryDirection(&pinDir);
+ if(pinDir == direction){
+ break;
+ }
+ pin->Release();
+ }
+ }
+
+ enumPins->Release();
+ return pin;
+}
+
+HRESULT ConnectFilters(IGraphBuilder *graphBuilder, IBaseFilter *source, IBaseFilter *destination, AM_MEDIA_TYPE *mediaType)
+{
+ HRESULT hr;
+
+ IPin *outPin = GetPin(source, PINDIR_OUTPUT);
+ IPin *inPin = GetPin(destination, PINDIR_INPUT);
+
+ if (mediaType != NULL){
+ hr = graphBuilder->ConnectDirect(outPin, inPin, mediaType);
+ }
+ else{
+ hr = graphBuilder->Connect(outPin, inPin);
+ }
+
+ SAFE_RELEASE(outPin);
+ SAFE_RELEASE(inPin);
+
+ return hr;
+}
+
+HRESULT DisconnectFilters(IGraphBuilder *graphBuilder, IBaseFilter *source, IBaseFilter *destination)
+{
+ HRESULT hr;
+
+ IPin *outPin = GetPin(source, PINDIR_OUTPUT);
+ IPin *inPin = GetPin(destination, PINDIR_INPUT);
+
+ if (inPin){
+ hr = graphBuilder->Disconnect(inPin);
+ }
+
+ if (outPin){
+ hr = graphBuilder->Disconnect(outPin);
+ }
+
+ SAFE_RELEASE(outPin);
+ SAFE_RELEASE(inPin);
+
+ return hr;
+}
+
+bool DisconnectAllFilters(IGraphBuilder *graphBuilder)
+{
+ IEnumFilters* filterEnum = NULL;
+ IBaseFilter* currentFilter = NULL;
+ ULONG fetched;
+ HRESULT hr;
+
+ hr = graphBuilder->EnumFilters(&filterEnum);
+ if (FAILED(hr)) {
+ SAFE_RELEASE(filterEnum);
+ return false;
+ }
+
+ while(filterEnum->Next(1, &currentFilter, &fetched) == S_OK){
+ hr = DisconnectFilters(graphBuilder, currentFilter, currentFilter);
+ SAFE_RELEASE(currentFilter);
+ }
+ SAFE_RELEASE(filterEnum);
+ SAFE_RELEASE(currentFilter);
+ return true;
+}
+
+bool RemoveAllFilters(IGraphBuilder *graphBuilder)
+{
+ IEnumFilters* filterEnum = NULL;
+ IBaseFilter* currentFilter = NULL;
+ ULONG fetched;
+ HRESULT hr;
+
+ hr = graphBuilder->EnumFilters(&filterEnum);
+ if (FAILED(hr)) return false;
+
+ while(filterEnum->Next(1, &currentFilter, &fetched) == S_OK){
+ hr = graphBuilder->RemoveFilter(currentFilter);
+ if (FAILED(hr)){
+ SAFE_RELEASE(filterEnum);
+ return false;
+ }
+ SAFE_RELEASE(currentFilter);
+ filterEnum->Reset();
+ }
+
+ SAFE_RELEASE(filterEnum);
+ SAFE_RELEASE(currentFilter);
+ return true;
+}
+
+
+#include "internals/DSDisplay.h"
+#include "internals/DSGrabber.h"
+
+#define WM_CREATE_DISPLAY_ON_UI_THREAD (WM_USER + 101)
+#define WM_CREATE_GRABBER_ON_UI_THREAD (WM_CREATE_DISPLAY_ON_UI_THREAD + 1)
+#define WM_CREATE_ON_UI_THREAD_TIMEOUT 1000
+
+// C Callback that dispatch event to create display on UI thread
+static LRESULT CALLBACK __create__WndProcWindow(HWND hWnd, UINT uMsg, WPARAM wParam, LPARAM lParam)
+{
+ HANDLE* event = reinterpret_cast<HANDLE*>(wParam);
+ BOOL* isScreenCast = reinterpret_cast<BOOL*>(GetProp(hWnd, TEXT("screnCast")));
+
+ if(event && lParam){
+ switch(uMsg){
+ case WM_CREATE_DISPLAY_ON_UI_THREAD:
+ {
+ HRESULT hr;
+ DSDisplay** ppDisplay = reinterpret_cast<DSDisplay**>(lParam);
+ *ppDisplay = new DSDisplay(&hr);
+ SetEvent(event);
+ break;
+ }
+ case WM_CREATE_GRABBER_ON_UI_THREAD:
+ {
+ HRESULT hr;
+ DSGrabber** ppGrabber = reinterpret_cast<DSGrabber**>(lParam);
+ *ppGrabber = new DSGrabber(&hr, *isScreenCast);
+ SetEvent(event);
+ break;
+ }
+ }
+ }
+ return DefWindowProc(hWnd, uMsg, wParam, lParam);
+}
+
+int createOnCurrentThead(HWND hWnd, void** ppRet, BOOL display, BOOL screnCast)
+{
+ HRESULT hr;
+ if(display) *ppRet = new DSDisplay(&hr);
+ else *ppRet = new DSGrabber(&hr, screnCast);
+ if(FAILED(hr)){
+ TSK_DEBUG_ERROR("Failed to created DirectShow %s", display ? "Display" : "Grabber");
+ SAFE_DELETE_PTR(*ppRet);
+ return -2;
+ }
+ return 0;
+}
+
+int createOnUIThead(HWND hWnd, void** ppRet, BOOL display, BOOL screnCast)
+{
+ static BOOL __isScreenCastFalse = FALSE;
+ static BOOL __isScreenCastTrue = TRUE;
+ if(!ppRet){
+ TSK_DEBUG_ERROR("Invalid parameter");
+ return -1;
+ }
+
+ if (IsMainThread()) {
+ return createOnCurrentThead(hWnd, ppRet, display, screnCast);
+ }
+ else{
+ TSK_DEBUG_INFO("Create DirectShow element on worker thread");
+ HANDLE event = NULL;
+ int ret = 0;
+ DWORD retWait, retryCount = 3;
+
+ if(!hWnd){
+ if (!(hWnd = FindWindow(NULL, TEXT("Boghe - IMS/RCS Client")))) {
+ if(!(hWnd = GetMainWindow())){
+ TSK_DEBUG_ERROR("No Window handle could be used");
+ return -2;
+ }
+ }
+ }
+#if defined(_WIN32_WCE)
+ WNDPROC wndProc = (WNDPROC) SetWindowLong(hWnd, GWL_WNDPROC, (LONG) __create__WndProcWindow);
+#else
+ WNDPROC wndProc = (WNDPROC) SetWindowLongPtr(hWnd, GWLP_WNDPROC, (LONG_PTR) __create__WndProcWindow);
+#endif
+ if (!wndProc) {
+ TSK_DEBUG_ERROR("SetWindowLongPtr() failed with errcode=%d", GetLastError());
+ return createOnCurrentThead(hWnd, ppRet, display, screnCast);
+ }
+
+ if (!(event = CreateEvent(NULL, TRUE, FALSE, NULL))) {
+ TSK_DEBUG_ERROR("Failed to create new event");
+ ret = -4; goto bail;
+ }
+ SetProp(hWnd, TEXT("screnCast"), screnCast ? &__isScreenCastTrue : &__isScreenCastFalse);
+ if (!PostMessage(hWnd, display ? WM_CREATE_DISPLAY_ON_UI_THREAD : WM_CREATE_GRABBER_ON_UI_THREAD, reinterpret_cast<WPARAM>(event), reinterpret_cast<LPARAM>(ppRet))) {
+ TSK_DEBUG_ERROR("PostMessageA() failed");
+ ret = -5; goto bail;
+ }
+
+ do {
+ retWait = WaitForSingleObject(event, WM_CREATE_ON_UI_THREAD_TIMEOUT);
+ }
+ while (retryCount-- > 0 && (retWait == WAIT_TIMEOUT));
+
+ bail:
+ // restore
+ if (hWnd && wndProc) {
+#if defined(_WIN32_WCE)
+ SetWindowLong(hWnd, GWL_WNDPROC, (LONG)wndProc);
+#else
+ SetWindowLongPtr(hWnd, GWLP_WNDPROC, (LONG_PTR)wndProc);
+#endif
+ }
+ if (event) {
+ CloseHandle(event);
+ }
+
+ return ret;
+ }
+} \ No newline at end of file
diff --git a/plugins/pluginDirectShow/internals/DSUtils.h b/plugins/pluginDirectShow/internals/DSUtils.h
new file mode 100644
index 0000000..09690ea
--- /dev/null
+++ b/plugins/pluginDirectShow/internals/DSUtils.h
@@ -0,0 +1,82 @@
+/* Copyright (C) 2011-2013 Doubango Telecom <http://www.doubango.org>
+*
+* This file is part of Open Source Doubango Framework.
+*
+* DOUBANGO is free software: you can redistribute it and/or modify
+* it under the terms of the GNU General Public License as published by
+* the Free Software Foundation, either version 3 of the License, or
+* (at your option) any later version.
+*
+* DOUBANGO is distributed in the hope that it will be useful,
+* but WITHOUT ANY WARRANTY; without even the implied warranty of
+* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
+* GNU General Public License for more details.
+*
+* You should have received a copy of the GNU General Public License
+* along with DOUBANGO.
+*/
+#ifndef PLUGIN_DSHOW_DUTILS_H
+#define PLUGIN_DSHOW_DUTILS_H
+
+#include "plugin_dshow_config.h"
+
+#include <strmif.h>
+
+// --------------------------------------------------------------------------------
+
+#define SAFE_RELEASE(x) if ((x)) { (x)->Release(); (x) = NULL; }
+#define SAFE_DELETE_PTR(x) if ((x)) { delete (x); (x) = NULL; }
+#define SAFE_DELETE_ARRAY(x) if ((x)) { delete[] (x); (x) = NULL; }
+// In CHECK_HR(x) When (x) is a function it will be executed twice when used in "TSK_DEBUG_ERROR(x)" and "If(x)"
+#define CHECK_HR(x) { HRESULT __hr__ = (x); if (FAILED(__hr__)) { TSK_DEBUG_ERROR("Operation Failed (%08x)", __hr__); goto bail; } }
+
+#define DS_NANOS_TO_100NS(NANOS) (((LONGLONG)(NANOS)) / 100ui64)
+#define DS_MICROS_TO_100NS(MICROS) (((LONGLONG)(MICROS)) * 10ui64)
+#define DS_MILLIS_TO_100NS(MILLIS) (((LONGLONG)(MILLIS)) * 10000ui64)
+#define DS_SECONDS_TO_100NS(SEC) (((LONGLONG)(SEC)) * 10000000ui64)
+#define DS_SECONDS_FROM_100NS(SEC) (10000000ui64/(SEC))
+
+#define COCREATE(cls, iid, target) \
+ CoCreateInstance(cls, NULL, CLSCTX_INPROC_SERVER, iid, reinterpret_cast<void**>(&target))
+#define QUERY(source, iid, target) \
+ source->QueryInterface(iid, reinterpret_cast<void**>(&target))
+
+// --------------------------------------------------------------------------------
+
+
+#define FILTER_WEBCAM _T("WEBCAM")
+#define FILTER_SCREENCAST _T("SCREENCAST")
+#define FILTER_FRAMERATE _T("TDSHOW_FRAMERATE")
+#define FILTER_OUTPUT _T("TDSHOW_OUTPUT")
+#define FITLER_SAMPLE_GRABBER _T("SAMPLE_GRABBER")
+#define FILTER_AVI_DECOMPRESSOR _T("AVI_DECOMPRESSOR")
+
+#define FILTER_COLORSPACE_CONVERTOR _T("COLORSPACE_CONVERTOR")
+#define FILTER_NULL_RENDERER _T("NULL_RENDERER")
+#define FILTER_VIDEO_RENDERER _T("VIDEO_RENDERER")
+#define FILTER_VIDEO_MIXING_RENDERER _T("VIDEO_MIXING_RENDERER")
+#define FILTER_COLOR_CONVERTOR_565 _T("COLOR_CONVERTOR_565")
+
+// --------------------------------------------------------------------------------
+
+HWND GetMainWindow();
+
+bool IsMainThread();
+
+bool IsD3D9Supported();
+
+IPin *GetPin(IBaseFilter *pFilter, PIN_DIRECTION dir);
+
+HRESULT ConnectFilters(IGraphBuilder *graphBuilder, IBaseFilter *source, IBaseFilter *destination, AM_MEDIA_TYPE *mediaType = NULL);
+
+HRESULT DisconnectFilters(IGraphBuilder *graphBuilder, IBaseFilter *source, IBaseFilter *destination);
+
+bool DisconnectAllFilters(IGraphBuilder *graphBuilder);
+
+bool RemoveAllFilters(IGraphBuilder *graphBuilder);
+
+int createOnCurrentThead(HWND hWnd, void** ppRet, BOOL display, BOOL screnCast);
+
+int createOnUIThead(HWND hWnd, void** ppRet, BOOL display, BOOL screnCast);
+
+#endif /* PLUGIN_DSHOW_DUTILS_H */
diff --git a/plugins/pluginDirectShow/internals/Resizer.cxx b/plugins/pluginDirectShow/internals/Resizer.cxx
new file mode 100644
index 0000000..32bcfac
--- /dev/null
+++ b/plugins/pluginDirectShow/internals/Resizer.cxx
@@ -0,0 +1,1192 @@
+/* Copyright (C) 2011-2013 Doubango Telecom <http://www.doubango.org>
+*
+* This file is part of Open Source Doubango Framework.
+*
+* DOUBANGO is free software: you can redistribute it and/or modify
+* it under the terms of the GNU General Public License as published by
+* the Free Software Foundation, either version 3 of the License, or
+* (at your option) any later version.
+*
+* DOUBANGO is distributed in the hope that it will be useful,
+* but WITHOUT ANY WARRANTY; without even the implied warranty of
+* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
+* GNU General Public License for more details.
+*
+* You should have received a copy of the GNU General Public License
+* along with DOUBANGO.
+*/
+/*
+ * Copyright (c) Microsoft Corporation. All rights reserved.
+ */
+#if !defined(RESIZER_DO_NOT_INCLUDE_HEADER)
+#include "internals/Resizer.h"
+#endif /* DO_NOT_INCLUDE_HEADER */
+
+
+/* stretch proportions */
+#define STRETCH_1_1 1
+#define STRETCH_1_2 2
+#define STRETCH_1_4 3
+#define STRETCH_1_N 4
+#define STRETCH_N_1 5
+#define STRETCH_4_1 6
+#define STRETCH_2_1 7
+
+void __stdcall StretchDIB(
+ LPBITMAPINFOHEADER biDst, // --> BITMAPINFO of destination
+ LPVOID lpvDst, // --> to destination bits
+ int DstX, // Destination origin - x coordinate
+ int DstY, // Destination origin - y coordinate
+ int DstXE, // x extent of the BLT
+ int DstYE, // y extent of the BLT
+ LPBITMAPINFOHEADER biSrc, // --> BITMAPINFO of source
+ LPVOID lpvSrc, // --> to source bits
+ int SrcX, // Source origin - x coordinate
+ int SrcY, // Source origin - y coordinate
+ int SrcXE, // x extent of the BLT
+ int SrcYE // y extent of the BLT
+ );
+
+/*
+* an X_FUNC is a function that copies one scanline, stretching or shrinking it
+* to fit a destination scanline. Pick an X_FUNC depending on
+* bitdepth and stretch ratio (1:1, 1:2, 1:4, 1:N, N:1, 4:1, 2:1)
+*
+* the x_fract argument is the delta fraction: it is a representation
+* of the smaller extent (whichever that is) as a fraction of the larger,
+* and is used when stretching or shrinking to advance the pointer to the
+* smaller scanline every (fract) pixels of the larger.
+* Thus if we are expanding 1:8, x_fract will be 1/8, we will advance the
+* source pointer once every 8 pixels, and thus copy each source pixel to
+* 8 dest pixels. Note that if shrinking 8:1, x_fract will still be 1/8
+* and we will use it to control advancement of the dest pointer.
+* the fraction is multiplied by 65536.
+*/
+typedef void (*X_FUNC) (LPBYTE lpSrc,
+ LPBYTE lpDst,
+ int SrcXE,
+ int DstXE,
+ int x_fract);
+
+
+void X_Stretch_1_1_8Bits(LPBYTE lpSrc, LPBYTE lpDst, int SrcXE, int DstXE, int x_fract);
+void X_Stretch_1_2_8Bits(LPBYTE lpSrc, LPBYTE lpDst, int SrcXE, int DstXE, int x_fract);
+void X_Stretch_1_4_8Bits(LPBYTE lpSrc, LPBYTE lpDst, int SrcXE, int DstXE, int x_fract);
+void X_Stretch_1_N_8Bits(LPBYTE lpSrc, LPBYTE lpDst, int SrcXE, int DstXE, int x_fract);
+void X_Stretch_N_1_8Bits(LPBYTE lpSrc, LPBYTE lpDst, int SrcXE, int DstXE, int x_fract);
+
+void X_Stretch_1_1_16Bits(LPBYTE lpSrc, LPBYTE lpDst, int SrcXE, int DstXE, int x_fract);
+void X_Stretch_1_2_16Bits(LPBYTE lpSrc, LPBYTE lpDst, int SrcXE, int DstXE, int x_fract);
+void X_Stretch_1_N_16Bits(LPBYTE lpSrc, LPBYTE lpDst, int SrcXE, int DstXE, int x_fract);
+void X_Stretch_N_1_16Bits(LPBYTE lpSrc, LPBYTE lpDst, int SrcXE, int DstXE, int x_fract);
+
+void X_Stretch_1_1_24Bits(LPBYTE lpSrc, LPBYTE lpDst, int SrcXE, int DstXE, int x_fract);
+void X_Stretch_1_N_24Bits(LPBYTE lpSrc, LPBYTE lpDst, int SrcXE, int DstXE, int x_fract);
+void X_Stretch_N_1_24Bits(LPBYTE lpSrc, LPBYTE lpDst, int SrcXE, int DstXE, int x_fract);
+
+void X_Stretch_1_1_32Bits(LPBYTE lpSrc, LPBYTE lpDst, int SrcXE, int DstXE, int x_fract);
+void X_Stretch_1_N_32Bits(LPBYTE lpSrc, LPBYTE lpDst, int SrcXE, int DstXE, int x_fract);
+void X_Stretch_N_1_32Bits(LPBYTE lpSrc, LPBYTE lpDst, int SrcXE, int DstXE, int x_fract);
+
+
+/*
+* Y_Stretch_* functions copy DstYE scanlines (using
+* an X_FUNC to copy each scanline) omitting or duplicating scanlines to
+* fit the destination extent. Pick a Y_ depending on the ratio
+* (1:N, N:1...)
+*/
+
+void Y_Stretch_1_N(LPBYTE lpSrc, LPBYTE lpDst, int SrcXE,int SrcYE, int DstXE,
+ int DstYE, int SrcWidth, int DstWidth, int x_fract,
+ X_FUNC x_func, int nBits);
+
+void Y_Stretch_N_1(LPBYTE lpSrc, LPBYTE lpDst, int SrcXE,int SrcYE, int DstXE,
+ int DstYE, int SrcWidth, int DstWidth, int x_fract,
+ X_FUNC x_func);
+
+/*
+* special case y-stretch functions for 1:2 in both dimensions for 8 and 16 bits
+* takes no X_FUNC arg. Will do entire stretch.
+*/
+void Stretch_1_2_8Bits(LPBYTE lpSrc, LPBYTE lpDst, int SrcXE,int SrcYE, int DstXE,
+ int DstYE, int SrcWidth, int DstWidth, int x_fract);
+
+
+void Stretch_1_2_16Bits(LPBYTE lpSrc, LPBYTE lpDst, int SrcXE,int SrcYE, int DstXE,
+ int DstYE, int SrcWidth, int DstWidth, int x_fract);
+
+/* straight copy of one scanline of count bytes */
+void X_CopyScanline(LPBYTE lpSrc, LPBYTE lpDst, int count);
+
+
+//
+// Resize function
+//
+void ResizeRGB( BITMAPINFOHEADER *pbiIn, //Src's BitMapInFoHeader
+ const unsigned char * dibBits, //Src bits
+ BITMAPINFOHEADER *pbiOut,
+ unsigned char *pFrame, //Dst bits
+ int iNewWidth, //new W in pixel
+ int iNewHeight) //new H in pixel
+{
+ StretchDIB( pbiOut, // --> BITMAPINFO of destination
+ pFrame, // --> to destination bits
+ 0, // Destination origin - x coordinate
+ 0, // Destination origin - y coordinate
+ iNewWidth, // x extent of the BLT
+ iNewHeight, // y extent of the BLT
+ pbiIn, // --> BITMAPINFO of destination
+ (void*) dibBits, // --> to source bits
+ 0, // Source origin - x coordinate
+ 0, // Source origin - y coordinate
+ pbiIn->biWidth, // x extent of the BLT
+ pbiIn->biHeight // y extent of the BLT
+ );
+
+ return;
+}
+
+
+/* -------------------------------------------------------------------- */
+
+/*
+* StretchFactor
+*
+* calculate the stretch factor (proportion of source extent to destination
+* extent: 1:1, 1:2, 1:4, 1:N, N:1, 4:1,or 2:1) and also the
+* delta fraction (see above comment on X_FUNC). This is the ratio of
+* the smaller extent to the larger extent, represented as a fraction
+* multiplied by 65536.
+*
+* returns: the stretch factor (stores the delta fraction in *pfract)
+*/
+
+int
+StretchFactor(int SrcE, int DstE, int *pfract)
+{
+
+
+ if (SrcE == DstE) {
+ if (pfract != NULL) {
+ pfract = 0;
+ }
+
+ return(STRETCH_1_1);
+
+ }
+
+
+ if (SrcE > DstE) {
+ if (pfract != NULL) {
+ *pfract = ( (DstE << 16) / SrcE) & 0xffff;
+ }
+
+ if (SrcE == (DstE * 2)) {
+ return(STRETCH_2_1);
+ } else if (SrcE == (DstE * 4)) {
+ return(STRETCH_4_1);
+ } else {
+ return(STRETCH_N_1);
+ }
+
+ } else {
+
+ /* calculate delta fraction based on smallest / largest */
+ if (pfract != NULL) {
+ *pfract = ( (SrcE << 16) / DstE) & 0xffff;
+ }
+
+ if (DstE == (SrcE * 2)) {
+ return(STRETCH_1_2);
+ } else if (DstE == (SrcE * 4)) {
+ return(STRETCH_1_4);
+ } else {
+ return(STRETCH_1_N);
+ }
+ }
+}
+
+
+/* -------------------------------------------------------------------- */
+
+/*
+* StretchDIB
+*
+*/
+
+void FAR PASCAL
+StretchDIB(
+ LPBITMAPINFOHEADER biDst, // --> BITMAPINFO of destination
+ LPVOID lpvDst, // --> to destination bits
+ int DstX, // Destination origin - x coordinate
+ int DstY, // Destination origin - y coordinate
+ int DstXE, // x extent of the BLT
+ int DstYE, // y extent of the BLT
+ LPBITMAPINFOHEADER biSrc, // --> BITMAPINFO of source
+ LPVOID lpvSrc, // --> to source bits
+ int SrcX, // Source origin - x coordinate
+ int SrcY, // Source origin - y coordinate
+ int SrcXE, // x extent of the BLT
+ int SrcYE // y extent of the BLT
+ )
+{
+
+ int nBits;
+ int SrcWidth, DstWidth;
+ LPBYTE lpDst = (LPBYTE)lpvDst, lpSrc = (LPBYTE)lpvSrc;
+ int x_fract;
+ int x_factor;
+ int y_factor;
+ X_FUNC xfunc;
+
+
+ /*
+ * chek that sizes are not same
+ */
+ /*if(DstXE == SrcXE && DstYE == SrcYE)
+ {
+ return;
+ }*/
+ /*
+ * check that bit depths are same and 8, 16 or 24
+ */
+
+ if ((nBits = biDst->biBitCount) != biSrc->biBitCount) {
+ return;
+ }
+
+ if ( (nBits != 8 ) && (nBits != 16) && (nBits != 24) &&
+ (nBits != 32)) {
+ return;
+ }
+
+ /*
+ * check that extents are not bad
+ */
+ if ( (SrcXE <= 0) || (SrcYE <= 0) || (DstXE <= 0) || (DstYE <= 0)) {
+ return;
+ }
+
+ /*
+ * calculate width of one scan line in bytes, rounded up to
+ * DWORD boundary.
+ */
+ SrcWidth = (((biSrc->biWidth * nBits) + 31) & ~31) / 8;
+ DstWidth = (((biDst->biWidth * nBits) + 31) & ~31) / 8;
+
+ /*
+ * set initial source and dest pointers
+ */
+ lpSrc += (SrcY * SrcWidth) + ((SrcX * nBits) / 8);
+ lpDst += (DstY * DstWidth) + ((DstX * nBits) / 8);
+
+
+ /*
+ * calculate stretch proportions (1:1, 1:2, 1:N, N:1 etc) and
+ * also the fractional stretch factor. (we are not interested in
+ * the y stretch fraction - this is only used in x stretching.
+ */
+
+ y_factor = StretchFactor(SrcYE, DstYE, NULL);
+ x_factor = StretchFactor(SrcXE, DstXE, &x_fract);
+
+ /*
+ * we have special case routines for 1:2 in both dimensions
+ * for 8 and 16 bits
+ */
+ if ((y_factor == x_factor) && (y_factor == STRETCH_1_2)) {
+
+ if (nBits == 8) {
+ //StartCounting();
+ Stretch_1_2_8Bits(lpSrc, lpDst, SrcXE, SrcYE,
+ DstXE, DstYE, SrcWidth, DstWidth,
+ x_fract);
+ //EndCounting("8 bit");
+ return;
+
+ } else if (nBits == 16) {
+ //StartCounting();
+ Stretch_1_2_16Bits(lpSrc, lpDst, SrcXE, SrcYE,
+ DstXE, DstYE, SrcWidth, DstWidth,
+ x_fract);
+ //EndCounting("16 bit");
+ return;
+ }
+ }
+
+
+ /* pick an X stretch function */
+ switch(nBits) {
+
+ case 8:
+ switch(x_factor) {
+ case STRETCH_1_1:
+ xfunc = X_Stretch_1_1_8Bits;
+ break;
+
+ case STRETCH_1_2:
+ xfunc = X_Stretch_1_2_8Bits;
+ break;
+
+ case STRETCH_1_4:
+ xfunc = X_Stretch_1_4_8Bits;
+ break;
+
+ case STRETCH_1_N:
+ xfunc = X_Stretch_1_N_8Bits;
+ break;
+
+ case STRETCH_N_1:
+ case STRETCH_4_1:
+ case STRETCH_2_1:
+ xfunc = X_Stretch_N_1_8Bits;
+ break;
+
+ }
+ break;
+
+ case 16:
+ switch(x_factor) {
+ case STRETCH_1_1:
+ xfunc = X_Stretch_1_1_16Bits;
+ break;
+
+ case STRETCH_1_2:
+ xfunc = X_Stretch_1_2_16Bits;
+ break;
+
+ case STRETCH_1_4:
+ case STRETCH_1_N:
+ xfunc = X_Stretch_1_N_16Bits;
+ break;
+
+ case STRETCH_N_1:
+ case STRETCH_4_1:
+ case STRETCH_2_1:
+ xfunc = X_Stretch_N_1_16Bits;
+ break;
+
+ }
+ break;
+
+ case 24:
+ switch(x_factor) {
+ case STRETCH_1_1:
+ xfunc = X_Stretch_1_1_24Bits;
+ break;
+
+ case STRETCH_1_2:
+ case STRETCH_1_4:
+ case STRETCH_1_N:
+ xfunc = X_Stretch_1_N_24Bits;
+ break;
+
+ case STRETCH_N_1:
+ case STRETCH_4_1:
+ case STRETCH_2_1:
+ xfunc = X_Stretch_N_1_24Bits;
+ break;
+
+ }
+ break;
+
+ case 32:
+ switch(x_factor) {
+ case STRETCH_1_1:
+ xfunc = X_Stretch_1_1_32Bits;
+ break;
+
+ case STRETCH_1_2:
+ case STRETCH_1_4:
+ case STRETCH_1_N:
+ xfunc = X_Stretch_1_N_32Bits;
+ break;
+
+ case STRETCH_N_1:
+ case STRETCH_4_1:
+ case STRETCH_2_1:
+ xfunc = X_Stretch_N_1_32Bits;
+ break;
+
+ }
+ break;
+
+ }
+
+
+ /*
+ * now call appropriate stretching function depending
+ * on the y stretch factor
+ */
+ switch (y_factor) {
+ case STRETCH_1_1:
+ case STRETCH_1_2:
+ case STRETCH_1_4:
+ case STRETCH_1_N:
+ Y_Stretch_1_N(lpSrc, lpDst, SrcXE, SrcYE,
+ DstXE, DstYE, SrcWidth, DstWidth, x_fract, xfunc, nBits);
+ break;
+
+ case STRETCH_N_1:
+ case STRETCH_4_1:
+ case STRETCH_2_1:
+ Y_Stretch_N_1(lpSrc, lpDst, SrcXE, SrcYE,
+ DstXE, DstYE, SrcWidth, DstWidth, x_fract, xfunc);
+ break;
+
+ }
+ return;
+}
+
+
+/* ---- y stretching -------------------------------------------- */
+
+/*
+* call an X_FUNC to copy scanlines from lpSrc to lpDst. Duplicate or
+* omit scanlines to stretch SrcYE to DstYE.
+*/
+
+
+/*
+* Y_Stretch_1_N
+*
+* write DstYE scanlines based on SrcYE scanlines, DstYE > SrcYE
+*
+*/
+
+void
+Y_Stretch_1_N(LPBYTE lpSrc,
+ LPBYTE lpDst,
+ int SrcXE,
+ int SrcYE,
+ int DstXE,
+ int DstYE,
+ int SrcWidth,
+ int DstWidth,
+ int x_fract,
+ X_FUNC x_func,
+ int nBits)
+{
+
+ int ydelta;
+ register int i;
+ LPBYTE lpPrev = NULL;
+
+ ydelta = DstYE -1;
+
+ for (i = 0; i < DstYE; i++) {
+
+ /* have we already stretched this scanline ? */
+ if (lpPrev == NULL) {
+ /* no - copy one scanline */
+ (*x_func)(lpSrc, lpDst, SrcXE, DstXE, x_fract);
+ lpPrev = lpDst;
+ } else {
+ /* yes - this is a duplicate scanline. do
+ * a straight copy of one that has already
+ * been stretched/shrunk
+ */
+ X_CopyScanline(lpPrev, lpDst, DstXE * nBits / 8);
+ }
+
+ /* advance dest pointer */
+ lpDst += DstWidth;
+
+ /* should we advance source pointer this time ? */
+ if ( (ydelta -= SrcYE) < 0) {
+ ydelta += DstYE;
+ lpSrc += SrcWidth;
+ lpPrev = NULL;
+ }
+ }
+}
+
+
+/*
+* Y_Stretch_N_1
+*
+* write DstYE scanlines based on SrcYE scanlines, DstYE < SrcYE
+*
+*/
+void
+Y_Stretch_N_1(LPBYTE lpSrc,
+ LPBYTE lpDst,
+ int SrcXE,
+ int SrcYE,
+ int DstXE,
+ int DstYE,
+ int SrcWidth,
+ int DstWidth,
+ int x_fract,
+ X_FUNC x_func)
+{
+
+ int ydelta;
+ register int i;
+
+ ydelta = SrcYE -1;
+
+ for (i = 0; i < DstYE; i++) {
+
+ /* copy one scanline */
+ (*x_func)(lpSrc, lpDst, SrcXE, DstXE, x_fract);
+
+ /* advance dest pointer */
+ lpDst += DstWidth;
+
+ /* how many times do we advance source pointer this time ? */
+ do {
+ lpSrc += SrcWidth;
+ ydelta -= DstYE;
+ } while (ydelta >= 0);
+
+ ydelta += SrcYE;
+ }
+}
+
+/* ---8-bit X stretching -------------------------------------------------- */
+
+/*
+* X_Stretch_1_N_8Bits
+*
+* copy one scan line, stretching 1:N (DstXE > SrcXE). For 8-bit depth.
+*/
+void
+X_Stretch_1_N_8Bits(LPBYTE lpSrc,
+ LPBYTE lpDst,
+ int SrcXE,
+ int DstXE,
+ int x_fract)
+{
+ int xdelta;
+ register int i;
+
+ xdelta = DstXE -1;
+
+ for (i = 0; i < DstXE; i++) {
+
+ /* copy one byte and advance dest */
+ *lpDst++ = *lpSrc;
+
+ /* should we advance source pointer this time ? */
+ if ( (xdelta -= SrcXE) < 0) {
+ xdelta += DstXE;
+ lpSrc++;
+ }
+ }
+}
+
+
+/*
+* X_Stretch_N_1_8Bits
+*
+* copy one scan line, shrinking N:1 (DstXE < SrcXE). For 8-bit depth.
+*/
+void
+X_Stretch_N_1_8Bits(LPBYTE lpSrc,
+ LPBYTE lpDst,
+ int SrcXE,
+ int DstXE,
+ int x_fract)
+{
+ int xdelta;
+ register int i;
+
+ xdelta = SrcXE -1;
+
+ for (i = 0; i < DstXE; i++) {
+
+ /* copy one byte and advance dest */
+ *lpDst++ = *lpSrc;
+
+ /* how many times do we advance source pointer this time ? */
+ do {
+ lpSrc++;
+ xdelta -= DstXE;
+ } while (xdelta >= 0);
+
+ xdelta += SrcXE;
+ }
+}
+
+/*
+* copy one scanline of count bytes from lpSrc to lpDst. used by 1:1
+* scanline functions for all bit depths
+*/
+void
+X_CopyScanline(LPBYTE lpSrc, LPBYTE lpDst, int count)
+{
+ register int i;
+
+ /*
+ * if the alignment of lpSrc and lpDst is the same, then
+ * we can get them aligned and do a faster copy
+ */
+ if (((DWORD_PTR) lpSrc & 0x3) == ( (DWORD_PTR) lpDst & 0x3)) {
+
+ /* align on WORD boundary */
+ if ( (DWORD_PTR) lpSrc & 0x1) {
+ *lpDst++ = *lpSrc++;
+ count--;
+ }
+
+ /* align on DWORD boundary */
+ if ((DWORD_PTR) lpSrc & 0x2) {
+ * ((LPWORD) lpDst) = *((LPWORD) lpSrc);
+ lpDst += sizeof(WORD);
+ lpSrc += sizeof(WORD);
+ count -= sizeof(WORD);
+ }
+
+ /* copy whole DWORDS */
+ for ( i = (count / 4); i > 0; i--) {
+ *((LPDWORD) lpDst) = *((LPDWORD) lpSrc);
+ lpSrc += sizeof(DWORD);
+ lpDst += sizeof(DWORD);
+ }
+ } else {
+ /* the lpSrc and lpDst pointers are different
+ * alignment, so leave them unaligned and
+ * copy all the whole DWORDs
+ */
+ for (i = (count / 4); i> 0; i--) {
+ *( (DWORD UNALIGNED FAR *) lpDst) =
+ *((DWORD UNALIGNED FAR *) lpSrc);
+ lpSrc += sizeof(DWORD);
+ lpDst += sizeof(DWORD);
+ }
+ }
+
+ /* in either case, copy last (up to 3) bytes. */
+ for ( i = count % 4; i > 0; i--) {
+ *lpDst++ = *lpSrc++;
+ }
+}
+
+/*
+* X_Stretch_1_1_8Bits
+*
+* copy a scanline with no change (1:1)
+*/
+void
+X_Stretch_1_1_8Bits(LPBYTE lpSrc,
+ LPBYTE lpDst,
+ int SrcXE,
+ int DstXE,
+ int x_fract)
+{
+
+ X_CopyScanline(lpSrc, lpDst, DstXE);
+}
+
+
+/*
+* X_Stretch_1_2_8Bits
+*
+* copy a scanline, doubling all the pixels (1:2)
+*/
+void
+X_Stretch_1_2_8Bits(LPBYTE lpSrc,
+ LPBYTE lpDst,
+ int SrcXE,
+ int DstXE,
+ int x_fract)
+{
+ WORD wPix;
+ register int i;
+
+ for (i = 0; i < SrcXE; i++) {
+
+ /* get a pixel and double it */
+ wPix = *lpSrc++;
+ wPix |= (wPix << 8);
+ * ((WORD UNALIGNED *) lpDst) = wPix;
+ lpDst += sizeof(WORD);
+ }
+}
+
+
+/*
+* X_Stretch_1_4_8Bits
+*
+* copy a scanline, quadrupling all the pixels (1:4)
+*/
+void
+X_Stretch_1_4_8Bits(LPBYTE lpSrc,
+ LPBYTE lpDst,
+ int SrcXE,
+ int DstXE,
+ int x_fract)
+{
+ DWORD dwPix;
+ register int i;
+
+ for (i = 0; i < SrcXE; i++) {
+
+ /* get a pixel and make four copies of it */
+ dwPix = *lpSrc++;
+ dwPix |= (dwPix <<8);
+ dwPix |= (dwPix << 16);
+ * ((DWORD UNALIGNED *) lpDst) = dwPix;
+ lpDst += sizeof(DWORD);
+ }
+}
+
+
+/* -- 16-bit X functions -----------------------------------------------*/
+
+/*
+* copy one scan-line of 16 bits with no change (1:1)
+*/
+void
+X_Stretch_1_1_16Bits(LPBYTE lpSrc,
+ LPBYTE lpDst,
+ int SrcXE,
+ int DstXE,
+ int x_fract)
+{
+
+ X_CopyScanline(lpSrc, lpDst, DstXE * sizeof(WORD));
+
+}
+
+
+/*
+* copy one scanline of 16 bpp duplicating each pixel
+*/
+void
+X_Stretch_1_2_16Bits(LPBYTE lpSrc,
+ LPBYTE lpDst,
+ int SrcXE,
+ int DstXE,
+ int x_fract)
+{
+
+ DWORD dwPix;
+ register int i;
+
+ for (i = 0; i < SrcXE; i++) {
+
+ /* get a pixel and double it */
+ dwPix = * ((WORD *)lpSrc);
+ dwPix |= (dwPix << 16);
+ * ((DWORD UNALIGNED *) lpDst) = dwPix;
+
+ lpDst += sizeof(DWORD);
+ lpSrc += sizeof(WORD);
+ }
+
+}
+
+/*
+* copy one scanline of 16 bits, stretching 1:n (dest > source)
+*/
+void
+X_Stretch_1_N_16Bits(LPBYTE lpSrc,
+ LPBYTE lpDst,
+ int SrcXE,
+ int DstXE,
+ int x_fract)
+{
+ int xdelta;
+ register int i;
+
+ xdelta = DstXE -1;
+
+ for (i = 0; i < DstXE; i++) {
+
+ /* copy one pixel and advance dest */
+ *((WORD *) lpDst) = *((WORD *) lpSrc);
+
+ lpDst += sizeof(WORD);
+
+ /* should we advance source pointer this time ? */
+ if ( (xdelta -= SrcXE) < 0) {
+ xdelta += DstXE;
+ lpSrc += sizeof(WORD);
+ }
+ }
+}
+
+/*
+* copy one scanline of 16bits, shrinking n:1 (dest < source)
+*/
+void
+X_Stretch_N_1_16Bits(LPBYTE lpSrc,
+ LPBYTE lpDst,
+ int SrcXE,
+ int DstXE,
+ int x_fract)
+{
+
+ int xdelta;
+ register int i;
+
+ xdelta = SrcXE -1;
+
+ for (i = 0; i < DstXE; i++) {
+
+ /* copy one pixel and advance dest */
+ *((WORD *) lpDst) = *((WORD *)lpSrc);
+
+ lpDst += sizeof(WORD);
+
+ /* how many times do we advance source pointer this time ? */
+ do {
+ lpSrc += sizeof(WORD);
+ xdelta -= DstXE;
+ } while (xdelta >= 0);
+
+ xdelta += SrcXE;
+ }
+
+}
+
+
+/* 24-bits ---------------------------------------------------------*/
+
+/*
+* copy one 24-bpp scanline as is (1:1)
+*/
+void
+X_Stretch_1_1_24Bits(LPBYTE lpSrc,
+ LPBYTE lpDst,
+ int SrcXE,
+ int DstXE,
+ int x_fract)
+{
+ X_CopyScanline(lpSrc, lpDst, DstXE * 3);
+}
+
+/*
+* copy one 24-bpp scanline stretching 1:n (dest > source)
+*/
+void
+X_Stretch_1_N_24Bits(LPBYTE lpSrc,
+ LPBYTE lpDst,
+ int SrcXE,
+ int DstXE,
+ int x_fract)
+{
+
+ int xdelta;
+ register int i;
+
+ xdelta = DstXE -1;
+
+ for (i = 0; i < DstXE; i++) {
+ /* copy first word of pixel and advance dest */
+ *((WORD UNALIGNED *) lpDst) = *((WORD UNALIGNED *) lpSrc);
+
+ lpDst += sizeof(WORD);
+
+ /* copy third byte and advance dest */
+ *lpDst++ = lpSrc[sizeof(WORD)];
+
+ /* should we advance source pointer this time ? */
+ if ( (xdelta -= SrcXE) < 0) {
+ xdelta += DstXE;
+ lpSrc += 3;
+ }
+ }
+}
+
+/*
+* copy one scanline of 24 bits, shrinking n:1 (dest < source)
+*/
+void
+X_Stretch_N_1_24Bits(LPBYTE lpSrc,
+ LPBYTE lpDst,
+ int SrcXE,
+ int DstXE,
+ int x_fract)
+{
+ int xdelta;
+ register int i;
+
+ xdelta = SrcXE -1;
+
+ for (i = 0; i < DstXE; i++) {
+
+ /* copy first word of pixel and advance dest */
+ *((WORD UNALIGNED *) lpDst) = *((WORD UNALIGNED *) lpSrc);
+
+ lpDst += sizeof(WORD);
+
+ /* copy third byte and advance dest */
+ *lpDst++ = lpSrc[sizeof(WORD)];
+
+
+ /* how many times do we advance source pointer this time ? */
+ do {
+ lpSrc += 3;
+ xdelta -= DstXE;
+ } while (xdelta >= 0);
+
+ xdelta += SrcXE;
+ }
+}
+
+
+/* 32-bits ---------------------------------------------------------*/
+
+/*
+* copy one 32-bpp scanline as is (1:1)
+*/
+void
+X_Stretch_1_1_32Bits(LPBYTE lpSrc,
+ LPBYTE lpDst,
+ int SrcXE,
+ int DstXE,
+ int x_fract)
+{
+ X_CopyScanline((BYTE*) lpSrc, (BYTE*) lpDst, DstXE * sizeof( RGBQUAD ) );
+}
+
+/*
+* copy one 32-bpp scanline stretching 1:n (dest > source)
+*/
+void
+X_Stretch_1_N_32Bits(LPBYTE lpSrc0,
+ LPBYTE lpDst0,
+ int SrcXE,
+ int DstXE,
+ int x_fract)
+{
+
+ int xdelta;
+ register int i;
+
+ RGBQUAD *lpSrc=(RGBQUAD *)lpSrc0;
+ RGBQUAD *lpDst=(RGBQUAD *)lpDst0;
+
+
+ xdelta = DstXE -1;
+
+ for (i = 0; i < DstXE; i++)
+ {
+ /* copy first word of pixel and advance dest */
+ *lpDst = *lpSrc;
+ lpDst++;
+
+ /* should we advance source pointer this time ? */
+ if ( (xdelta -= SrcXE) < 0)
+ {
+ xdelta += DstXE;
+ lpSrc++;
+ }
+ }
+}
+
+/*
+* copy one scanline of 32 bits, shrinking n:1 (dest < source)
+*/
+void
+X_Stretch_N_1_32Bits(LPBYTE lpSrc0,
+ LPBYTE lpDst0,
+ int SrcXE,
+ int DstXE,
+ int x_fract)
+{
+ int xdelta;
+ register int i;
+
+ RGBQUAD *lpSrc=(RGBQUAD *)lpSrc0;
+ RGBQUAD *lpDst=(RGBQUAD *)lpDst0;
+
+ xdelta = SrcXE -1;
+
+ for (i = 0; i < DstXE; i++)
+ {
+ *lpDst = *lpSrc;
+ lpDst++;
+
+ /* how many times do we advance source pointer this time ? */
+ do
+ {
+ lpSrc++;
+ xdelta -= DstXE;
+ } while (xdelta >= 0);
+
+ xdelta += SrcXE;
+ }
+}
+
+
+
+
+/* -- special-case 1:2 -------------------------------------------*/
+
+/*
+* stretch 1:2 in both directions, for 8 bits.
+*
+* An experiment was done on x86 to only write every other line during
+* the stretch and when the whole frame was done to use memcpy to fill
+* in the gaps. This is slower than doing the stretch in a single pass.
+*/
+void
+Stretch_1_2_8Bits(LPBYTE lpSrc, LPBYTE lpDst, int SrcXE,int SrcYE, int DstXE,
+ int DstYE, int SrcWidth, int DstWidth, int x_fract)
+{
+
+ int SrcInc, DstInc;
+ register int i, j;
+ WORD wPix;
+ DWORD dwPix4;
+
+ /* amount to advance source by at the end of each scan */
+ SrcInc = SrcWidth - SrcXE;
+
+
+ /* amount to advance dest by at the end of each scan - note
+ * that we write two scans at once, so advance past the next
+ * scan line
+ */
+ DstInc = (DstWidth * 2) - DstXE;
+
+ /*
+ * we would like to copy the pixels DWORD at a time. this means
+ * being aligned. if we are currently aligned on a WORD boundary,
+ * then copy one pixel to get aligned. If we are on a byte
+ * boundary, we can never get aligned, so use the slower loop.
+ */
+ if ( ((DWORD_PTR)lpDst) & 1) {
+
+ /*
+ * dest is byte aligned - so we can never align it
+ * by writing WORDs - use slow loop.
+ */
+ for (i = 0; i < SrcYE; i++) {
+
+ for (j = 0; j < SrcXE; j++) {
+
+ /* get a pixel and double it */
+
+ wPix = *lpSrc++;
+ wPix |= (wPix<<8);
+
+
+ /* write doubled pixel to this scanline */
+
+ *( (WORD UNALIGNED *) lpDst) = wPix;
+
+ /* write double pixel to next scanline */
+ *( (WORD UNALIGNED *) (lpDst + DstWidth)) = wPix;
+
+ lpDst += sizeof(WORD);
+ }
+ lpSrc += SrcInc;
+ lpDst += DstInc;
+ }
+ return;
+ }
+
+ /*
+ * this will be the aligned version. align each scan line
+ */
+ for ( i = 0; i < SrcYE; i++) {
+
+ /* count of pixels remaining */
+ j = SrcXE;
+
+ /* align this scan line */
+ if (((DWORD_PTR)lpDst) & 2) {
+
+ /* word aligned - copy one doubled pixel and we are ok */
+ wPix = *lpSrc++;
+ wPix |= (wPix << 8);
+
+ *( (WORD *) lpDst) = wPix;
+ *( (WORD *) (lpDst + DstWidth)) = wPix;
+ lpDst += sizeof(WORD);
+
+ j -= 1;
+ }
+
+
+ /* now dest is aligned - so loop eating two pixels at a time
+ * until there is at most one left
+ */
+ for ( ; j > 1; j -= 2) {
+
+ /* read two pixels and double them */
+ wPix = * ((WORD UNALIGNED *) lpSrc);
+ lpSrc += sizeof(WORD);
+
+ dwPix4 = (wPix & 0xff) | ((wPix & 0xff) << 8);
+ dwPix4 |= ((wPix & 0xff00) << 8) | ((wPix & 0xff00) << 16);
+ *((DWORD *) lpDst) = dwPix4;
+ *((DWORD *) (lpDst + DstWidth)) = dwPix4;
+
+ lpDst += sizeof(DWORD);
+ }
+
+ /* odd byte remaining ? */
+ if (j > 0) {
+ /* word aligned - copy one doubled pixel and we are ok */
+ wPix = *lpSrc++;
+ wPix |= (wPix << 8);
+
+ *( (WORD *) lpDst) = wPix;
+ *( (WORD *) (lpDst + DstWidth)) = wPix;
+ lpDst += sizeof(WORD);
+
+ j -= 1;
+ }
+ lpSrc += SrcInc;
+ lpDst += DstInc;
+ }
+}
+
+
+
+/* ----------------------------------------------------------------*/
+
+/*
+* stretch 1:2 in both directions, for 16-bits
+*/
+
+void
+Stretch_1_2_16Bits(LPBYTE lpSrc, LPBYTE lpDst, int SrcXE,int SrcYE, int DstXE,
+ int DstYE, int SrcWidth, int DstWidth, int x_fract)
+
+{
+ int SrcInc, DstInc;
+ register int i, j;
+ DWORD dwPix;
+
+ /* amount to advance source by at the end of each scan */
+ SrcInc = SrcWidth - (SrcXE * sizeof(WORD));
+
+
+ /* amount to advance dest by at the end of each scan - note
+ * that we write two scans at once, so advance past the next
+ * scan line
+ */
+ DstInc = (DstWidth * 2) - (DstXE * sizeof(WORD));
+
+ for (i = 0; i < SrcYE; i++) {
+
+ for (j = 0; j < SrcXE; j++) {
+
+ /* get a pixel and double it */
+
+ dwPix = *((WORD *)lpSrc);
+ dwPix |= (dwPix<<16);
+
+ lpSrc += sizeof(WORD);
+
+ /* write doubled pixel to this scanline */
+
+ *( (DWORD UNALIGNED *) lpDst) = dwPix;
+
+ /* write double pixel to next scanline */
+ *( (DWORD UNALIGNED *) (lpDst + DstWidth)) = dwPix;
+
+ lpDst += sizeof(DWORD);
+ }
+ lpSrc += SrcInc;
+ lpDst += DstInc;
+
+ }
+}
diff --git a/plugins/pluginDirectShow/internals/Resizer.h b/plugins/pluginDirectShow/internals/Resizer.h
new file mode 100644
index 0000000..6c76970
--- /dev/null
+++ b/plugins/pluginDirectShow/internals/Resizer.h
@@ -0,0 +1,76 @@
+/* Copyright (C) 2011-2013 Doubango Telecom <http://www.doubango.org>
+*
+* This file is part of Open Source Doubango Framework.
+*
+* DOUBANGO is free software: you can redistribute it and/or modify
+* it under the terms of the GNU General Public License as published by
+* the Free Software Foundation, either version 3 of the License, or
+* (at your option) any later version.
+*
+* DOUBANGO is distributed in the hope that it will be useful,
+* but WITHOUT ANY WARRANTY; without even the implied warranty of
+* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
+* GNU General Public License for more details.
+*
+* You should have received a copy of the GNU General Public License
+* along with DOUBANGO.
+*/
+/*
+ * Copyright (c) Microsoft Corporation. All rights reserved.
+ */
+#ifndef PLUGIN_DSHOW_RESIZER_H
+#define PLUGIN_DSHOW_RESIZER_H
+
+#include "plugin_dshow_config.h"
+
+/*
+* StretchC.C
+*
+* StretchBlt for DIBs
+*
+* C version of stretch.asm: StretchDIB optimised for AVI.
+*
+* NOTES
+* - does not handle mirroring in x or y
+* - does not handle pixel translation
+* - will not work in place.
+*/
+
+
+/* Outline:
+*
+* we select a y-stretching function depending on the ratio (eg 1:N or N:1).
+* it copies scanlines from source to destination, duplicating or omitting
+* scanlines as necessary to fit the destination. It copies each scanline
+* via the X_FUNC function we passed as an argument: this copies one scanline
+* duplicating or omitting pixels to fit the destination: we select an X_FUNC
+* depending on the bit-depth as well as the x-stretching ratio.
+*
+* both x and y stretching functions use the following basic model for deciding
+* when to insert/omit elements:
+*
+* delta = <larger extent> -1;
+*
+* for (number of destination elements) {
+*
+* copy one element
+* advance pointer to larger region
+* delta -= <smaller extent>
+* if (delta < 0) {
+* delta += <larger extent>;
+* advance pointer to smaller region
+* }
+* }
+*/
+
+#include <streams.h>
+
+
+void ResizeRGB( BITMAPINFOHEADER *pbiIn, //Src's BitMapInFoHeader
+ const unsigned char * dibBits, //Src bits
+ BITMAPINFOHEADER *pbiOut,
+ unsigned char *pFrame, //Dst bits
+ int iNewWidth, //new W in pixel
+ int iNewHeight); //new H in pixel
+
+#endif //RESIZER_H
diff --git a/plugins/pluginDirectShow/internals/VideoDisplayName.cxx b/plugins/pluginDirectShow/internals/VideoDisplayName.cxx
new file mode 100644
index 0000000..6b4d0b0
--- /dev/null
+++ b/plugins/pluginDirectShow/internals/VideoDisplayName.cxx
@@ -0,0 +1,37 @@
+/* Copyright (C) 2011-2013 Doubango Telecom <http://www.doubango.org>
+*
+* This file is part of Open Source Doubango Framework.
+*
+* DOUBANGO is free software: you can redistribute it and/or modify
+* it under the terms of the GNU General Public License as published by
+* the Free Software Foundation, either version 3 of the License, or
+* (at your option) any later version.
+*
+* DOUBANGO is distributed in the hope that it will be useful,
+* but WITHOUT ANY WARRANTY; without even the implied warranty of
+* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
+* GNU General Public License for more details.
+*
+* You should have received a copy of the GNU General Public License
+* along with DOUBANGO.
+*/
+#include "internals/VideoDisplayName.h"
+
+VideoDisplayName::VideoDisplayName(std::string name_, std::string descr) : name(name_), description(descr)
+{
+}
+
+std::string VideoDisplayName::getName() const
+{
+ return this->name;
+}
+
+std::string VideoDisplayName::getDescription() const
+{
+ return this->description;
+}
+
+int VideoDisplayName::operator==(const VideoDisplayName &dev) const
+{
+ return this->name == dev.name;
+}
diff --git a/plugins/pluginDirectShow/internals/VideoDisplayName.h b/plugins/pluginDirectShow/internals/VideoDisplayName.h
new file mode 100644
index 0000000..82dc0d0
--- /dev/null
+++ b/plugins/pluginDirectShow/internals/VideoDisplayName.h
@@ -0,0 +1,43 @@
+/* Copyright (C) 2011-2013 Doubango Telecom <http://www.doubango.org>
+*
+* This file is part of Open Source Doubango Framework.
+*
+* DOUBANGO is free software: you can redistribute it and/or modify
+* it under the terms of the GNU General Public License as published by
+* the Free Software Foundation, either version 3 of the License, or
+* (at your option) any later version.
+*
+* DOUBANGO is distributed in the hope that it will be useful,
+* but WITHOUT ANY WARRANTY; without even the implied warranty of
+* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
+* GNU General Public License for more details.
+*
+* You should have received a copy of the GNU General Public License
+* along with DOUBANGO.
+*/
+#ifndef PLUGIN_DSHOW_VIDEODISPLAYNAME_H
+#define PLUGIN_DSHOW_VIDEODISPLAYNAME_H
+
+#include "plugin_dshow_config.h"
+
+#include <string>
+
+class VideoDisplayName
+{
+public:
+ VideoDisplayName() {}
+
+ VideoDisplayName(std::string name, std::string description);
+
+ std::string getName() const;
+
+ std::string getDescription() const;
+
+ int operator==( const VideoDisplayName &dev ) const;
+
+private:
+ std::string name;
+ std::string description;
+};
+
+#endif /* PLUGIN_DSHOW_VIDEODISPLAYNAME_H */
diff --git a/plugins/pluginDirectShow/internals/VideoFrame.h b/plugins/pluginDirectShow/internals/VideoFrame.h
new file mode 100644
index 0000000..2c910a6
--- /dev/null
+++ b/plugins/pluginDirectShow/internals/VideoFrame.h
@@ -0,0 +1,107 @@
+/* Copyright (C) 2011-2013 Doubango Telecom <http://www.doubango.org>
+*
+* This file is part of Open Source Doubango Framework.
+*
+* DOUBANGO is free software: you can redistribute it and/or modify
+* it under the terms of the GNU General Public License as published by
+* the Free Software Foundation, either version 3 of the License, or
+* (at your option) any later version.
+*
+* DOUBANGO is distributed in the hope that it will be useful,
+* but WITHOUT ANY WARRANTY; without even the implied warranty of
+* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
+* GNU General Public License for more details.
+*
+* You should have received a copy of the GNU General Public License
+* along with DOUBANGO.
+*/
+#ifndef PLUGIN_DSHOW_VIDEOFRAME_H
+#define PLUGIN_DSHOW_VIDEOFRAME_H
+
+// Define supported video formats
+typedef enum _VIDEOFORMAT
+{
+ VIDEOFORMAT_NULL = 0, // 0 x 0 : Null
+ VIDEOFORMAT_SQCIF, // 128 x 96 : SQCIF
+ VIDEOFORMAT_QCIF, // 176 x 144 : QCIF
+ VIDEOFORMAT_QVGA, // 320 x 240 : QVGA
+ VIDEOFORMAT_CIF, // 352 x 288 : CIF
+ VIDEOFORMAT_IOS_MEDIUM, // 480 x 360 : IOS_MEDIUM
+ VIDEOFORMAT_VGA, // 640 x 480 : VGA
+ VIDEOFORMAT_4CIF, // 704 x 576 : 4CIF
+ VIDEOFORMAT_SVGA, // 800 x 600 : SVGA
+ VIDEOFORMAT_XGA, // 1024 x 768 : XGA
+ VIDEOFORMAT_SXGA, // 1280 x 1024 : SXGA
+ VIDEOFORMAT_16CIF, // 1408 x 1152 : 16CIF
+} VIDEOFORMAT;
+
+
+// Macro to convert a video format to its size
+#define VIDEOFORMAT_TO_SIZE(format, width, height) \
+ switch(format) \
+ { \
+ case VIDEOFORMAT_SQCIF: width = 128; height = 96; break; \
+ case VIDEOFORMAT_QCIF: width = 176; height = 144; break; \
+ case VIDEOFORMAT_QVGA: width = 320; height = 240; break; \
+ case VIDEOFORMAT_CIF: width = 352; height = 288; break; \
+ case VIDEOFORMAT_IOS_MEDIUM: width = 480; height = 360; break; \
+ case VIDEOFORMAT_VGA: width = 640; height = 480; break; \
+ case VIDEOFORMAT_4CIF: width = 704; height = 576; break; \
+ case VIDEOFORMAT_SVGA: width = 800; height = 600; break; \
+ case VIDEOFORMAT_XGA: width = 1024; height = 768; break; \
+ case VIDEOFORMAT_SXGA: width = 1280; height = 1024; break; \
+ case VIDEOFORMAT_16CIF: width = 1408; height = 1152; break; \
+ case VIDEOFORMAT_NULL: \
+ default: width = 0; height = 0; break; \
+ } \
+
+
+// Macro to get a video format from its size
+#define SIZE_TO_VIDEOFORMAT(width, height, format) \
+ if ((width == 128) && (height = 96)) format = VIDEOFORMAT_SQCIF; \
+ else if ((width == 176) && (height = 144)) format = VIDEOFORMAT_QCIF; \
+ else if ((width == 320) && (height = 240)) format = VIDEOFORMAT_QVGA; \
+ else if ((width == 352) && (height = 288)) format = VIDEOFORMAT_CIF; \
+ else if ((width == 480) && (height = 360)) format = VIDEOFORMAT_IOS_MEDIUM; \
+ else if ((width == 640) && (height = 480)) format = VIDEOFORMAT_VGA; \
+ else if ((width == 704) && (height = 576)) format = VIDEOFORMAT_4CIF; \
+ else if ((width == 800) && (height = 600)) format = VIDEOFORMAT_SVGA; \
+ else if ((width == 1024) && (height = 768)) format = VIDEOFORMAT_XGA; \
+ else if ((width == 1280) && (height = 1024)) format = VIDEOFORMAT_SXGA; \
+ else if ((width == 1408) && (height = 1152)) format = VIDEOFORMAT_16CIF; \
+ else format = VIDEOFORMAT_NULL; \
+
+
+// Constants for consumer and producer Ids
+#define GRABBER_VIDEO_ID 0x1FFFFFFF
+#define REMOTE_VIDEO_ID 0x2FFFFFFF
+
+
+class VideoFrame
+{
+public:
+ VideoFrame() { this->data = NULL; };
+ virtual ~VideoFrame() { if(this->data) { this->data = NULL;} };
+
+ int getWidth() { return this->width; };
+ int getHeight() { return this->height; };
+ int getBitsPerPixel() { return this->bpp; };
+ int getTotalBits () { return this->width * this->height * (this->bpp/8); };
+ void* getData() { return this->data; };
+
+ void setWidth(int width_) { this->width = width_; };
+ void setHeight(int height_) { this->height = height_; };
+ void setBitsPerPixel( int bpp_) { this->bpp = bpp_; };
+ void setData( void* data_) { this->data = data_; };
+
+ VIDEOFORMAT getSize();
+ void setSize(VIDEOFORMAT format);
+
+private:
+ void *data;
+ int width;
+ int height;
+ int bpp;
+};
+
+#endif /* VIDEOFRAME_H */
diff --git a/plugins/pluginDirectShow/internals/VideoGrabberName.cxx b/plugins/pluginDirectShow/internals/VideoGrabberName.cxx
new file mode 100644
index 0000000..4b418cf
--- /dev/null
+++ b/plugins/pluginDirectShow/internals/VideoGrabberName.cxx
@@ -0,0 +1,37 @@
+/* Copyright (C) 2011-2013 Doubango Telecom <http://www.doubango.org>
+*
+* This file is part of Open Source Doubango Framework.
+*
+* DOUBANGO is free software: you can redistribute it and/or modify
+* it under the terms of the GNU General Public License as published by
+* the Free Software Foundation, either version 3 of the License, or
+* (at your option) any later version.
+*
+* DOUBANGO is distributed in the hope that it will be useful,
+* but WITHOUT ANY WARRANTY; without even the implied warranty of
+* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
+* GNU General Public License for more details.
+*
+* You should have received a copy of the GNU General Public License
+* along with DOUBANGO.
+*/
+#include "internals/VideoGrabberName.h"
+
+VideoGrabberName::VideoGrabberName(std::string name_, std::string descr) : name(name_), description(descr)
+{
+}
+
+std::string VideoGrabberName::getName() const
+{
+ return this->name;
+}
+
+std::string VideoGrabberName::getDescription() const
+{
+ return this->description;
+}
+
+int VideoGrabberName::operator==(const VideoGrabberName &dev) const
+{
+ return this->name == dev.name;
+} \ No newline at end of file
diff --git a/plugins/pluginDirectShow/internals/VideoGrabberName.h b/plugins/pluginDirectShow/internals/VideoGrabberName.h
new file mode 100644
index 0000000..0bb45b5
--- /dev/null
+++ b/plugins/pluginDirectShow/internals/VideoGrabberName.h
@@ -0,0 +1,43 @@
+/* Copyright (C) 2011-2013 Doubango Telecom <http://www.doubango.org>
+*
+* This file is part of Open Source Doubango Framework.
+*
+* DOUBANGO is free software: you can redistribute it and/or modify
+* it under the terms of the GNU General Public License as published by
+* the Free Software Foundation, either version 3 of the License, or
+* (at your option) any later version.
+*
+* DOUBANGO is distributed in the hope that it will be useful,
+* but WITHOUT ANY WARRANTY; without even the implied warranty of
+* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
+* GNU General Public License for more details.
+*
+* You should have received a copy of the GNU General Public License
+* along with DOUBANGO.
+*/
+#ifndef PLUGIN_DSHOW_VIDEOGRABBERNAME_H
+#define PLUGIN_DSHOW_VIDEOGRABBERNAME_H
+
+#include "plugin_dshow_config.h"
+
+#include<string>
+
+class VideoGrabberName
+{
+public:
+ VideoGrabberName() {}
+
+ VideoGrabberName(std::string name, std::string description);
+
+ std::string getName() const;
+
+ std::string getDescription() const;
+
+ int operator==( const VideoGrabberName &dev ) const;
+
+private:
+ std::string name;
+ std::string description;
+};
+
+#endif /* PLUGIN_DSHOW_VIDEOGRABBERNAME_H */
diff --git a/plugins/pluginDirectShow/internals/wince/CPropertyBag.cxx b/plugins/pluginDirectShow/internals/wince/CPropertyBag.cxx
new file mode 100644
index 0000000..a6b436a
--- /dev/null
+++ b/plugins/pluginDirectShow/internals/wince/CPropertyBag.cxx
@@ -0,0 +1,108 @@
+#if defined(_WIN32_WCE)
+//
+// Copyright (c) Microsoft Corporation.  All rights reserved.
+//
+//
+// Use of this source code is subject to the terms of the Microsoft end-user
+// license agreement (EULA) under which you licensed this SOFTWARE PRODUCT.
+// If you did not accept the terms of the EULA, you are not authorized to use
+// this source code. For a copy of the EULA, please see the LICENSE.RTF on your
+// install media.
+//
+#include <windows.h>
+#include <Ocidl.h>
+#include <oleauto.h>
+
+#include "internals/wince/CPropertyBag.h"
+
+CPropertyBag::CPropertyBag() : _refCount(1), pVar(0)
+{
+}
+
+CPropertyBag::~CPropertyBag()
+{
+ VAR_LIST *pTemp = pVar;
+ HRESULT hr = S_OK;
+
+ while(pTemp) {
+ VAR_LIST *pDel = pTemp;
+ VariantClear(&pTemp->var);
+ SysFreeString(pTemp->pBSTRName);
+ pTemp = pTemp->pNext;
+ delete pDel;
+ }
+
+}
+
+HRESULT STDMETHODCALLTYPE
+CPropertyBag::Read(LPCOLESTR pszPropName,
+ VARIANT *_pVar,
+ IErrorLog *pErrorLog)
+{
+ VAR_LIST *pTemp = pVar;
+ HRESULT hr = S_OK;
+
+ while (pTemp) {
+ if (0 == wcscmp(pszPropName, pTemp->pBSTRName)) {
+ hr = VariantCopy(_pVar, &pTemp->var);
+ break;
+ }
+ pTemp = pTemp->pNext;
+ }
+ return hr;
+}
+
+
+HRESULT STDMETHODCALLTYPE
+CPropertyBag::Write(LPCOLESTR pszPropName,
+ VARIANT *_pVar)
+{
+ HRESULT hr = S_OK;
+ VAR_LIST *pTemp = new VAR_LIST();
+ ASSERT(pTemp);
+
+ if ( !pTemp ) {
+ return E_OUTOFMEMORY;
+ }
+
+ VariantInit(&pTemp->var);
+ pTemp->pBSTRName = SysAllocString(pszPropName);
+ pTemp->pNext = pVar;
+ pVar = pTemp;
+ return VariantCopy(&pTemp->var, _pVar);
+}
+
+ULONG STDMETHODCALLTYPE
+CPropertyBag::AddRef()
+{
+ return InterlockedIncrement((LONG *)&_refCount);
+}
+
+ULONG STDMETHODCALLTYPE
+CPropertyBag::Release()
+{
+ ASSERT(_refCount != 0xFFFFFFFF);
+ ULONG ret = InterlockedDecrement((LONG *)&_refCount);
+ if (!ret) {
+ delete this;
+ }
+ return ret;
+}
+
+HRESULT STDMETHODCALLTYPE
+CPropertyBag::QueryInterface(REFIID riid, void** ppv)
+{
+ if (!ppv) {
+ return E_POINTER;
+ }
+ if (riid == IID_IPropertyBag) {
+ *ppv = static_cast<IPropertyBag*>(this);
+ }
+ else {
+ return *ppv = 0, E_NOINTERFACE;
+ }
+
+ return AddRef(), S_OK;
+}
+
+#endif /* _WIN32_WCE */
diff --git a/plugins/pluginDirectShow/internals/wince/CPropertyBag.h b/plugins/pluginDirectShow/internals/wince/CPropertyBag.h
new file mode 100644
index 0000000..20ce779
--- /dev/null
+++ b/plugins/pluginDirectShow/internals/wince/CPropertyBag.h
@@ -0,0 +1,43 @@
+#pragma once
+
+#if defined(_WIN32_WCE)
+
+#include "plugin_dshow_config.h"
+
+struct VAR_LIST
+{
+ VARIANT var;
+ VAR_LIST *pNext;
+ BSTR pBSTRName;
+};
+
+class CPropertyBag : public IPropertyBag
+{
+public:
+ CPropertyBag();
+ ~CPropertyBag();
+
+ HRESULT STDMETHODCALLTYPE
+ Read(
+ LPCOLESTR pszPropName,
+ VARIANT *pVar,
+ IErrorLog *pErrorLog
+ );
+
+
+ HRESULT STDMETHODCALLTYPE
+ Write(
+ LPCOLESTR pszPropName,
+ VARIANT *pVar
+ );
+
+ ULONG STDMETHODCALLTYPE AddRef();
+ ULONG STDMETHODCALLTYPE Release();
+ HRESULT STDMETHODCALLTYPE QueryInterface(REFIID riid, void** ppv);
+
+private:
+ ULONG _refCount;
+ VAR_LIST *pVar;
+};
+
+#endif /* _WIN32_WCE */
diff --git a/plugins/pluginDirectShow/internals/wince/DSISampleGrabberCB.h b/plugins/pluginDirectShow/internals/wince/DSISampleGrabberCB.h
new file mode 100644
index 0000000..89d8909
--- /dev/null
+++ b/plugins/pluginDirectShow/internals/wince/DSISampleGrabberCB.h
@@ -0,0 +1,30 @@
+/* Copyright (C) 2014-2015 Mamadou DIOP
+*
+* This file is part of Open Source Doubango Framework.
+*
+* DOUBANGO is free software: you can redistribute it and/or modify
+* it under the terms of the GNU General Public License as published by
+* the Free Software Foundation, either version 3 of the License, or
+* (at your option) any later version.
+*
+* DOUBANGO is distributed in the hope that it will be useful,
+* but WITHOUT ANY WARRANTY; without even the implied warranty of
+* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
+* GNU General Public License for more details.
+*
+* You should have received a copy of the GNU General Public License
+* along with DOUBANGO.
+*/
+#pragma once
+
+#if defined(_WIN32_WCE)
+
+#include "plugin_dshow_config.h"
+
+interface DSISampleGrabberCB
+{
+ virtual HRESULT STDMETHODCALLTYPE SampleCB(double SampleTime, IMediaSample *pSample) = 0;
+ virtual HRESULT STDMETHODCALLTYPE BufferCB(double SampleTime, BYTE *pBuffer, long BufferLen) = 0;
+};
+
+#endif /* _WIN32_WCE */ \ No newline at end of file
diff --git a/plugins/pluginDirectShow/internals/wince/DSNullFilter.cxx b/plugins/pluginDirectShow/internals/wince/DSNullFilter.cxx
new file mode 100644
index 0000000..76d713c
--- /dev/null
+++ b/plugins/pluginDirectShow/internals/wince/DSNullFilter.cxx
@@ -0,0 +1,56 @@
+#if defined(_WIN32_WCE)
+
+/* Copyright (C) 2014-2015 Mamadou DIOP
+*
+* This file is part of Open Source Doubango Framework.
+*
+* DOUBANGO is free software: you can redistribute it and/or modify
+* it under the terms of the GNU General Public License as published by
+* the Free Software Foundation, either version 3 of the License, or
+* (at your option) any later version.
+*
+* DOUBANGO is distributed in the hope that it will be useful,
+* but WITHOUT ANY WARRANTY; without even the implied warranty of
+* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
+* GNU General Public License for more details.
+*
+* You should have received a copy of the GNU General Public License
+* along with DOUBANGO.
+*/
+
+#include "internals/wince/DSNullFilter.h"
+
+// {7F9F08CF-139F-40b2-A283-01C4EC26A452}
+TDSHOW_DEFINE_GUID(CLSID_DSNullFilter,
+0x7f9f08cf, 0x139f, 0x40b2, 0xa2, 0x83, 0x1, 0xc4, 0xec, 0x26, 0xa4, 0x52);
+
+DSNullFilter::DSNullFilter(LPUNKNOWN punk,HRESULT *phr)
+ : CTransInPlaceFilter(TEXT("NullRenderer"), punk, CLSID_DSNullFilter, phr)
+{
+}
+
+HRESULT DSNullFilter::CheckInputType(const CMediaType *mtIn)
+{
+ CheckPointer(mtIn,E_POINTER);
+
+ if (*mtIn->FormatType() != FORMAT_VideoInfo) {
+ return E_INVALIDARG;
+ }
+
+ if ( *mtIn->Type( ) != MEDIATYPE_Video ) {
+ return E_INVALIDARG;
+ }
+
+ if ( *mtIn->Subtype( ) != MEDIASUBTYPE_RGB24 ) {
+ return E_INVALIDARG;
+ }
+
+ return NOERROR;
+}
+
+HRESULT DSNullFilter::Transform(IMediaSample *pSample)
+{
+ return NOERROR;
+}
+
+#endif /* _WIN32_WCE */
diff --git a/plugins/pluginDirectShow/internals/wince/DSNullFilter.h b/plugins/pluginDirectShow/internals/wince/DSNullFilter.h
new file mode 100644
index 0000000..fc9b76a
--- /dev/null
+++ b/plugins/pluginDirectShow/internals/wince/DSNullFilter.h
@@ -0,0 +1,40 @@
+/* Copyright (C) 2014-2015 Mamadou DIOP
+*
+* This file is part of Open Source Doubango Framework.
+*
+* DOUBANGO is free software: you can redistribute it and/or modify
+* it under the terms of the GNU General Public License as published by
+* the Free Software Foundation, either version 3 of the License, or
+* (at your option) any later version.
+*
+* DOUBANGO is distributed in the hope that it will be useful,
+* but WITHOUT ANY WARRANTY; without even the implied warranty of
+* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
+* GNU General Public License for more details.
+*
+* You should have received a copy of the GNU General Public License
+* along with DOUBANGO.
+*/
+#pragma once
+
+#if defined(_WIN32_WCE)
+
+#include "plugin_dshow_config.h"
+
+#include <streams.h>
+#include <math.h>
+
+class DSNullFilter : public CTransInPlaceFilter
+{
+public:
+
+ DECLARE_IUNKNOWN;
+
+ HRESULT Transform(IMediaSample *pSample);
+ HRESULT CheckInputType(const CMediaType *mtIn);
+
+ // Constructor
+ DSNullFilter( LPUNKNOWN punk, HRESULT *phr );
+};
+
+#endif /* _WIN32_WCE */
diff --git a/plugins/pluginDirectShow/internals/wince/DSSampleGrabber.cxx b/plugins/pluginDirectShow/internals/wince/DSSampleGrabber.cxx
new file mode 100644
index 0000000..d33d105
--- /dev/null
+++ b/plugins/pluginDirectShow/internals/wince/DSSampleGrabber.cxx
@@ -0,0 +1,197 @@
+#if defined(_WIN32_WCE)
+
+#include <ddraw.h>
+#include <internals/wince/DSSampleGrabber.h>
+#include <initguid.h>
+
+
+// {38589364-71FD-4641-B426-E443DB023568}
+TDSHOW_DEFINE_GUID(CLSID_SampleGrabber,
+0x38589364, 0x71fd, 0x4641, 0xb4, 0x26, 0xe4, 0x43, 0xdb, 0x2, 0x35, 0x68);
+
+#define RGB565_MASK_RED 0xF800
+#define RGB565_MASK_GREEN 0x07E0
+#define RGB565_MASK_BLUE 0x001F
+
+DSSampleGrabber::DSSampleGrabber(TCHAR *tszName, LPUNKNOWN punk, HRESULT *phr)
+:CTransInPlaceFilter (tszName, punk, CLSID_SampleGrabber, phr)
+{
+#define FPS_INPUT 30
+#define FPS_OUTPUT 5
+
+ this->m_rtFrameLength = (10000000)/FPS_OUTPUT;
+
+ this->m_inputFps = FPS_INPUT;
+ this->m_outputFps = FPS_OUTPUT;
+
+ this->m_iFrameNumber = 0;
+ this->m_progress = 0;
+ this->m_bProcessFrame = true;
+
+ this->callback = NULL;
+ this->m_rgb24 = NULL;
+
+ m_cRef = 0;
+}
+
+DSSampleGrabber::~DSSampleGrabber() {
+ this->callback = NULL;
+ if(this->m_rgb24)
+ {
+ delete[]this->m_rgb24;
+ this->m_rgb24 = NULL;
+ }
+}
+
+HRESULT DSSampleGrabber::SetFps(int inputFps, int outputFps)
+{
+ if (inputFps <= 0 || outputFps <= 0) {
+ return E_FAIL;
+ }
+
+ // Stop prcessing
+ this->m_bProcessFrame = false;
+
+ if (inputFps < outputFps) {
+ this->m_inputFps = this->m_outputFps = inputFps;
+ }
+ else {
+ this->m_outputFps = outputFps;
+ this->m_inputFps = inputFps;
+ }
+
+ // Restart processing
+ this->m_iFrameNumber = 0;
+ this->m_progress = 0;
+ this->m_bProcessFrame = true;
+
+ return S_OK;
+}
+
+HRESULT DSSampleGrabber::Transform(IMediaSample *pSample)
+{
+ BYTE *pData = NULL;
+ HRESULT hr = S_OK;
+ HRESULT ret = S_FALSE;
+
+ if (!this->m_bProcessFrame) {
+ return S_FALSE;
+ }
+
+ // Get pointer to the video buffer data
+ if ( FAILED(pSample->GetPointer(&pData)) ) {
+ ret = E_FAIL;
+ goto bail;
+ }
+
+ pSample->SetTime(NULL, NULL);
+
+ // Drop frame?
+ if (this->m_iFrameNumber == 0) {
+ ret = S_OK;
+ }
+ else if (this->m_progress >= this->m_inputFps) {
+ this->m_progress -= this->m_inputFps;
+ ret = S_OK;
+ }
+
+ // Mark frame as accepted
+ if (ret == S_OK) {
+ // Set TRUE on every sample for uncompressed frames
+ pSample->SetSyncPoint(TRUE);
+
+ long Size = pSample->GetSize();
+ if ( this->callback ) {
+ LONGLONG start, end;
+ WORD *rgb565 = (WORD*)pData;
+
+ for(int i = 0, i24 = 0, i565 = 0; i< (Size/2); i++, i24+=3, i565+=1) {
+ BYTE *p24 = (this->m_rgb24+i24);
+ WORD val565 = *(rgb565 + i565);
+
+ // extract RGB
+ p24[2] = (val565 & RGB565_MASK_RED) >> 11;
+ p24[1] = (val565 & RGB565_MASK_GREEN) >> 5;
+ p24[0] = (val565 & RGB565_MASK_BLUE);
+
+ // amplify the image
+ p24[2] <<= 3;
+ p24[1] <<= 2;
+ p24[0] <<= 3;
+ }
+
+ pSample->GetMediaTime(&start, &end);
+ this->callback->BufferCB( (double)start, this->m_rgb24, ((Size >> 1) * 3));
+ }
+ }
+
+ this->m_progress += this->m_outputFps;
+ this->m_iFrameNumber++;
+
+bail:
+ SAFE_DELETE_ARRAY( pData );
+ SAFE_RELEASE(pSample);
+
+ return ret;
+}
+
+HRESULT DSSampleGrabber::CheckInputType(const CMediaType* mtIn)
+{
+ VIDEOINFO *video;
+ if ( !IsEqualGUID( *mtIn->Subtype(), MEDIASUBTYPE_RGB565 ) || !(video=(VIDEOINFO *)mtIn->Format()) ) {
+ return E_FAIL;
+ }
+
+ return S_OK;
+}
+
+STDMETHODIMP DSSampleGrabber::SetCallback( DSISampleGrabberCB* callback_ )
+{
+ if (!callback_) {
+ return E_FAIL;
+ }
+
+ this->callback = callback_;
+ return S_OK;
+}
+
+HRESULT DSSampleGrabber::SetSize(int width, int height)
+{
+ ZeroMemory(&this->mt, sizeof(CMediaType));
+
+ VIDEOINFO *pvi = (VIDEOINFO *)this->mt.AllocFormatBuffer(sizeof(VIDEOINFO));
+ if (NULL == pvi)
+ {
+ return E_OUTOFMEMORY;
+ }
+
+ ZeroMemory(pvi, sizeof(VIDEOINFO));
+
+ pvi->bmiHeader.biCompression = BI_RGB;
+ pvi->bmiHeader.biBitCount = 24;
+ pvi->bmiHeader.biSize = sizeof(BITMAPINFOHEADER);
+ pvi->bmiHeader.biWidth = width;
+ pvi->bmiHeader.biHeight = height;
+ pvi->bmiHeader.biPlanes = 1;
+ pvi->bmiHeader.biSizeImage = GetBitmapSize(&pvi->bmiHeader);
+ pvi->bmiHeader.biClrImportant = 0;
+
+ // Frame rate
+ pvi->AvgTimePerFrame = 10000000/this->m_outputFps;
+
+ SetRectEmpty(&(pvi->rcSource)); // we want the whole image area rendered.
+ SetRectEmpty(&(pvi->rcTarget)); // no particular destination rectangle
+
+ this->mt.SetType(&MEDIATYPE_Video);
+ this->mt.SetFormatType(&FORMAT_VideoInfo);
+ this->mt.SetTemporalCompression(FALSE);
+
+ this->mt.SetSubtype(&MEDIASUBTYPE_RGB24);
+ this->mt.SetSampleSize(pvi->bmiHeader.biSizeImage);
+
+ this->m_rgb24 = new BYTE[pvi->bmiHeader.biSizeImage];
+
+ return S_OK;
+}
+
+#endif /* _WIN32_WCE */
diff --git a/plugins/pluginDirectShow/internals/wince/DSSampleGrabber.h b/plugins/pluginDirectShow/internals/wince/DSSampleGrabber.h
new file mode 100644
index 0000000..39ee5c6
--- /dev/null
+++ b/plugins/pluginDirectShow/internals/wince/DSSampleGrabber.h
@@ -0,0 +1,73 @@
+/* Copyright (C) 2014-2015 Mamadou DIOP
+*
+* This file is part of Open Source Doubango Framework.
+*
+* DOUBANGO is free software: you can redistribute it and/or modify
+* it under the terms of the GNU General Public License as published by
+* the Free Software Foundation, either version 3 of the License, or
+* (at your option) any later version.
+*
+* DOUBANGO is distributed in the hope that it will be useful,
+* but WITHOUT ANY WARRANTY; without even the implied warranty of
+* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
+* GNU General Public License for more details.
+*
+* You should have received a copy of the GNU General Public License
+* along with DOUBANGO.
+*/
+
+#pragma once
+
+#if defined(_WIN32_WCE)
+
+#include "plugin_dshow_config.h"
+
+#include <streams.h>
+#include <math.h>
+
+#include <internals/DSUtils.h>
+#include <internals/wince/DSISampleGrabberCB.h>
+#include "DSSampleGrabberUtils.h"
+
+class DSSampleGrabber : public CTransInPlaceFilter
+{
+public:
+ // instantiation
+ DSSampleGrabber( TCHAR *tszName, LPUNKNOWN punk, HRESULT *phr );
+ ~DSSampleGrabber(void);
+
+public:
+ HRESULT Transform(IMediaSample *pSample);
+ HRESULT CheckInputType(const CMediaType* mtIn);
+
+ HRESULT SetFps(int inputFps, int outputFps);
+
+ // DECLARE_IUNKNOWN;
+ STDMETHODIMP QueryInterface(REFIID riid, void **ppv) {
+ return GetOwner()->QueryInterface(riid,ppv);
+ };
+ STDMETHODIMP_(ULONG) AddRef() {
+ return InterlockedIncrement(&m_cRef);
+ };
+ STDMETHODIMP_(ULONG) Release() {
+ return GetOwner()->Release();
+ };
+
+ STDMETHODIMP SetCallback(DSISampleGrabberCB* callback_);
+ HRESULT SetSize(int width, int height);
+
+ inline AM_MEDIA_TYPE GetMediaType() { return (AM_MEDIA_TYPE)this->mt; }
+
+private:
+ int m_progress;
+ int m_inputFps, m_outputFps;
+ bool m_bProcessFrame;
+ REFERENCE_TIME m_rtFrameLength; // UNITS/fps
+ LONGLONG m_iFrameNumber;
+
+ DSISampleGrabberCB* callback;
+ CMediaType mt;
+ BYTE *m_rgb24;
+};
+
+#endif /* _WIN32_WCE */
diff --git a/plugins/pluginDirectShow/internals/wince/DSSampleGrabberUtils.h b/plugins/pluginDirectShow/internals/wince/DSSampleGrabberUtils.h
new file mode 100644
index 0000000..01e1728
--- /dev/null
+++ b/plugins/pluginDirectShow/internals/wince/DSSampleGrabberUtils.h
@@ -0,0 +1,38 @@
+/* Copyright (C) 2014-2015 Mamadou DIOP
+*
+* This file is part of Open Source Doubango Framework.
+*
+* DOUBANGO is free software: you can redistribute it and/or modify
+* it under the terms of the GNU General Public License as published by
+* the Free Software Foundation, either version 3 of the License, or
+* (at your option) any later version.
+*
+* DOUBANGO is distributed in the hope that it will be useful,
+* but WITHOUT ANY WARRANTY; without even the implied warranty of
+* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
+* GNU General Public License for more details.
+*
+* You should have received a copy of the GNU General Public License
+* along with DOUBANGO.
+*/
+#pragma once
+
+#if defined(_WIN32_WCE)
+
+// callback definition
+typedef void (CALLBACK *MANAGEDCALLBACKPROC)(BYTE* pdata, long len);
+
+// ISampleGrabber interface definition
+
+// {04951BFF-696A-4ade-828D-42A5F1EDB631}
+DEFINE_GUID(IID_ISampleGrabber,
+ 0x4951bff, 0x696a, 0x4ade, 0x82, 0x8d, 0x42, 0xa5, 0xf1, 0xed, 0xb6, 0x31);
+
+DECLARE_INTERFACE_(ISampleGrabber, IUnknown) {
+ STDMETHOD(SetCallback)(MANAGEDCALLBACKPROC callback) PURE;};
+
+ // {D11DFE19-8864-4a60-B26C-552F9AA472E1}
+DEFINE_GUID(CLSID_NullRenderer,
+ 0xd11dfe19, 0x8864, 0x4a60, 0xb2, 0x6c, 0x55, 0x2f, 0x9a, 0xa4, 0x72, 0xe1);
+
+#endif /* _WIN32_WCE */
diff --git a/plugins/pluginDirectShow/pluginDirectShow.vcproj b/plugins/pluginDirectShow/pluginDirectShow.vcproj
new file mode 100644
index 0000000..38642ad
--- /dev/null
+++ b/plugins/pluginDirectShow/pluginDirectShow.vcproj
@@ -0,0 +1,379 @@
+<?xml version="1.0" encoding="Windows-1252"?>
+<VisualStudioProject
+ ProjectType="Visual C++"
+ Version="9.00"
+ Name="pluginDirectShow"
+ ProjectGUID="{6A69773C-0C70-4BD4-8362-C274CAFD58F2}"
+ RootNamespace="pluginDirectShow"
+ Keyword="Win32Proj"
+ TargetFrameworkVersion="196613"
+ >
+ <Platforms>
+ <Platform
+ Name="Win32"
+ />
+ </Platforms>
+ <ToolFiles>
+ </ToolFiles>
+ <Configurations>
+ <Configuration
+ Name="Debug|Win32"
+ OutputDirectory="$(SolutionDir)$(ConfigurationName)"
+ IntermediateDirectory="$(ConfigurationName)"
+ ConfigurationType="2"
+ CharacterSet="1"
+ >
+ <Tool
+ Name="VCPreBuildEventTool"
+ />
+ <Tool
+ Name="VCCustomBuildTool"
+ />
+ <Tool
+ Name="VCXMLDataGeneratorTool"
+ />
+ <Tool
+ Name="VCWebServiceProxyGeneratorTool"
+ />
+ <Tool
+ Name="VCMIDLTool"
+ />
+ <Tool
+ Name="VCCLCompilerTool"
+ Optimization="0"
+ AdditionalIncludeDirectories=".;..\..\thirdparties\win32\include;..\..\thirdparties\win32\include\directshow;..\..\tinySAK\src;..\..\tinyMEDIA\include;..\..\tinySDP\include"
+ PreprocessorDefinitions="WIN32;_DEBUG;_WINDOWS;_USRDLL;PLUGIN_DSHOW_EXPORTS;_WIN32_WINNT=0x0501"
+ MinimalRebuild="true"
+ BasicRuntimeChecks="3"
+ RuntimeLibrary="3"
+ UsePrecompiledHeader="0"
+ WarningLevel="3"
+ DebugInformationFormat="4"
+ />
+ <Tool
+ Name="VCManagedResourceCompilerTool"
+ />
+ <Tool
+ Name="VCResourceCompilerTool"
+ />
+ <Tool
+ Name="VCPreLinkEventTool"
+ />
+ <Tool
+ Name="VCLinkerTool"
+ AdditionalDependencies="..\..\thirdparties\win32\lib\directshow\strmbasd.lib dmoguids.lib strmiids.lib uuid.lib Winmm.lib $(OutDir)\tinySAK.lib $(OutDir)\tinyMEDIA.lib"
+ LinkIncremental="2"
+ GenerateDebugInformation="true"
+ SubSystem="2"
+ TargetMachine="1"
+ />
+ <Tool
+ Name="VCALinkTool"
+ />
+ <Tool
+ Name="VCManifestTool"
+ />
+ <Tool
+ Name="VCXDCMakeTool"
+ />
+ <Tool
+ Name="VCBscMakeTool"
+ />
+ <Tool
+ Name="VCFxCopTool"
+ />
+ <Tool
+ Name="VCAppVerifierTool"
+ />
+ <Tool
+ Name="VCPostBuildEventTool"
+ />
+ </Configuration>
+ <Configuration
+ Name="Release|Win32"
+ OutputDirectory="$(SolutionDir)$(ConfigurationName)"
+ IntermediateDirectory="$(ConfigurationName)"
+ ConfigurationType="2"
+ CharacterSet="1"
+ WholeProgramOptimization="1"
+ >
+ <Tool
+ Name="VCPreBuildEventTool"
+ />
+ <Tool
+ Name="VCCustomBuildTool"
+ />
+ <Tool
+ Name="VCXMLDataGeneratorTool"
+ />
+ <Tool
+ Name="VCWebServiceProxyGeneratorTool"
+ />
+ <Tool
+ Name="VCMIDLTool"
+ />
+ <Tool
+ Name="VCCLCompilerTool"
+ Optimization="3"
+ EnableIntrinsicFunctions="true"
+ AdditionalIncludeDirectories=".;..\..\thirdparties\win32\include;..\..\thirdparties\win32\include\directshow;..\..\tinySAK\src;..\..\tinyMEDIA\include;..\..\tinySDP\include"
+ PreprocessorDefinitions="WIN32;NDEBUG;_WINDOWS;_USRDLL;PLUGIN_DSHOW_EXPORTS;_WIN32_WINNT=0x0501"
+ RuntimeLibrary="2"
+ EnableFunctionLevelLinking="true"
+ UsePrecompiledHeader="0"
+ WarningLevel="3"
+ DebugInformationFormat="0"
+ />
+ <Tool
+ Name="VCManagedResourceCompilerTool"
+ />
+ <Tool
+ Name="VCResourceCompilerTool"
+ />
+ <Tool
+ Name="VCPreLinkEventTool"
+ />
+ <Tool
+ Name="VCLinkerTool"
+ AdditionalDependencies="..\..\thirdparties\win32\lib\directshow\strmbase.lib dmoguids.lib strmiids.lib uuid.lib Winmm.lib $(OutDir)\tinySAK.lib $(OutDir)\tinyMEDIA.lib"
+ LinkIncremental="1"
+ GenerateDebugInformation="false"
+ SubSystem="2"
+ OptimizeReferences="2"
+ EnableCOMDATFolding="2"
+ TargetMachine="1"
+ />
+ <Tool
+ Name="VCALinkTool"
+ />
+ <Tool
+ Name="VCManifestTool"
+ />
+ <Tool
+ Name="VCXDCMakeTool"
+ />
+ <Tool
+ Name="VCBscMakeTool"
+ />
+ <Tool
+ Name="VCFxCopTool"
+ />
+ <Tool
+ Name="VCAppVerifierTool"
+ />
+ <Tool
+ Name="VCPostBuildEventTool"
+ />
+ </Configuration>
+ </Configurations>
+ <References>
+ </References>
+ <Files>
+ <Filter
+ Name="Source Files"
+ Filter="cpp;c;cc;cxx;def;odl;idl;hpj;bat;asm;asmx"
+ UniqueIdentifier="{4FC737F1-C7A5-4376-A066-2A32D752A2FF}"
+ >
+ <File
+ RelativePath=".\dllmain_dshow.cxx"
+ >
+ </File>
+ <File
+ RelativePath=".\plugin_screencast_dshow_producer.cxx"
+ >
+ </File>
+ <File
+ RelativePath=".\plugin_video_dshow_consumer.cxx"
+ >
+ </File>
+ <File
+ RelativePath=".\plugin_video_dshow_producer.cxx"
+ >
+ </File>
+ <Filter
+ Name="internals"
+ >
+ <File
+ RelativePath=".\internals\DSCaptureFormat.cxx"
+ >
+ </File>
+ <File
+ RelativePath=".\internals\DSCaptureGraph.cxx"
+ >
+ </File>
+ <File
+ RelativePath=".\internals\DSCaptureUtils.cxx"
+ >
+ </File>
+ <File
+ RelativePath=".\internals\DSDibHelper.cxx"
+ >
+ </File>
+ <File
+ RelativePath=".\internals\DSDisplay.cxx"
+ >
+ </File>
+ <File
+ RelativePath=".\internals\DSDisplayGraph.cxx"
+ >
+ </File>
+ <File
+ RelativePath=".\internals\DSDisplayOverlay.cxx"
+ >
+ </File>
+ <File
+ RelativePath=".\internals\DSDisplayOverlay.VMR.cxx"
+ >
+ </File>
+ <File
+ RelativePath=".\internals\DSDisplayOverlay.VMR9.cxx"
+ >
+ </File>
+ <File
+ RelativePath=".\internals\DSFrameRateFilter.cxx"
+ >
+ </File>
+ <File
+ RelativePath=".\internals\DSGrabber.cxx"
+ >
+ </File>
+ <File
+ RelativePath=".\internals\DSOutputFilter.cxx"
+ >
+ </File>
+ <File
+ RelativePath=".\internals\DSOutputStream.cxx"
+ >
+ </File>
+ <File
+ RelativePath=".\internals\DSPushSourceDesktop.cxx"
+ >
+ </File>
+ <File
+ RelativePath=".\internals\DSScreenCastGraph.cxx"
+ >
+ </File>
+ <File
+ RelativePath=".\internals\DSUtils.cxx"
+ >
+ </File>
+ <File
+ RelativePath=".\internals\Resizer.cxx"
+ >
+ </File>
+ <File
+ RelativePath=".\internals\VideoDisplayName.cxx"
+ >
+ </File>
+ <File
+ RelativePath=".\internals\VideoGrabberName.cxx"
+ >
+ </File>
+ </Filter>
+ </Filter>
+ <Filter
+ Name="Header Files"
+ Filter="h;hpp;hxx;hm;inl;inc;xsd"
+ UniqueIdentifier="{93995380-89BD-4b04-88EB-625FBE52EBFB}"
+ >
+ <File
+ RelativePath=".\plugin_dshow_config.h"
+ >
+ </File>
+ <Filter
+ Name="internals"
+ >
+ <File
+ RelativePath=".\internals\DSBaseCaptureGraph.h"
+ >
+ </File>
+ <File
+ RelativePath=".\internals\DSBufferWriter.h"
+ >
+ </File>
+ <File
+ RelativePath=".\internals\DSCaptureFormat.h"
+ >
+ </File>
+ <File
+ RelativePath=".\internals\DSCaptureGraph.h"
+ >
+ </File>
+ <File
+ RelativePath=".\internals\DSCaptureUtils.h"
+ >
+ </File>
+ <File
+ RelativePath=".\internals\DSDibHelper.h"
+ >
+ </File>
+ <File
+ RelativePath=".\internals\DSDisplay.h"
+ >
+ </File>
+ <File
+ RelativePath=".\internals\DSDisplayGraph.h"
+ >
+ </File>
+ <File
+ RelativePath=".\internals\DSDisplayOverlay.h"
+ >
+ </File>
+ <File
+ RelativePath=".\internals\DSFrameRateFilter.h"
+ >
+ </File>
+ <File
+ RelativePath=".\internals\DSGrabber.h"
+ >
+ </File>
+ <File
+ RelativePath=".\internals\DSOutputFilter.h"
+ >
+ </File>
+ <File
+ RelativePath=".\internals\DSOutputStream.h"
+ >
+ </File>
+ <File
+ RelativePath=".\internals\DSPushSource.h"
+ >
+ </File>
+ <File
+ RelativePath=".\internals\DSScreenCastGraph.h"
+ >
+ </File>
+ <File
+ RelativePath=".\internals\DSUtils.h"
+ >
+ </File>
+ <File
+ RelativePath=".\internals\Resizer.h"
+ >
+ </File>
+ <File
+ RelativePath=".\internals\VideoDisplayName.h"
+ >
+ </File>
+ <File
+ RelativePath=".\internals\VideoFrame.h"
+ >
+ </File>
+ <File
+ RelativePath=".\internals\VideoGrabberName.h"
+ >
+ </File>
+ </Filter>
+ </Filter>
+ <Filter
+ Name="Resource Files"
+ >
+ <File
+ RelativePath=".\version.rc"
+ >
+ </File>
+ </Filter>
+ </Files>
+ <Globals>
+ </Globals>
+</VisualStudioProject>
diff --git a/plugins/pluginDirectShow/plugin_dshow_config.h b/plugins/pluginDirectShow/plugin_dshow_config.h
new file mode 100644
index 0000000..f58e4b2
--- /dev/null
+++ b/plugins/pluginDirectShow/plugin_dshow_config.h
@@ -0,0 +1,103 @@
+/* Copyright (C) 2011-2013 Doubango Telecom <http://www.doubango.org>
+*
+* This file is part of Open Source Doubango Framework.
+*
+* DOUBANGO is free software: you can redistribute it and/or modify
+* it under the terms of the GNU General Public License as published by
+* the Free Software Foundation, either version 3 of the License, or
+* (at your option) any later version.
+*
+* DOUBANGO is distributed in the hope that it will be useful,
+* but WITHOUT ANY WARRANTY; without even the implied warranty of
+* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
+* GNU General Public License for more details.
+*
+* You should have received a copy of the GNU General Public License
+* along with DOUBANGO.
+*/
+
+#ifndef PLUGIN_DSHOW_CONFIG_H
+#define PLUGIN_DSHOW_CONFIG_H
+
+#ifdef __SYMBIAN32__
+#undef _WIN32 /* Because of WINSCW */
+#endif
+
+// Windows (XP/Vista/7/CE and Windows Mobile) macro definition
+#if defined(WIN32)|| defined(_WIN32) || defined(_WIN32_WCE)
+# define PLUGIN_DSHOW_UNDER_WINDOWS 1
+# if defined(WINAPI_FAMILY) && (WINAPI_FAMILY == WINAPI_FAMILY_PHONE_APP || WINAPI_FAMILY == WINAPI_FAMILY_APP)
+# define PLUGIN_DSHOW_UNDER_WINDOWS_RT 1
+# endif
+#endif
+
+#if (PLUGIN_DSHOW_UNDER_WINDOWS || defined(__SYMBIAN32__)) && defined(PLUGIN_DSHOW_EXPORTS)
+# define PLUGIN_DSHOW_API __declspec(dllexport)
+# define PLUGIN_DSHOW_GEXTERN extern __declspec(dllexport)
+#elif (PLUGIN_DSHOW_UNDER_WINDOWS || defined(__SYMBIAN32__)) && !defined(PLUGIN_DSHOW_IMPORTS_IGNORE)
+# define PLUGIN_DSHOW_API __declspec(dllimport)
+# define PLUGIN_DSHOW_GEXTERN __declspec(dllimport)
+#else
+# define PLUGIN_DSHOW_API
+# define PLUGIN_DSHOW_GEXTERN extern
+#endif
+
+/* Guards against C++ name mangling
+*/
+#ifdef __cplusplus
+# define PLUGIN_DSHOW_BEGIN_DECLS extern "C" {
+# define PLUGIN_DSHOW_END_DECLS }
+#else
+# define PLUGIN_DSHOW_BEGIN_DECLS
+# define PLUGIN_DSHOW_END_DECLS
+#endif
+
+/* Disable some well-known warnings
+*/
+#ifdef _MSC_VER
+# pragma warning (disable:4995 4996)
+# define _CRT_SECURE_NO_WARNINGS
+#endif
+
+/* Detecting C99 compilers
+ */
+#if (__STDC_VERSION__ == 199901L) && !defined(__C99__)
+# define __C99__
+#endif
+
+#if PLUGIN_DSHOW_UNDER_WINDOWS
+#include <windows.h>
+#endif
+
+#include <stdint.h>
+#include <stdlib.h>
+#include <malloc.h>
+#include <memory.h>
+#include <tchar.h>
+#ifdef __SYMBIAN32__
+#include <stdlib.h>
+#endif
+
+#if HAVE_CONFIG_H
+ #include <config.h>
+#endif
+
+#if !defined(TDSHOW_DEFINE_GUID) && !defined(_WIN32_WCE)
+#define TDSHOW_DEFINE_GUID(name, l, w1, w2, b1, b2, b3, b4, b5, b6, b7, b8) \
+ EXTERN_C const GUID DECLSPEC_SELECTANY name \
+ = { l, w1, w2, { b1, b2, b3, b4, b5, b6, b7, b8 } }
+#elif !defined(TDSHOW_DEFINE_GUID) && defined(_WIN32_WCE)
+#define TDSHOW_DEFINE_GUID(name, l, w1, w2, b1, b2, b3, b4, b5, b6, b7, b8) \
+ EXTERN_C const GUID __declspec(selectany) name \
+ = { l, w1, w2, { b1, b2, b3, b4, b5, b6, b7, b8 } }
+#endif
+
+#if 1 // workaround for "Cannot open include file: 'dxtrans.h': No such file or directory"
+#pragma include_alias( "dxtrans.h", "qedit.h" )
+#define __IDxtCompositor_INTERFACE_DEFINED__
+#define __IDxtAlphaSetter_INTERFACE_DEFINED__
+#define __IDxtJpeg_INTERFACE_DEFINED__
+#define __IDxtKey_INTERFACE_DEFINED__
+#endif
+
+#endif // PLUGIN_DSHOW_CONFIG_H
diff --git a/plugins/pluginDirectShow/plugin_screencast_dshow_producer.cxx b/plugins/pluginDirectShow/plugin_screencast_dshow_producer.cxx
new file mode 100644
index 0000000..af09c4e
--- /dev/null
+++ b/plugins/pluginDirectShow/plugin_screencast_dshow_producer.cxx
@@ -0,0 +1,273 @@
+/* Copyright (C) 2014 Mamadou DIOP
+*
+* This file is part of Open Source Doubango Framework.
+*
+* DOUBANGO is free software: you can redistribute it and/or modify
+* it under the terms of the GNU General Public License as published by
+* the Free Software Foundation, either version 3 of the License, or
+* (at your option) any later version.
+*
+* DOUBANGO is distributed in the hope that it will be useful,
+* but WITHOUT ANY WARRANTY; without even the implied warranty of
+* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
+* GNU General Public License for more details.
+*
+* You should have received a copy of the GNU General Public License
+* along with DOUBANGO.
+*/
+#include "internals/DSGrabber.h"
+#include "internals/DSDisplay.h"
+#include "internals/DSUtils.h"
+
+#include "tinymedia/tmedia_producer.h"
+
+#include "tsk_string.h"
+#include "tsk_debug.h"
+
+#define DSPRODUCER(self) ((plugin_screencast_dshow_producer_t*)(self))
+
+typedef struct plugin_screencast_dshow_producer_s
+{
+ TMEDIA_DECLARE_PRODUCER;
+
+ DSGrabber* grabber;
+ INT64 previewHwnd;
+
+ tsk_bool_t started;
+ tsk_bool_t mute;
+ tsk_bool_t create_on_ui_thread;
+}
+plugin_screencast_dshow_producer_t;
+
+// Producer callback (From DirectShow Grabber to our plugin)
+static int plugin_video_dshow_plugin_cb(const void* callback_data, const void* buffer, tsk_size_t size)
+{
+ const plugin_screencast_dshow_producer_t* producer = (const plugin_screencast_dshow_producer_t*)callback_data;
+
+ if (producer && TMEDIA_PRODUCER(producer)->enc_cb.callback) {
+ TMEDIA_PRODUCER(producer)->enc_cb.callback(TMEDIA_PRODUCER(producer)->enc_cb.callback_data, buffer, size);
+ }
+
+ return 0;
+}
+
+
+/* ============ Media Producer Interface ================= */
+static int plugin_screencast_dshow_producer_set(tmedia_producer_t *self, const tmedia_param_t* param)
+{
+ int ret = 0;
+ plugin_screencast_dshow_producer_t* producer = (plugin_screencast_dshow_producer_t*)self;
+
+ if(!producer || !param){
+ TSK_DEBUG_ERROR("Invalid parameter");
+ return -1;
+ }
+
+ if(param->value_type == tmedia_pvt_int64){
+ if(tsk_striequals(param->key, "local-hwnd")){
+ DSPRODUCER(producer)->previewHwnd = (INT64)*((int64_t*)param->value);
+ if(DSPRODUCER(producer)->grabber && DSPRODUCER(self)->grabber->preview){
+ DSPRODUCER(producer)->grabber->preview->attach(DSPRODUCER(producer)->previewHwnd);
+ }
+ }
+ }
+ else if(param->value_type == tmedia_pvt_int32){
+ if(tsk_striequals(param->key, "mute")){
+ producer->mute = (TSK_TO_INT32((uint8_t*)param->value) != 0);
+ if(producer->started){
+ if(producer->mute){
+ producer->grabber->pause();
+ }
+ else{
+ producer->grabber->start();
+ }
+ }
+ }
+ else if(tsk_striequals(param->key, "create-on-current-thead")){
+ producer->create_on_ui_thread = *((int32_t*)param->value) ? tsk_false : tsk_true;
+ }
+ else if(tsk_striequals(param->key, "plugin-firefox")){
+ TSK_DEBUG_INFO("'plugin-firefox' ignored for screencast");
+ }
+ }
+
+ return ret;
+}
+
+static int plugin_screencast_dshow_producer_prepare(tmedia_producer_t* self, const tmedia_codec_t* codec)
+{
+ plugin_screencast_dshow_producer_t* producer = (plugin_screencast_dshow_producer_t*)self;
+
+ if(!producer || !codec && codec->plugin){
+ TSK_DEBUG_ERROR("Invalid parameter");
+ return -1;
+ }
+
+ TMEDIA_PRODUCER(producer)->video.fps = TMEDIA_CODEC_VIDEO(codec)->out.fps;
+ TMEDIA_PRODUCER(producer)->video.width = TMEDIA_CODEC_VIDEO(codec)->out.width;
+ TMEDIA_PRODUCER(producer)->video.height = TMEDIA_CODEC_VIDEO(codec)->out.height;
+
+ return 0;
+}
+
+static int plugin_screencast_dshow_producer_start(tmedia_producer_t* self)
+{
+ plugin_screencast_dshow_producer_t* producer = (plugin_screencast_dshow_producer_t*)self;
+ HRESULT hr = S_OK;
+
+ if (!producer) {
+ TSK_DEBUG_ERROR("Invalid parameter");
+ return -1;
+ }
+
+ if (producer->started) {
+ return 0;
+ }
+
+ // create grabber on ALWAYS current thread
+ if (!producer->grabber) {
+ static BOOL __isDisplayFalse = FALSE;
+ static BOOL __isScreenCastTrue = TRUE;
+ if(producer->create_on_ui_thread) createOnUIThead(reinterpret_cast<HWND>((void*)DSPRODUCER(producer)->previewHwnd), (void**)&producer->grabber, __isDisplayFalse, __isScreenCastTrue);
+ else createOnCurrentThead(reinterpret_cast<HWND>((void*)DSPRODUCER(producer)->previewHwnd), (void**)&producer->grabber, __isDisplayFalse, __isScreenCastTrue);
+ if (!producer->grabber) {
+ TSK_DEBUG_ERROR("Failed to create grabber");
+ return -2;
+ }
+ }
+
+ // set parameters
+ producer->grabber->setCaptureParameters((int)TMEDIA_PRODUCER(producer)->video.width, (int)TMEDIA_PRODUCER(producer)->video.height, TMEDIA_PRODUCER(producer)->video.fps);
+
+ // set callback function
+ producer->grabber->setCallback(plugin_video_dshow_plugin_cb, producer);
+
+ // attach preview
+ if (producer->grabber->preview) {
+ if (producer->previewHwnd) {
+ producer->grabber->preview->attach(producer->previewHwnd);
+ }
+ producer->grabber->preview->setSize((int)TMEDIA_PRODUCER(producer)->video.width, (int)TMEDIA_PRODUCER(producer)->video.height);
+ }
+
+ // start grabber
+ if (!producer->mute) {
+ producer->grabber->start();
+ }
+
+ producer->started = tsk_true;
+
+ return 0;
+}
+
+static int plugin_screencast_dshow_producer_pause(tmedia_producer_t* self)
+{
+ plugin_screencast_dshow_producer_t* producer = (plugin_screencast_dshow_producer_t*)self;
+
+ if(!producer){
+ TSK_DEBUG_ERROR("Invalid parameter");
+ return -1;
+ }
+
+ if(!producer->grabber){
+ TSK_DEBUG_ERROR("Invalid internal grabber");
+ return -2;
+ }
+
+ producer->grabber->pause();
+
+ return 0;
+}
+
+static int plugin_screencast_dshow_producer_stop(tmedia_producer_t* self)
+{
+ plugin_screencast_dshow_producer_t* producer = (plugin_screencast_dshow_producer_t*)self;
+
+ if(!self){
+ TSK_DEBUG_ERROR("Invalid parameter");
+ return -1;
+ }
+
+ if(!producer->started){
+ return 0;
+ }
+
+ if(!producer->grabber){
+ TSK_DEBUG_ERROR("Invalid internal grabber");
+ return -2;
+ }
+
+ producer->grabber->stop();
+ producer->started = tsk_false;
+
+ return 0;
+}
+
+
+//
+// WaveAPI producer object definition
+//
+/* constructor */
+static tsk_object_t* plugin_screencast_dshow_producer_ctor(tsk_object_t * self, va_list * app)
+{
+ CoInitializeEx(NULL, COINIT_MULTITHREADED);
+
+ plugin_screencast_dshow_producer_t *producer = (plugin_screencast_dshow_producer_t *)self;
+ if(producer){
+ /* init base */
+ tmedia_producer_init(TMEDIA_PRODUCER(producer));
+ TMEDIA_PRODUCER(producer)->video.chroma = tmedia_chroma_bgr24; // RGB24 on x86 (little endians) stored as BGR24
+ /* init self with default values*/
+ producer->create_on_ui_thread = tsk_true;
+ TMEDIA_PRODUCER(producer)->video.fps = 15;
+ TMEDIA_PRODUCER(producer)->video.width = 352;
+ TMEDIA_PRODUCER(producer)->video.height = 288;
+ }
+ return self;
+}
+/* destructor */
+static tsk_object_t* plugin_screencast_dshow_producer_dtor(tsk_object_t * self)
+{
+ plugin_screencast_dshow_producer_t *producer = (plugin_screencast_dshow_producer_t *)self;
+ if(producer){
+ /* stop */
+ if(producer->started){
+ plugin_screencast_dshow_producer_stop((tmedia_producer_t*)self);
+ }
+
+ /* for safety */
+ if(producer->grabber){
+ producer->grabber->setCallback(tsk_null, tsk_null);
+ }
+
+ /* deinit base */
+ tmedia_producer_deinit(TMEDIA_PRODUCER(producer));
+ /* deinit self */
+ SAFE_DELETE_PTR(producer->grabber);
+ }
+
+ return self;
+}
+/* object definition */
+static const tsk_object_def_t plugin_screencast_dshow_producer_def_s =
+{
+ sizeof(plugin_screencast_dshow_producer_t),
+ plugin_screencast_dshow_producer_ctor,
+ plugin_screencast_dshow_producer_dtor,
+ tsk_null,
+};
+/* plugin definition*/
+static const tmedia_producer_plugin_def_t plugin_screencast_dshow_producer_plugin_def_s =
+{
+ &plugin_screencast_dshow_producer_def_s,
+
+ tmedia_bfcp_video,
+ "Microsoft DirectShow producer (ScrenCast)",
+
+ plugin_screencast_dshow_producer_set,
+ plugin_screencast_dshow_producer_prepare,
+ plugin_screencast_dshow_producer_start,
+ plugin_screencast_dshow_producer_pause,
+ plugin_screencast_dshow_producer_stop
+};
+const tmedia_producer_plugin_def_t *plugin_screencast_dshow_producer_plugin_def_t = &plugin_screencast_dshow_producer_plugin_def_s;
diff --git a/plugins/pluginDirectShow/plugin_video_dshow_consumer.cxx b/plugins/pluginDirectShow/plugin_video_dshow_consumer.cxx
new file mode 100644
index 0000000..dfd7e72
--- /dev/null
+++ b/plugins/pluginDirectShow/plugin_video_dshow_consumer.cxx
@@ -0,0 +1,1319 @@
+/* Copyright (C) 2011-2013 Doubango Telecom <http://www.doubango.org>
+*
+* This file is part of Open Source Doubango Framework.
+*
+* DOUBANGO is free software: you can redistribute it and/or modify
+* it under the terms of the GNU General Public License as published by
+* the Free Software Foundation, either version 3 of the License, or
+* (at your option) any later version.
+*
+* DOUBANGO is distributed in the hope that it will be useful,
+* but WITHOUT ANY WARRANTY; without even the implied warranty of
+* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
+* GNU General Public License for more details.
+*
+* You should have received a copy of the GNU General Public License
+* along with DOUBANGO.
+*/
+#include "internals/DSDisplay.h"
+#include "internals/DSUtils.h"
+
+#include "tinymedia/tmedia_consumer.h"
+
+#include "tsk_safeobj.h"
+#include "tsk_string.h"
+#include "tsk_debug.h"
+
+
+#define DSCONSUMER(self) ((plugin_video_dshow_consumer_t*)(self))
+
+// Whether to use Direct3D device for direct rendering or DirectShow graph and custom source
+// Using DirectShow (DS) introduce delay when the input fps is different than the one in the custom src.
+// It's very hard to have someting accurate when using DS because the input FPS change depending on the congestion control. D3D is the best choice as frames are displayed as they arrive
+#if !defined(PLUGIN_DS_CV_USE_D3D9) && !defined(_WIN32_WCE)
+# define PLUGIN_DS_CV_USE_D3D9 1
+#endif
+
+/******* ********/
+
+#if PLUGIN_DS_CV_USE_D3D9
+
+#include <d3d9.h>
+#include <dxva2api.h>
+
+#ifdef _MSC_VER
+#pragma comment(lib, "d3d9")
+#endif
+
+const DWORD NUM_BACK_BUFFERS = 2;
+
+#undef SafeRelease
+#define SafeRelease(ppT) \
+{ \
+ if (*ppT) \
+ { \
+ (*ppT)->Release(); \
+ *ppT = NULL; \
+ } \
+}
+
+#undef CHECK_HR
+// In CHECK_HR(x) When (x) is a function it will be executed twice when used in "TSK_DEBUG_ERROR(x)" and "If(x)"
+#define CHECK_HR(x) { HRESULT __hr__ = (x); if (FAILED(__hr__)) { TSK_DEBUG_ERROR("Operation Failed (%08x)", __hr__); goto bail; } }
+
+typedef struct _DSRatio
+ {
+ DWORD Numerator;
+ DWORD Denominator;
+ } DSRatio;
+
+static HRESULT CreateDeviceD3D9(
+ HWND hWnd,
+ IDirect3DDevice9** ppDevice,
+ IDirect3D9 **ppD3D,
+ D3DPRESENT_PARAMETERS &d3dpp
+ );
+static HRESULT TestCooperativeLevel(
+ struct plugin_video_dshow_consumer_s *pSelf
+ );
+static HRESULT CreateSwapChain(
+ HWND hWnd,
+ UINT32 nFrameWidth,
+ UINT32 nFrameHeight,
+ IDirect3DDevice9* pDevice,
+ IDirect3DSwapChain9 **ppSwapChain);
+
+static LRESULT CALLBACK WndProc(HWND hWnd, UINT uMsg, WPARAM wParam, LPARAM lParam);
+
+static inline HWND Window(struct plugin_video_dshow_consumer_s *pSelf);
+static inline LONG Width(const RECT& r);
+static inline LONG Height(const RECT& r);
+static inline RECT CorrectAspectRatio(const RECT& src, const DSRatio& srcPAR);
+static inline RECT LetterBoxRect(const RECT& rcSrc, const RECT& rcDst);
+static inline HRESULT UpdateDestinationRect(struct plugin_video_dshow_consumer_s *pSelf, BOOL bForce = FALSE);
+static HRESULT ResetDevice(struct plugin_video_dshow_consumer_s *pSelf, BOOL bUpdateDestinationRect = FALSE);
+static HRESULT SetFullscreen(struct plugin_video_dshow_consumer_s *pSelf, BOOL bFullScreen);
+static HWND CreateFullScreenWindow(struct plugin_video_dshow_consumer_s *pSelf);
+static HRESULT HookWindow(struct plugin_video_dshow_consumer_s *pSelf, HWND hWnd);
+static HRESULT UnhookWindow(struct plugin_video_dshow_consumer_s *pSelf);
+
+
+typedef struct plugin_video_dshow_consumer_s
+{
+ TMEDIA_DECLARE_CONSUMER;
+
+ BOOL bStarted, bPrepared, bPaused, bFullScreen, bWindowHooked;
+ BOOL bPluginFireFox, bPluginWebRTC4All;
+ HWND hWindow;
+ WNDPROC wndProc;
+ HWND hWindowFullScreen;
+ RECT rcWindow;
+ RECT rcDest;
+ DSRatio pixelAR;
+
+ UINT32 nNegWidth;
+ UINT32 nNegHeight;
+ UINT32 nNegFps;
+
+ D3DLOCKED_RECT rcLock;
+ IDirect3DDevice9* pDevice;
+ IDirect3D9 *pD3D;
+ IDirect3DSwapChain9 *pSwapChain;
+ D3DPRESENT_PARAMETERS d3dpp;
+
+ TSK_DECLARE_SAFEOBJ;
+}
+plugin_video_dshow_consumer_t;
+
+static int _plugin_video_dshow_consumer_unprepare(plugin_video_dshow_consumer_t* pSelf);
+
+/* ============ Media Consumer Interface ================= */
+static int plugin_video_dshow_consumer_set(tmedia_consumer_t *self, const tmedia_param_t* param)
+{
+ int ret = 0;
+ HRESULT hr = S_OK;
+ plugin_video_dshow_consumer_t* pSelf = (plugin_video_dshow_consumer_t*)self;
+
+ if(!self || !param)
+ {
+ TSK_DEBUG_ERROR("Invalid parameter");
+ CHECK_HR(hr = E_POINTER);
+ }
+
+ if(param->value_type == tmedia_pvt_int64)
+ {
+ if(tsk_striequals(param->key, "remote-hwnd"))
+ {
+ HWND hWnd = reinterpret_cast<HWND>((INT64)*((int64_t*)param->value));
+ if(hWnd != pSelf->hWindow)
+ {
+ tsk_safeobj_lock(pSelf); // block consumer thread
+ pSelf->hWindow = hWnd;
+ if(pSelf->bPrepared)
+ {
+ hr = ResetDevice(pSelf);
+ }
+ tsk_safeobj_unlock(pSelf); // unblock consumer thread
+ }
+ }
+ }
+ else if(param->value_type == tmedia_pvt_int32)
+ {
+ if(tsk_striequals(param->key, "fullscreen"))
+ {
+ BOOL bFullScreen = !!*((int32_t*)param->value);
+ TSK_DEBUG_INFO("[MF video consumer] Full Screen = %d", bFullScreen);
+ CHECK_HR(hr = SetFullscreen(pSelf, bFullScreen));
+ }
+ else if(tsk_striequals(param->key, "create-on-current-thead"))
+ {
+ // DSCONSUMER(self)->create_on_ui_thread = *((int32_t*)param->value) ? tsk_false : tsk_true;
+ }
+ else if(tsk_striequals(param->key, "plugin-firefox"))
+ {
+ pSelf->bPluginFireFox = (*((int32_t*)param->value) != 0);
+ }
+ else if(tsk_striequals(param->key, "plugin-webrtc4all"))
+ {
+ pSelf->bPluginWebRTC4All = (*((int32_t*)param->value) != 0);
+ }
+ }
+
+ CHECK_HR(hr);
+
+bail:
+ return SUCCEEDED(hr) ? 0 : -1;
+}
+
+
+static int plugin_video_dshow_consumer_prepare(tmedia_consumer_t* self, const tmedia_codec_t* codec)
+{
+ plugin_video_dshow_consumer_t* pSelf = (plugin_video_dshow_consumer_t*)self;
+
+ if(!pSelf || !codec && codec->plugin){
+ TSK_DEBUG_ERROR("Invalid parameter");
+ return -1;
+ }
+ if(pSelf->bPrepared){
+ TSK_DEBUG_WARN("D3D9 video consumer already prepared");
+ return -1;
+ }
+
+ HRESULT hr = S_OK;
+ HWND hWnd = Window(pSelf);
+
+ TMEDIA_CONSUMER(pSelf)->video.fps = TMEDIA_CODEC_VIDEO(codec)->in.fps;
+ TMEDIA_CONSUMER(pSelf)->video.in.width = TMEDIA_CODEC_VIDEO(codec)->in.width;
+ TMEDIA_CONSUMER(pSelf)->video.in.height = TMEDIA_CODEC_VIDEO(codec)->in.height;
+
+ if(!TMEDIA_CONSUMER(pSelf)->video.display.width){
+ TMEDIA_CONSUMER(pSelf)->video.display.width = TMEDIA_CONSUMER(pSelf)->video.in.width;
+ }
+ if(!TMEDIA_CONSUMER(pSelf)->video.display.height){
+ TMEDIA_CONSUMER(pSelf)->video.display.height = TMEDIA_CONSUMER(pSelf)->video.in.height;
+ }
+
+ pSelf->nNegFps = (UINT32)TMEDIA_CONSUMER(pSelf)->video.fps;
+ pSelf->nNegWidth = (UINT32)TMEDIA_CONSUMER(pSelf)->video.display.width;
+ pSelf->nNegHeight = (UINT32)TMEDIA_CONSUMER(pSelf)->video.display.height;
+
+ TSK_DEBUG_INFO("D3D9 video consumer: fps=%d, width=%d, height=%d",
+ pSelf->nNegFps,
+ pSelf->nNegWidth,
+ pSelf->nNegHeight);
+
+ TMEDIA_CONSUMER(pSelf)->video.display.chroma = tmedia_chroma_rgb32;
+ TMEDIA_CONSUMER(pSelf)->decoder.codec_id = tmedia_codec_id_none; // means accept RAW fames
+
+ // The window handle is not created until the call is connect (incoming only) - At least on Internet Explorer 10
+ if(hWnd && !pSelf->bPluginWebRTC4All)
+ {
+ CHECK_HR(hr = CreateDeviceD3D9(hWnd, &pSelf->pDevice, &pSelf->pD3D, pSelf->d3dpp));
+ CHECK_HR(hr = CreateSwapChain(hWnd, pSelf->nNegWidth, pSelf->nNegHeight, pSelf->pDevice, &pSelf->pSwapChain));
+ }
+ else
+ {
+ if(hWnd && pSelf->bPluginWebRTC4All)
+ {
+ TSK_DEBUG_INFO("[MF consumer] HWND is defined but we detected webrtc4all...delaying D3D9 device creating until session get connected");
+ }
+ else
+ {
+ TSK_DEBUG_WARN("Delaying D3D9 device creation because HWND is not defined yet");
+ }
+ }
+
+bail:
+ pSelf->bPrepared = SUCCEEDED(hr);
+ return pSelf->bPrepared ? 0 : -1;
+}
+
+static int plugin_video_dshow_consumer_start(tmedia_consumer_t* self)
+{
+ plugin_video_dshow_consumer_t* pSelf = (plugin_video_dshow_consumer_t*)self;
+
+ if(!pSelf){
+ TSK_DEBUG_ERROR("Invalid parameter");
+ return -1;
+ }
+
+ if(pSelf->bStarted){
+ TSK_DEBUG_INFO("D3D9 video consumer already started");
+ return 0;
+ }
+ if(!pSelf->bPrepared){
+ TSK_DEBUG_ERROR("D3D9 video consumer not prepared");
+ return -1;
+ }
+
+ HRESULT hr = S_OK;
+
+ pSelf->bPaused = false;
+ pSelf->bStarted = true;
+
+ return SUCCEEDED(hr) ? 0 : -1;
+}
+
+static int plugin_video_dshow_consumer_consume(tmedia_consumer_t* self, const void* buffer, tsk_size_t size, const tsk_object_t* proto_hdr)
+{
+ plugin_video_dshow_consumer_t* pSelf = (plugin_video_dshow_consumer_t*)self;
+
+ HRESULT hr = S_OK;
+ HWND hWnd = Window(pSelf);
+
+ IDirect3DSurface9 *pSurf = NULL;
+ IDirect3DSurface9 *pBB = NULL;
+
+ if(!pSelf)
+ {
+ TSK_DEBUG_ERROR("Invalid parameter");
+ return -1; // because of the mutex lock do it here
+ }
+
+ tsk_safeobj_lock(pSelf);
+
+ if(!buffer || !size)
+ {
+ TSK_DEBUG_ERROR("Invalid parameter");
+ CHECK_HR(hr = E_INVALIDARG);
+ }
+
+ if(!pSelf->bStarted)
+ {
+ TSK_DEBUG_INFO("D3D9 video consumer not started");
+ CHECK_HR(hr = E_FAIL);
+ }
+
+ if(!hWnd)
+ {
+ TSK_DEBUG_INFO("Do not draw frame because HWND not set");
+ goto bail; // not an error as the application can decide to set the HWND at any time
+ }
+
+ if (!pSelf->bWindowHooked)
+ {
+ // Do not hook "hWnd" as it could be the fullscreen handle which is always hooked.
+ CHECK_HR(hr = HookWindow(pSelf, pSelf->hWindow));
+ }
+
+ if(!pSelf->pDevice || !pSelf->pD3D || !pSelf->pSwapChain)
+ {
+ if(pSelf->pDevice || pSelf->pD3D || pSelf->pSwapChain)
+ {
+ CHECK_HR(hr = E_POINTER); // They must be "all null" or "all valid"
+ }
+
+ if(hWnd)
+ {
+ // means HWND was not set but defined now
+ pSelf->nNegWidth = (UINT32)TMEDIA_CONSUMER(pSelf)->video.in.width;
+ pSelf->nNegHeight = (UINT32)TMEDIA_CONSUMER(pSelf)->video.in.height;
+
+ CHECK_HR(hr = CreateDeviceD3D9(hWnd, &pSelf->pDevice, &pSelf->pD3D, pSelf->d3dpp));
+ CHECK_HR(hr = CreateSwapChain(hWnd, pSelf->nNegWidth, pSelf->nNegHeight, pSelf->pDevice, &pSelf->pSwapChain));
+ }
+ }
+
+ if(pSelf->nNegWidth != TMEDIA_CONSUMER(pSelf)->video.in.width || pSelf->nNegHeight != TMEDIA_CONSUMER(pSelf)->video.in.height){
+ TSK_DEBUG_INFO("Negotiated and input video sizes are different:%d#%d or %d#%d",
+ pSelf->nNegWidth, TMEDIA_CONSUMER(pSelf)->video.in.width,
+ pSelf->nNegHeight, TMEDIA_CONSUMER(pSelf)->video.in.height);
+ // Update media type
+
+ SafeRelease(&pSelf->pSwapChain);
+ CHECK_HR(hr = CreateSwapChain(hWnd, (UINT32)TMEDIA_CONSUMER(pSelf)->video.in.width, (UINT32)TMEDIA_CONSUMER(pSelf)->video.in.height, pSelf->pDevice, &pSelf->pSwapChain));
+
+ pSelf->nNegWidth = (UINT32)TMEDIA_CONSUMER(pSelf)->video.in.width;
+ pSelf->nNegHeight = (UINT32)TMEDIA_CONSUMER(pSelf)->video.in.height;
+
+ // Update Destination will do noting if the window size haven't changed.
+ // Force updating the destination rect if negotiated size change
+ CHECK_HR(hr = UpdateDestinationRect(pSelf, TRUE/* Force */));
+ }
+
+ if(((pSelf->nNegWidth * pSelf->nNegHeight) << 2) != size)
+ {
+ TSK_DEBUG_ERROR("%u not valid as input size", size);
+ CHECK_HR(hr = E_FAIL);
+ }
+
+ CHECK_HR(hr = TestCooperativeLevel(pSelf));
+
+ CHECK_HR(hr = UpdateDestinationRect(pSelf));
+
+ CHECK_HR(hr = pSelf->pSwapChain->GetBackBuffer(0, D3DBACKBUFFER_TYPE_MONO, &pSurf));
+ CHECK_HR(hr = pSurf->LockRect(&pSelf->rcLock, NULL, D3DLOCK_NOSYSLOCK ));
+
+ // Fast copy() using MMX, SSE, or SSE2
+ // Only available on Vista or later: Use LoadLibrary() to get a pointer to the function
+ /*hr = MFCopyImage(
+ (BYTE*)pSelf->rcLock.pBits,
+ pSelf->rcLock.Pitch,
+ (BYTE*)buffer,
+ (pSelf->nNegWidth << 2),
+ (pSelf->nNegWidth << 2),
+ pSelf->nNegHeight
+ );*/
+
+ if(pSelf->rcLock.Pitch == (pSelf->nNegWidth << 2))
+ {
+ memcpy(pSelf->rcLock.pBits, buffer, size);
+ }
+ else
+ {
+ const BYTE* pSrcPtr = (const BYTE*)buffer;
+ BYTE* pDstPtr = (BYTE*)pSelf->rcLock.pBits;
+ UINT32 nDstPitch = pSelf->rcLock.Pitch;
+ UINT32 nSrcPitch = (pSelf->nNegWidth << 2);
+ for(UINT32 i = 0; i < pSelf->nNegHeight; ++i)
+ {
+ memcpy(pDstPtr, pSrcPtr, nSrcPitch);
+ pDstPtr += nDstPitch;
+ pSrcPtr += nSrcPitch;
+ }
+ }
+
+ if(FAILED(hr))
+ {
+ // unlock() before leaving
+ pSurf->UnlockRect();
+ CHECK_HR(hr);
+ }
+
+ CHECK_HR(hr = pSurf->UnlockRect());
+
+ // Color fill the back buffer
+ CHECK_HR(hr = pSelf->pDevice->GetBackBuffer(0, 0, D3DBACKBUFFER_TYPE_MONO, &pBB));
+ CHECK_HR(hr = pSelf->pDevice->ColorFill(pBB, NULL, D3DCOLOR_XRGB(0xFF, 0xFF, 0xFF)));
+
+ // Resize keeping aspect ratio and Blit the frame (required)
+ hr = pSelf->pDevice->StretchRect(
+ pSurf,
+ NULL,
+ pBB,
+ &pSelf->rcDest/*NULL*/,
+ D3DTEXF_LINEAR
+ ); // could fail when display is being resized
+ if(SUCCEEDED(hr))
+ {
+ // Present the frame
+ CHECK_HR(hr = pSelf->pDevice->Present(NULL, NULL, NULL, NULL));
+ }
+ else
+ {
+ TSK_DEBUG_INFO("StretchRect returned ...%x", hr);
+ }
+
+bail:
+ SafeRelease(&pSurf);
+ SafeRelease(&pBB);
+
+ tsk_safeobj_unlock(pSelf);
+
+ return SUCCEEDED(hr) ? 0 : -1;
+}
+
+static int plugin_video_dshow_consumer_pause(tmedia_consumer_t* self)
+{
+ plugin_video_dshow_consumer_t* pSelf = (plugin_video_dshow_consumer_t*)self;
+
+ if(!pSelf){
+ TSK_DEBUG_ERROR("Invalid parameter");
+ return -1;
+ }
+ if(!pSelf->bStarted)
+ {
+ TSK_DEBUG_INFO("MF video producer not started");
+ return 0;
+ }
+
+ HRESULT hr = S_OK;
+
+ pSelf->bPaused = true;
+
+ return SUCCEEDED(hr) ? 0 : -1;
+}
+
+static int plugin_video_dshow_consumer_stop(tmedia_consumer_t* self)
+{
+ plugin_video_dshow_consumer_t* pSelf = (plugin_video_dshow_consumer_t*)self;
+
+ if(!pSelf){
+ TSK_DEBUG_ERROR("Invalid parameter");
+ return -1;
+ }
+
+ HRESULT hr = S_OK;
+
+ pSelf->bStarted = false;
+ pSelf->bPaused = false;
+
+ if(pSelf->hWindowFullScreen)
+ {
+ ::InvalidateRect(pSelf->hWindowFullScreen, NULL, FALSE);
+ ::ShowWindow(pSelf->hWindowFullScreen, SW_HIDE);
+ }
+
+ // next start() will be called after prepare()
+ return _plugin_video_dshow_consumer_unprepare(pSelf);
+}
+
+static int _plugin_video_dshow_consumer_unprepare(plugin_video_dshow_consumer_t* pSelf)
+{
+ if(!pSelf){
+ TSK_DEBUG_ERROR("Invalid parameter");
+ return -1;
+ }
+
+ if(pSelf->bStarted)
+ {
+ // plugin_win_mf_producer_video_stop(TMEDIA_PRODUCER(pSelf));
+ TSK_DEBUG_ERROR("Consumer must be stopped before calling unprepare");
+ return -1;
+ }
+
+ UnhookWindow(pSelf);
+
+ SafeRelease(&pSelf->pDevice);
+ SafeRelease(&pSelf->pD3D);
+ SafeRelease(&pSelf->pSwapChain);
+
+ pSelf->bPrepared = false;
+
+ return 0;
+}
+
+
+//
+// D3D9 video consumer object definition
+//
+/* constructor */
+static tsk_object_t* plugin_video_dshow_consumer_ctor(tsk_object_t * self, va_list * app)
+{
+ plugin_video_dshow_consumer_t *pSelf = (plugin_video_dshow_consumer_t *)self;
+ if(pSelf)
+ {
+ /* init base */
+ tmedia_consumer_init(TMEDIA_CONSUMER(pSelf));
+ TMEDIA_CONSUMER(pSelf)->video.display.chroma = tmedia_chroma_rgb32;
+ TMEDIA_CONSUMER(pSelf)->decoder.codec_id = tmedia_codec_id_none; // means accept RAW fames
+
+ /* init self */
+ tsk_safeobj_init(pSelf);
+ TMEDIA_CONSUMER(pSelf)->video.fps = 15;
+ TMEDIA_CONSUMER(pSelf)->video.display.width = 0; // use codec value
+ TMEDIA_CONSUMER(pSelf)->video.display.height = 0; // use codec value
+ TMEDIA_CONSUMER(pSelf)->video.display.auto_resize = tsk_true;
+
+ pSelf->pixelAR.Denominator = pSelf->pixelAR.Numerator = 1;
+ }
+ return self;
+}
+/* destructor */
+static tsk_object_t* plugin_video_dshow_consumer_dtor(tsk_object_t * self)
+{
+ plugin_video_dshow_consumer_t *pSelf = (plugin_video_dshow_consumer_t *)self;
+ if (pSelf) {
+ /* stop */
+ if (pSelf->bStarted)
+ {
+ plugin_video_dshow_consumer_stop(TMEDIA_CONSUMER(pSelf));
+ }
+
+ /* deinit base */
+ tmedia_consumer_deinit(TMEDIA_CONSUMER(pSelf));
+ /* deinit self */
+ _plugin_video_dshow_consumer_unprepare(pSelf);
+ tsk_safeobj_deinit(pSelf);
+ }
+
+ return self;
+}
+/* object definition */
+static const tsk_object_def_t plugin_video_dshow_consumer_def_s =
+{
+ sizeof(plugin_video_dshow_consumer_t),
+ plugin_video_dshow_consumer_ctor,
+ plugin_video_dshow_consumer_dtor,
+ tsk_null,
+};
+/* plugin definition*/
+static const tmedia_consumer_plugin_def_t plugin_video_dshow_consumer_plugin_def_s =
+{
+ &plugin_video_dshow_consumer_def_s,
+
+ tmedia_video,
+ "Microsoft DirectShow consumer (D3D9)",
+
+ plugin_video_dshow_consumer_set,
+ plugin_video_dshow_consumer_prepare,
+ plugin_video_dshow_consumer_start,
+ plugin_video_dshow_consumer_consume,
+ plugin_video_dshow_consumer_pause,
+ plugin_video_dshow_consumer_stop
+};
+const tmedia_consumer_plugin_def_t *plugin_video_dshow_consumer_plugin_def_t = &plugin_video_dshow_consumer_plugin_def_s;
+
+// Helper functions
+
+static HRESULT CreateDeviceD3D9(
+ HWND hWnd,
+ IDirect3DDevice9** ppDevice,
+ IDirect3D9 **ppD3D,
+ D3DPRESENT_PARAMETERS &d3dpp
+ )
+{
+ HRESULT hr = S_OK;
+
+ D3DDISPLAYMODE mode = { 0 };
+ D3DPRESENT_PARAMETERS pp = {0};
+
+ if(!ppDevice || *ppDevice || !ppD3D || *ppD3D)
+ {
+ CHECK_HR(hr = E_POINTER);
+ }
+
+ if(!(*ppD3D = Direct3DCreate9(D3D_SDK_VERSION)))
+ {
+ CHECK_HR(hr = E_OUTOFMEMORY);
+ }
+
+ CHECK_HR(hr = (*ppD3D)->GetAdapterDisplayMode(
+ D3DADAPTER_DEFAULT,
+ &mode
+ ));
+
+ CHECK_HR(hr = (*ppD3D)->CheckDeviceType(
+ D3DADAPTER_DEFAULT,
+ D3DDEVTYPE_HAL,
+ mode.Format,
+ D3DFMT_X8R8G8B8,
+ TRUE // windowed
+ ));
+ pp.BackBufferFormat = D3DFMT_X8R8G8B8;
+ pp.SwapEffect = D3DSWAPEFFECT_DISCARD;
+ pp.PresentationInterval = D3DPRESENT_INTERVAL_IMMEDIATE;
+ pp.Windowed = TRUE;
+ pp.hDeviceWindow = hWnd;
+ CHECK_HR(hr = (*ppD3D)->CreateDevice(
+ D3DADAPTER_DEFAULT,
+ D3DDEVTYPE_HAL,
+ hWnd,
+ D3DCREATE_HARDWARE_VERTEXPROCESSING,
+ &pp,
+ ppDevice
+ ));
+
+ d3dpp = pp;
+
+bail:
+ if(FAILED(hr))
+ {
+ SafeRelease(ppD3D);
+ SafeRelease(ppDevice);
+ }
+ return hr;
+}
+
+static HRESULT TestCooperativeLevel(
+ struct plugin_video_dshow_consumer_s *pSelf
+ )
+{
+ HRESULT hr = S_OK;
+
+ if (!pSelf || !pSelf->pDevice)
+ {
+ CHECK_HR(hr = E_POINTER);
+ }
+
+ switch((hr = pSelf->pDevice->TestCooperativeLevel()))
+ {
+ case D3D_OK:
+ {
+ break;
+ }
+
+ case D3DERR_DEVICELOST:
+ {
+ hr = S_OK;
+ break;
+ }
+
+ case D3DERR_DEVICENOTRESET:
+ {
+ hr = ResetDevice(pSelf, TRUE);
+ break;
+ }
+
+ default:
+ {
+ break;
+ }
+ }
+
+ CHECK_HR(hr);
+
+bail:
+ return hr;
+}
+
+static HRESULT CreateSwapChain(
+ HWND hWnd,
+ UINT32 nFrameWidth,
+ UINT32 nFrameHeight,
+ IDirect3DDevice9* pDevice,
+ IDirect3DSwapChain9 **ppSwapChain
+ )
+{
+ HRESULT hr = S_OK;
+
+ D3DPRESENT_PARAMETERS pp = { 0 };
+
+ if(!pDevice || !ppSwapChain || *ppSwapChain)
+ {
+ CHECK_HR(hr = E_POINTER);
+ }
+
+ pp.BackBufferWidth = nFrameWidth;
+ pp.BackBufferHeight = nFrameHeight;
+ pp.Windowed = TRUE;
+ pp.SwapEffect = D3DSWAPEFFECT_FLIP;
+ pp.hDeviceWindow = hWnd;
+ pp.BackBufferFormat = D3DFMT_X8R8G8B8;
+ pp.Flags =
+ D3DPRESENTFLAG_VIDEO | D3DPRESENTFLAG_DEVICECLIP |
+ D3DPRESENTFLAG_LOCKABLE_BACKBUFFER;
+ pp.PresentationInterval = D3DPRESENT_INTERVAL_IMMEDIATE;
+ pp.BackBufferCount = NUM_BACK_BUFFERS;
+
+ CHECK_HR(hr = pDevice->CreateAdditionalSwapChain(&pp, ppSwapChain));
+
+bail:
+ return hr;
+}
+
+static inline HWND Window(struct plugin_video_dshow_consumer_s *pSelf)
+{
+ return pSelf ? (pSelf->bFullScreen ? pSelf->hWindowFullScreen : pSelf->hWindow) : NULL;
+}
+
+static inline LONG Width(const RECT& r)
+{
+ return r.right - r.left;
+}
+
+static inline LONG Height(const RECT& r)
+{
+ return r.bottom - r.top;
+}
+
+//-----------------------------------------------------------------------------
+// CorrectAspectRatio
+//
+// Converts a rectangle from the source's pixel aspect ratio (PAR) to 1:1 PAR.
+// Returns the corrected rectangle.
+//
+// For example, a 720 x 486 rect with a PAR of 9:10, when converted to 1x1 PAR,
+// is stretched to 720 x 540.
+// Copyright (C) Microsoft
+//-----------------------------------------------------------------------------
+
+static inline RECT CorrectAspectRatio(const RECT& src, const DSRatio& srcPAR)
+{
+ // Start with a rectangle the same size as src, but offset to the origin (0,0).
+ RECT rc = {0, 0, src.right - src.left, src.bottom - src.top};
+
+ if ((srcPAR.Numerator != 1) || (srcPAR.Denominator != 1))
+ {
+ // Correct for the source's PAR.
+
+ if (srcPAR.Numerator > srcPAR.Denominator)
+ {
+ // The source has "wide" pixels, so stretch the width.
+ rc.right = MulDiv(rc.right, srcPAR.Numerator, srcPAR.Denominator);
+ }
+ else if (srcPAR.Numerator < srcPAR.Denominator)
+ {
+ // The source has "tall" pixels, so stretch the height.
+ rc.bottom = MulDiv(rc.bottom, srcPAR.Denominator, srcPAR.Numerator);
+ }
+ // else: PAR is 1:1, which is a no-op.
+ }
+ return rc;
+}
+
+//-------------------------------------------------------------------
+// LetterBoxDstRect
+//
+// Takes a src rectangle and constructs the largest possible
+// destination rectangle within the specifed destination rectangle
+// such thatthe video maintains its current shape.
+//
+// This function assumes that pels are the same shape within both the
+// source and destination rectangles.
+// Copyright (C) Microsoft
+//-------------------------------------------------------------------
+
+static inline RECT LetterBoxRect(const RECT& rcSrc, const RECT& rcDst)
+{
+ // figure out src/dest scale ratios
+ int iSrcWidth = Width(rcSrc);
+ int iSrcHeight = Height(rcSrc);
+
+ int iDstWidth = Width(rcDst);
+ int iDstHeight = Height(rcDst);
+
+ int iDstLBWidth;
+ int iDstLBHeight;
+
+ if (MulDiv(iSrcWidth, iDstHeight, iSrcHeight) <= iDstWidth) {
+
+ // Column letter boxing ("pillar box")
+
+ iDstLBWidth = MulDiv(iDstHeight, iSrcWidth, iSrcHeight);
+ iDstLBHeight = iDstHeight;
+ }
+ else {
+
+ // Row letter boxing.
+
+ iDstLBWidth = iDstWidth;
+ iDstLBHeight = MulDiv(iDstWidth, iSrcHeight, iSrcWidth);
+ }
+
+
+ // Create a centered rectangle within the current destination rect
+
+ RECT rc;
+
+ LONG left = rcDst.left + ((iDstWidth - iDstLBWidth) >> 1);
+ LONG top = rcDst.top + ((iDstHeight - iDstLBHeight) >> 1);
+
+ SetRect(&rc, left, top, left + iDstLBWidth, top + iDstLBHeight);
+
+ return rc;
+}
+
+static inline HRESULT UpdateDestinationRect(plugin_video_dshow_consumer_t *pSelf, BOOL bForce /*= FALSE*/)
+{
+ HRESULT hr = S_OK;
+ HWND hwnd = Window(pSelf);
+
+ if(!pSelf)
+ {
+ CHECK_HR(hr = E_POINTER);
+ }
+
+ if(!hwnd)
+ {
+ CHECK_HR(hr = E_HANDLE);
+ }
+ RECT rcClient;
+ GetClientRect(hwnd, &rcClient);
+
+ // only update destination if window size changed
+ if(bForce || (rcClient.bottom != pSelf->rcWindow.bottom || rcClient.left != pSelf->rcWindow.left || rcClient.right != pSelf->rcWindow.right || rcClient.top != pSelf->rcWindow.top))
+ {
+ CHECK_HR(hr = ResetDevice(pSelf));
+
+ pSelf->rcWindow = rcClient;
+#if 1
+ RECT rcSrc = { 0, 0, pSelf->nNegWidth, pSelf->nNegHeight };
+ rcSrc = CorrectAspectRatio(rcSrc, pSelf->pixelAR);
+ pSelf->rcDest = LetterBoxRect(rcSrc, rcClient);
+#else
+ long w = rcClient.right - rcClient.left;
+ long h = rcClient.bottom - rcClient.top;
+ float ratio = ((float)pSelf->nNegWidth/(float)pSelf->nNegHeight);
+ // (w/h)=ratio =>
+ // 1) h=w/ratio
+ // and
+ // 2) w=h*ratio
+ pSelf->rcDest.right = (int)(w/ratio) > h ? (int)(h * ratio) : w;
+ pSelf->rcDest.bottom = (int)(pSelf->rcDest.right/ratio) > h ? h : (int)(pSelf->rcDest.right/ratio);
+ pSelf->rcDest.left = ((w - pSelf->rcDest.right) >> 1);
+ pSelf->rcDest.top = ((h - pSelf->rcDest.bottom) >> 1);
+#endif
+
+ //::InvalidateRect(hwnd, NULL, FALSE);
+ }
+
+bail:
+ return hr;
+}
+
+static HRESULT ResetDevice(plugin_video_dshow_consumer_t *pSelf, BOOL bUpdateDestinationRect /*= FALSE*/)
+{
+ HRESULT hr = S_OK;
+
+ tsk_safeobj_lock(pSelf);
+
+ HWND hWnd = Window(pSelf);
+
+ if (pSelf->pDevice)
+ {
+ D3DPRESENT_PARAMETERS d3dpp = pSelf->d3dpp;
+
+ hr = pSelf->pDevice->Reset(&d3dpp);
+
+ if (FAILED(hr))
+ {
+ SafeRelease(&pSelf->pDevice);
+ SafeRelease(&pSelf->pD3D);
+ SafeRelease(&pSelf->pSwapChain);
+ }
+ }
+
+ if (pSelf->pDevice == NULL && hWnd)
+ {
+ CHECK_HR(hr = CreateDeviceD3D9(hWnd, &pSelf->pDevice, &pSelf->pD3D, pSelf->d3dpp));
+ CHECK_HR(hr = CreateSwapChain(hWnd, pSelf->nNegWidth, pSelf->nNegHeight, pSelf->pDevice, &pSelf->pSwapChain));
+ }
+
+ if(bUpdateDestinationRect) // endless loop guard
+ {
+ CHECK_HR(hr = UpdateDestinationRect(pSelf));
+ }
+
+bail:
+ tsk_safeobj_unlock(pSelf);
+
+ return hr;
+}
+
+static HRESULT SetFullscreen(struct plugin_video_dshow_consumer_s *pSelf, BOOL bFullScreen)
+{
+ HRESULT hr = S_OK;
+ if(!pSelf)
+ {
+ CHECK_HR(hr = E_POINTER);
+ }
+
+ if(pSelf->bFullScreen != bFullScreen)
+ {
+ tsk_safeobj_lock(pSelf);
+ if(bFullScreen)
+ {
+ HWND hWnd = CreateFullScreenWindow(pSelf);
+ if(hWnd)
+ {
+ ::ShowWindow(hWnd, SW_SHOWDEFAULT);
+ ::UpdateWindow(hWnd);
+ }
+ }
+ else if(pSelf->hWindowFullScreen)
+ {
+ ::ShowWindow(pSelf->hWindowFullScreen, SW_HIDE);
+ }
+ pSelf->bFullScreen = bFullScreen;
+ if(pSelf->bPrepared)
+ {
+ hr = ResetDevice(pSelf);
+ }
+ tsk_safeobj_unlock(pSelf);
+
+ CHECK_HR(hr);
+ }
+
+bail:
+ return hr;
+}
+
+static LRESULT CALLBACK WndProc(HWND hWnd, UINT uMsg, WPARAM wParam, LPARAM lParam)
+{
+ switch(uMsg)
+ {
+ case WM_CREATE:
+ case WM_SIZE:
+ case WM_MOVE:
+ {
+ struct plugin_video_dshow_consumer_s* pSelf = dynamic_cast<struct plugin_video_dshow_consumer_s*>((struct plugin_video_dshow_consumer_s*)GetPropA(hWnd, "Self"));
+ if (pSelf)
+ {
+
+ }
+ break;
+ }
+#if 0
+ case WM_PAINT:
+ {
+ PAINTSTRUCT ps;
+ HDC hdc = BeginPaint(hWnd, &ps);
+ ps.fErase = FALSE;
+
+ RECT rc;
+ GetBoundsRect(hdc, &rc, 0);
+ FillRect(hdc, &rc, (HBRUSH)GetStockObject(BLACK_BRUSH));
+
+ EndPaint(hWnd, &ps);
+ break;
+ }
+#endif
+
+ case WM_ERASEBKGND:
+ {
+ return TRUE; // avoid background erasing.
+ }
+
+
+ case WM_CHAR:
+ case WM_KEYUP:
+ {
+ struct plugin_video_dshow_consumer_s* pSelf = dynamic_cast<struct plugin_video_dshow_consumer_s*>((struct plugin_video_dshow_consumer_s*)GetPropA(hWnd, "Self"));
+ if (pSelf)
+ {
+ SetFullscreen(pSelf, FALSE);
+ }
+
+ break;
+ }
+ }
+
+ return DefWindowProc(hWnd, uMsg, wParam, lParam);
+}
+
+static HWND CreateFullScreenWindow(struct plugin_video_dshow_consumer_s *pSelf)
+{
+ HRESULT hr = S_OK;
+
+ if(!pSelf)
+ {
+ return NULL;
+ }
+
+ if(!pSelf->hWindowFullScreen)
+ {
+ WNDCLASS wc = {0};
+
+ wc.lpfnWndProc = WndProc;
+ wc.hInstance = GetModuleHandle(NULL);
+ wc.hCursor = LoadCursor(NULL, IDC_ARROW);
+ wc.lpszClassName = L"WindowClass";
+ RegisterClass(&wc);
+ pSelf->hWindowFullScreen = ::CreateWindowEx(
+ NULL,
+ wc.lpszClassName,
+ L"Doubango's Video Consumer Fullscreen",
+ WS_EX_TOPMOST | WS_POPUP,
+ 0, 0,
+ GetSystemMetrics(SM_CXSCREEN), GetSystemMetrics(SM_CYSCREEN),
+ NULL,
+ NULL,
+ GetModuleHandle(NULL),
+ NULL);
+
+ SetPropA(pSelf->hWindowFullScreen, "Self", pSelf);
+ }
+ return pSelf->hWindowFullScreen;
+}
+
+static HRESULT HookWindow(plugin_video_dshow_consumer_t *pSelf, HWND hWnd)
+{
+ HRESULT hr = S_OK;
+
+ tsk_safeobj_lock(pSelf);
+
+ CHECK_HR(hr = UnhookWindow(pSelf));
+
+ if ((pSelf->hWindow = hWnd)) {
+ pSelf->wndProc = (WNDPROC)SetWindowLongPtr(pSelf->hWindow, GWLP_WNDPROC, (LONG_PTR)WndProc);
+ if (!pSelf->wndProc) {
+ TSK_DEBUG_ERROR("HookWindowLongPtr() failed with errcode=%d", GetLastError());
+ CHECK_HR(hr = E_FAIL);
+ }
+ pSelf->bWindowHooked = TRUE;
+ }
+bail:
+ tsk_safeobj_unlock(pSelf);
+ return S_OK;
+}
+
+static HRESULT UnhookWindow(struct plugin_video_dshow_consumer_s *pSelf)
+{
+ tsk_safeobj_lock(pSelf);
+ if (pSelf->hWindow && pSelf->wndProc) {
+ SetWindowLongPtr(pSelf->hWindow, GWLP_WNDPROC, (LONG_PTR)pSelf->wndProc);
+ pSelf->wndProc = NULL;
+ }
+ if(pSelf->hWindow)
+ {
+ ::InvalidateRect(pSelf->hWindow, NULL, FALSE);
+ }
+ pSelf->bWindowHooked = FALSE;
+ tsk_safeobj_unlock(pSelf);
+ return S_OK;
+}
+
+#else /* !PLUGIN_DS_CV_USE_D3D9 */
+
+typedef struct plugin_video_dshow_consumer_s
+{
+ TMEDIA_DECLARE_CONSUMER;
+
+ DSDisplay* display;
+ INT64 window;
+
+ tsk_bool_t plugin_firefox;
+ tsk_bool_t started;
+ tsk_bool_t create_on_ui_thread;
+}
+plugin_video_dshow_consumer_t;
+
+
+
+/* ============ Media Consumer Interface ================= */
+static int plugin_video_dshow_consumer_set(tmedia_consumer_t *self, const tmedia_param_t* param)
+{
+ int ret = 0;
+
+ if(!self || !param){
+ TSK_DEBUG_ERROR("Invalid parameter");
+ return -1;
+ }
+
+ if(param->value_type == tmedia_pvt_int64){
+ if(tsk_striequals(param->key, "remote-hwnd")){
+ DSCONSUMER(self)->window = (INT64)*((int64_t*)param->value);
+ if(DSCONSUMER(self)->display){
+ if(DSCONSUMER(self)->window){
+ DSCONSUMER(self)->display->attach(DSCONSUMER(self)->window);
+ }
+ else{
+ DSCONSUMER(self)->display->detach();
+ }
+ }
+ }
+ }
+ else if(param->value_type == tmedia_pvt_int32){
+ if(tsk_striequals(param->key, "fullscreen")){
+ if(DSCONSUMER(self)->display){
+ DSCONSUMER(self)->display->setFullscreen(*((int32_t*)param->value) != 0);
+ }
+ }
+ else if(tsk_striequals(param->key, "create-on-current-thead")){
+ DSCONSUMER(self)->create_on_ui_thread = *((int32_t*)param->value) ? tsk_false : tsk_true;
+ }
+ else if(tsk_striequals(param->key, "plugin-firefox")){
+ DSCONSUMER(self)->plugin_firefox = (*((int32_t*)param->value) != 0);
+ if(DSCONSUMER(self)->display){
+ DSCONSUMER(self)->display->setPluginFirefox((DSCONSUMER(self)->plugin_firefox == tsk_true));
+ }
+ }
+ }
+
+ return ret;
+}
+
+
+static int plugin_video_dshow_consumer_prepare(tmedia_consumer_t* self, const tmedia_codec_t* codec)
+{
+ plugin_video_dshow_consumer_t* consumer = (plugin_video_dshow_consumer_t*)self;
+
+ if(!consumer || !codec && codec->plugin){
+ TSK_DEBUG_ERROR("Invalid parameter");
+ return -1;
+ }
+
+ TMEDIA_CONSUMER(consumer)->video.fps = TMEDIA_CODEC_VIDEO(codec)->in.fps;
+ TMEDIA_CONSUMER(consumer)->video.in.width = TMEDIA_CODEC_VIDEO(codec)->in.width;
+ TMEDIA_CONSUMER(consumer)->video.in.height = TMEDIA_CODEC_VIDEO(codec)->in.height;
+
+ if(!TMEDIA_CONSUMER(consumer)->video.display.width){
+ TMEDIA_CONSUMER(consumer)->video.display.width = TMEDIA_CONSUMER(consumer)->video.in.width;
+ }
+ if(!TMEDIA_CONSUMER(consumer)->video.display.height){
+ TMEDIA_CONSUMER(consumer)->video.display.height = TMEDIA_CONSUMER(consumer)->video.in.height;
+ }
+
+ return 0;
+}
+
+static int plugin_video_dshow_consumer_start(tmedia_consumer_t* self)
+{
+ plugin_video_dshow_consumer_t* consumer = (plugin_video_dshow_consumer_t*)self;
+
+ if(!consumer){
+ TSK_DEBUG_ERROR("Invalid parameter");
+ return -1;
+ }
+
+ if(consumer->started){
+ return 0;
+ }
+
+ // create display on UI thread
+ if(!consumer->display){
+ if (consumer->create_on_ui_thread) createOnUIThead(reinterpret_cast<HWND>((void*)consumer->window), (void**)&consumer->display, true, false);
+ else createOnCurrentThead(reinterpret_cast<HWND>((void*)consumer->window), (void**)&consumer->display, true, false);
+
+ if(!consumer->display){
+ TSK_DEBUG_ERROR("Failed to create display");
+ return -2;
+ }
+ }
+
+ // Set parameters
+ consumer->display->setPluginFirefox((consumer->plugin_firefox == tsk_true));
+ consumer->display->setFps(TMEDIA_CONSUMER(consumer)->video.fps);
+ // do not change the display size: see hook()
+ // consumer->display->setSize(TMEDIA_CONSUMER(consumer)->video.display.width, TMEDIA_CONSUMER(consumer)->video.display.height);
+ if(consumer->window){
+ consumer->display->attach(consumer->window);
+ }
+
+ // Start display
+ consumer->display->start();
+ consumer->started = tsk_true;
+
+ return 0;
+}
+
+static int plugin_video_dshow_consumer_consume(tmedia_consumer_t* self, const void* buffer, tsk_size_t size, const tsk_object_t* proto_hdr)
+{
+ plugin_video_dshow_consumer_t* consumer = (plugin_video_dshow_consumer_t*)self;
+ if(consumer && consumer->display && buffer){
+ consumer->display->handleVideoFrame(buffer, TMEDIA_CONSUMER(consumer)->video.display.width, TMEDIA_CONSUMER(consumer)->video.display.height);
+ return 0;
+ }
+ else{
+ TSK_DEBUG_ERROR("Invalid parameter");
+ return -1;
+ }
+}
+
+static int plugin_video_dshow_consumer_pause(tmedia_consumer_t* self)
+{
+ plugin_video_dshow_consumer_t* consumer = (plugin_video_dshow_consumer_t*)self;
+
+ if(!consumer){
+ TSK_DEBUG_ERROR("Invalid parameter");
+ return -1;
+ }
+
+ if(!consumer->display){
+ TSK_DEBUG_ERROR("Invalid internal grabber");
+ return -2;
+ }
+
+ //consumer->display->pause();
+
+ return 0;
+}
+
+static int plugin_video_dshow_consumer_stop(tmedia_consumer_t* self)
+{
+ plugin_video_dshow_consumer_t* consumer = (plugin_video_dshow_consumer_t*)self;
+
+ if(!self){
+ TSK_DEBUG_ERROR("Invalid parameter");
+ return -1;
+ }
+
+ if(!consumer->started){
+ return 0;
+ }
+
+ if(!consumer->display){
+ TSK_DEBUG_ERROR("Invalid internal display");
+ return -2;
+ }
+
+ TSK_DEBUG_INFO("Before stopping DirectShow consumer");
+ consumer->display->stop();
+ consumer->started = tsk_false;
+ TSK_DEBUG_INFO("After stopping DirectShow consumer");
+
+ return 0;
+}
+
+
+//
+// DirectShow consumer object definition
+//
+/* constructor */
+static tsk_object_t* plugin_video_dshow_consumer_ctor(tsk_object_t * self, va_list * app)
+{
+ CoInitializeEx(NULL, COINIT_MULTITHREADED);
+
+ plugin_video_dshow_consumer_t *consumer = (plugin_video_dshow_consumer_t *)self;
+ if(consumer){
+ /* init base */
+ tmedia_consumer_init(TMEDIA_CONSUMER(consumer));
+ TMEDIA_CONSUMER(consumer)->video.display.chroma = tmedia_chroma_bgr24; // RGB24 on x86 (little endians) stored as BGR24
+
+ /* init self */
+ consumer->create_on_ui_thread = tsk_true;
+ TMEDIA_CONSUMER(consumer)->video.fps = 15;
+ TMEDIA_CONSUMER(consumer)->video.display.width = 352;
+ TMEDIA_CONSUMER(consumer)->video.display.height = 288;
+ TMEDIA_CONSUMER(consumer)->video.display.auto_resize = tsk_true;
+ }
+ return self;
+}
+/* destructor */
+static tsk_object_t* plugin_video_dshow_consumer_dtor(tsk_object_t * self)
+{
+ plugin_video_dshow_consumer_t *consumer = (plugin_video_dshow_consumer_t *)self;
+ if(consumer){
+
+ /* stop */
+ if(consumer->started){
+ plugin_video_dshow_consumer_stop((tmedia_consumer_t*)self);
+ }
+
+ /* deinit base */
+ tmedia_consumer_deinit(TMEDIA_CONSUMER(consumer));
+ /* deinit self */
+ SAFE_DELETE_PTR(consumer->display);
+
+ }
+
+ return self;
+}
+/* object definition */
+static const tsk_object_def_t plugin_video_dshow_consumer_def_s =
+{
+ sizeof(plugin_video_dshow_consumer_t),
+ plugin_video_dshow_consumer_ctor,
+ plugin_video_dshow_consumer_dtor,
+ tsk_null,
+};
+/* plugin definition*/
+static const tmedia_consumer_plugin_def_t plugin_video_dshow_consumer_plugin_def_s =
+{
+ &plugin_video_dshow_consumer_def_s,
+
+ tmedia_video,
+ "Microsoft DirectShow consumer (using custom source)",
+
+ plugin_video_dshow_consumer_set,
+ plugin_video_dshow_consumer_prepare,
+ plugin_video_dshow_consumer_start,
+ plugin_video_dshow_consumer_consume,
+ plugin_video_dshow_consumer_pause,
+ plugin_video_dshow_consumer_stop
+};
+const tmedia_consumer_plugin_def_t *plugin_video_dshow_consumer_plugin_def_t = &plugin_video_dshow_consumer_plugin_def_s;
+
+#endif /* PLUGIN_DS_CV_USE_D3D9 */
+
diff --git a/plugins/pluginDirectShow/plugin_video_dshow_producer.cxx b/plugins/pluginDirectShow/plugin_video_dshow_producer.cxx
new file mode 100644
index 0000000..61aa929
--- /dev/null
+++ b/plugins/pluginDirectShow/plugin_video_dshow_producer.cxx
@@ -0,0 +1,276 @@
+/* Copyright (C) 2011-2013 Doubango Telecom <http://www.doubango.org>
+*
+* This file is part of Open Source Doubango Framework.
+*
+* DOUBANGO is free software: you can redistribute it and/or modify
+* it under the terms of the GNU General Public License as published by
+* the Free Software Foundation, either version 3 of the License, or
+* (at your option) any later version.
+*
+* DOUBANGO is distributed in the hope that it will be useful,
+* but WITHOUT ANY WARRANTY; without even the implied warranty of
+* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
+* GNU General Public License for more details.
+*
+* You should have received a copy of the GNU General Public License
+* along with DOUBANGO.
+*/
+#include "internals/DSGrabber.h"
+#include "internals/DSDisplay.h"
+#include "internals/DSUtils.h"
+
+#include "tinymedia/tmedia_producer.h"
+
+#include "tsk_string.h"
+#include "tsk_debug.h"
+
+#define DSPRODUCER(self) ((plugin_video_dshow_producer_t*)(self))
+
+typedef struct plugin_video_dshow_producer_s
+{
+ TMEDIA_DECLARE_PRODUCER;
+
+ DSGrabber* grabber;
+ INT64 previewHwnd;
+
+ tsk_bool_t plugin_firefox;
+ tsk_bool_t started;
+ tsk_bool_t mute;
+ tsk_bool_t create_on_ui_thread;
+}
+plugin_video_dshow_producer_t;
+
+// Producer callback (From DirectShow Grabber to our plugin)
+static int plugin_video_dshow_plugin_cb(const void* callback_data, const void* buffer, tsk_size_t size)
+{
+ const plugin_video_dshow_producer_t* producer = (const plugin_video_dshow_producer_t*)callback_data;
+
+ if(producer && TMEDIA_PRODUCER(producer)->enc_cb.callback){
+ TMEDIA_PRODUCER(producer)->enc_cb.callback(TMEDIA_PRODUCER(producer)->enc_cb.callback_data, buffer, size);
+ }
+
+ return 0;
+}
+
+
+/* ============ Media Producer Interface ================= */
+static int plugin_video_dshow_producer_set(tmedia_producer_t *self, const tmedia_param_t* param)
+{
+ int ret = 0;
+ plugin_video_dshow_producer_t* producer = (plugin_video_dshow_producer_t*)self;
+
+ if(!producer || !param){
+ TSK_DEBUG_ERROR("Invalid parameter");
+ return -1;
+ }
+
+ if(param->value_type == tmedia_pvt_int64){
+ if(tsk_striequals(param->key, "local-hwnd")){
+ DSPRODUCER(producer)->previewHwnd = (INT64)*((int64_t*)param->value);
+ if(DSPRODUCER(producer)->grabber && DSPRODUCER(self)->grabber->preview){
+ DSPRODUCER(producer)->grabber->preview->attach(DSPRODUCER(producer)->previewHwnd);
+ }
+ }
+ }
+ else if(param->value_type == tmedia_pvt_int32){
+ if(tsk_striequals(param->key, "mute")){
+ producer->mute = (TSK_TO_INT32((uint8_t*)param->value) != 0);
+ if(producer->started){
+ if (producer->mute) {
+ producer->grabber->pause();
+ }
+ else{
+ producer->grabber->start();
+ }
+ }
+ }
+ else if(tsk_striequals(param->key, "create-on-current-thead")){
+ producer->create_on_ui_thread = *((int32_t*)param->value) ? tsk_false : tsk_true;
+ }
+ else if(tsk_striequals(param->key, "plugin-firefox")){
+ producer->plugin_firefox = (*((int32_t*)param->value) != 0);
+ if(producer->grabber){
+ producer->grabber->setPluginFirefox((producer->plugin_firefox == tsk_true));
+ }
+ }
+ }
+
+ return ret;
+}
+
+static int plugin_video_dshow_producer_prepare(tmedia_producer_t* self, const tmedia_codec_t* codec)
+{
+ plugin_video_dshow_producer_t* producer = (plugin_video_dshow_producer_t*)self;
+
+ if(!producer || !codec && codec->plugin){
+ TSK_DEBUG_ERROR("Invalid parameter");
+ return -1;
+ }
+
+ TMEDIA_PRODUCER(producer)->video.fps = TMEDIA_CODEC_VIDEO(codec)->out.fps;
+ TMEDIA_PRODUCER(producer)->video.width = TMEDIA_CODEC_VIDEO(codec)->out.width;
+ TMEDIA_PRODUCER(producer)->video.height = TMEDIA_CODEC_VIDEO(codec)->out.height;
+
+ return 0;
+}
+
+static int plugin_video_dshow_producer_start(tmedia_producer_t* self)
+{
+ plugin_video_dshow_producer_t* producer = (plugin_video_dshow_producer_t*)self;
+
+ if(!producer){
+ TSK_DEBUG_ERROR("Invalid parameter");
+ return -1;
+ }
+
+ if (producer->started) {
+ return 0;
+ }
+
+ // create grabber on UI thread
+ if (!producer->grabber) {
+ static BOOL __isDisplayFalse = FALSE;
+ static BOOL __isScreenCastFalse = FALSE;
+ if(producer->create_on_ui_thread) createOnUIThead(reinterpret_cast<HWND>((void*)DSPRODUCER(producer)->previewHwnd), (void**)&producer->grabber, __isDisplayFalse, __isScreenCastFalse);
+ else createOnCurrentThead(reinterpret_cast<HWND>((void*)DSPRODUCER(producer)->previewHwnd), (void**)&producer->grabber, __isDisplayFalse, __isScreenCastFalse);
+ if (!producer->grabber) {
+ TSK_DEBUG_ERROR("Failed to create grabber");
+ return -2;
+ }
+ }
+ producer->grabber->setPluginFirefox((producer->plugin_firefox == tsk_true));
+
+ //set Source device
+ producer->grabber->setCaptureDevice("Null");
+
+ // set parameters
+ producer->grabber->setCaptureParameters((int)TMEDIA_PRODUCER(producer)->video.width, (int)TMEDIA_PRODUCER(producer)->video.height, TMEDIA_PRODUCER(producer)->video.fps);
+
+ // set callback function
+ producer->grabber->setCallback(plugin_video_dshow_plugin_cb, producer);
+
+ // attach preview
+ if(producer->grabber->preview){
+ if(producer->previewHwnd){
+ producer->grabber->preview->attach(producer->previewHwnd);
+ }
+ producer->grabber->preview->setSize((int)TMEDIA_PRODUCER(producer)->video.width, (int)TMEDIA_PRODUCER(producer)->video.height);
+ }
+
+ // start grabber
+ if(!producer->mute){
+ producer->grabber->start();
+ }
+ producer->started = tsk_true;
+
+ return 0;
+}
+
+static int plugin_video_dshow_producer_pause(tmedia_producer_t* self)
+{
+ plugin_video_dshow_producer_t* producer = (plugin_video_dshow_producer_t*)self;
+
+ if(!producer){
+ TSK_DEBUG_ERROR("Invalid parameter");
+ return -1;
+ }
+
+ if(!producer->grabber){
+ TSK_DEBUG_ERROR("Invalid internal grabber");
+ return -2;
+ }
+
+ producer->grabber->pause();
+
+ return 0;
+}
+
+static int plugin_video_dshow_producer_stop(tmedia_producer_t* self)
+{
+ plugin_video_dshow_producer_t* producer = (plugin_video_dshow_producer_t*)self;
+
+ if(!self){
+ TSK_DEBUG_ERROR("Invalid parameter");
+ return -1;
+ }
+
+ if(!producer->started){
+ return 0;
+ }
+
+ if(!producer->grabber){
+ TSK_DEBUG_ERROR("Invalid internal grabber");
+ return -2;
+ }
+
+ producer->grabber->stop();
+ producer->started = tsk_false;
+
+ return 0;
+}
+
+
+//
+// WaveAPI producer object definition
+//
+/* constructor */
+static tsk_object_t* plugin_video_dshow_producer_ctor(tsk_object_t * self, va_list * app)
+{ plugin_video_dshow_producer_t *producer = (plugin_video_dshow_producer_t *)self;
+ if (producer) {
+ /* init base */
+ tmedia_producer_init(TMEDIA_PRODUCER(producer));
+ TMEDIA_PRODUCER(producer)->video.chroma = tmedia_chroma_bgr24; // RGB24 on x86 (little endians) stored as BGR24
+ /* init self with default values*/
+ producer->create_on_ui_thread = tsk_true;
+ TMEDIA_PRODUCER(producer)->video.fps = 15;
+ TMEDIA_PRODUCER(producer)->video.width = 352;
+ TMEDIA_PRODUCER(producer)->video.height = 288;
+ }
+ return self;
+}
+/* destructor */
+static tsk_object_t* plugin_video_dshow_producer_dtor(tsk_object_t * self)
+{
+ plugin_video_dshow_producer_t *producer = (plugin_video_dshow_producer_t *)self;
+ if(producer){
+ /* stop */
+ if(producer->started){
+ plugin_video_dshow_producer_stop((tmedia_producer_t*)self);
+ }
+
+ /* for safety */
+ if(producer->grabber){
+ producer->grabber->setCallback(tsk_null, tsk_null);
+ }
+
+ /* deinit base */
+ tmedia_producer_deinit(TMEDIA_PRODUCER(producer));
+ /* deinit self */
+ SAFE_DELETE_PTR(producer->grabber);
+ }
+
+ return self;
+}
+/* object definition */
+static const tsk_object_def_t plugin_video_dshow_producer_def_s =
+{
+ sizeof(plugin_video_dshow_producer_t),
+ plugin_video_dshow_producer_ctor,
+ plugin_video_dshow_producer_dtor,
+ tsk_null,
+};
+/* plugin definition*/
+static const tmedia_producer_plugin_def_t plugin_video_dshow_producer_plugin_def_s =
+{
+ &plugin_video_dshow_producer_def_s,
+
+ tmedia_video,
+ "Microsoft DirectShow producer",
+
+ plugin_video_dshow_producer_set,
+ plugin_video_dshow_producer_prepare,
+ plugin_video_dshow_producer_start,
+ plugin_video_dshow_producer_pause,
+ plugin_video_dshow_producer_stop
+};
+const tmedia_producer_plugin_def_t *plugin_video_dshow_producer_plugin_def_t = &plugin_video_dshow_producer_plugin_def_s;
diff --git a/plugins/pluginDirectShow/version.rc b/plugins/pluginDirectShow/version.rc
new file mode 100644
index 0000000..fc0f489
--- /dev/null
+++ b/plugins/pluginDirectShow/version.rc
@@ -0,0 +1,102 @@
+// Microsoft Visual C++ generated resource script.
+//
+// #include "resource.h"
+
+#define APSTUDIO_READONLY_SYMBOLS
+/////////////////////////////////////////////////////////////////////////////
+//
+// Generated from the TEXTINCLUDE 2 resource.
+//
+#include "afxres.h"
+
+/////////////////////////////////////////////////////////////////////////////
+#undef APSTUDIO_READONLY_SYMBOLS
+
+/////////////////////////////////////////////////////////////////////////////
+// English (U.S.) resources
+
+#if !defined(AFX_RESOURCE_DLL) || defined(AFX_TARG_ENU)
+#ifdef _WIN32
+LANGUAGE LANG_ENGLISH, SUBLANG_ENGLISH_US
+#pragma code_page(1252)
+#endif //_WIN32
+
+#ifdef APSTUDIO_INVOKED
+/////////////////////////////////////////////////////////////////////////////
+//
+// TEXTINCLUDE
+//
+
+1 TEXTINCLUDE
+BEGIN
+ "resource.h\0"
+END
+
+2 TEXTINCLUDE
+BEGIN
+ "#include ""afxres.h""\r\n"
+ "\0"
+END
+
+3 TEXTINCLUDE
+BEGIN
+ "\r\n"
+ "\0"
+END
+
+#endif // APSTUDIO_INVOKED
+
+
+/////////////////////////////////////////////////////////////////////////////
+//
+// Version
+//
+
+VS_VERSION_INFO VERSIONINFO
+ FILEVERSION 2.0.0.1156
+ PRODUCTVERSION 2.0.0.1156
+ FILEFLAGSMASK 0x17L
+#ifdef _DEBUG
+ FILEFLAGS 0x1L
+#else
+ FILEFLAGS 0x0L
+#endif
+ FILEOS 0x4L
+ FILETYPE 0x2L
+ FILESUBTYPE 0x0L
+BEGIN
+ BLOCK "StringFileInfo"
+ BEGIN
+ BLOCK "040904b0"
+ BEGIN
+ VALUE "CompanyName", "Doubango Telecom"
+ VALUE "FileDescription", "Doubango IMS Framework DirectShow Plugin"
+ VALUE "FileVersion", "2.0.0.1156"
+ VALUE "InternalName", "pluginDirectShow.dll"
+ VALUE "LegalCopyright", "(c) 2010-2013 Doubango Telecom. All rights reserved."
+ VALUE "OriginalFilename", "pluginDirectShow.dll"
+ VALUE "ProductName", "Doubango IMS Framework DirectShow Plugin"
+ VALUE "ProductVersion", "2.0.0.1156"
+ END
+ END
+ BLOCK "VarFileInfo"
+ BEGIN
+ VALUE "Translation", 0x409, 1200
+ END
+END
+
+#endif // English (U.S.) resources
+/////////////////////////////////////////////////////////////////////////////
+
+
+
+#ifndef APSTUDIO_INVOKED
+/////////////////////////////////////////////////////////////////////////////
+//
+// Generated from the TEXTINCLUDE 3 resource.
+//
+
+
+/////////////////////////////////////////////////////////////////////////////
+#endif // not APSTUDIO_INVOKED
+
diff --git a/plugins/pluginWASAPI/dllmain_wasapi.cxx b/plugins/pluginWASAPI/dllmain_wasapi.cxx
new file mode 100644
index 0000000..ff13977
--- /dev/null
+++ b/plugins/pluginWASAPI/dllmain_wasapi.cxx
@@ -0,0 +1,134 @@
+/* Copyright (C) 2013 Mamadou DIOP
+* Copyright (C) 2013 Doubango Telecom <http://www.doubango.org>
+*
+* This file is part of Open Source Doubango Framework.
+*
+* DOUBANGO is free software: you can redistribute it and/or modify
+* it under the terms of the GNU General Public License as published by
+* the Free Software Foundation, either version 3 of the License, or
+* (at your option) any later version.
+*
+* DOUBANGO is distributed in the hope that it will be useful,
+* but WITHOUT ANY WARRANTY; without even the implied warranty of
+* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
+* GNU General Public License for more details.
+*
+* You should have received a copy of the GNU General Public License
+* along with DOUBANGO.
+*/
+#include "plugin_wasapi_config.h"
+
+#include "tinymedia/tmedia_producer.h"
+#include "tinymedia/tmedia_consumer.h"
+
+#include "tsk_plugin.h"
+#include "tsk_debug.h"
+
+#include <windows.h>
+
+#if defined(_MSC_VER)
+
+#endif
+
+
+#if !defined(PLUGIN_WASAPI_ENABLE)
+# define PLUGIN_WASAPI_ENABLE 1
+#endif
+
+extern const tmedia_producer_plugin_def_t *plugin_wasapi_producer_audio_plugin_def_t;
+extern const tmedia_consumer_plugin_def_t *plugin_wasapi_consumer_audio_plugin_def_t;
+
+PLUGIN_WASAPI_BEGIN_DECLS /* BEGIN */
+PLUGIN_WASAPI_API int __plugin_get_def_count();
+PLUGIN_WASAPI_API tsk_plugin_def_type_t __plugin_get_def_type_at(int index);
+PLUGIN_WASAPI_API tsk_plugin_def_media_type_t __plugin_get_def_media_type_at(int index);
+PLUGIN_WASAPI_API tsk_plugin_def_ptr_const_t __plugin_get_def_at(int index);
+PLUGIN_WASAPI_END_DECLS /* END */
+
+BOOL APIENTRY DllMain( HMODULE hModule,
+ DWORD ul_reason_for_call,
+ LPVOID lpReserved
+ )
+{
+ switch (ul_reason_for_call)
+ {
+ case DLL_PROCESS_ATTACH:
+ break;
+ case DLL_THREAD_ATTACH:
+ break;
+ case DLL_THREAD_DETACH:
+ break;
+ case DLL_PROCESS_DETACH:
+ break;
+ }
+ return TRUE;
+}
+
+
+typedef enum PLUGIN_INDEX_E
+{
+#if PLUGIN_WASAPI_ENABLE
+ PLUGIN_INDEX_CONSUMER,
+ PLUGIN_INDEX_PRODUCER,
+#endif
+
+ PLUGIN_INDEX_COUNT
+}
+PLUGIN_INDEX_T;
+
+
+int __plugin_get_def_count()
+{
+ return PLUGIN_INDEX_COUNT;
+}
+
+tsk_plugin_def_type_t __plugin_get_def_type_at(int index)
+{
+#if PLUGIN_WASAPI_ENABLE
+ switch(index){
+ case PLUGIN_INDEX_CONSUMER:
+ {
+ return tsk_plugin_def_type_consumer;
+ }
+ case PLUGIN_INDEX_PRODUCER:
+ {
+ return tsk_plugin_def_type_producer;
+ }
+ }
+#endif
+ TSK_DEBUG_ERROR("No plugin at index %d", index);
+ return tsk_plugin_def_type_none;
+}
+
+tsk_plugin_def_media_type_t __plugin_get_def_media_type_at(int index)
+{
+#if PLUGIN_WASAPI_ENABLE
+ switch(index){
+ case PLUGIN_INDEX_CONSUMER:
+ case PLUGIN_INDEX_PRODUCER:
+ {
+ return tsk_plugin_def_media_type_audio;
+ }
+ }
+#endif
+ TSK_DEBUG_ERROR("No plugin at index %d", index);
+ return tsk_plugin_def_media_type_none;
+}
+
+tsk_plugin_def_ptr_const_t __plugin_get_def_at(int index)
+{
+#if PLUGIN_WASAPI_ENABLE
+ switch(index){
+ case PLUGIN_INDEX_CONSUMER:
+ {
+ return plugin_wasapi_consumer_audio_plugin_def_t;
+ }
+ case PLUGIN_INDEX_PRODUCER:
+ {
+ return plugin_wasapi_producer_audio_plugin_def_t;
+ }
+ }
+#endif
+ TSK_DEBUG_ERROR("No plugin at index %d", index);
+ return tsk_null;
+}
diff --git a/plugins/pluginWASAPI/pluginWASAPI.vcproj b/plugins/pluginWASAPI/pluginWASAPI.vcproj
new file mode 100644
index 0000000..12ea812
--- /dev/null
+++ b/plugins/pluginWASAPI/pluginWASAPI.vcproj
@@ -0,0 +1,227 @@
+<?xml version="1.0" encoding="Windows-1252"?>
+<VisualStudioProject
+ ProjectType="Visual C++"
+ Version="9.00"
+ Name="pluginWASAPI"
+ ProjectGUID="{8D72C459-8B9D-4280-8331-8BAB02D735E5}"
+ RootNamespace="pluginWASAPI"
+ Keyword="Win32Proj"
+ TargetFrameworkVersion="196613"
+ >
+ <Platforms>
+ <Platform
+ Name="Win32"
+ />
+ </Platforms>
+ <ToolFiles>
+ </ToolFiles>
+ <Configurations>
+ <Configuration
+ Name="Debug|Win32"
+ OutputDirectory="$(SolutionDir)$(ConfigurationName)"
+ IntermediateDirectory="$(ConfigurationName)"
+ ConfigurationType="2"
+ CharacterSet="1"
+ >
+ <Tool
+ Name="VCPreBuildEventTool"
+ />
+ <Tool
+ Name="VCCustomBuildTool"
+ />
+ <Tool
+ Name="VCXMLDataGeneratorTool"
+ />
+ <Tool
+ Name="VCWebServiceProxyGeneratorTool"
+ />
+ <Tool
+ Name="VCMIDLTool"
+ />
+ <Tool
+ Name="VCCLCompilerTool"
+ Optimization="0"
+ AdditionalIncludeDirectories=".;..\..\thirdparties\win32\include;..\..\tinySAK\src;..\..\tinyMEDIA\include;..\..\tinySDP\include;..\..\tinyDAV\include;..\..\tinyRTP\include"
+ PreprocessorDefinitions="WIN32;_DEBUG;_WINDOWS;_USRDLL;PLUGIN_WASAPI_EXPORTS;TINYDAV_EXPORTS"
+ MinimalRebuild="true"
+ BasicRuntimeChecks="3"
+ RuntimeLibrary="3"
+ UsePrecompiledHeader="0"
+ WarningLevel="3"
+ WarnAsError="true"
+ DebugInformationFormat="4"
+ />
+ <Tool
+ Name="VCManagedResourceCompilerTool"
+ />
+ <Tool
+ Name="VCResourceCompilerTool"
+ />
+ <Tool
+ Name="VCPreLinkEventTool"
+ />
+ <Tool
+ Name="VCLinkerTool"
+ AdditionalDependencies="&quot;..\..\thirdparties\win32\lib\speex\libspeexdsp.a&quot; $(OutDir)\tinySAK.lib $(OutDir)\tinyMEDIA.lib"
+ LinkIncremental="2"
+ GenerateDebugInformation="true"
+ SubSystem="2"
+ TargetMachine="1"
+ />
+ <Tool
+ Name="VCALinkTool"
+ />
+ <Tool
+ Name="VCManifestTool"
+ />
+ <Tool
+ Name="VCXDCMakeTool"
+ />
+ <Tool
+ Name="VCBscMakeTool"
+ />
+ <Tool
+ Name="VCFxCopTool"
+ />
+ <Tool
+ Name="VCAppVerifierTool"
+ />
+ <Tool
+ Name="VCPostBuildEventTool"
+ />
+ </Configuration>
+ <Configuration
+ Name="Release|Win32"
+ OutputDirectory="$(SolutionDir)$(ConfigurationName)"
+ IntermediateDirectory="$(ConfigurationName)"
+ ConfigurationType="2"
+ CharacterSet="1"
+ WholeProgramOptimization="1"
+ >
+ <Tool
+ Name="VCPreBuildEventTool"
+ />
+ <Tool
+ Name="VCCustomBuildTool"
+ />
+ <Tool
+ Name="VCXMLDataGeneratorTool"
+ />
+ <Tool
+ Name="VCWebServiceProxyGeneratorTool"
+ />
+ <Tool
+ Name="VCMIDLTool"
+ />
+ <Tool
+ Name="VCCLCompilerTool"
+ Optimization="2"
+ EnableIntrinsicFunctions="true"
+ AdditionalIncludeDirectories=".;..\..\thirdparties\win32\include;..\..\tinySAK\src;..\..\tinyMEDIA\include;..\..\tinySDP\include;..\..\tinyDAV\include;..\..\tinyRTP\include"
+ PreprocessorDefinitions="WIN32;NDEBUG;_WINDOWS;_USRDLL;PLUGIN_WASAPI_EXPORTS;TINYDAV_EXPORTS"
+ RuntimeLibrary="2"
+ EnableFunctionLevelLinking="true"
+ UsePrecompiledHeader="0"
+ WarningLevel="3"
+ WarnAsError="true"
+ DebugInformationFormat="0"
+ />
+ <Tool
+ Name="VCManagedResourceCompilerTool"
+ />
+ <Tool
+ Name="VCResourceCompilerTool"
+ />
+ <Tool
+ Name="VCPreLinkEventTool"
+ />
+ <Tool
+ Name="VCLinkerTool"
+ AdditionalDependencies="&quot;..\..\thirdparties\win32\lib\speex\libspeexdsp.a&quot; $(OutDir)\tinySAK.lib $(OutDir)\tinyMEDIA.lib"
+ LinkIncremental="1"
+ GenerateDebugInformation="true"
+ SubSystem="2"
+ OptimizeReferences="2"
+ EnableCOMDATFolding="2"
+ TargetMachine="1"
+ />
+ <Tool
+ Name="VCALinkTool"
+ />
+ <Tool
+ Name="VCManifestTool"
+ />
+ <Tool
+ Name="VCXDCMakeTool"
+ />
+ <Tool
+ Name="VCBscMakeTool"
+ />
+ <Tool
+ Name="VCFxCopTool"
+ />
+ <Tool
+ Name="VCAppVerifierTool"
+ />
+ <Tool
+ Name="VCPostBuildEventTool"
+ />
+ </Configuration>
+ </Configurations>
+ <References>
+ </References>
+ <Files>
+ <Filter
+ Name="Source Files"
+ Filter="cpp;c;cc;cxx;def;odl;idl;hpj;bat;asm;asmx"
+ UniqueIdentifier="{4FC737F1-C7A5-4376-A066-2A32D752A2FF}"
+ >
+ <File
+ RelativePath=".\dllmain_wasapi.cxx"
+ >
+ </File>
+ <File
+ RelativePath=".\plugin_wasapi_consumer_audio.cxx"
+ >
+ </File>
+ <File
+ RelativePath=".\plugin_wasapi_producer_audio.cxx"
+ >
+ </File>
+ <File
+ RelativePath=".\plugin_wasapi_tdav.cxx"
+ >
+ </File>
+ <File
+ RelativePath=".\plugin_wasapi_utils.cxx"
+ >
+ </File>
+ </Filter>
+ <Filter
+ Name="Header Files"
+ Filter="h;hpp;hxx;hm;inl;inc;xsd"
+ UniqueIdentifier="{93995380-89BD-4b04-88EB-625FBE52EBFB}"
+ >
+ <File
+ RelativePath=".\plugin_wasapi_config.h"
+ >
+ </File>
+ <File
+ RelativePath=".\plugin_wasapi_utils.h"
+ >
+ </File>
+ </Filter>
+ <Filter
+ Name="Resource Files"
+ Filter="rc;ico;cur;bmp;dlg;rc2;rct;bin;rgs;gif;jpg;jpeg;jpe;resx;tiff;tif;png;wav"
+ UniqueIdentifier="{67DA6AB6-F800-4c08-8B7A-83BB121AAD01}"
+ >
+ <File
+ RelativePath=".\version.rc"
+ >
+ </File>
+ </Filter>
+ </Files>
+ <Globals>
+ </Globals>
+</VisualStudioProject>
diff --git a/plugins/pluginWASAPI/plugin_wasapi_config.h b/plugins/pluginWASAPI/plugin_wasapi_config.h
new file mode 100644
index 0000000..d5f742f
--- /dev/null
+++ b/plugins/pluginWASAPI/plugin_wasapi_config.h
@@ -0,0 +1,78 @@
+/* Copyright (C) 2013 Mamadou DIOP
+* Copyright (C) 2013 Doubango Telecom <http://www.doubango.org>
+*
+* This file is part of Open Source Doubango Framework.
+*
+* DOUBANGO is free software: you can redistribute it and/or modify
+* it under the terms of the GNU General Public License as published by
+* the Free Software Foundation, either version 3 of the License, or
+* (at your option) any later version.
+*
+* DOUBANGO is distributed in the hope that it will be useful,
+* but WITHOUT ANY WARRANTY; without even the implied warranty of
+* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
+* GNU General Public License for more details.
+*
+* You should have received a copy of the GNU General Public License
+* along with DOUBANGO.
+*/
+#ifndef PLUGIN_WASAPI_CONFIG_H
+#define PLUGIN_WASAPI_CONFIG_H
+
+#ifdef __SYMBIAN32__
+#undef _WIN32 /* Because of WINSCW */
+#endif
+
+
+// Windows (XP/Vista/7/CE and Windows Mobile) macro definition
+#if defined(WIN32)|| defined(_WIN32) || defined(_WIN32_WCE)
+# define PLUGIN_WASAPI_UNDER_WINDOWS 1
+# if defined(WINAPI_FAMILY) && (WINAPI_FAMILY == WINAPI_FAMILY_PHONE_APP || WINAPI_FAMILY == WINAPI_FAMILY_APP)
+# define PLUGIN_WASAPI_UNDER_WINDOWS_RT 1
+# if WINAPI_FAMILY == WINAPI_FAMILY_PHONE_APP
+# define PLUGIN_WASAPI_UNDER_WINDOWS_PHONE 1
+# endif
+# endif
+#endif
+
+#if (PLUGIN_WASAPI_UNDER_WINDOWS || defined(__SYMBIAN32__)) && defined(PLUGIN_WASAPI_EXPORTS)
+# define PLUGIN_WASAPI_API __declspec(dllexport)
+# define PLUGIN_WASAPI_GEXTERN extern __declspec(dllexport)
+#elif (PLUGIN_WASAPI_UNDER_WINDOWS || defined(__SYMBIAN32__)) && !defined(PLUGIN_WASAPI_IMPORTS_IGNORE)
+# define PLUGIN_WASAPI_API __declspec(dllimport)
+# define PLUGIN_WASAPI_GEXTERN __declspec(dllimport)
+#else
+# define PLUGIN_WASAPI_API
+# define PLUGIN_WASAPI_GEXTERN extern
+#endif
+
+// x86
+#if defined(__x86_64__) || defined(__x86__) || defined(__i386__)
+# define PLUGIN_WASAPI_UNDER_X86 1
+#endif
+
+// Guards against C++ name mangling
+#ifdef __cplusplus
+# define PLUGIN_WASAPI_BEGIN_DECLS extern "C" {
+# define PLUGIN_WASAPI_END_DECLS }
+#else
+# define PLUGIN_WASAPI_BEGIN_DECLS
+# define PLUGIN_WASAPI_END_DECLS
+#endif
+
+#ifdef _MSC_VER
+# define inline __inline
+# define _CRT_SECURE_NO_WARNINGS
+# define _ALLOW_KEYWORD_MACROS
+#endif
+
+#include <stdint.h>
+#ifdef __SYMBIAN32__
+#include <stdlib.h>
+#endif
+
+#if HAVE_CONFIG_H
+ #include <config.h>
+#endif
+
+#endif // PLUGIN_WASAPI_CONFIG_H
diff --git a/plugins/pluginWASAPI/plugin_wasapi_consumer_audio.cxx b/plugins/pluginWASAPI/plugin_wasapi_consumer_audio.cxx
new file mode 100644
index 0000000..97db2eb
--- /dev/null
+++ b/plugins/pluginWASAPI/plugin_wasapi_consumer_audio.cxx
@@ -0,0 +1,700 @@
+/*Copyright (C) 2013 Mamadou Diop
+* Copyright (C) 2013 Doubango Telecom <http://www.doubango.org>
+*
+* This file is part of Open Source Doubango Framework.
+*
+* DOUBANGO is free software: you can redistribute it and/or modify
+* it under the terms of the GNU General Public License as published by
+* the Free Software Foundation, either version 3 of the License, or
+* (at your option) any later version.
+*
+* DOUBANGO is distributed in the hope that it will be useful,
+* but WITHOUT ANY WARRANTY; without even the implied warranty of
+* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
+* GNU General Public License for more details.
+*
+* You should have received a copy of the GNU General Public License
+* along with DOUBANGO.
+*/
+/**@file plugin_wasapi_consumer_audio.cxx
+* @brief Microsoft Windows Audio Session API (WASAPI) consumer.
+* http://msdn.microsoft.com/en-us/library/windows/desktop/dd316551(v=vs.85).aspx
+*/
+#include "plugin_wasapi_utils.h"
+
+#include "tinydav/audio/tdav_consumer_audio.h"
+
+#include "tsk_thread.h"
+#include "tsk_memory.h"
+#include "tsk_string.h"
+#include "tsk_condwait.h"
+#include "tsk_debug.h"
+
+#include <windows.h>
+#include <audioclient.h>
+#if PLUGIN_WASAPI_UNDER_WINDOWS_PHONE
+# include <phoneaudioclient.h>
+#else
+# include <Mmdeviceapi.h>
+#endif
+#include <initguid.h>
+
+#include <speex/speex_buffer.h>
+
+#if !defined(PLUGIN_WASAPI_CONSUMER_NOTIF_POS_COUNT)
+# define PLUGIN_WASAPI_CONSUMER_NOTIF_POS_COUNT 13
+#endif
+#define WASAPI_MILLIS_TO_100NS(MILLIS) (((LONGLONG)(MILLIS)) * 10000ui64)
+#define WASAPI_100NS_TO_MILLIS(NANOS) (((LONGLONG)(NANOS)) / 10000ui64)
+
+struct plugin_wasapi_consumer_audio_s;
+
+class AudioRender sealed
+{
+public:
+ AudioRender();
+ virtual ~AudioRender();
+
+ int Prepare(struct plugin_wasapi_consumer_audio_s* wasapi, const tmedia_codec_t* codec);
+ int UnPrepare();
+ int Start();
+ int Stop();
+ int Pause();
+ int Consume(const void* buffer, tsk_size_t size, const tsk_object_t* proto_hdr);
+private:
+ tsk_size_t Read(void* data, tsk_size_t size);
+ static void* TSK_STDCALL AsyncThread(void *pArg);
+
+private:
+ tsk_mutex_handle_t* m_hMutex;
+ const struct plugin_wasapi_consumer_audio_s* m_pWrappedConsumer; // Must not take ref() otherwise dtor() will be never called (circular reference)
+#if PLUGIN_WASAPI_UNDER_WINDOWS_PHONE
+ IAudioClient2* m_pDevice;
+#else
+ IAudioClient* m_pDevice;
+#endif
+ IAudioRenderClient* m_pClient;
+ tsk_condwait_handle_t* m_hCondWait;
+ tsk_thread_handle_t* m_ppTread[1];
+ INT32 m_nBytesPerNotif;
+ INT32 m_nSourceFrameSizeInBytes;
+ UINT32 m_nMaxFrameCount;
+ UINT32 m_nPtime;
+ UINT32 m_nChannels;
+
+ struct
+ {
+ struct
+ {
+ void* buffer;
+ tsk_size_t size;
+ } chunck;
+ tsk_ssize_t leftBytes;
+ SpeexBuffer* buffer;
+ tsk_size_t size;
+ } m_ring;
+
+ bool m_bStarted;
+ bool m_bPrepared;
+ bool m_bPaused;
+};
+
+typedef struct plugin_wasapi_consumer_audio_s
+{
+ TDAV_DECLARE_CONSUMER_AUDIO;
+
+ AudioRender* pAudioRender;
+}
+plugin_wasapi_consumer_audio_t;
+
+
+/* ============ Media consumer Interface ================= */
+
+static int plugin_wasapi_consumer_audio_set(tmedia_consumer_t* self, const tmedia_param_t* param)
+{
+ return tdav_consumer_audio_set(TDAV_CONSUMER_AUDIO(self), param);
+}
+
+static int plugin_wasapi_consumer_audio_prepare(tmedia_consumer_t* self, const tmedia_codec_t* codec)
+{
+ plugin_wasapi_consumer_audio_t* wasapi = (plugin_wasapi_consumer_audio_t*)self;
+
+ if(!wasapi || !codec || !wasapi->pAudioRender)
+ {
+ TSK_DEBUG_ERROR("Invalid parameter");
+ return -1;
+ }
+
+ TMEDIA_CONSUMER(wasapi)->audio.ptime = TMEDIA_CODEC_PTIME_AUDIO_DECODING(codec);
+ TMEDIA_CONSUMER(wasapi)->audio.in.channels = TMEDIA_CODEC_CHANNELS_AUDIO_DECODING(codec);
+ TMEDIA_CONSUMER(wasapi)->audio.in.rate = TMEDIA_CODEC_RATE_DECODING(codec);
+
+ TSK_DEBUG_INFO("WASAPI consumer: in.channels=%d, out.channles=%d, in.rate=%d, out.rate=%d, ptime=%d",
+ TMEDIA_CONSUMER(wasapi)->audio.in.channels,
+ TMEDIA_CONSUMER(wasapi)->audio.out.channels,
+ TMEDIA_CONSUMER(wasapi)->audio.in.rate,
+ TMEDIA_CONSUMER(wasapi)->audio.out.rate,
+ TMEDIA_CONSUMER(wasapi)->audio.ptime);
+
+ return wasapi->pAudioRender->Prepare(wasapi, codec);
+}
+
+static int plugin_wasapi_consumer_audio_start(tmedia_consumer_t* self)
+{
+ plugin_wasapi_consumer_audio_t* wasapi = (plugin_wasapi_consumer_audio_t*)self;
+
+ TSK_DEBUG_INFO("plugin_wasapi_consumer_audio_start()");
+
+ if(!wasapi || !wasapi->pAudioRender)
+ {
+ TSK_DEBUG_ERROR("Invalid parameter");
+ return -1;
+ }
+
+ return wasapi->pAudioRender->Start();
+}
+
+
+static int plugin_wasapi_consumer_audio_consume(tmedia_consumer_t* self, const void* buffer, tsk_size_t size, const tsk_object_t* proto_hdr)
+{
+ plugin_wasapi_consumer_audio_t* wasapi = (plugin_wasapi_consumer_audio_t*)self;
+ if(!wasapi || !wasapi->pAudioRender || !buffer || !size)
+ {
+ TSK_DEBUG_ERROR("Invalid parameter");
+ return -1;
+ }
+
+ return wasapi->pAudioRender->Consume(buffer, size, proto_hdr);
+}
+
+static int plugin_wasapi_consumer_audio_pause(tmedia_consumer_t* self)
+{
+ plugin_wasapi_consumer_audio_t* wasapi = (plugin_wasapi_consumer_audio_t*)self;
+
+ if(!wasapi || !wasapi->pAudioRender)
+ {
+ TSK_DEBUG_ERROR("Invalid parameter");
+ return -1;
+ }
+
+ return wasapi->pAudioRender->Pause();
+}
+
+static int plugin_wasapi_consumer_audio_stop(tmedia_consumer_t* self)
+{
+ plugin_wasapi_consumer_audio_t* wasapi = (plugin_wasapi_consumer_audio_t*)self;
+
+ TSK_DEBUG_INFO("plugin_wasapi_consumer_audio_stop()");
+
+ if(!wasapi || !wasapi->pAudioRender)
+ {
+ TSK_DEBUG_ERROR("Invalid parameter");
+ return -1;
+ }
+
+ return wasapi->pAudioRender->Stop();
+}
+
+
+
+
+
+
+
+AudioRender::AudioRender()
+: m_pDevice(NULL)
+, m_hMutex(NULL)
+, m_pClient(NULL)
+, m_hCondWait(NULL)
+, m_pWrappedConsumer(NULL)
+, m_nBytesPerNotif(0)
+, m_nSourceFrameSizeInBytes(0)
+, m_nMaxFrameCount(0)
+, m_nPtime(0)
+, m_nChannels(1)
+, m_bStarted(false)
+, m_bPrepared(false)
+, m_bPaused(false)
+{
+ m_ppTread[0] = NULL;
+ memset(&m_ring, 0, sizeof(m_ring));
+
+ if(!(m_hMutex = tsk_mutex_create()))
+ {
+ TSK_DEBUG_ERROR("Failed to create mutex");
+ }
+}
+
+AudioRender::~AudioRender()
+{
+ Stop();
+ UnPrepare();
+
+ tsk_mutex_destroy(&m_hMutex);
+}
+
+int AudioRender::Prepare(plugin_wasapi_consumer_audio_t* wasapi, const tmedia_codec_t* codec)
+{
+ HRESULT hr = E_FAIL;
+ int ret = 0;
+ WAVEFORMATEX wfx = {0};
+#if PLUGIN_WASAPI_UNDER_WINDOWS_PHONE
+ AudioClientProperties properties = {0};
+#endif
+ LPCWSTR pwstrRenderId = NULL;
+ IMMDeviceEnumerator *pEnumerator = NULL;
+ IMMDevice *pDevice = NULL;
+
+ tsk_mutex_lock(m_hMutex);
+
+ if(m_bPrepared)
+ {
+ TSK_DEBUG_INFO("#WASAPI: Audio consumer already prepared");
+ goto bail;
+ }
+
+ if(!wasapi || !codec)
+ {
+ TSK_DEBUG_ERROR("Invalid parameter");
+ CHECK_HR(hr = E_FAIL);
+ }
+
+ if(m_pDevice || m_pClient)
+ {
+ TSK_DEBUG_ERROR("consumer already prepared");
+ CHECK_HR(hr = E_FAIL);
+ }
+#if PLUGIN_WASAPI_UNDER_WINDOWS_PHONE
+ pwstrRenderId = GetDefaultAudioRenderId(AudioDeviceRole::Communications);
+
+ if (NULL == pwstrRenderId)
+ {
+ PLUGIN_WASAPI_ERROR("GetDefaultAudioRenderId", HRESULT_FROM_WIN32(GetLastError()));
+ CHECK_HR(hr = E_FAIL);
+ }
+
+ CHECK_HR(hr = ActivateAudioInterface(pwstrRenderId, __uuidof(IAudioClient2), (void**)&m_pDevice));
+
+ // Win8 or WP8 only
+ properties.cbSize = sizeof AudioClientProperties;
+ properties.eCategory = AudioCategory_Communications;
+ CHECK_HR(hr = m_pDevice->SetClientProperties(&properties));
+#else
+ CHECK_HR(hr = CoCreateInstance(
+ CLSID_MMDeviceEnumerator, NULL,
+ CLSCTX_ALL, IID_IMMDeviceEnumerator,
+ (void**)&pEnumerator));
+
+ CHECK_HR(hr = pEnumerator->GetDefaultAudioEndpoint(
+ eRender, eCommunications, &pDevice));
+
+ CHECK_HR(hr = pDevice->Activate(
+ IID_IAudioClient, CLSCTX_ALL,
+ NULL, (void**)&m_pDevice));
+#endif
+
+
+
+
+ /* Set best format */
+ {
+ wfx.wFormatTag = WAVE_FORMAT_PCM;
+ wfx.nChannels = TMEDIA_CONSUMER(wasapi)->audio.in.channels;
+ wfx.nSamplesPerSec = TMEDIA_CONSUMER(wasapi)->audio.in.rate;
+ wfx.wBitsPerSample = TMEDIA_CONSUMER(wasapi)->audio.bits_per_sample;
+ wfx.nBlockAlign = (wfx.nChannels * wfx.wBitsPerSample/8);
+ wfx.nAvgBytesPerSec = (wfx.nSamplesPerSec * wfx.nBlockAlign);
+
+ PWAVEFORMATEX pwfxClosestMatch = NULL;
+ hr = m_pDevice->IsFormatSupported(AUDCLNT_SHAREMODE_SHARED, &wfx, &pwfxClosestMatch);
+ if(hr != S_OK && hr != S_FALSE)
+ {
+ PLUGIN_WASAPI_ERROR(HRESULT_FROM_WIN32(GetLastError()));
+ CHECK_HR(hr = E_FAIL);
+ }
+
+ if(hr == S_FALSE)
+ {
+ if(!pwfxClosestMatch)
+ {
+ TSK_DEBUG_ERROR("malloc(%d) failed", sizeof(WAVEFORMATEX));
+ CHECK_HR(hr = E_OUTOFMEMORY);
+ }
+
+ wfx.nSamplesPerSec = pwfxClosestMatch->nSamplesPerSec;
+ wfx.nChannels = pwfxClosestMatch->nChannels;
+#if 0
+ wfx.wBitsPerSample = pwfxClosestMatch->wBitsPerSample;
+#endif
+ wfx.nBlockAlign = wfx.nChannels * (wfx.wBitsPerSample / 8);
+ wfx.nAvgBytesPerSec = wfx.nSamplesPerSec * wfx.nBlockAlign;
+ // Request resampler
+ TMEDIA_CONSUMER(wasapi)->audio.out.rate = (uint32_t)wfx.nSamplesPerSec;
+ TMEDIA_CONSUMER(wasapi)->audio.bits_per_sample = (uint8_t)wfx.wBitsPerSample;
+ TMEDIA_CONSUMER(wasapi)->audio.out.channels = (uint8_t)wfx.nChannels;
+
+ TSK_DEBUG_INFO("Audio device format fallback: rate=%d, bps=%d, channels=%d", wfx.nSamplesPerSec, wfx.wBitsPerSample, wfx.nChannels);
+ }
+ if(pwfxClosestMatch)
+ {
+ CoTaskMemFree(pwfxClosestMatch);
+ }
+ }
+
+ m_nSourceFrameSizeInBytes = (wfx.wBitsPerSample >> 3) * wfx.nChannels;
+ m_nBytesPerNotif = ((wfx.nAvgBytesPerSec * TMEDIA_CONSUMER(wasapi)->audio.ptime)/1000) * wfx.nChannels;
+
+ // Initialize
+ CHECK_HR(hr = m_pDevice->Initialize(
+ AUDCLNT_SHAREMODE_SHARED,
+ 0x00000000,
+ (PLUGIN_WASAPI_CONSUMER_NOTIF_POS_COUNT * WASAPI_MILLIS_TO_100NS(TMEDIA_CONSUMER(wasapi)->audio.ptime)) ,
+ 0,
+ &wfx,
+ NULL));
+
+ REFERENCE_TIME DefaultDevicePeriod, MinimumDevicePeriod;
+ CHECK_HR(hr = m_pDevice->GetDevicePeriod(&DefaultDevicePeriod, &MinimumDevicePeriod));
+
+ CHECK_HR(hr = m_pDevice->GetBufferSize(&m_nMaxFrameCount));
+ TSK_DEBUG_INFO("#WASAPI (Playback): BufferSize=%u, DefaultDevicePeriod=%lld ms, MinimumDevicePeriod=%lldms", m_nMaxFrameCount, WASAPI_100NS_TO_MILLIS(DefaultDevicePeriod), WASAPI_100NS_TO_MILLIS(MinimumDevicePeriod));
+
+ if(!m_hCondWait)
+ {
+ if(!(m_hCondWait = tsk_condwait_create()))
+ {
+ PLUGIN_WASAPI_ERROR(HRESULT_FROM_WIN32(GetLastError()));
+ CHECK_HR(hr = E_FAIL);
+ }
+ }
+
+ CHECK_HR(hr = m_pDevice->GetService(__uuidof(IAudioRenderClient), (void**)&m_pClient));
+
+ int packetperbuffer = (1000 / TMEDIA_CONSUMER(wasapi)->audio.ptime);
+ m_ring.chunck.size = (wfx.nSamplesPerSec * (wfx.wBitsPerSample >> 3) / packetperbuffer) * wfx.nChannels;
+ m_ring.size = PLUGIN_WASAPI_CONSUMER_NOTIF_POS_COUNT * m_ring.chunck.size;
+ if(!(m_ring.chunck.buffer = tsk_realloc(m_ring.chunck.buffer, m_ring.chunck.size)))
+ {
+ m_ring.size = 0;
+ TSK_DEBUG_ERROR("Failed to allocate new buffer");
+ CHECK_HR(hr = E_OUTOFMEMORY);
+ }
+ if(!m_ring.buffer)
+ {
+ m_ring.buffer = speex_buffer_init(m_ring.size);
+ }
+ else
+ {
+ int sret;
+ if((sret = speex_buffer_resize(m_ring.buffer, m_ring.size)) < 0)
+ {
+ TSK_DEBUG_ERROR("speex_buffer_resize(%d) failed with error code=%d", m_ring.size, sret);
+ CHECK_HR(hr = E_OUTOFMEMORY);
+ }
+ }
+ if(!m_ring.buffer)
+ {
+ TSK_DEBUG_ERROR("Failed to create a new ring buffer with size = %d", m_ring.size);
+ CHECK_HR(hr = E_OUTOFMEMORY);
+ }
+
+bail:
+ ret = SUCCEEDED(hr) ? 0 : -1;
+ if (pwstrRenderId)
+ {
+ CoTaskMemFree((LPVOID)pwstrRenderId);
+ }
+ if(ret != 0)
+ {
+ UnPrepare();
+ }
+
+ if((m_bPrepared = (ret == 0)))
+ {
+ m_pWrappedConsumer = wasapi;
+ m_nPtime = TMEDIA_CONSUMER(wasapi)->audio.ptime;
+ m_nChannels = TMEDIA_CONSUMER(wasapi)->audio.out.channels;
+ }
+
+ tsk_mutex_unlock(m_hMutex);
+
+ SafeRelease(&pEnumerator);
+ SafeRelease(&pDevice);
+
+ return ret;
+}
+
+int AudioRender::UnPrepare()
+{
+ tsk_mutex_lock(m_hMutex);
+
+ if(m_hCondWait)
+ {
+ tsk_condwait_destroy(&m_hCondWait);
+ }
+ if(m_pDevice)
+ {
+ m_pDevice->Release(), m_pDevice = NULL;
+ }
+ if(m_pClient)
+ {
+ m_pClient->Release(), m_pClient = NULL;
+ }
+
+ TSK_FREE(m_ring.chunck.buffer);
+ if(m_ring.buffer)
+ {
+ speex_buffer_destroy(m_ring.buffer);
+ m_ring.buffer = NULL;
+ }
+
+ m_pWrappedConsumer = NULL;
+
+ m_bPrepared = false;
+
+ tsk_mutex_unlock(m_hMutex);
+
+ return 0;
+}
+
+int AudioRender::Start()
+{
+ tsk_mutex_lock(m_hMutex);
+
+ if(m_bStarted)
+ {
+ TSK_DEBUG_INFO("#WASAPI: Audio consumer already started");
+ goto bail;
+ }
+ if(!m_bPrepared)
+ {
+ TSK_DEBUG_ERROR("Audio consumer not prepared");
+ goto bail;
+ }
+
+ m_bStarted = true;
+ if(!m_ppTread[0] && tsk_thread_create(m_ppTread, AudioRender::AsyncThread, this) != 0)
+ {
+ m_bStarted = false;
+ goto bail;
+ }
+
+ HRESULT hr = m_pDevice->Start();
+ if(!SUCCEEDED(hr))
+ {
+ PLUGIN_WASAPI_ERROR(hr);
+ Stop();
+ }
+ m_bPaused = false;
+
+bail:
+ tsk_mutex_unlock(m_hMutex);
+
+ return (m_bStarted ? 0 : -2);
+}
+
+int AudioRender::Stop()
+{
+ m_bStarted = false;
+
+ tsk_mutex_lock(m_hMutex);
+
+ if (m_hCondWait)
+ {
+ tsk_condwait_broadcast(m_hCondWait);
+ }
+
+ if (m_ppTread[0])
+ {
+ tsk_thread_join(m_ppTread);
+ }
+
+ if(m_pDevice)
+ {
+ m_pDevice->Stop();
+ }
+
+ // will be prepared again before next start()
+ UnPrepare();
+
+ tsk_mutex_unlock(m_hMutex);
+
+ return 0;
+}
+
+int AudioRender::Pause()
+{
+ m_bPaused = true;
+
+ return 0;
+}
+
+int AudioRender::Consume(const void* buffer, tsk_size_t size, const tsk_object_t* proto_hdr)
+{
+ return tdav_consumer_audio_put(TDAV_CONSUMER_AUDIO(m_pWrappedConsumer), buffer, size, proto_hdr);
+}
+
+tsk_size_t AudioRender::Read(void* data, tsk_size_t size)
+{
+ tsk_ssize_t retSize = 0;
+
+ m_ring.leftBytes += size;
+ while (m_ring.leftBytes >= (tsk_ssize_t)m_ring.chunck.size)
+ {
+ m_ring.leftBytes -= m_ring.chunck.size;
+ retSize = (tsk_ssize_t)tdav_consumer_audio_get(TDAV_CONSUMER_AUDIO(m_pWrappedConsumer), m_ring.chunck.buffer, m_ring.chunck.size);
+ tdav_consumer_audio_tick(TDAV_CONSUMER_AUDIO(m_pWrappedConsumer));
+ speex_buffer_write(m_ring.buffer, m_ring.chunck.buffer, retSize);
+ }
+ // IMPORTANT: looks like there is a bug in speex: continously trying to read more than avail
+ // many times can corrupt the buffer. At least on OS X 1.5
+ int avail = speex_buffer_get_available(m_ring.buffer);
+ //if(speex_buffer_get_available(m_ring.buffer) >= (tsk_ssize_t)size)
+ //{
+ retSize = speex_buffer_read(m_ring.buffer, data, TSK_MIN((int)size,avail));
+ //}
+ //else
+ //{
+ //memset(data, 0, size);
+ //}
+
+ return retSize;
+}
+
+void* TSK_STDCALL AudioRender::AsyncThread(void *pArg)
+{
+ HRESULT hr = S_OK;
+ INT32 nFramesToWrite;
+ UINT32 nPadding, nRead;
+ int waitResult = 0;
+ AudioRender* This = (AudioRender*)pArg;
+
+ TSK_DEBUG_INFO("#WASAPI: __playback_thread -- START");
+
+#define BREAK_WHILE tsk_mutex_unlock(This->m_hMutex); break;
+
+ while(This->m_bStarted && SUCCEEDED(hr))
+ {
+ waitResult = tsk_condwait_timedwait(This->m_hCondWait, This->m_nPtime);
+
+ tsk_mutex_lock(This->m_hMutex);
+
+ if(!This->m_bStarted)
+ {
+ BREAK_WHILE;
+ }
+
+ if(waitResult == 0)
+ {
+ hr = This->m_pDevice->GetCurrentPadding(&nPadding);
+ if (SUCCEEDED(hr))
+ {
+ BYTE* pRenderBuffer = NULL;
+ nFramesToWrite = This->m_nMaxFrameCount - nPadding;
+
+ if (nFramesToWrite > 0)
+ {
+ hr = This->m_pClient->GetBuffer(nFramesToWrite, &pRenderBuffer);
+ if (SUCCEEDED(hr))
+ {
+ nRead = This->Read(pRenderBuffer, (nFramesToWrite * This->m_nSourceFrameSizeInBytes));
+
+ // Release the buffer
+ hr = This->m_pClient->ReleaseBuffer((nRead / This->m_nSourceFrameSizeInBytes), (nRead == 0) ? AUDCLNT_BUFFERFLAGS_SILENT: 0);
+ }
+ }
+ }
+ }
+ else
+ {
+ BREAK_WHILE;
+ }
+
+ tsk_mutex_lock(This->m_hMutex);
+ }// end-of-while
+
+ if (!SUCCEEDED(hr))
+ {
+ PLUGIN_WASAPI_ERROR(hr);
+ }
+
+ TSK_DEBUG_INFO("WASAPI: __playback_thread(%s) -- STOP", (SUCCEEDED(hr) && waitResult == 0) ? "OK": "NOK");
+
+ return NULL;
+}
+
+
+
+
+
+
+
+//
+// WaveAPI consumer object definition
+//
+/* constructor */
+static tsk_object_t* plugin_wasapi_consumer_audio_ctor(tsk_object_t * self, va_list * app)
+{
+ plugin_wasapi_consumer_audio_t *wasapi = (plugin_wasapi_consumer_audio_t*)self;
+ if(wasapi)
+ {
+ WASAPIUtils::Startup();
+
+ /* init base */
+ tdav_consumer_audio_init(TDAV_CONSUMER_AUDIO(wasapi));
+ /* init self */
+
+ wasapi->pAudioRender = new AudioRender();
+ if(!wasapi->pAudioRender)
+ {
+ TSK_DEBUG_ERROR("Failed to create renderer");
+ return tsk_null;
+ }
+ }
+ return self;
+}
+/* destructor */
+static tsk_object_t* plugin_wasapi_consumer_audio_dtor(tsk_object_t * self)
+{
+ plugin_wasapi_consumer_audio_t *wasapi = (plugin_wasapi_consumer_audio_t*)self;
+ if(wasapi)
+ {
+ /* stop */
+ plugin_wasapi_consumer_audio_stop((tmedia_consumer_t*)self);
+ /* deinit base */
+ tdav_consumer_audio_deinit(TDAV_CONSUMER_AUDIO(wasapi));
+ /* deinit self */
+ if(wasapi->pAudioRender)
+ {
+ delete wasapi->pAudioRender;
+ wasapi->pAudioRender = NULL;
+ }
+ }
+
+ return self;
+}
+/* object definition */
+static const tsk_object_def_t plugin_wasapi_consumer_audio_def_s =
+{
+ sizeof(plugin_wasapi_consumer_audio_t),
+ plugin_wasapi_consumer_audio_ctor,
+ plugin_wasapi_consumer_audio_dtor,
+ tdav_consumer_audio_cmp,
+};
+/* plugin definition*/
+static const tmedia_consumer_plugin_def_t plugin_wasapi_consumer_audio_plugin_def_s =
+{
+ &plugin_wasapi_consumer_audio_def_s,
+
+ tmedia_audio,
+ "Microsoft Windows Audio Session API (WASAPI) consumer",
+
+ plugin_wasapi_consumer_audio_set,
+ plugin_wasapi_consumer_audio_prepare,
+ plugin_wasapi_consumer_audio_start,
+ plugin_wasapi_consumer_audio_consume,
+ plugin_wasapi_consumer_audio_pause,
+ plugin_wasapi_consumer_audio_stop
+};
+const tmedia_consumer_plugin_def_t *plugin_wasapi_consumer_audio_plugin_def_t = &plugin_wasapi_consumer_audio_plugin_def_s;
+
diff --git a/plugins/pluginWASAPI/plugin_wasapi_producer_audio.cxx b/plugins/pluginWASAPI/plugin_wasapi_producer_audio.cxx
new file mode 100644
index 0000000..6f44ab0
--- /dev/null
+++ b/plugins/pluginWASAPI/plugin_wasapi_producer_audio.cxx
@@ -0,0 +1,712 @@
+/*Copyright (C) 2013 Mamadou Diop
+* Copyright (C) 2013 Doubango Telecom <http://www.doubango.org>
+*
+* This file is part of Open Source Doubango Framework.
+*
+* DOUBANGO is free software: you can redistribute it and/or modify
+* it under the terms of the GNU General Public License as published by
+* the Free Software Foundation, either version 3 of the License, or
+* (at your option) any later version.
+*
+* DOUBANGO is distributed in the hope that it will be useful,
+* but WITHOUT ANY WARRANTY; without even the implied warranty of
+* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
+* GNU General Public License for more details.
+*
+* You should have received a copy of the GNU General Public License
+* along with DOUBANGO.
+*/
+/**@file plugin_wasapi_producer_audio.cxx
+* @brief Microsoft Windows Audio Session API (WASAPI) producer.
+* http://msdn.microsoft.com/en-us/library/windows/desktop/dd316551(v=vs.85).aspx
+*/
+#include "plugin_wasapi_utils.h"
+
+#include "tinydav/audio/tdav_producer_audio.h"
+
+#include "tsk_thread.h"
+#include "tsk_memory.h"
+#include "tsk_string.h"
+#include "tsk_debug.h"
+
+#include <windows.h>
+#include <audioclient.h>
+#if PLUGIN_WASAPI_UNDER_WINDOWS_PHONE
+# include <phoneaudioclient.h>
+#else
+# include <Mmdeviceapi.h>
+#endif
+#include <initguid.h>
+
+#include <speex/speex_buffer.h>
+
+static const CLSID CLSID_MMDeviceEnumerator = __uuidof(MMDeviceEnumerator);
+static const IID IID_IMMDeviceEnumerator = __uuidof(IMMDeviceEnumerator);
+static const IID IID_IAudioClient = __uuidof(IAudioClient);
+static const IID IID_IAudioCaptureClient = __uuidof(IAudioCaptureClient);
+
+#if !defined(PLUGIN_WASAPI_PRODUCER_NOTIF_POS_COUNT)
+# define PLUGIN_WASAPI_PRODUCER_NOTIF_POS_COUNT 10
+#endif
+#define WASAPI_MILLIS_TO_100NS(MILLIS) (((LONGLONG)(MILLIS)) * 10000ui64)
+#define WASAPI_100NS_TO_MILLIS(NANOS) (((LONGLONG)(NANOS)) / 10000ui64)
+
+struct plugin_wasapi_producer_audio_s;
+
+class AudioCapture
+{
+public:
+ AudioCapture();
+ virtual ~AudioCapture();
+
+ int Prepare(struct plugin_wasapi_producer_audio_s* wasapi, const tmedia_codec_t* codec);
+ int UnPrepare();
+ int Start();
+ int Stop();
+ int Pause();
+
+private:
+ static void* TSK_STDCALL AsyncThread(void *pArg);
+
+private:
+ tsk_mutex_handle_t* m_hMutex;
+#if PLUGIN_WASAPI_UNDER_WINDOWS_PHONE
+ IAudioClient2* m_pDevice;
+#else
+ IAudioClient* m_pDevice;
+#endif
+ IAudioCaptureClient* m_pClient;
+ HANDLE m_hCaptureEvent;
+ HANDLE m_hShutdownEvent;
+ tsk_thread_handle_t* m_ppTread[1];
+ INT32 m_nBytesPerNotif;
+ INT32 m_nSourceFrameSizeInBytes;
+
+ struct
+ {
+ tmedia_producer_enc_cb_f fn;
+ const void* pcData;
+ } m_callback;
+
+ struct
+ {
+ struct
+ {
+ void* buffer;
+ tsk_size_t size;
+ } chunck;
+ SpeexBuffer* buffer;
+ tsk_size_t size;
+ } m_ring;
+ bool m_bStarted;
+ bool m_bPrepared;
+ bool m_bPaused;
+};
+
+typedef struct plugin_wasapi_producer_audio_s
+{
+ TDAV_DECLARE_PRODUCER_AUDIO;
+ AudioCapture* pAudioCapture;
+}
+plugin_wasapi_producer_audio_t;
+
+
+/* ============ Media Producer Interface ================= */
+static int plugin_wasapi_producer_audio_set(tmedia_producer_t* self, const tmedia_param_t* param)
+{
+ plugin_wasapi_producer_audio_t* wasapi = (plugin_wasapi_producer_audio_t*)self;
+ if(param->plugin_type == tmedia_ppt_producer)
+ {
+ if(param->value_type == tmedia_pvt_int32)
+ {
+ if(tsk_striequals(param->key, "volume"))
+ {
+ return 0;
+ }
+ else if(tsk_striequals(param->key, "mute"))
+ {
+ //wasapi->mute = (TSK_TO_INT32((uint8_t*)param->value) != 0);
+#if !FIXME_SEND_SILENCE_ON_MUTE
+ //if(wasapi->started){
+ // if(wasapi->mute){
+ //IDirectSoundCaptureBuffer_Stop(wasapi->captureBuffer);
+ // }
+ // else{
+ //IDirectSoundCaptureBuffer_Start(wasapi->captureBuffer, DSBPLAY_LOOPING);
+ // }
+ //}
+#endif
+ return 0;
+ }
+ }
+ }
+ return tdav_producer_audio_set(TDAV_PRODUCER_AUDIO(self), param);
+}
+
+
+
+static int plugin_wasapi_producer_audio_prepare(tmedia_producer_t* self, const tmedia_codec_t* codec)
+{
+ plugin_wasapi_producer_audio_t* wasapi = (plugin_wasapi_producer_audio_t*)self;
+
+ if(!wasapi || !codec || !wasapi->pAudioCapture)
+ {
+ TSK_DEBUG_ERROR("Invalid parameter");
+ return -1;
+ }
+
+ /* codec should have ptime */
+ TMEDIA_PRODUCER(wasapi)->audio.channels = TMEDIA_CODEC_CHANNELS_AUDIO_ENCODING(codec);
+ TMEDIA_PRODUCER(wasapi)->audio.rate = TMEDIA_CODEC_RATE_ENCODING(codec);
+ TMEDIA_PRODUCER(wasapi)->audio.ptime = TMEDIA_CODEC_PTIME_AUDIO_ENCODING(codec);
+
+ TSK_DEBUG_INFO("WASAPI producer: channels=%d, rate=%d, ptime=%d",
+ TMEDIA_PRODUCER(wasapi)->audio.channels,
+ TMEDIA_PRODUCER(wasapi)->audio.rate,
+ TMEDIA_PRODUCER(wasapi)->audio.ptime);
+
+ return wasapi->pAudioCapture->Prepare(wasapi, codec);
+}
+
+static int plugin_wasapi_producer_audio_start(tmedia_producer_t* self)
+{
+ plugin_wasapi_producer_audio_t* wasapi = (plugin_wasapi_producer_audio_t*)self;
+
+ TSK_DEBUG_INFO("plugin_wasapi_producer_audio_start()");
+
+ if(!wasapi || !wasapi->pAudioCapture){
+ TSK_DEBUG_ERROR("Invalid parameter");
+ return -1;
+ }
+
+ return wasapi->pAudioCapture->Start();
+}
+
+static int plugin_wasapi_producer_audio_pause(tmedia_producer_t* self)
+{
+ plugin_wasapi_producer_audio_t* wasapi = (plugin_wasapi_producer_audio_t*)self;
+
+ if(!wasapi || !wasapi->pAudioCapture){
+ TSK_DEBUG_ERROR("Invalid parameter");
+ return -1;
+ }
+
+ return wasapi->pAudioCapture->Pause();
+}
+
+static int plugin_wasapi_producer_audio_stop(tmedia_producer_t* self)
+{
+ plugin_wasapi_producer_audio_t* wasapi = (plugin_wasapi_producer_audio_t*)self;
+
+ TSK_DEBUG_INFO("plugin_wasapi_producer_audio_stop()");
+
+ if(!wasapi || !wasapi->pAudioCapture){
+ TSK_DEBUG_ERROR("Invalid parameter");
+ return -1;
+ }
+
+ return wasapi->pAudioCapture->Stop();
+}
+
+
+
+
+
+
+
+AudioCapture::AudioCapture()
+: m_pDevice(NULL)
+, m_hMutex(NULL)
+, m_pClient(NULL)
+, m_hCaptureEvent(NULL)
+, m_hShutdownEvent(NULL)
+, m_nBytesPerNotif(0)
+, m_nSourceFrameSizeInBytes(0)
+, m_bStarted(false)
+, m_bPrepared(false)
+, m_bPaused(false)
+{
+ m_ppTread[0] = NULL;
+ memset(&m_ring, 0, sizeof(m_ring));
+
+ m_callback.fn = NULL, m_callback.pcData = NULL;
+
+ if(!(m_hMutex = tsk_mutex_create()))
+ {
+ TSK_DEBUG_ERROR("Failed to create mutex");
+ }
+}
+
+AudioCapture::~AudioCapture()
+{
+ Stop();
+ UnPrepare();
+
+ tsk_mutex_destroy(&m_hMutex);
+}
+
+int AudioCapture::Prepare(plugin_wasapi_producer_audio_t* wasapi, const tmedia_codec_t* codec)
+{
+ HRESULT hr = S_OK;
+ int ret = 0;
+ WAVEFORMATEX wfx = {0};
+#if PLUGIN_WASAPI_UNDER_WINDOWS_PHONE
+ AudioClientProperties properties = {0};
+#endif
+ IMMDeviceEnumerator *pEnumerator = NULL;
+ LPCWSTR pwstrCaptureId = NULL;
+ IMMDevice *pDevice = NULL;
+
+ tsk_mutex_lock(m_hMutex);
+
+ if(m_bPrepared)
+ {
+ TSK_DEBUG_INFO("#WASAPI: Audio producer already prepared");
+ goto bail;
+ }
+
+ if(!wasapi || !codec)
+ {
+ TSK_DEBUG_ERROR("Invalid parameter");
+ CHECK_HR(hr = E_FAIL);
+ }
+
+ if(m_pDevice || m_pClient)
+ {
+ TSK_DEBUG_ERROR("Producer already prepared");
+ CHECK_HR(hr = E_FAIL);
+ }
+#if PLUGIN_WASAPI_UNDER_WINDOWS_PHONE
+ pwstrCaptureId = GetDefaultAudioCaptureId(AudioDeviceRole::Communications);
+ if (NULL == pwstrCaptureId)
+ {
+ PLUGIN_WASAPI_ERROR("GetDefaultAudioCaptureId", HRESULT_FROM_WIN32(GetLastError()));
+ CHECK_HR(hr = E_FAIL);
+ }
+ CHECK_HR(hr = ActivateAudioInterface(pwstrCaptureId, __uuidof(IAudioClient2), (void**)&m_pDevice));
+
+ // Win8 or WP8 only
+ properties.cbSize = sizeof AudioClientProperties;
+ properties.eCategory = AudioCategory_Communications;
+ CHECK_HR(hr = m_pDevice->SetClientProperties(&properties));
+#else
+ CHECK_HR(hr = CoCreateInstance(
+ CLSID_MMDeviceEnumerator, NULL,
+ CLSCTX_ALL, IID_IMMDeviceEnumerator,
+ (void**)&pEnumerator));
+
+ CHECK_HR(hr = pEnumerator->GetDefaultAudioEndpoint(
+ eCapture, eCommunications, &pDevice));
+
+ CHECK_HR(hr = pDevice->Activate(
+ IID_IAudioClient, CLSCTX_ALL,
+ NULL, (void**)&m_pDevice));
+#endif
+
+
+ /* Set best format */
+ {
+ wfx.wFormatTag = WAVE_FORMAT_PCM;
+ wfx.nChannels = TMEDIA_PRODUCER(wasapi)->audio.channels;
+ wfx.nSamplesPerSec = TMEDIA_PRODUCER(wasapi)->audio.rate;
+ wfx.wBitsPerSample = TMEDIA_PRODUCER(wasapi)->audio.bits_per_sample;
+ wfx.nBlockAlign = (wfx.nChannels * wfx.wBitsPerSample/8);
+ wfx.nAvgBytesPerSec = (wfx.nSamplesPerSec * wfx.nBlockAlign);
+
+ PWAVEFORMATEX pwfxClosestMatch = NULL;
+ hr = m_pDevice->IsFormatSupported(AUDCLNT_SHAREMODE_SHARED, &wfx, &pwfxClosestMatch);
+ if(hr != S_OK && hr != S_FALSE)
+ {
+ CHECK_HR(hr);
+ }
+
+ if(hr == S_FALSE)
+ {
+ if(!pwfxClosestMatch)
+ {
+ CHECK_HR(hr = E_OUTOFMEMORY);
+ }
+ wfx.nChannels = pwfxClosestMatch->nChannels;
+ wfx.nSamplesPerSec = pwfxClosestMatch->nSamplesPerSec;
+#if 0
+ wfx.wBitsPerSample = pwfxClosestMatch->wBitsPerSample;
+#endif
+ wfx.nBlockAlign = wfx.nChannels * (wfx.wBitsPerSample / 8);
+ wfx.nAvgBytesPerSec = wfx.nSamplesPerSec * wfx.nBlockAlign;
+ // Request resampler
+ TMEDIA_PRODUCER(wasapi)->audio.rate = (uint32_t)wfx.nSamplesPerSec;
+ TMEDIA_PRODUCER(wasapi)->audio.bits_per_sample = (uint8_t)wfx.wBitsPerSample;
+ TMEDIA_PRODUCER(wasapi)->audio.channels = (uint8_t)wfx.nChannels;
+
+ TSK_DEBUG_INFO("Audio device format fallback: rate=%d, bps=%d, channels=%d", wfx.nSamplesPerSec, wfx.wBitsPerSample, wfx.nChannels);
+ }
+ if(pwfxClosestMatch)
+ {
+ CoTaskMemFree(pwfxClosestMatch);
+ }
+ }
+
+ m_nSourceFrameSizeInBytes = (wfx.wBitsPerSample >> 3) * wfx.nChannels;
+ m_nBytesPerNotif = ((wfx.nAvgBytesPerSec * TMEDIA_PRODUCER(wasapi)->audio.ptime)/1000) * wfx.nChannels;
+
+ // Initialize
+ CHECK_HR(hr = m_pDevice->Initialize(
+ AUDCLNT_SHAREMODE_SHARED,
+ AUDCLNT_STREAMFLAGS_EVENTCALLBACK,
+ (PLUGIN_WASAPI_PRODUCER_NOTIF_POS_COUNT * WASAPI_MILLIS_TO_100NS(TMEDIA_PRODUCER(wasapi)->audio.ptime)),
+ 0,
+ &wfx,
+ NULL));
+
+ REFERENCE_TIME DefaultDevicePeriod, MinimumDevicePeriod;
+ CHECK_HR(hr = m_pDevice->GetDevicePeriod(&DefaultDevicePeriod, &MinimumDevicePeriod));
+ TSK_DEBUG_INFO("#WASAPI(Capture): DefaultDevicePeriod=%lld ms, MinimumDevicePeriod=%lldms", WASAPI_100NS_TO_MILLIS(DefaultDevicePeriod), WASAPI_100NS_TO_MILLIS(MinimumDevicePeriod));
+
+ if(!m_hCaptureEvent)
+ {
+ if(!(m_hCaptureEvent = CreateEventEx(NULL, NULL, 0, EVENT_ALL_ACCESS)))
+ {
+ PLUGIN_WASAPI_ERROR(HRESULT_FROM_WIN32(GetLastError()));
+ CHECK_HR(hr = E_FAIL);
+ }
+ }
+ if(!m_hShutdownEvent)
+ {
+ if(!(m_hShutdownEvent = CreateEventEx(NULL, NULL, CREATE_EVENT_MANUAL_RESET, EVENT_ALL_ACCESS)))
+ {
+ PLUGIN_WASAPI_ERROR(HRESULT_FROM_WIN32(GetLastError()));
+ CHECK_HR(hr = E_FAIL);
+ }
+ }
+
+ CHECK_HR(hr = m_pDevice->SetEventHandle(m_hCaptureEvent));
+
+ CHECK_HR(hr = m_pDevice->GetService(__uuidof(IAudioCaptureClient), (void**)&m_pClient));
+
+ int packetperbuffer = (1000 / TMEDIA_PRODUCER(wasapi)->audio.ptime);
+ m_ring.chunck.size = (wfx.nSamplesPerSec * (wfx.wBitsPerSample >> 3) / packetperbuffer) * wfx.nChannels;
+ TSK_DEBUG_INFO("#WASAPI: Audio producer ring chunk size = %u", m_ring.chunck.size);
+ // allocate our chunck buffer
+ if(!(m_ring.chunck.buffer = tsk_realloc(m_ring.chunck.buffer, m_ring.chunck.size)))
+ {
+ TSK_DEBUG_ERROR("Failed to allocate new buffer");
+ CHECK_HR(hr = E_OUTOFMEMORY);
+ }
+ // create ringbuffer
+ m_ring.size = PLUGIN_WASAPI_PRODUCER_NOTIF_POS_COUNT * m_ring.chunck.size;
+ TSK_DEBUG_INFO("#WASAPI: Audio producer ring size = %u", m_ring.size);
+ if(!m_ring.buffer)
+ {
+ m_ring.buffer = speex_buffer_init(m_ring.size);
+ }
+ else
+ {
+ int sret;
+ if((sret = speex_buffer_resize(m_ring.buffer, m_ring.size)) < 0)
+ {
+ TSK_DEBUG_ERROR("speex_buffer_resize(%d) failed with error code=%d", m_ring.size, sret);
+ CHECK_HR(hr = E_OUTOFMEMORY);
+ }
+ }
+ if(!m_ring.buffer)
+ {
+ TSK_DEBUG_ERROR("Failed to create a new ring buffer with size = %d", m_ring.size);
+ CHECK_HR(hr = E_OUTOFMEMORY);
+ }
+
+ m_callback.fn = TMEDIA_PRODUCER(wasapi)->enc_cb.callback;
+ m_callback.pcData = TMEDIA_PRODUCER(wasapi)->enc_cb.callback_data;
+
+bail:
+ ret = SUCCEEDED(hr) ? 0 : -1;
+ if (pwstrCaptureId)
+ {
+ CoTaskMemFree((LPVOID)pwstrCaptureId);
+ }
+ if(ret != 0)
+ {
+ UnPrepare();
+ }
+ m_bPrepared = (ret == 0);
+
+ tsk_mutex_unlock(m_hMutex);
+
+ SafeRelease(&pEnumerator);
+ SafeRelease(&pDevice);
+
+ return ret;
+}
+
+int AudioCapture::UnPrepare()
+{
+ tsk_mutex_lock(m_hMutex);
+
+ if(m_hCaptureEvent)
+ {
+ CloseHandle(m_hCaptureEvent), m_hCaptureEvent = NULL;
+ }
+ if(m_hShutdownEvent)
+ {
+ CloseHandle(m_hShutdownEvent), m_hShutdownEvent = NULL;
+ }
+ if(m_pDevice)
+ {
+ m_pDevice->Release(), m_pDevice = NULL;
+ }
+ if(m_pClient)
+ {
+ m_pClient->Release(), m_pClient = NULL;
+ }
+
+ TSK_FREE(m_ring.chunck.buffer);
+ if(m_ring.buffer)
+ {
+ speex_buffer_destroy(m_ring.buffer);
+ m_ring.buffer = NULL;
+ }
+
+ m_callback.fn = NULL;
+ m_callback.pcData = NULL;
+
+ m_bPrepared = false;
+
+ tsk_mutex_unlock(m_hMutex);
+
+ return 0;
+}
+
+int AudioCapture::Start()
+{
+ tsk_mutex_lock(m_hMutex);
+
+ if(m_bStarted)
+ {
+ TSK_DEBUG_INFO("#WASAPI: Audio producer already started");
+ goto bail;
+ }
+ if(!m_bPrepared)
+ {
+ TSK_DEBUG_ERROR("Audio producer not prepared");
+ goto bail;
+ }
+
+ m_bStarted = true;
+ if(!m_ppTread[0] && tsk_thread_create(m_ppTread, AudioCapture::AsyncThread, this) != 0)
+ {
+ m_bStarted = false;
+ goto bail;
+ }
+
+ HRESULT hr = m_pDevice->Start();
+ if(!SUCCEEDED(hr))
+ {
+ PLUGIN_WASAPI_ERROR(hr);
+ Stop();
+ }
+ m_bPaused = false;
+
+bail:
+ tsk_mutex_unlock(m_hMutex);
+
+ return (m_bStarted ? 0 : -2);
+}
+
+int AudioCapture::Stop()
+{
+ m_bStarted = false;
+
+ tsk_mutex_lock(m_hMutex);
+
+ if (m_hShutdownEvent)
+ {
+ SetEvent(m_hShutdownEvent);
+ }
+
+ if (m_ppTread[0])
+ {
+ tsk_thread_join(m_ppTread);
+ }
+
+ if(m_pDevice)
+ {
+ m_pDevice->Stop();
+ }
+
+ // will be prepared again before next start()
+ UnPrepare();
+
+ tsk_mutex_unlock(m_hMutex);
+
+ return 0;
+}
+
+int AudioCapture::Pause()
+{
+ tsk_mutex_lock(m_hMutex);
+
+ m_bPaused = true;
+
+ tsk_mutex_unlock(m_hMutex);
+
+ return 0;
+}
+
+void* TSK_STDCALL AudioCapture::AsyncThread(void *pArg)
+{
+ HRESULT hr = S_OK;
+ BYTE* pbData = NULL;
+ UINT32 nFrames = 0;
+ DWORD dwFlags = 0;
+ UINT32 incomingBufferSize;
+ INT32 avail;
+ UINT32 nNextPacketSize;
+ AudioCapture* This = (AudioCapture*)pArg;
+
+ HANDLE eventHandles[] = {
+ This->m_hCaptureEvent, // WAIT_OBJECT0
+ This->m_hShutdownEvent // WAIT_OBJECT1
+ };
+
+ TSK_DEBUG_INFO("#WASAPI: __record_thread -- START");
+
+#define BREAK_WHILE tsk_mutex_unlock(This->m_hMutex); break;
+
+ while(This->m_bStarted && SUCCEEDED(hr))
+ {
+ DWORD waitResult = WaitForMultipleObjectsEx(SIZEOF_ARRAY(eventHandles), eventHandles, FALSE, INFINITE, FALSE);
+
+ tsk_mutex_lock(This->m_hMutex);
+
+ if(!This->m_bStarted)
+ {
+ BREAK_WHILE;
+ }
+
+ if(waitResult == WAIT_OBJECT_0 && This->m_callback.fn)
+ {
+ hr = This->m_pClient->GetNextPacketSize(&nNextPacketSize);
+ while(SUCCEEDED(hr) && nNextPacketSize >0)
+ {
+ hr = This->m_pClient->GetBuffer(&pbData, &nFrames, &dwFlags, NULL, NULL);
+ if(SUCCEEDED(hr) && pbData && nFrames)
+ {
+ if((dwFlags & AUDCLNT_BUFFERFLAGS_SILENT) != AUDCLNT_BUFFERFLAGS_SILENT)
+ {
+ incomingBufferSize = nFrames * This->m_nSourceFrameSizeInBytes;
+ speex_buffer_write(This->m_ring.buffer, pbData, incomingBufferSize);
+ avail = speex_buffer_get_available(This->m_ring.buffer);
+ while (This->m_bStarted && avail >= (INT32)This->m_ring.chunck.size)
+ {
+ avail -= speex_buffer_read(This->m_ring.buffer, This->m_ring.chunck.buffer, This->m_ring.chunck.size);
+#if 0
+ {
+ static FILE* f = fopen("./wasapi_producer.raw", "w+");
+ fwrite(This->m_ring.chunck.buffer, 1, This->m_ring.chunck.size, f);
+ }
+#endif
+ This->m_callback.fn(This->m_callback.pcData, This->m_ring.chunck.buffer, This->m_ring.chunck.size);
+ }
+ }
+
+ if (SUCCEEDED(hr))
+ {
+ hr = This->m_pClient->ReleaseBuffer(nFrames);
+ }
+ if (SUCCEEDED(hr))
+ {
+ hr = This->m_pClient->GetNextPacketSize(&nNextPacketSize);
+ }
+ }
+ }
+ }
+ else if(waitResult != WAIT_OBJECT_0)
+ {
+ BREAK_WHILE;
+ }
+
+ tsk_mutex_unlock(This->m_hMutex);
+ }// end-of-while
+
+ if (!SUCCEEDED(hr))
+ {
+ PLUGIN_WASAPI_ERROR(hr);
+ }
+
+ TSK_DEBUG_INFO("WASAPI: __record_thread(%s) -- STOP", SUCCEEDED(hr) ? "OK": "NOK");
+
+ return NULL;
+}
+
+
+
+
+
+
+
+//
+// WaveAPI producer object definition
+//
+/* constructor */
+static tsk_object_t* plugin_wasapi_producer_audio_ctor(tsk_object_t * self, va_list * app)
+{
+ plugin_wasapi_producer_audio_t *wasapi = (plugin_wasapi_producer_audio_t*)self;
+ if(wasapi)
+ {
+ WASAPIUtils::Startup();
+
+ /* init base */
+ tdav_producer_audio_init(TDAV_PRODUCER_AUDIO(wasapi));
+ /* init self */
+
+ wasapi->pAudioCapture = new AudioCapture();
+ if(!wasapi->pAudioCapture)
+ {
+ TSK_DEBUG_ERROR("Failed to create Audio capture device");
+ return tsk_null;
+ }
+ }
+ return self;
+}
+/* destructor */
+static tsk_object_t* plugin_wasapi_producer_audio_dtor(tsk_object_t * self)
+{
+ plugin_wasapi_producer_audio_t *wasapi = (plugin_wasapi_producer_audio_t*)self;
+ if(wasapi)
+ {
+ /* stop */
+ plugin_wasapi_producer_audio_stop((tmedia_producer_t*)self);
+ /* deinit base */
+ tdav_producer_audio_deinit(TDAV_PRODUCER_AUDIO(wasapi));
+ /* deinit self */
+ if(wasapi->pAudioCapture)
+ {
+ delete wasapi->pAudioCapture;
+ wasapi->pAudioCapture = NULL;
+ }
+ }
+
+ return self;
+}
+/* object definition */
+static const tsk_object_def_t plugin_wasapi_producer_audio_def_s =
+{
+ sizeof(plugin_wasapi_producer_audio_t),
+ plugin_wasapi_producer_audio_ctor,
+ plugin_wasapi_producer_audio_dtor,
+ tdav_producer_audio_cmp,
+};
+/* plugin definition*/
+static const tmedia_producer_plugin_def_t plugin_wasapi_producer_audio_plugin_def_s =
+{
+ &plugin_wasapi_producer_audio_def_s,
+
+ tmedia_audio,
+ "Microsoft Windows Audio Session API (WASAPI) producer",
+
+ plugin_wasapi_producer_audio_set,
+ plugin_wasapi_producer_audio_prepare,
+ plugin_wasapi_producer_audio_start,
+ plugin_wasapi_producer_audio_pause,
+ plugin_wasapi_producer_audio_stop
+};
+const tmedia_producer_plugin_def_t *plugin_wasapi_producer_audio_plugin_def_t = &plugin_wasapi_producer_audio_plugin_def_s;
diff --git a/plugins/pluginWASAPI/plugin_wasapi_tdav.cxx b/plugins/pluginWASAPI/plugin_wasapi_tdav.cxx
new file mode 100644
index 0000000..e7a15e9
--- /dev/null
+++ b/plugins/pluginWASAPI/plugin_wasapi_tdav.cxx
@@ -0,0 +1,21 @@
+/* Copyright (C) 2013 Mamadou DIOP
+* Copyright (C) 2013 Doubango Telecom <http://www.doubango.org>
+*
+* This file is part of Open Source Doubango Framework.
+*
+* DOUBANGO is free software: you can redistribute it and/or modify
+* it under the terms of the GNU General Public License as published by
+* the Free Software Foundation, either version 3 of the License, or
+* (at your option) any later version.
+*
+* DOUBANGO is distributed in the hope that it will be useful,
+* but WITHOUT ANY WARRANTY; without even the implied warranty of
+* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
+* GNU General Public License for more details.
+*
+* You should have received a copy of the GNU General Public License
+* along with DOUBANGO.
+*/
+// This file is used to avoid duplication for the .obj files
+#include "../../tinyDAV/src/audio/tdav_consumer_audio.c"
+#include "../../tinyDAV/src/audio/tdav_producer_audio.c" \ No newline at end of file
diff --git a/plugins/pluginWASAPI/plugin_wasapi_utils.cxx b/plugins/pluginWASAPI/plugin_wasapi_utils.cxx
new file mode 100644
index 0000000..bc2d45e
--- /dev/null
+++ b/plugins/pluginWASAPI/plugin_wasapi_utils.cxx
@@ -0,0 +1,81 @@
+/* Copyright (C) 2013 Mamadou DIOP
+* Copyright (C) 2013 Doubango Telecom <http://www.doubango.org>
+*
+* This file is part of Open Source Doubango Framework.
+*
+* DOUBANGO is free software: you can redistribute it and/or modify
+* it under the terms of the GNU General Public License as published by
+* the Free Software Foundation, either version 3 of the License, or
+* (at your option) any later version.
+*
+* DOUBANGO is distributed in the hope that it will be useful,
+* but WITHOUT ANY WARRANTY; without even the implied warranty of
+* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
+* GNU General Public License for more details.
+*
+* You should have received a copy of the GNU General Public License
+* along with DOUBANGO.
+*/
+#include "plugin_wasapi_utils.h"
+
+#include "tsk_debug.h"
+
+bool WASAPIUtils::g_bStarted = false;
+
+HRESULT WASAPIUtils::Startup()
+{
+ if(!g_bStarted)
+ {
+ HRESULT hr = CoInitializeEx(NULL, COINIT_MULTITHREADED);
+ if(SUCCEEDED(hr) || hr == 0x80010106) // 0x80010106 when called from managed code (e.g. Boghe) - More info: http://support.microsoft.com/kb/824480
+ {
+ hr = S_OK;
+ }
+ g_bStarted = SUCCEEDED(hr);
+ return hr;
+ }
+ return S_OK;
+}
+
+HRESULT WASAPIUtils::Shutdown()
+{
+ return S_OK;
+}
+
+void WASAPIUtils::PrintError(const char* pcFileName, const char* pcFuncName, unsigned nLineNumber, HRESULT hr)
+{
+ CHAR message[1024] = {0};
+
+#if PLUGIN_WASAPI_UNDER_WINDOWS_RT
+ // FormatMessageA not allowed on the Store
+ static WCHAR wBuff[1024] = {0};
+ FormatMessageW(
+ FORMAT_MESSAGE_FROM_SYSTEM,
+ tsk_null,
+ hr,
+ 0,
+ wBuff,
+ sizeof(wBuff)-1,
+ tsk_null);
+ WideCharToMultiByte(CP_UTF8, WC_ERR_INVALID_CHARS, wBuff, wcslen(wBuff), message, sizeof(message) - 1, NULL, NULL);
+#else
+#ifdef _WIN32_WCE
+ FormatMessage
+#else
+ FormatMessageA
+#endif
+ (
+#if !PLUGIN_WASAPI_UNDER_WINDOWS_RT
+ FORMAT_MESSAGE_ALLOCATE_BUFFER |
+#endif
+ FORMAT_MESSAGE_FROM_SYSTEM,
+ tsk_null,
+ hr,
+ 0,
+ message,
+ sizeof(message) - 1,
+ tsk_null);
+#endif
+
+ TSK_DEBUG_ERROR("[WASAPI] File:%s\n Function=%s\n Line:%u\n Message:%s", pcFileName, pcFuncName, nLineNumber, message);
+} \ No newline at end of file
diff --git a/plugins/pluginWASAPI/plugin_wasapi_utils.h b/plugins/pluginWASAPI/plugin_wasapi_utils.h
new file mode 100644
index 0000000..218a7f8
--- /dev/null
+++ b/plugins/pluginWASAPI/plugin_wasapi_utils.h
@@ -0,0 +1,53 @@
+/* Copyright (C) 2013 Mamadou DIOP
+* Copyright (C) 2013 Doubango Telecom <http://www.doubango.org>
+*
+* This file is part of Open Source Doubango Framework.
+*
+* DOUBANGO is free software: you can redistribute it and/or modify
+* it under the terms of the GNU General Public License as published by
+* the Free Software Foundation, either version 3 of the License, or
+* (at your option) any later version.
+*
+* DOUBANGO is distributed in the hope that it will be useful,
+* but WITHOUT ANY WARRANTY; without even the implied warranty of
+* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
+* GNU General Public License for more details.
+*
+* You should have received a copy of the GNU General Public License
+* along with DOUBANGO.
+*/
+#ifndef PLUGIN_WASAPI_UTILS_H
+#define PLUGIN_WASAPI_UTILS_H
+
+#include "plugin_wasapi_config.h"
+
+#include <Windows.h>
+
+#undef SafeRelease
+#define SafeRelease(ppT) \
+{ \
+ if (*ppT) \
+ { \
+ (*ppT)->Release(); \
+ *ppT = NULL; \
+ } \
+}
+
+#undef CHECK_HR
+// In CHECK_HR(x) When (x) is a function it will be executed twice when used in "TSK_DEBUG_ERROR(x)" and "If(x)"
+#define CHECK_HR(x) { HRESULT __hr__ = (x); if (FAILED(__hr__)) { TSK_DEBUG_ERROR("Operation Failed (%08x)", __hr__); goto bail; } }
+
+#define PLUGIN_WASAPI_ERROR(hr) WASAPIUtils::PrintError(__FILE__, __FUNCTION__, __LINE__, (hr))
+
+class WASAPIUtils
+{
+public:
+ static HRESULT Startup();
+ static HRESULT Shutdown();
+ static void PrintError(const char* pcFileName, const char* pcFuncName, unsigned nLineNumber, HRESULT hr);
+
+private:
+ static bool g_bStarted;
+};
+
+#endif /* PLUGIN_WASAPI_UTILS_H */
diff --git a/plugins/pluginWASAPI/version.rc b/plugins/pluginWASAPI/version.rc
new file mode 100644
index 0000000..6683a78
--- /dev/null
+++ b/plugins/pluginWASAPI/version.rc
@@ -0,0 +1,102 @@
+// Microsoft Visual C++ generated resource script.
+//
+// #include "resource.h"
+
+#define APSTUDIO_READONLY_SYMBOLS
+/////////////////////////////////////////////////////////////////////////////
+//
+// Generated from the TEXTINCLUDE 2 resource.
+//
+#include "afxres.h"
+
+/////////////////////////////////////////////////////////////////////////////
+#undef APSTUDIO_READONLY_SYMBOLS
+
+/////////////////////////////////////////////////////////////////////////////
+// English (U.S.) resources
+
+#if !defined(AFX_RESOURCE_DLL) || defined(AFX_TARG_ENU)
+#ifdef _WIN32
+LANGUAGE LANG_ENGLISH, SUBLANG_ENGLISH_US
+#pragma code_page(1252)
+#endif //_WIN32
+
+#ifdef APSTUDIO_INVOKED
+/////////////////////////////////////////////////////////////////////////////
+//
+// TEXTINCLUDE
+//
+
+1 TEXTINCLUDE
+BEGIN
+ "resource.h\0"
+END
+
+2 TEXTINCLUDE
+BEGIN
+ "#include ""afxres.h""\r\n"
+ "\0"
+END
+
+3 TEXTINCLUDE
+BEGIN
+ "\r\n"
+ "\0"
+END
+
+#endif // APSTUDIO_INVOKED
+
+
+/////////////////////////////////////////////////////////////////////////////
+//
+// Version
+//
+
+VS_VERSION_INFO VERSIONINFO
+ FILEVERSION 2.0.0.1156
+ PRODUCTVERSION 2.0.0.1156
+ FILEFLAGSMASK 0x17L
+#ifdef _DEBUG
+ FILEFLAGS 0x1L
+#else
+ FILEFLAGS 0x0L
+#endif
+ FILEOS 0x4L
+ FILETYPE 0x2L
+ FILESUBTYPE 0x0L
+BEGIN
+ BLOCK "StringFileInfo"
+ BEGIN
+ BLOCK "040904b0"
+ BEGIN
+ VALUE "CompanyName", "Doubango Telecom"
+ VALUE "FileDescription", "Doubango IMS Framework WASAPI Plugin"
+ VALUE "FileVersion", "2.0.0.1156"
+ VALUE "InternalName", "pluginWASAPI.dll"
+ VALUE "LegalCopyright", "(c) 2010-2013 Doubango Telecom. All rights reserved."
+ VALUE "OriginalFilename", "pluginWASAPI.dll"
+ VALUE "ProductName", "Doubango IMS Framework WASAPI Plugin"
+ VALUE "ProductVersion", "2.0.0.1156"
+ END
+ END
+ BLOCK "VarFileInfo"
+ BEGIN
+ VALUE "Translation", 0x409, 1200
+ END
+END
+
+#endif // English (U.S.) resources
+/////////////////////////////////////////////////////////////////////////////
+
+
+
+#ifndef APSTUDIO_INVOKED
+/////////////////////////////////////////////////////////////////////////////
+//
+// Generated from the TEXTINCLUDE 3 resource.
+//
+
+
+/////////////////////////////////////////////////////////////////////////////
+#endif // not APSTUDIO_INVOKED
+
diff --git a/plugins/pluginWinAudioDSP/dllmain_audio_dsp.cxx b/plugins/pluginWinAudioDSP/dllmain_audio_dsp.cxx
new file mode 100644
index 0000000..dcb9005
--- /dev/null
+++ b/plugins/pluginWinAudioDSP/dllmain_audio_dsp.cxx
@@ -0,0 +1,157 @@
+/* Copyright (C) 2013 Mamadou DIOP
+* Copyright (C) 2013 Doubango Telecom <http://www.doubango.org>
+*
+* This file is part of Open Source Doubango Framework.
+*
+* DOUBANGO is free software: you can redistribute it and/or modify
+* it under the terms of the GNU General Public License as published by
+* the Free Software Foundation, either version 3 of the License, or
+* (at your option) any later version.
+*
+* DOUBANGO is distributed in the hope that it will be useful,
+* but WITHOUT ANY WARRANTY; without even the implied warranty of
+* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
+* GNU General Public License for more details.
+*
+* You should have received a copy of the GNU General Public License
+* along with DOUBANGO.
+*/
+#include "plugin_audio_dsp_utils.h"
+
+#include "tinymedia/tmedia_resampler.h"
+#include "tinymedia/tmedia_denoise.h"
+
+#include "tsk_plugin.h"
+#include "tsk_debug.h"
+
+#include <windows.h>
+
+#if defined(_MSC_VER)
+# pragma comment(lib, "wmcodecdspuuid")
+# pragma comment(lib, "Msdmo")
+# pragma comment(lib, "Dmoguids")
+# pragma comment(lib, "amstrmid")
+# pragma comment(lib, "Mfplat")
+# pragma comment(lib, "mfuuid")
+#endif
+
+#if !defined(PLUGIN_AUDIO_DSP_RESAMPLER_ENABLE)
+# define PLUGIN_AUDIO_DSP_RESAMPLER_ENABLE 1
+#endif
+#if !defined(PLUGIN_AUDIO_DSP_DENOISER_ENABLE)
+# define PLUGIN_AUDIO_DSP_DENOISER_ENABLE 0 /* Filter mode doesn't support AEC */
+#endif
+
+extern const tmedia_resampler_plugin_def_t *plugin_audio_dsp_resampler_plugin_def_t;
+extern const tmedia_denoise_plugin_def_t *plugin_audio_dsp_denoise_plugin_def_t;
+
+PLUGIN_AUDIO_DSP_BEGIN_DECLS /* BEGIN */
+PLUGIN_AUDIO_DSP_API int __plugin_get_def_count();
+PLUGIN_AUDIO_DSP_API tsk_plugin_def_type_t __plugin_get_def_type_at(int index);
+PLUGIN_AUDIO_DSP_API tsk_plugin_def_media_type_t __plugin_get_def_media_type_at(int index);
+PLUGIN_AUDIO_DSP_API tsk_plugin_def_ptr_const_t __plugin_get_def_at(int index);
+PLUGIN_AUDIO_DSP_END_DECLS /* END */
+
+BOOL APIENTRY DllMain( HMODULE hModule,
+ DWORD ul_reason_for_call,
+ LPVOID lpReserved
+ )
+{
+ switch (ul_reason_for_call)
+ {
+ case DLL_PROCESS_ATTACH:
+ break;
+ case DLL_THREAD_ATTACH:
+ break;
+ case DLL_THREAD_DETACH:
+ break;
+ case DLL_PROCESS_DETACH:
+ break;
+ }
+ return TRUE;
+}
+
+
+typedef enum PLUGIN_INDEX_E
+{
+#if PLUGIN_AUDIO_DSP_RESAMPLER_ENABLE
+ PLUGIN_INDEX_RESAMPLER,
+#endif
+#if PLUGIN_AUDIO_DSP_DENOISER_ENABLE
+ PLUGIN_INDEX_DENOISER,
+#endif
+
+ PLUGIN_INDEX_COUNT
+}
+PLUGIN_INDEX_T;
+
+
+int __plugin_get_def_count()
+{
+ return PLUGIN_INDEX_COUNT;
+}
+
+tsk_plugin_def_type_t __plugin_get_def_type_at(int index)
+{
+ switch(index)
+ {
+#if PLUGIN_AUDIO_DSP_RESAMPLER_ENABLE
+ case PLUGIN_INDEX_RESAMPLER:
+ {
+ return tsk_plugin_def_type_resampler;
+ }
+#endif
+#if PLUGIN_AUDIO_DSP_DENOISER_ENABLE
+ case PLUGIN_INDEX_DENOISER:
+ {
+ return tsk_plugin_def_type_denoiser;
+ }
+#endif
+ }
+ TSK_DEBUG_ERROR("No plugin at index %d", index);
+ return tsk_plugin_def_type_none;
+}
+
+tsk_plugin_def_media_type_t __plugin_get_def_media_type_at(int index)
+{
+ switch(index)
+ {
+#if PLUGIN_AUDIO_DSP_RESAMPLER_ENABLE
+ case PLUGIN_INDEX_RESAMPLER:
+ {
+ return tsk_plugin_def_media_type_audio;
+ }
+#endif
+#if PLUGIN_AUDIO_DSP_DENOISER_ENABLE
+ case PLUGIN_INDEX_DENOISER:
+ {
+ return tsk_plugin_def_media_type_audio;
+ }
+#endif
+ }
+
+ TSK_DEBUG_ERROR("No plugin at index %d", index);
+ return tsk_plugin_def_media_type_none;
+}
+
+tsk_plugin_def_ptr_const_t __plugin_get_def_at(int index)
+{
+ switch(index)
+ {
+#if PLUGIN_AUDIO_DSP_RESAMPLER_ENABLE
+ case PLUGIN_INDEX_RESAMPLER:
+ {
+ return plugin_audio_dsp_resampler_plugin_def_t;
+ }
+#endif
+#if PLUGIN_AUDIO_DSP_DENOISER_ENABLE
+ case PLUGIN_INDEX_DENOISER:
+ {
+ return plugin_audio_dsp_denoise_plugin_def_t;
+ }
+#endif
+ }
+
+ TSK_DEBUG_ERROR("No plugin at index %d", index);
+ return tsk_null;
+}
diff --git a/plugins/pluginWinAudioDSP/pluginWinAudioDSP.vcproj b/plugins/pluginWinAudioDSP/pluginWinAudioDSP.vcproj
new file mode 100644
index 0000000..0df717f
--- /dev/null
+++ b/plugins/pluginWinAudioDSP/pluginWinAudioDSP.vcproj
@@ -0,0 +1,231 @@
+<?xml version="1.0" encoding="Windows-1252"?>
+<VisualStudioProject
+ ProjectType="Visual C++"
+ Version="9.00"
+ Name="pluginWinAudioDSP"
+ ProjectGUID="{3C720D1E-E3DC-408E-99EF-A5AA2B81C520}"
+ RootNamespace="pluginWinAudioDSP"
+ Keyword="Win32Proj"
+ TargetFrameworkVersion="196613"
+ >
+ <Platforms>
+ <Platform
+ Name="Win32"
+ />
+ </Platforms>
+ <ToolFiles>
+ </ToolFiles>
+ <Configurations>
+ <Configuration
+ Name="Debug|Win32"
+ OutputDirectory="$(SolutionDir)$(ConfigurationName)"
+ IntermediateDirectory="$(ConfigurationName)"
+ ConfigurationType="2"
+ CharacterSet="1"
+ >
+ <Tool
+ Name="VCPreBuildEventTool"
+ />
+ <Tool
+ Name="VCCustomBuildTool"
+ />
+ <Tool
+ Name="VCXMLDataGeneratorTool"
+ />
+ <Tool
+ Name="VCWebServiceProxyGeneratorTool"
+ />
+ <Tool
+ Name="VCMIDLTool"
+ />
+ <Tool
+ Name="VCCLCompilerTool"
+ Optimization="0"
+ AdditionalIncludeDirectories="..\..\thirdparties\common\include;..\..\thirdparties\win32\include;..\..\tinySAK\src;..\..\tinyMEDIA\include"
+ PreprocessorDefinitions="WIN32;_DEBUG;_WINDOWS;_USRDLL;PLUGIN_AUDIO_DSP_EXPORTS"
+ MinimalRebuild="true"
+ BasicRuntimeChecks="3"
+ RuntimeLibrary="3"
+ UsePrecompiledHeader="0"
+ WarningLevel="3"
+ WarnAsError="true"
+ DebugInformationFormat="4"
+ />
+ <Tool
+ Name="VCManagedResourceCompilerTool"
+ />
+ <Tool
+ Name="VCResourceCompilerTool"
+ />
+ <Tool
+ Name="VCPreLinkEventTool"
+ />
+ <Tool
+ Name="VCLinkerTool"
+ AdditionalDependencies="$(OutDir)\tinySAK.lib $(OutDir)\tinyMEDIA.lib"
+ LinkIncremental="2"
+ GenerateDebugInformation="true"
+ SubSystem="2"
+ TargetMachine="1"
+ />
+ <Tool
+ Name="VCALinkTool"
+ />
+ <Tool
+ Name="VCManifestTool"
+ />
+ <Tool
+ Name="VCXDCMakeTool"
+ />
+ <Tool
+ Name="VCBscMakeTool"
+ />
+ <Tool
+ Name="VCFxCopTool"
+ />
+ <Tool
+ Name="VCAppVerifierTool"
+ />
+ <Tool
+ Name="VCPostBuildEventTool"
+ />
+ </Configuration>
+ <Configuration
+ Name="Release|Win32"
+ OutputDirectory="$(SolutionDir)$(ConfigurationName)"
+ IntermediateDirectory="$(ConfigurationName)"
+ ConfigurationType="2"
+ CharacterSet="1"
+ WholeProgramOptimization="1"
+ >
+ <Tool
+ Name="VCPreBuildEventTool"
+ />
+ <Tool
+ Name="VCCustomBuildTool"
+ />
+ <Tool
+ Name="VCXMLDataGeneratorTool"
+ />
+ <Tool
+ Name="VCWebServiceProxyGeneratorTool"
+ />
+ <Tool
+ Name="VCMIDLTool"
+ />
+ <Tool
+ Name="VCCLCompilerTool"
+ Optimization="2"
+ EnableIntrinsicFunctions="true"
+ AdditionalIncludeDirectories="..\..\thirdparties\common\include;..\..\thirdparties\win32\include;..\..\tinySAK\src;..\..\tinyMEDIA\include"
+ PreprocessorDefinitions="WIN32;NDEBUG;_WINDOWS;_USRDLL;PLUGIN_AUDIO_DSP_EXPORTS"
+ RuntimeLibrary="2"
+ EnableFunctionLevelLinking="true"
+ UsePrecompiledHeader="0"
+ WarningLevel="3"
+ WarnAsError="true"
+ DebugInformationFormat="0"
+ />
+ <Tool
+ Name="VCManagedResourceCompilerTool"
+ />
+ <Tool
+ Name="VCResourceCompilerTool"
+ />
+ <Tool
+ Name="VCPreLinkEventTool"
+ />
+ <Tool
+ Name="VCLinkerTool"
+ AdditionalDependencies="$(OutDir)\tinySAK.lib $(OutDir)\tinyMEDIA.lib"
+ LinkIncremental="1"
+ GenerateDebugInformation="true"
+ SubSystem="2"
+ OptimizeReferences="2"
+ EnableCOMDATFolding="2"
+ TargetMachine="1"
+ />
+ <Tool
+ Name="VCALinkTool"
+ />
+ <Tool
+ Name="VCManifestTool"
+ />
+ <Tool
+ Name="VCXDCMakeTool"
+ />
+ <Tool
+ Name="VCBscMakeTool"
+ />
+ <Tool
+ Name="VCFxCopTool"
+ />
+ <Tool
+ Name="VCAppVerifierTool"
+ />
+ <Tool
+ Name="VCPostBuildEventTool"
+ />
+ </Configuration>
+ </Configurations>
+ <References>
+ </References>
+ <Files>
+ <Filter
+ Name="Source Files"
+ Filter="cpp;c;cc;cxx;def;odl;idl;hpj;bat;asm;asmx"
+ UniqueIdentifier="{4FC737F1-C7A5-4376-A066-2A32D752A2FF}"
+ >
+ <File
+ RelativePath=".\dllmain_audio_dsp.cxx"
+ >
+ </File>
+ <File
+ RelativePath=".\plugin_audio_dsp_denoiser.cxx"
+ >
+ </File>
+ <File
+ RelativePath=".\plugin_audio_dsp_mediabuffer.cxx"
+ >
+ </File>
+ <File
+ RelativePath=".\plugin_audio_dsp_resampler.cxx"
+ >
+ </File>
+ <File
+ RelativePath=".\plugin_audio_dsp_utils.cxx"
+ >
+ </File>
+ </Filter>
+ <Filter
+ Name="Header Files"
+ Filter="h;hpp;hxx;hm;inl;inc;xsd"
+ UniqueIdentifier="{93995380-89BD-4b04-88EB-625FBE52EBFB}"
+ >
+ <File
+ RelativePath=".\plugin_audio_dsp_config.h"
+ >
+ </File>
+ <File
+ RelativePath=".\plugin_audio_dsp_mediabuffer.h"
+ >
+ </File>
+ <File
+ RelativePath=".\plugin_audio_dsp_utils.h"
+ >
+ </File>
+ </Filter>
+ <Filter
+ Name="Resource Files"
+ Filter="rc;ico;cur;bmp;dlg;rc2;rct;bin;rgs;gif;jpg;jpeg;jpe;resx;tiff;tif;png;wav"
+ UniqueIdentifier="{67DA6AB6-F800-4c08-8B7A-83BB121AAD01}"
+ >
+ <File
+ RelativePath=".\version.rc"
+ >
+ </File>
+ </Filter>
+ </Files>
+ <Globals>
+ </Globals>
+</VisualStudioProject>
diff --git a/plugins/pluginWinAudioDSP/plugin_audio_dsp_config.h b/plugins/pluginWinAudioDSP/plugin_audio_dsp_config.h
new file mode 100644
index 0000000..7730bc8
--- /dev/null
+++ b/plugins/pluginWinAudioDSP/plugin_audio_dsp_config.h
@@ -0,0 +1,75 @@
+/* Copyright (C) 2013 Mamadou DIOP
+* Copyright (C) 2013 Doubango Telecom <http://www.doubango.org>
+*
+* This file is part of Open Source Doubango Framework.
+*
+* DOUBANGO is free software: you can redistribute it and/or modify
+* it under the terms of the GNU General Public License as published by
+* the Free Software Foundation, either version 3 of the License, or
+* (at your option) any later version.
+*
+* DOUBANGO is distributed in the hope that it will be useful,
+* but WITHOUT ANY WARRANTY; without even the implied warranty of
+* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
+* GNU General Public License for more details.
+*
+* You should have received a copy of the GNU General Public License
+* along with DOUBANGO.
+*/
+#ifndef PLUGIN_AUDIO_DSP_CONFIG_H
+#define PLUGIN_AUDIO_DSP_CONFIG_H
+
+#ifdef __SYMBIAN32__
+#undef _WIN32 /* Because of WINSCW */
+#endif
+
+
+// Windows (XP/Vista/7/CE and Windows Mobile) macro definition
+#if defined(WIN32)|| defined(_WIN32) || defined(_WIN32_WCE)
+# define PLUGIN_AUDIO_DSP_UNDER_WINDOWS 1
+# if defined(WINAPI_FAMILY) && (WINAPI_FAMILY == WINAPI_FAMILY_PHONE_APP || WINAPI_FAMILY == WINAPI_FAMILY_APP)
+# define PLUGIN_AUDIO_DSP_UNDER_WINDOWS_RT 1
+# endif
+#endif
+
+#if (PLUGIN_AUDIO_DSP_UNDER_WINDOWS || defined(__SYMBIAN32__)) && defined(PLUGIN_AUDIO_DSP_EXPORTS)
+# define PLUGIN_AUDIO_DSP_API __declspec(dllexport)
+# define PLUGIN_AUDIO_DSP_GEXTERN extern __declspec(dllexport)
+#elif (PLUGIN_AUDIO_DSP_UNDER_WINDOWS || defined(__SYMBIAN32__)) && !defined(PLUGIN_AUDIO_DSP_IMPORTS_IGNORE)
+# define PLUGIN_AUDIO_DSP_API __declspec(dllimport)
+# define PLUGIN_AUDIO_DSP_GEXTERN __declspec(dllimport)
+#else
+# define PLUGIN_AUDIO_DSP_API
+# define PLUGIN_AUDIO_DSP_GEXTERN extern
+#endif
+
+// x86
+#if defined(__x86_64__) || defined(__x86__) || defined(__i386__)
+# define PLUGIN_AUDIO_DSP_UNDER_X86 1
+#endif
+
+// Guards against C++ name mangling
+#ifdef __cplusplus
+# define PLUGIN_AUDIO_DSP_BEGIN_DECLS extern "C" {
+# define PLUGIN_AUDIO_DSP_END_DECLS }
+#else
+# define PLUGIN_AUDIO_DSP_BEGIN_DECLS
+# define PLUGIN_AUDIO_DSP_END_DECLS
+#endif
+
+#ifdef _MSC_VER
+# define inline __inline
+# define _CRT_SECURE_NO_WARNINGS
+# define _ALLOW_KEYWORD_MACROS
+#endif
+
+#include <stdint.h>
+#ifdef __SYMBIAN32__
+#include <stdlib.h>
+#endif
+
+#if HAVE_CONFIG_H
+ #include <config.h>
+#endif
+
+#endif // PLUGIN_AUDIO_DSP_CONFIG_H
diff --git a/plugins/pluginWinAudioDSP/plugin_audio_dsp_denoiser.cxx b/plugins/pluginWinAudioDSP/plugin_audio_dsp_denoiser.cxx
new file mode 100644
index 0000000..574c5ac
--- /dev/null
+++ b/plugins/pluginWinAudioDSP/plugin_audio_dsp_denoiser.cxx
@@ -0,0 +1,402 @@
+/* Copyright (C) 2013 Mamadou DIOP
+* Copyright (C) 2013 Doubango Telecom <http://www.doubango.org>
+*
+* This file is part of Open Source Doubango Framework.
+*
+* DOUBANGO is free software: you can redistribute it and/or modify
+* it under the terms of the GNU General Public License as published by
+* the Free Software Foundation, either version 3 of the License, or
+* (at your option) any later version.
+*
+* DOUBANGO is distributed in the hope that it will be useful,
+* but WITHOUT ANY WARRANTY; without even the implied warranty of
+* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
+* GNU General Public License for more details.
+*
+* You should have received a copy of the GNU General Public License
+* along with DOUBANGO.
+*/
+// MS Voice Capture DSP: http://msdn.microsoft.com/en-us/library/windows/desktop/ff819492(v=vs.85).aspx
+// Features:
+// - Acoustic echo cancellation (AEC)
+// - Microphone array processing
+// - Noise suppression
+// - Automatic gain control
+// - Voice activity detection
+#include "plugin_audio_dsp_utils.h"
+#include "plugin_audio_dsp_mediabuffer.h"
+
+#include "tinymedia/tmedia_denoise.h"
+#include "tinymedia/tmedia_defaults.h"
+
+#include "tsk_string.h"
+#include "tsk_memory.h"
+#include "tsk_debug.h"
+
+#include <Wmcodecdsp.h>
+#include <Dmo.h>
+
+static const UINT32 g_nMicrophoneStreamIndex = 0;
+static const UINT32 g_nSpeakerStreamIndex = 0;
+
+static const UINT32 g_nBitsPerSample = 16;
+static const UINT32 g_nChannles = 1; // FIXME
+static const UINT32 g_nFrameDuration = 20; // FIXME
+
+/** Speex denoiser*/
+typedef struct plugin_audio_dsp_denoise_s
+{
+ TMEDIA_DECLARE_DENOISE;
+
+ bool bOpened;
+
+ LONGLONG rtStart;
+ UINT64 rtDuration;
+
+ uint32_t echo_tail;
+ tsk_size_t playback_size_samples;
+ tsk_size_t playback_size_bytes;
+ tsk_size_t playback_channels;
+ tsk_size_t record_size_samples;
+ tsk_size_t record_size_bytes;
+ tsk_size_t record_channels;
+
+ IMediaObject* pInst;
+ IMediaBuffer *pBufferIn;
+ IMediaBuffer *pBufferOut;
+}
+plugin_audio_dsp_denoise_t;
+
+static int plugin_audio_dsp_denoise_set(tmedia_denoise_t* _self, const tmedia_param_t* param)
+{
+ plugin_audio_dsp_denoise_t *self = (plugin_audio_dsp_denoise_t *)_self;
+ if(!self || !param)
+ {
+ TSK_DEBUG_ERROR("Invalid parameter");
+ return -1;
+ }
+
+ if(param->value_type == tmedia_pvt_int32)
+ {
+ if(tsk_striequals(param->key, "echo-tail"))
+ {
+ _self->echo_tail = *((int32_t*)param->value);
+ TSK_DEBUG_INFO("ms_voice_dsp_set_echo_tail(%d)", _self->echo_tail);
+ if(self->pInst)
+ {
+ IPropertyStore* pPropStore = NULL;
+ HRESULT hr = self->pInst->QueryInterface(IID_PPV_ARGS(&pPropStore));
+ if(SUCCEEDED(hr))
+ {
+ DMO_MEDIA_TYPE mt = {0};
+ PROPVARIANT var = {0};
+ var.vt = VT_UI4;
+ var.ulVal = _self->echo_tail;
+ hr = pPropStore->SetValue(MFPKEY_WMAAECMA_FEATR_ECHO_LENGTH , var);
+ }
+ SafeRelease(&pPropStore);
+ }
+ return 0;
+ }
+ }
+ return -1;
+}
+
+static int plugin_audio_dsp_denoise_open(tmedia_denoise_t* self, uint32_t record_frame_size_samples, uint32_t record_sampling_rate, uint32_t playback_frame_size_samples, uint32_t playback_sampling_rate)
+{
+ plugin_audio_dsp_denoise_t *denoiser = (plugin_audio_dsp_denoise_t *)self;
+
+ HRESULT hr = S_OK;
+ DMO_MEDIA_TYPE mt = {0};
+ PROPVARIANT var = {0};
+ IPropertyStore* pPropStore = NULL;
+
+ TSK_DEBUG_INFO("[MS Voice Capture DSP] AEC_ENABLED=%d ECHO_TAIL=%d,\nAGC_ENABLED=%d,\nNOISE_SUPP_ENABLED=%d,\nVAD_ENABLED=%d",
+ self->echo_supp_enabled, self->echo_tail,
+ self->agc_enabled,
+ self->noise_supp_enabled,
+ self->vad_enabled
+ );
+
+ if(denoiser->bOpened)
+ {
+ TSK_DEBUG_ERROR("Denoiser already opened");
+ CHECK_HR(hr = E_FAIL);
+ }
+
+ CHECK_HR(hr = AudioDSPUtils::MoInitMediaType(
+ record_sampling_rate,
+ g_nBitsPerSample,
+ g_nChannles,
+ &mt));
+
+ CHECK_HR(hr = CoCreateInstance(CLSID_CWMAudioAEC, NULL,
+ CLSCTX_INPROC_SERVER, IID_PPV_ARGS(&denoiser->pInst)));
+ CHECK_HR(hr = denoiser->pInst->QueryInterface(IID_PPV_ARGS(&pPropStore)));
+
+ // If the input format does not match the output format, the DMO automatically performs sample-rate conversion.
+ CHECK_HR(hr = denoiser->pInst->SetInputType(0, &mt, 0));
+ CHECK_HR(hr = denoiser->pInst->SetOutputType(0, &mt, 0));
+
+ // Enables the application to override the default settings on various properties of the Voice Capture DSP
+ // http://msdn.microsoft.com/en-us/library/windows/desktop/ff819422(v=vs.85).aspx
+ var.vt = VT_BOOL;
+ var.boolVal = VARIANT_TRUE;
+ CHECK_HR(hr = pPropStore->SetValue(MFPKEY_WMAAECMA_FEATURE_MODE, var));
+
+ // Switch to filter mode: http://msdn.microsoft.com/en-us/library/windows/desktop/ff819410(v=vs.85).aspx
+ var.vt = VT_BOOL;
+ var.boolVal = VARIANT_FALSE; /* VARIANT_FALSE: Filter, VARIANT_TRUE: Source */
+ CHECK_HR(hr = pPropStore->SetValue(MFPKEY_WMAAECMA_DMO_SOURCE_MODE, var));
+
+ // Enable AEC
+ if(self->echo_supp_enabled)
+ {
+ // Enable AEC: http://msdn.microsoft.com/en-us/library/windows/desktop/ff819427(v=vs.85).aspx
+ var.vt = VT_I4;
+ var.lVal = SINGLE_CHANNEL_AEC;
+ CHECK_HR(hr = pPropStore->SetValue(MFPKEY_WMAAECMA_SYSTEM_MODE, var));
+
+ // Echo Tail (milliseconds): http://msdn.microsoft.com/en-us/library/windows/desktop/ff819414(v=vs.85).aspx
+ if(!self->echo_tail)
+ {
+ self->echo_tail = tmedia_defaults_get_echo_tail();
+ }
+ var.vt = VT_I4;
+ var.lVal = self->echo_tail ? self->echo_tail : 256;
+ CHECK_HR(hr = pPropStore->SetValue(MFPKEY_WMAAECMA_FEATR_ECHO_LENGTH, var));
+ }
+
+ // Automatic Gain Control (AGC): http://msdn.microsoft.com/en-us/library/windows/desktop/ff819412(v=vs.85).aspx
+ var.vt = VT_BOOL;
+ var.boolVal = self->agc_enabled ? VARIANT_TRUE : VARIANT_FALSE;
+ CHECK_HR(hr = pPropStore->SetValue(MFPKEY_WMAAECMA_FEATR_AGC, var));
+
+ // Noise suppression (NS): http://msdn.microsoft.com/en-us/library/windows/desktop/ff819420(v=vs.85).aspx
+ var.vt = VT_I4;
+ var.lVal = self->noise_supp_enabled ? 1 : 0;
+ CHECK_HR(hr = pPropStore->SetValue(MFPKEY_WMAAECMA_FEATR_NS, var));
+
+ // Automatic Gain Control (AGC): http://msdn.microsoft.com/en-us/library/windows/desktop/ff819412(v=vs.85).aspx
+ var.vt = VT_BOOL;
+ var.boolVal = self->agc_enabled ? VARIANT_TRUE : VARIANT_FALSE;
+ CHECK_HR(hr = pPropStore->SetValue(MFPKEY_WMAAECMA_FEATR_AGC, var));
+
+ // Voice Activity Detection (VAD): http://msdn.microsoft.com/en-us/library/windows/desktop/ff819421(v=vs.85).aspx
+ var.vt = VT_I4;
+ var.lVal = self->vad_enabled ? AEC_VAD_FOR_SILENCE_SUPPRESSION : AEC_VAD_DISABLED;
+ CHECK_HR(hr = pPropStore->SetValue(MFPKEY_WMAAECMA_FEATR_VAD, var));
+
+ // Recommended to allocate resources
+ CHECK_HR(hr = denoiser->pInst->AllocateStreamingResources()); // FIXME: returns E_FAIL
+
+ denoiser->record_channels = g_nChannles;
+ denoiser->record_size_samples = ((record_sampling_rate * g_nFrameDuration) / 1000) << (denoiser->record_channels == 2 ? 1 : 0);
+ denoiser->record_size_bytes = (denoiser->record_size_samples * (g_nBitsPerSample >> 3));
+
+ denoiser->playback_channels = g_nChannles;
+ denoiser->playback_size_samples = ((playback_sampling_rate * g_nFrameDuration) / 1000) << (denoiser->playback_channels == 2 ? 1 : 0);
+ denoiser->playback_size_bytes = (denoiser->playback_size_samples * (g_nBitsPerSample >> 3));
+
+ denoiser->rtStart = 0;
+ denoiser->rtDuration = PLUGIN_AUDIO_DSP_MILLIS_TO_100NS(g_nFrameDuration); // milliseconds -> 100ns
+
+bail:
+ denoiser->bOpened = SUCCEEDED(hr);
+
+ MoFreeMediaType(&mt);
+ SafeRelease(&pPropStore);
+
+ return denoiser->bOpened ? 0 : -1;
+}
+
+// playback = "stream 1"
+// /!\Thread safety: could be called at the same time as plugin_audio_dsp_denoise_process_record()
+static int plugin_audio_dsp_denoise_echo_playback(tmedia_denoise_t* self, const void* echo_frame, uint32_t echo_frame_size_bytes)
+{
+ plugin_audio_dsp_denoise_t *denoiser = (plugin_audio_dsp_denoise_t *)self;
+
+ HRESULT hr = S_OK;
+
+ if(!self || !echo_frame || !echo_frame_size_bytes)
+ {
+ CHECK_HR(hr = E_POINTER);
+ }
+
+ if(!denoiser->bOpened)
+ {
+ TSK_DEBUG_ERROR("Denoiser not opened");
+ CHECK_HR(hr = E_FAIL);
+ }
+
+ if(denoiser->record_size_bytes != echo_frame_size_bytes)
+ {
+ TSK_DEBUG_ERROR("Size mismatch: %u<>%u", denoiser->record_size_bytes, echo_frame_size_bytes);
+ CHECK_HR(hr = E_INVALIDARG);
+ }
+
+
+
+bail:
+ return SUCCEEDED(hr) ? 0 : -1;
+}
+
+
+// record = "stream 0"
+// /!\Thread safety: could be called at the same time as plugin_audio_dsp_denoise_echo_playback()
+static int plugin_audio_dsp_denoise_process_record(tmedia_denoise_t* self, void* audio_frame, uint32_t audio_frame_size_bytes, tsk_bool_t* silence_or_noise)
+{
+ plugin_audio_dsp_denoise_t *denoiser = (plugin_audio_dsp_denoise_t *)self;
+
+ HRESULT hr = S_OK;
+ BYTE* pBufferInPtr = NULL;
+ DWORD dwBufferInSize = 0;
+
+ if(!self || !audio_frame || !audio_frame_size_bytes || !silence_or_noise)
+ {
+ CHECK_HR(hr = E_POINTER);
+ }
+
+ if(!denoiser->bOpened)
+ {
+ TSK_DEBUG_ERROR("Denoiser not opened");
+ CHECK_HR(hr = E_FAIL);
+ }
+
+ if(denoiser->record_size_bytes != audio_frame_size_bytes)
+ {
+ TSK_DEBUG_ERROR("Size mismatch: %u<>%u", denoiser->record_size_bytes, audio_frame_size_bytes);
+ CHECK_HR(hr = E_INVALIDARG);
+ }
+
+ if(!denoiser->pBufferIn)
+ {
+ CHECK_HR(hr = AudioDSPMediaBuffer::Create(denoiser->record_size_bytes, &denoiser->pBufferIn));
+ }
+ else
+ {
+ DWORD dwMaxLength = 0;
+ CHECK_HR(hr = denoiser->pBufferIn->GetMaxLength(&dwMaxLength));
+ if(dwMaxLength < denoiser->record_size_bytes)
+ {
+ SafeRelease(&denoiser->pBufferIn);
+ CHECK_HR(hr = AudioDSPMediaBuffer::Create(denoiser->record_size_bytes, &denoiser->pBufferIn));
+ }
+ }
+
+ // Get memory pointer to the input buffer
+ CHECK_HR(hr = denoiser->pBufferIn->GetBufferAndLength(&pBufferInPtr, NULL));
+ // Copy data
+ dwBufferInSize = TSK_MIN(audio_frame_size_bytes, denoiser->record_size_bytes);
+ memcpy(pBufferInPtr, audio_frame, dwBufferInSize);
+ CHECK_HR(hr = denoiser->pBufferIn->SetLength(dwBufferInSize));
+
+ // Process input
+ hr = denoiser->pInst->ProcessInput(
+ g_nMicrophoneStreamIndex,
+ denoiser->pBufferIn,
+ (/*DMO_INPUT_DATA_BUFFERF_TIME | DMO_INPUT_DATA_BUFFERF_TIMELENGTH*/0),
+ denoiser->rtStart,
+ denoiser->rtDuration);
+ if(hr == DMO_E_NOTACCEPTING)
+ {
+ hr = S_OK;
+ }
+ CHECK_HR(hr);
+
+ denoiser->rtStart += denoiser->rtDuration;
+
+
+bail:
+ return SUCCEEDED(hr) ? 0 : -1;
+}
+
+static int plugin_audio_dsp_denoise_process_playback(tmedia_denoise_t* self, void* audio_frame, uint32_t audio_frame_size_bytes)
+{
+ plugin_audio_dsp_denoise_t *denoiser = (plugin_audio_dsp_denoise_t *)self;
+
+ (void)(denoiser);
+
+ // Not mandatory to denoise audio before playback.
+ // All Doubango clients support noise suppression.
+ return 0;
+}
+
+static int plugin_audio_dsp_denoise_close(tmedia_denoise_t* self)
+{
+ plugin_audio_dsp_denoise_t *denoiser = (plugin_audio_dsp_denoise_t *)self;
+
+ if(!self)
+ {
+ TSK_DEBUG_ERROR("Invalid parameter");
+ return -1;
+ }
+
+ denoiser->bOpened = false;
+ SafeRelease(&denoiser->pBufferIn);
+ SafeRelease(&denoiser->pBufferOut);
+ SafeRelease(&denoiser->pInst);
+ return 0;
+}
+
+
+
+//
+// MS Voice Capture DSP Plugin definition
+//
+
+/* constructor */
+static tsk_object_t* plugin_audio_dsp_denoise_ctor(tsk_object_t * self, va_list * app)
+{
+ plugin_audio_dsp_denoise_t *denoise = (plugin_audio_dsp_denoise_t*)self;
+ if(denoise)
+ {
+ AudioDSPUtils::Startup();
+
+ /* init base */
+ tmedia_denoise_init(TMEDIA_DENOISE(denoise));
+ /* init self */
+ }
+ return self;
+}
+/* destructor */
+static tsk_object_t* plugin_audio_dsp_denoise_dtor(tsk_object_t * self)
+{
+ plugin_audio_dsp_denoise_t *denoiser = (plugin_audio_dsp_denoise_t*)self;
+ if(denoiser)
+ {
+ /* deinit base */
+ tmedia_denoise_deinit(TMEDIA_DENOISE(denoiser));
+ /* deinit self */
+ SafeRelease(&denoiser->pBufferIn);
+ SafeRelease(&denoiser->pBufferOut);
+ SafeRelease(&denoiser->pInst);
+
+ TSK_DEBUG_INFO("*** MS Voice Capture DSP destroyed ***");
+ }
+
+ return self;
+}
+/* object definition */
+static const tsk_object_def_t plugin_audio_dsp_denoise_def_s =
+{
+ sizeof(plugin_audio_dsp_denoise_t),
+ plugin_audio_dsp_denoise_ctor,
+ plugin_audio_dsp_denoise_dtor,
+ tsk_null,
+};
+/* plugin definition*/
+static const tmedia_denoise_plugin_def_t plugin_audio_dsp_denoise_plugin_def_s =
+{
+ &plugin_audio_dsp_denoise_def_s,
+
+ "MS Voice Capture DSP", /* http://msdn.microsoft.com/en-us/library/windows/desktop/ff819492(v=vs.85).aspx */
+
+ plugin_audio_dsp_denoise_set,
+ plugin_audio_dsp_denoise_open,
+ plugin_audio_dsp_denoise_echo_playback,
+ plugin_audio_dsp_denoise_process_record,
+ plugin_audio_dsp_denoise_process_playback,
+ plugin_audio_dsp_denoise_close,
+};
+const tmedia_denoise_plugin_def_t *plugin_audio_dsp_denoise_plugin_def_t = &plugin_audio_dsp_denoise_plugin_def_s;
diff --git a/plugins/pluginWinAudioDSP/plugin_audio_dsp_mediabuffer.cxx b/plugins/pluginWinAudioDSP/plugin_audio_dsp_mediabuffer.cxx
new file mode 100644
index 0000000..34e5b4d
--- /dev/null
+++ b/plugins/pluginWinAudioDSP/plugin_audio_dsp_mediabuffer.cxx
@@ -0,0 +1,148 @@
+/* Copyright (C) 2013 Mamadou DIOP
+* Copyright (C) 2013 Doubango Telecom <http://www.doubango.org>
+*
+* This file is part of Open Source Doubango Framework.
+*
+* DOUBANGO is free software: you can redistribute it and/or modify
+* it under the terms of the GNU General Public License as published by
+* the Free Software Foundation, either version 3 of the License, or
+* (at your option) any later version.
+*
+* DOUBANGO is distributed in the hope that it will be useful,
+* but WITHOUT ANY WARRANTY; without even the implied warranty of
+* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
+* GNU General Public License for more details.
+*
+* You should have received a copy of the GNU General Public License
+* along with DOUBANGO.
+*/
+#include "plugin_audio_dsp_mediabuffer.h"
+
+AudioDSPMediaBuffer::AudioDSPMediaBuffer(DWORD cbMaxLength, HRESULT& hr) :
+ m_nRefCount(1),
+ m_cbMaxLength(cbMaxLength),
+ m_cbLength(0),
+ m_pbData(NULL)
+{
+ m_pbData = new BYTE[cbMaxLength];
+ if (!m_pbData)
+ {
+ hr = E_OUTOFMEMORY;
+ }
+}
+
+AudioDSPMediaBuffer::~AudioDSPMediaBuffer()
+{
+ if (m_pbData)
+ {
+ delete [] m_pbData;
+ }
+}
+
+
+// Function to create a new IMediaBuffer object and return
+// an AddRef'd interface pointer.
+HRESULT AudioDSPMediaBuffer::Create(long cbMaxLen, IMediaBuffer **ppBuffer)
+{
+ HRESULT hr = S_OK;
+ AudioDSPMediaBuffer *pBuffer = NULL;
+
+ if (ppBuffer == NULL)
+ {
+ return E_POINTER;
+ }
+
+ pBuffer = new AudioDSPMediaBuffer(cbMaxLen, hr);
+
+ if (pBuffer == NULL)
+ {
+ hr = E_OUTOFMEMORY;
+ }
+
+ if (SUCCEEDED(hr))
+ {
+ *ppBuffer = pBuffer;
+ (*ppBuffer)->AddRef();
+ }
+
+ if (pBuffer)
+ {
+ pBuffer->Release();
+ }
+ return hr;
+}
+
+// IUnknown methods.
+STDMETHODIMP AudioDSPMediaBuffer::QueryInterface(REFIID riid, void **ppv)
+{
+ if (ppv == NULL)
+ {
+ return E_POINTER;
+ }
+ else if (riid == IID_IMediaBuffer || riid == IID_IUnknown)
+ {
+ *ppv = static_cast<IMediaBuffer *>(this);
+ AddRef();
+ return S_OK;
+ }
+ else
+ {
+ *ppv = NULL;
+ return E_NOINTERFACE;
+ }
+}
+
+STDMETHODIMP_(ULONG) AudioDSPMediaBuffer::AddRef()
+{
+ return InterlockedIncrement(&m_nRefCount);
+}
+
+STDMETHODIMP_(ULONG) AudioDSPMediaBuffer::Release()
+{
+ LONG lRef = InterlockedDecrement(&m_nRefCount);
+ if (lRef == 0)
+ {
+ delete this;
+ // m_cRef is no longer valid! Return lRef.
+ }
+ return lRef;
+}
+
+// IMediaBuffer methods.
+STDMETHODIMP AudioDSPMediaBuffer::SetLength(DWORD cbLength)
+{
+ if (cbLength > m_cbMaxLength)
+ {
+ return E_INVALIDARG;
+ }
+ m_cbLength = cbLength;
+ return S_OK;
+}
+
+STDMETHODIMP AudioDSPMediaBuffer::GetMaxLength(DWORD *pcbMaxLength)
+{
+ if (pcbMaxLength == NULL)
+ {
+ return E_POINTER;
+ }
+ *pcbMaxLength = m_cbMaxLength;
+ return S_OK;
+}
+
+STDMETHODIMP AudioDSPMediaBuffer::GetBufferAndLength(BYTE **ppbBuffer, DWORD *pcbLength)
+{
+ // Either parameter can be NULL, but not both.
+ if (ppbBuffer == NULL && pcbLength == NULL)
+ {
+ return E_POINTER;
+ }
+ if (ppbBuffer)
+ {
+ *ppbBuffer = m_pbData;
+ }
+ if (pcbLength)
+ {
+ *pcbLength = m_cbLength;
+ }
+ return S_OK;
+}
diff --git a/plugins/pluginWinAudioDSP/plugin_audio_dsp_mediabuffer.h b/plugins/pluginWinAudioDSP/plugin_audio_dsp_mediabuffer.h
new file mode 100644
index 0000000..a8d603b
--- /dev/null
+++ b/plugins/pluginWinAudioDSP/plugin_audio_dsp_mediabuffer.h
@@ -0,0 +1,51 @@
+/* Copyright (C) 2013 Mamadou DIOP
+* Copyright (C) 2013 Doubango Telecom <http://www.doubango.org>
+*
+* This file is part of Open Source Doubango Framework.
+*
+* DOUBANGO is free software: you can redistribute it and/or modify
+* it under the terms of the GNU General Public License as published by
+* the Free Software Foundation, either version 3 of the License, or
+* (at your option) any later version.
+*
+* DOUBANGO is distributed in the hope that it will be useful,
+* but WITHOUT ANY WARRANTY; without even the implied warranty of
+* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
+* GNU General Public License for more details.
+*
+* You should have received a copy of the GNU General Public License
+* along with DOUBANGO.
+*/
+#ifndef PLUGIN_AUDIO_DSP_MEDIABUFFER_H
+#define PLUGIN_AUDIO_DSP_MEDIABUFFER_H
+
+#include "plugin_audio_dsp_config.h"
+
+#include <dmo.h>
+
+class AudioDSPMediaBuffer : public IMediaBuffer
+{
+private:
+ DWORD m_cbLength;
+ const DWORD m_cbMaxLength;
+ LONG m_nRefCount; // Reference count
+ BYTE *m_pbData;
+
+ AudioDSPMediaBuffer(DWORD cbMaxLength, HRESULT& hr);
+ ~AudioDSPMediaBuffer();
+
+public:
+ static HRESULT Create(long cbMaxLen, IMediaBuffer **ppBuffer);
+
+ // IUnknown methods.
+ STDMETHODIMP QueryInterface(REFIID riid, void **ppv);
+ STDMETHODIMP_(ULONG) AddRef();
+ STDMETHODIMP_(ULONG) Release();
+
+ // IMediaBuffer methods.
+ STDMETHODIMP SetLength(DWORD cbLength);
+ STDMETHODIMP GetMaxLength(DWORD *pcbMaxLength);
+ STDMETHODIMP GetBufferAndLength(BYTE **ppbBuffer, DWORD *pcbLength);
+};
+
+#endif /* PLUGIN_AUDIO_DSP_MEDIABUFFER_H */
diff --git a/plugins/pluginWinAudioDSP/plugin_audio_dsp_resampler.cxx b/plugins/pluginWinAudioDSP/plugin_audio_dsp_resampler.cxx
new file mode 100644
index 0000000..3e5a291
--- /dev/null
+++ b/plugins/pluginWinAudioDSP/plugin_audio_dsp_resampler.cxx
@@ -0,0 +1,388 @@
+/* Copyright (C) 2013 Mamadou DIOP
+* Copyright (C) 2013 Doubango Telecom <http://www.doubango.org>
+*
+* This file is part of Open Source Doubango Framework.
+*
+* DOUBANGO is free software: you can redistribute it and/or modify
+* it under the terms of the GNU General Public License as published by
+* the Free Software Foundation, either version 3 of the License, or
+* (at your option) any later version.
+*
+* DOUBANGO is distributed in the hope that it will be useful,
+* but WITHOUT ANY WARRANTY; without even the implied warranty of
+* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
+* GNU General Public License for more details.
+*
+* You should have received a copy of the GNU General Public License
+* along with DOUBANGO.
+*/
+// MS Audio Resampler DSP: http://msdn.microsoft.com/en-us/library/windows/desktop/ff819070(v=vs.85).aspx
+#include "plugin_audio_dsp_utils.h"
+
+#include "tinymedia/tmedia_resampler.h"
+
+#include "tsk_memory.h"
+#include "tsk_debug.h"
+
+#include <initguid.h>
+#include <Wmcodecdsp.h>
+#include <Mftransform.h>
+#include <Mfapi.h>
+#include <Mferror.h>
+
+#if !defined(PLUGIN_AUDIO_DSP_RESAMPLER_MAX_QUALITY)
+# define PLUGIN_AUDIO_DSP_RESAMPLER_MAX_QUALITY 60 /* [1 - 60]: http://msdn.microsoft.com/en-us/library/windows/desktop/ff819449(v=vs.85).aspx */
+#endif
+
+static const UINT32 g_nBitsPerSample = 16;
+static HRESULT ProcessOutput(struct plugin_audio_dsp_resampler_s *resampler, IMFSample **ppSample);
+
+typedef struct plugin_audio_dsp_resampler_s
+{
+ TMEDIA_DECLARE_RESAMPLER;
+
+ bool bOpened;
+
+ tsk_size_t in_size_samples;
+ tsk_size_t in_size_bytes;
+ tsk_size_t out_size_samples;
+ tsk_size_t out_size_bytes;
+ uint32_t in_channels;
+ uint32_t out_channels;
+
+ LONGLONG rtStart;
+ UINT64 rtDuration;
+
+ IMFTransform* pMFT;
+ IMFSample *pSampleIn;
+ IMFSample *pSampleOut;
+}
+plugin_audio_dsp_resampler_t;
+
+// Doubango engine uses quality from [1 - 10].
+static int plugin_audio_dsp_resampler_open(tmedia_resampler_t* self, uint32_t in_freq, uint32_t out_freq, uint32_t frame_duration, uint32_t in_channels, uint32_t out_channels, uint32_t quality)
+{
+ plugin_audio_dsp_resampler_t *resampler = (plugin_audio_dsp_resampler_t *)self;
+
+ IMFMediaType* pTypeIn = NULL;
+ IMFMediaType* pTypeOut = NULL;
+ IWMResamplerProps* pProps = NULL;
+ HRESULT hr = S_OK;
+
+ if(in_channels != 1 && in_channels != 2)
+ {
+ TSK_DEBUG_ERROR("%d not valid as input channel", in_channels);
+ CHECK_HR(hr = E_INVALIDARG);
+ }
+ if(out_channels != 1 && out_channels != 2)
+ {
+ TSK_DEBUG_ERROR("%d not valid as output channel", out_channels);
+ CHECK_HR(hr = E_INVALIDARG);
+ }
+
+ if(resampler->bOpened)
+ {
+ TSK_DEBUG_ERROR("Resampler already opened");
+ CHECK_HR(hr = E_FAIL);
+ }
+
+ resampler->in_size_samples = ((in_freq * frame_duration) / 1000) << (in_channels == 2 ? 1 : 0);
+ resampler->out_size_samples = ((out_freq * frame_duration) / 1000) << (out_channels == 2 ? 1 : 0);
+ resampler->in_channels = in_channels;
+ resampler->out_channels = out_channels;
+
+ resampler->in_size_bytes = (resampler->in_size_samples * (g_nBitsPerSample >> 3));
+ resampler->out_size_bytes = (resampler->out_size_samples * (g_nBitsPerSample >> 3));
+
+ resampler->rtStart = 0;
+ resampler->rtDuration = PLUGIN_AUDIO_DSP_MILLIS_TO_100NS(frame_duration); // milliseconds -> 100ns
+
+ CHECK_HR(hr = CoCreateInstance(CLSID_CResamplerMediaObject, NULL,
+ CLSCTX_INPROC_SERVER, IID_PPV_ARGS(&resampler->pMFT)));
+
+ CHECK_HR(hr = AudioDSPUtils::CreatePCMAudioType(in_freq, g_nBitsPerSample, in_channels, &pTypeIn));
+ CHECK_HR(hr = AudioDSPUtils::CreatePCMAudioType(out_freq, g_nBitsPerSample, out_channels, &pTypeOut));
+
+ CHECK_HR(hr = resampler->pMFT->SetInputType(0, pTypeIn, 0));
+ CHECK_HR(hr = resampler->pMFT->SetOutputType(0, pTypeOut, 0));
+
+ CHECK_HR(hr = resampler->pMFT->QueryInterface(IID_PPV_ARGS(&pProps)));
+ CHECK_HR(hr = pProps->SetHalfFilterLength((quality * PLUGIN_AUDIO_DSP_RESAMPLER_MAX_QUALITY) / 10)); // [1 - 10] -> [1 - 60]
+
+ CHECK_HR(hr = resampler->pMFT->ProcessMessage(MFT_MESSAGE_COMMAND_FLUSH, NULL));
+ CHECK_HR(hr = resampler->pMFT->ProcessMessage(MFT_MESSAGE_NOTIFY_BEGIN_STREAMING, NULL));
+ CHECK_HR(hr = resampler->pMFT->ProcessMessage(MFT_MESSAGE_NOTIFY_START_OF_STREAM, NULL));
+
+bail:
+ resampler->bOpened = SUCCEEDED(hr);
+ if(!resampler->bOpened)
+ {
+ SafeRelease(&resampler->pMFT);
+ }
+ SafeRelease(&pTypeIn);
+ SafeRelease(&pTypeOut);
+ SafeRelease(&pProps);
+ return resampler->bOpened ? 0 : -1;
+}
+
+static tsk_size_t plugin_audio_dsp_resampler_process(tmedia_resampler_t* self, const uint16_t* in_data, tsk_size_t in_size, uint16_t* out_data, tsk_size_t out_size)
+{
+ plugin_audio_dsp_resampler_t *resampler = (plugin_audio_dsp_resampler_t *)self;
+
+ HRESULT hr = S_OK;
+ tsk_size_t retSize = 0;
+
+ if(!resampler || !out_data)
+ {
+ CHECK_HR(hr = E_POINTER);
+ }
+
+ if(!resampler->bOpened)
+ {
+ TSK_DEBUG_ERROR("Resampler not opened");
+ CHECK_HR(hr = E_FAIL);
+ }
+
+ if(in_size != resampler->in_size_samples)
+ {
+ TSK_DEBUG_ERROR("Input data has wrong size");
+ CHECK_HR(hr = E_FAIL);
+ }
+
+ if(out_size < resampler->out_size_samples)
+ {
+ TSK_DEBUG_ERROR("Output data is too short");
+ CHECK_HR(hr = E_FAIL);
+ }
+
+ IMFMediaBuffer* pBufferIn = NULL;
+ IMFMediaBuffer* pBufferOut = NULL;
+ IMFSample *pSampleOut = NULL;
+ BYTE* pBufferPtr = NULL;
+
+ if(!resampler->pSampleIn)
+ {
+ CHECK_HR(hr = AudioDSPUtils::CreateMediaSample(resampler->in_size_bytes, &resampler->pSampleIn));
+ hr = resampler->pSampleIn->GetBufferByIndex(0, &pBufferIn);
+ if(FAILED(hr))
+ {
+ SafeRelease(&resampler->pSampleIn);
+ CHECK_HR(hr);
+ }
+ }
+ else
+ {
+ DWORD dwMaxLength = 0;
+ CHECK_HR(hr = resampler->pSampleIn->GetBufferByIndex(0, &pBufferIn));
+ CHECK_HR(hr = pBufferIn->GetMaxLength(&dwMaxLength));
+ if(dwMaxLength < resampler->in_size_bytes)
+ {
+ CHECK_HR(hr = resampler->pSampleIn->RemoveAllBuffers());
+ SafeRelease(&pBufferIn);
+ CHECK_HR(hr = MFCreateMemoryBuffer(resampler->in_size_bytes, &pBufferIn));
+ CHECK_HR(hr = resampler->pSampleIn->AddBuffer(pBufferIn));
+ }
+ }
+
+ CHECK_HR(hr = pBufferIn->Lock(&pBufferPtr, NULL, NULL));
+ memcpy(pBufferPtr, in_data, resampler->in_size_bytes);
+ CHECK_HR(hr = pBufferIn->Unlock());
+ CHECK_HR(hr = pBufferIn->SetCurrentLength(resampler->in_size_bytes));
+
+ CHECK_HR(hr = resampler->pSampleIn->SetSampleDuration(resampler->rtDuration));
+ CHECK_HR(hr = resampler->pSampleIn->SetSampleTime(resampler->rtStart));
+
+ // Process input
+ hr = resampler->pMFT->ProcessInput(0, resampler->pSampleIn, 0);
+ if(hr == MF_E_NOTACCEPTING)
+ {
+ hr = S_OK;
+ }
+ CHECK_HR(hr);
+
+ resampler->rtStart += resampler->rtDuration;
+
+ // Process output
+ CHECK_HR(hr = ProcessOutput(resampler, &pSampleOut));
+ if(pSampleOut)
+ {
+ CHECK_HR(hr = pSampleOut->GetBufferByIndex(0, &pBufferOut));
+
+ BYTE* pBufferOutPtr = NULL;
+ DWORD dwDataLength = 0;
+ CHECK_HR(hr = pBufferOut->GetCurrentLength(&dwDataLength));
+ //if(dwDataLength == resampler->out_size_bytes)
+ {
+ CHECK_HR(hr = pBufferOut->Lock(&pBufferOutPtr, NULL, NULL));
+ {
+ memcpy(out_data, pBufferOutPtr, TSK_MIN(dwDataLength, resampler->out_size_bytes));
+ if(dwDataLength < resampler->out_size_bytes)
+ {
+ TSK_DEBUG_INFO("[MS Resampler DSP] Output too short filling with silence");
+ memset(&((uint8_t*)out_data)[dwDataLength], 0, (resampler->out_size_bytes - dwDataLength));
+ }
+ retSize = (tsk_size_t)resampler->out_size_bytes;
+ }
+ CHECK_HR(hr = pBufferOut->Unlock());
+ }
+ }
+
+
+bail:
+ SafeRelease(&pBufferIn);
+ SafeRelease(&pBufferOut);
+ SafeRelease(&pSampleOut);
+
+ return retSize;
+}
+
+static int plugin_audio_dsp_resampler_close(tmedia_resampler_t* self)
+{
+ plugin_audio_dsp_resampler_t *resampler = (plugin_audio_dsp_resampler_t *)self;
+
+ HRESULT hr = S_OK;
+
+ if(resampler->pMFT)
+ {
+ hr = resampler->pMFT->ProcessMessage(MFT_MESSAGE_NOTIFY_END_OF_STREAM, NULL);
+ }
+
+ SafeRelease(&resampler->pMFT);
+ SafeRelease(&resampler->pSampleIn);
+ SafeRelease(&resampler->pSampleOut);
+
+ resampler->bOpened = false;
+
+ return 0;
+}
+
+static HRESULT ProcessOutput(plugin_audio_dsp_resampler_t *resampler, IMFSample **ppSample)
+{
+ *ppSample = NULL;
+
+ IMFMediaBuffer* pBufferOut = NULL;
+
+ DWORD dwStatus;
+
+ HRESULT hr = S_OK;
+
+ MFT_OUTPUT_DATA_BUFFER mftOutputData = { 0 };
+
+ if(!resampler || !ppSample)
+ {
+ CHECK_HR(hr = E_POINTER);
+ }
+
+ if(!resampler->pSampleOut)
+ {
+ CHECK_HR(hr = AudioDSPUtils::CreateMediaSample(resampler->out_size_bytes, &resampler->pSampleOut));
+ hr = resampler->pSampleOut->GetBufferByIndex(0, &pBufferOut);
+ if(FAILED(hr))
+ {
+ SafeRelease(&resampler->pSampleOut);
+ CHECK_HR(hr);
+ }
+ }
+ else
+ {
+ DWORD dwMaxLength = 0;
+ CHECK_HR(hr = resampler->pSampleOut->GetBufferByIndex(0, &pBufferOut));
+ CHECK_HR(hr = pBufferOut->GetMaxLength(&dwMaxLength));
+ if(dwMaxLength < resampler->out_size_bytes)
+ {
+ CHECK_HR(hr = resampler->pSampleOut->RemoveAllBuffers());
+ SafeRelease(&pBufferOut);
+ CHECK_HR(hr = MFCreateMemoryBuffer(resampler->out_size_bytes, &pBufferOut));
+ CHECK_HR(hr = resampler->pSampleOut->AddBuffer(pBufferOut));
+ }
+ }
+
+ CHECK_HR(hr = pBufferOut->SetCurrentLength(0));
+
+ //Set the output sample
+ mftOutputData.pSample = resampler->pSampleOut;
+ //Set the output id
+ mftOutputData.dwStreamID = 0;
+
+ //Generate the output sample
+ hr = resampler->pMFT->ProcessOutput(0, 1, &mftOutputData, &dwStatus);
+ if (hr == MF_E_TRANSFORM_NEED_MORE_INPUT)
+ {
+ hr = S_OK;
+ goto bail;
+ }
+
+ // TODO: Handle MF_E_TRANSFORM_STREAM_CHANGE
+
+ if (FAILED(hr))
+ {
+ goto bail;
+ }
+
+ *ppSample = resampler->pSampleOut;
+ (*ppSample)->AddRef();
+
+bail:
+ SafeRelease(&pBufferOut);
+ return hr;
+}
+
+
+
+//
+// Speex resamplerr Plugin definition
+//
+
+/* constructor */
+static tsk_object_t* plugin_audio_dsp_resampler_ctor(tsk_object_t * self, va_list * app)
+{
+ plugin_audio_dsp_resampler_t *resampler = (plugin_audio_dsp_resampler_t *)self;
+ if(resampler)
+ {
+ AudioDSPUtils::Startup();
+
+ /* init base */
+ tmedia_resampler_init(TMEDIA_RESAMPLER(resampler));
+ /* init self */
+ }
+ return self;
+}
+/* destructor */
+static tsk_object_t* plugin_audio_dsp_resampler_dtor(tsk_object_t * self)
+{
+ plugin_audio_dsp_resampler_t *resampler = (plugin_audio_dsp_resampler_t *)self;
+ if(resampler)
+ {
+ /* deinit base */
+ tmedia_resampler_deinit(TMEDIA_RESAMPLER(resampler));
+ /* deinit self */
+ // tmedia_resampler_deinit() already closed the resampler and freed the resources...but do it again
+ SafeRelease(&resampler->pMFT);
+ SafeRelease(&resampler->pSampleIn);
+ SafeRelease(&resampler->pSampleOut);
+
+ TSK_DEBUG_INFO("*** MS Audio Resampler DSP (plugin) destroyed ***");
+ }
+
+ return self;
+}
+/* object definition */
+static const tsk_object_def_t plugin_audio_dsp_resampler_def_s =
+{
+ sizeof(plugin_audio_dsp_resampler_t),
+ plugin_audio_dsp_resampler_ctor,
+ plugin_audio_dsp_resampler_dtor,
+ tsk_null,
+};
+/* plugin definition*/
+static const tmedia_resampler_plugin_def_t plugin_audio_dsp_resampler_plugin_def_s =
+{
+ &plugin_audio_dsp_resampler_def_s,
+
+ "MS Audio Resampler DSP", /* http://msdn.microsoft.com/en-us/library/windows/desktop/ff819070(v=vs.85).aspx */
+
+ plugin_audio_dsp_resampler_open,
+ plugin_audio_dsp_resampler_process,
+ plugin_audio_dsp_resampler_close,
+};
+const tmedia_resampler_plugin_def_t *plugin_audio_dsp_resampler_plugin_def_t = &plugin_audio_dsp_resampler_plugin_def_s;
diff --git a/plugins/pluginWinAudioDSP/plugin_audio_dsp_utils.cxx b/plugins/pluginWinAudioDSP/plugin_audio_dsp_utils.cxx
new file mode 100644
index 0000000..67cf3cf
--- /dev/null
+++ b/plugins/pluginWinAudioDSP/plugin_audio_dsp_utils.cxx
@@ -0,0 +1,157 @@
+/* Copyright (C) 2013 Mamadou DIOP
+* Copyright (C) 2013 Doubango Telecom <http://www.doubango.org>
+*
+* This file is part of Open Source Doubango Framework.
+*
+* DOUBANGO is free software: you can redistribute it and/or modify
+* it under the terms of the GNU General Public License as published by
+* the Free Software Foundation, either version 3 of the License, or
+* (at your option) any later version.
+*
+* DOUBANGO is distributed in the hope that it will be useful,
+* but WITHOUT ANY WARRANTY; without even the implied warranty of
+* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
+* GNU General Public License for more details.
+*
+* You should have received a copy of the GNU General Public License
+* along with DOUBANGO.
+*/
+#include "plugin_audio_dsp_utils.h"
+#include "tsk_debug.h"
+
+#include <uuids.h>
+#include <Dmo.h>
+#include <Mfapi.h>
+#include <assert.h>
+
+bool AudioDSPUtils::g_bStarted = false;
+
+HRESULT AudioDSPUtils::Startup()
+{
+ if(!g_bStarted)
+ {
+ HRESULT hr = CoInitializeEx(NULL, COINIT_MULTITHREADED);
+ if(SUCCEEDED(hr) || hr == 0x80010106) // 0x80010106 when called from managed code (e.g. Boghe) - More info: http://support.microsoft.com/kb/824480
+ {
+ hr = MFStartup(MF_VERSION);
+ }
+ g_bStarted = SUCCEEDED(hr);
+ return hr;
+ }
+ return S_OK;
+}
+
+HRESULT AudioDSPUtils::Shutdown()
+{
+ return S_OK;
+}
+
+HRESULT AudioDSPUtils::CreatePCMAudioType(
+ UINT32 sampleRate, // Samples per second
+ UINT32 bitsPerSample, // Bits per sample
+ UINT32 cChannels, // Number of channels
+ IMFMediaType **ppType // Receives a pointer to the media type.
+ )
+{
+ HRESULT hr = S_OK;
+
+ IMFMediaType *pType = NULL;
+
+ // Calculate derived values.
+ UINT32 blockAlign = cChannels * (bitsPerSample >> 3);
+ UINT32 bytesPerSecond = blockAlign * sampleRate;
+
+ // Create the empty media type.
+ CHECK_HR(hr = MFCreateMediaType(&pType));
+
+ // Set attributes on the type.
+ CHECK_HR(hr = pType->SetGUID(MF_MT_MAJOR_TYPE, MFMediaType_Audio));
+
+ CHECK_HR(hr = pType->SetGUID(MF_MT_SUBTYPE, MFAudioFormat_PCM));
+
+ CHECK_HR(hr = pType->SetUINT32(MF_MT_AUDIO_NUM_CHANNELS, cChannels));
+
+ CHECK_HR(hr = pType->SetUINT32(MF_MT_AUDIO_SAMPLES_PER_SECOND, sampleRate));
+
+ CHECK_HR(hr = pType->SetUINT32(MF_MT_AUDIO_BLOCK_ALIGNMENT, blockAlign));
+
+ CHECK_HR(hr = pType->SetUINT32(MF_MT_AUDIO_AVG_BYTES_PER_SECOND, bytesPerSecond));
+
+ CHECK_HR(hr = pType->SetUINT32(MF_MT_AUDIO_BITS_PER_SAMPLE, bitsPerSample));
+
+ CHECK_HR(hr = pType->SetUINT32(MF_MT_ALL_SAMPLES_INDEPENDENT, TRUE));
+
+ CHECK_HR(hr = pType->SetUINT32(MF_MT_FIXED_SIZE_SAMPLES, TRUE));
+
+ *ppType = pType;
+ (*ppType)->AddRef();
+
+bail:
+ SafeRelease(&pType);
+ return hr;
+}
+
+HRESULT AudioDSPUtils::CreateMediaSample(
+ DWORD cbData, // Maximum buffer size
+ IMFSample **ppSample // Receives the sample
+)
+{
+ HRESULT hr = S_OK;
+
+ if(!ppSample)
+ {
+ CHECK_HR(hr = E_POINTER);
+ }
+
+ IMFSample *pSample = NULL;
+ IMFMediaBuffer *pBuffer = NULL;
+
+ CHECK_HR(hr = MFCreateSample(&pSample));
+ CHECK_HR(hr = MFCreateMemoryBuffer(cbData, &pBuffer));
+ CHECK_HR(hr = pSample->AddBuffer(pBuffer));
+
+ *ppSample = pSample;
+ (*ppSample)->AddRef();
+
+bail:
+ SafeRelease(&pSample);
+ SafeRelease(&pBuffer);
+ return hr;
+}
+
+HRESULT AudioDSPUtils::MoInitMediaType(
+ UINT32 sampleRate, // Samples per second
+ UINT32 bitsPerSample, // Bits per sample
+ UINT32 cChannels, // Number of channels
+ DMO_MEDIA_TYPE *pType // The media type to initialize. Must be freed using MoFreeMediaType.
+ )
+{
+ HRESULT hr = S_OK;
+ WAVEFORMATEX *pWAV = NULL;
+
+ if(!pType)
+ {
+ CHECK_HR(hr = E_POINTER);
+ }
+
+ pType->majortype = MEDIATYPE_Audio;
+ pType->subtype = MEDIASUBTYPE_PCM;
+ pType->lSampleSize = 0;
+ pType->bFixedSizeSamples = TRUE;
+ pType->bTemporalCompression = FALSE;
+ pType->formattype = FORMAT_WaveFormatEx;
+
+ CHECK_HR(hr = ::MoInitMediaType(pType, sizeof(WAVEFORMATEX)));
+
+ pWAV = (WAVEFORMATEX*)pType->pbFormat;
+ pWAV->wFormatTag = WAVE_FORMAT_PCM;
+ pWAV->nChannels = 1;
+ pWAV->nSamplesPerSec = sampleRate;
+ pWAV->nBlockAlign = cChannels * (bitsPerSample >> 3);
+ pWAV->nAvgBytesPerSec = pWAV->nBlockAlign * pWAV->nSamplesPerSec;
+ pWAV->wBitsPerSample = bitsPerSample;
+ pWAV->cbSize = 0;
+
+bail:
+ return hr;
+}
diff --git a/plugins/pluginWinAudioDSP/plugin_audio_dsp_utils.h b/plugins/pluginWinAudioDSP/plugin_audio_dsp_utils.h
new file mode 100644
index 0000000..7daff31
--- /dev/null
+++ b/plugins/pluginWinAudioDSP/plugin_audio_dsp_utils.h
@@ -0,0 +1,72 @@
+/* Copyright (C) 2013 Mamadou DIOP
+* Copyright (C) 2013 Doubango Telecom <http://www.doubango.org>
+*
+* This file is part of Open Source Doubango Framework.
+*
+* DOUBANGO is free software: you can redistribute it and/or modify
+* it under the terms of the GNU General Public License as published by
+* the Free Software Foundation, either version 3 of the License, or
+* (at your option) any later version.
+*
+* DOUBANGO is distributed in the hope that it will be useful,
+* but WITHOUT ANY WARRANTY; without even the implied warranty of
+* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
+* GNU General Public License for more details.
+*
+* You should have received a copy of the GNU General Public License
+* along with DOUBANGO.
+*/
+#ifndef PLUGIN_AUDIO_DSP_UTILS_H
+#define PLUGIN_AUDIO_DSP_UTILS_H
+
+#include "plugin_audio_dsp_config.h"
+
+#include <Windows.h>
+#include <mfidl.h>
+
+#undef CHECK_HR
+// In CHECK_HR(x) When (x) is a function it will be executed twice when used in "TSK_DEBUG_ERROR(x)" and "If(x)"
+#define CHECK_HR(x) { HRESULT __hr__ = (x); if (FAILED(__hr__)) { TSK_DEBUG_ERROR("Operation Failed (%08x)", __hr__); goto bail; } }
+
+#if !defined(PLUGIN_AUDIO_DSP_MILLIS_TO_100NS)
+# define PLUGIN_AUDIO_DSP_MILLIS_TO_100NS(MILLIS) (((LONGLONG)(MILLIS)) * 10000ui64)
+#endif
+
+#undef SafeRelease
+#define SafeRelease(ppT) \
+{ \
+ if (*ppT) \
+ { \
+ (*ppT)->Release(); \
+ *ppT = NULL; \
+ } \
+}
+
+class AudioDSPUtils
+{
+public:
+ static HRESULT Startup();
+ static HRESULT Shutdown();
+ static HRESULT CreatePCMAudioType(
+ UINT32 sampleRate, // Samples per second
+ UINT32 bitsPerSample, // Bits per sample
+ UINT32 cChannels, // Number of channels
+ IMFMediaType **ppType // Receives a pointer to the media type.
+ );
+ static HRESULT CreateMediaSample(
+ DWORD cbData, // Maximum buffer size
+ IMFSample **ppSample // Receives the sample
+ );
+ static HRESULT MoInitMediaType(
+ UINT32 sampleRate, // Samples per second
+ UINT32 bitsPerSample, // Bits per sample
+ UINT32 cChannels, // Number of channels
+ DMO_MEDIA_TYPE *pType // The media type to initialize. Must be freed using MoFreeMediaType.
+ );
+
+private:
+ static bool g_bStarted;
+};
+
+#endif /* PLUGIN_AUDIO_DSP_UTILS_H */
+
diff --git a/plugins/pluginWinAudioDSP/version.rc b/plugins/pluginWinAudioDSP/version.rc
new file mode 100644
index 0000000..d2b8170
--- /dev/null
+++ b/plugins/pluginWinAudioDSP/version.rc
@@ -0,0 +1,102 @@
+// Microsoft Visual C++ generated resource script.
+//
+// #include "resource.h"
+
+#define APSTUDIO_READONLY_SYMBOLS
+/////////////////////////////////////////////////////////////////////////////
+//
+// Generated from the TEXTINCLUDE 2 resource.
+//
+#include "afxres.h"
+
+/////////////////////////////////////////////////////////////////////////////
+#undef APSTUDIO_READONLY_SYMBOLS
+
+/////////////////////////////////////////////////////////////////////////////
+// English (U.S.) resources
+
+#if !defined(AFX_RESOURCE_DLL) || defined(AFX_TARG_ENU)
+#ifdef _WIN32
+LANGUAGE LANG_ENGLISH, SUBLANG_ENGLISH_US
+#pragma code_page(1252)
+#endif //_WIN32
+
+#ifdef APSTUDIO_INVOKED
+/////////////////////////////////////////////////////////////////////////////
+//
+// TEXTINCLUDE
+//
+
+1 TEXTINCLUDE
+BEGIN
+ "resource.h\0"
+END
+
+2 TEXTINCLUDE
+BEGIN
+ "#include ""afxres.h""\r\n"
+ "\0"
+END
+
+3 TEXTINCLUDE
+BEGIN
+ "\r\n"
+ "\0"
+END
+
+#endif // APSTUDIO_INVOKED
+
+
+/////////////////////////////////////////////////////////////////////////////
+//
+// Version
+//
+
+VS_VERSION_INFO VERSIONINFO
+ FILEVERSION 2.0.0.1156
+ PRODUCTVERSION 2.0.0.1156
+ FILEFLAGSMASK 0x17L
+#ifdef _DEBUG
+ FILEFLAGS 0x1L
+#else
+ FILEFLAGS 0x0L
+#endif
+ FILEOS 0x4L
+ FILETYPE 0x2L
+ FILESUBTYPE 0x0L
+BEGIN
+ BLOCK "StringFileInfo"
+ BEGIN
+ BLOCK "040904b0"
+ BEGIN
+ VALUE "CompanyName", "Doubango Telecom"
+ VALUE "FileDescription", "Doubango IMS Framework Windows Audio DSP"
+ VALUE "FileVersion", "2.0.0.1156"
+ VALUE "InternalName", "pluginWinAudioDSP.dll"
+ VALUE "LegalCopyright", "(c) 2010-2013 Doubango Telecom. All rights reserved."
+ VALUE "OriginalFilename", "pluginWinAudioDSP.dll"
+ VALUE "ProductName", "Doubango IMS Framework Windows Audio DSP"
+ VALUE "ProductVersion", "2.0.0.1156"
+ END
+ END
+ BLOCK "VarFileInfo"
+ BEGIN
+ VALUE "Translation", 0x409, 1200
+ END
+END
+
+#endif // English (U.S.) resources
+/////////////////////////////////////////////////////////////////////////////
+
+
+
+#ifndef APSTUDIO_INVOKED
+/////////////////////////////////////////////////////////////////////////////
+//
+// Generated from the TEXTINCLUDE 3 resource.
+//
+
+
+/////////////////////////////////////////////////////////////////////////////
+#endif // not APSTUDIO_INVOKED
+
diff --git a/plugins/pluginWinDD/dllmain_dd.cxx b/plugins/pluginWinDD/dllmain_dd.cxx
new file mode 100644
index 0000000..b390c90
--- /dev/null
+++ b/plugins/pluginWinDD/dllmain_dd.cxx
@@ -0,0 +1,114 @@
+/* Copyright (C) 2015 Mamadou DIOP
+* Copyright (C) 2015 Doubango Telecom <http://www.doubango.org>
+*
+* This file is part of Open Source Doubango Framework.
+*
+* DOUBANGO is free software: you can redistribute it and/or modify
+* it under the terms of the GNU General Public License as published by
+* the Free Software Foundation, either version 3 of the License, or
+* (at your option) any later version.
+*
+* DOUBANGO is distributed in the hope that it will be useful,
+* but WITHOUT ANY WARRANTY; without even the implied warranty of
+* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
+* GNU General Public License for more details.
+*
+* You should have received a copy of the GNU General Public License
+* along with DOUBANGO.
+*/
+#include "plugin_win_dd_config.h"
+
+#include "tinymedia/tmedia_producer.h"
+
+#include "tsk_plugin.h"
+#include "tsk_debug.h"
+
+#include <windows.h>
+
+extern const tmedia_producer_plugin_def_t *plugin_win_dd_producer_plugin_def_t;
+
+PLUGIN_WIN_DD_BEGIN_DECLS /* BEGIN */
+PLUGIN_WIN_DDP_API int __plugin_get_def_count();
+PLUGIN_WIN_DDP_API tsk_plugin_def_type_t __plugin_get_def_type_at(int index);
+PLUGIN_WIN_DDP_API tsk_plugin_def_media_type_t __plugin_get_def_media_type_at(int index);
+PLUGIN_WIN_DDP_API tsk_plugin_def_ptr_const_t __plugin_get_def_at(int index);
+PLUGIN_WIN_DD_END_DECLS /* END */
+
+BOOL APIENTRY DllMain(HMODULE hModule,
+DWORD ul_reason_for_call,
+LPVOID lpReserved
+)
+{
+ switch (ul_reason_for_call)
+ {
+ case DLL_PROCESS_ATTACH:
+ break;
+ case DLL_THREAD_ATTACH:
+ break;
+ case DLL_THREAD_DETACH:
+ break;
+ case DLL_PROCESS_DETACH:
+ break;
+ }
+ return TRUE;
+}
+
+
+typedef enum PLUGIN_INDEX_E
+{
+ PLUGIN_INDEX_PRODUCER,
+
+ PLUGIN_INDEX_COUNT
+}
+PLUGIN_INDEX_T;
+
+
+int __plugin_get_def_count()
+{
+ return PLUGIN_INDEX_COUNT;
+}
+
+tsk_plugin_def_type_t __plugin_get_def_type_at(int index)
+{
+ switch (index){
+ case PLUGIN_INDEX_PRODUCER:
+ {
+ return tsk_plugin_def_type_producer;
+ }
+ default:
+ {
+ TSK_DEBUG_ERROR("No plugin at index %d", index);
+ return tsk_plugin_def_type_none;
+ }
+ }
+}
+
+tsk_plugin_def_media_type_t __plugin_get_def_media_type_at(int index)
+{
+ switch (index){
+ case PLUGIN_INDEX_PRODUCER:
+ {
+ return tsk_plugin_def_media_type_screencast;
+ }
+ default:
+ {
+ TSK_DEBUG_ERROR("No plugin at index %d", index);
+ return tsk_plugin_def_media_type_none;
+ }
+ }
+}
+
+tsk_plugin_def_ptr_const_t __plugin_get_def_at(int index)
+{
+ switch (index){
+ case PLUGIN_INDEX_PRODUCER:
+ {
+ return plugin_win_dd_producer_plugin_def_t;
+ }
+ default:
+ {
+ TSK_DEBUG_ERROR("No plugin at index %d", index);
+ return tsk_null;
+ }
+ }
+}
diff --git a/plugins/pluginWinDD/internals/CommonTypes.h b/plugins/pluginWinDD/internals/CommonTypes.h
new file mode 100644
index 0000000..7b9c2d4
--- /dev/null
+++ b/plugins/pluginWinDD/internals/CommonTypes.h
@@ -0,0 +1,119 @@
+// THIS CODE AND INFORMATION IS PROVIDED "AS IS" WITHOUT WARRANTY OF
+// ANY KIND, EITHER EXPRESSED OR IMPLIED, INCLUDING BUT NOT LIMITED TO
+// THE IMPLIED WARRANTIES OF MERCHANTABILITY AND/OR FITNESS FOR A
+// PARTICULAR PURPOSE.
+//
+// Copyright (c) Microsoft Corporation. All rights reserved
+
+#ifndef _COMMONTYPES_H_
+#define _COMMONTYPES_H_
+
+#include <windows.h>
+#include <d3d11.h>
+#include <dxgi1_2.h>
+#include <sal.h>
+#include <new>
+#include <warning.h>
+#include <DirectXMath.h>
+#include <initguid.h>
+
+#include "PixelShader.h"
+#include "VertexShader.h"
+
+#define NUMVERTICES 6
+#define BPP 4
+
+#define OCCLUSION_STATUS_MSG WM_USER
+
+extern HRESULT SystemTransitionsExpectedErrors[];
+extern HRESULT CreateDuplicationExpectedErrors[];
+extern HRESULT FrameInfoExpectedErrors[];
+extern HRESULT AcquireFrameExpectedError[];
+extern HRESULT EnumOutputsExpectedErrors[];
+
+
+typedef _Return_type_success_(return == DUPL_RETURN_SUCCESS) enum
+{
+ DUPL_RETURN_SUCCESS = 0,
+ DUPL_RETURN_ERROR_EXPECTED = 1,
+ DUPL_RETURN_ERROR_UNEXPECTED = 2
+}DUPL_RETURN;
+
+_Post_satisfies_(return != DUPL_RETURN_SUCCESS)
+DUPL_RETURN ProcessFailure(_In_opt_ ID3D11Device* Device, _In_ LPCWSTR Str, _In_ LPCWSTR Title, HRESULT hr, _In_opt_z_ HRESULT* ExpectedErrors = nullptr);
+
+void DisplayMsg(_In_ LPCWSTR Str, _In_ LPCWSTR Title, HRESULT hr);
+
+//
+// Holds info about the pointer/cursor
+//
+typedef struct _PTR_INFO
+{
+ _Field_size_bytes_(BufferSize) BYTE* PtrShapeBuffer;
+ DXGI_OUTDUPL_POINTER_SHAPE_INFO ShapeInfo;
+ POINT Position;
+ bool Visible;
+ UINT BufferSize;
+ UINT WhoUpdatedPositionLast;
+ LARGE_INTEGER LastTimeStamp;
+} PTR_INFO;
+
+//
+// Structure that holds D3D resources not directly tied to any one thread
+//
+typedef struct _DX_RESOURCES
+{
+ ID3D11Device* Device;
+ ID3D11DeviceContext* Context;
+ ID3D11VertexShader* VertexShader;
+ ID3D11PixelShader* PixelShader;
+ ID3D11InputLayout* InputLayout;
+ ID3D11SamplerState* SamplerLinear;
+} DX_RESOURCES;
+
+//
+// Structure to pass to a new thread
+//
+typedef struct _THREAD_DATA
+{
+ // Used to indicate abnormal error condition
+ HANDLE UnexpectedErrorEvent;
+
+ // Used to indicate a transition event occurred e.g. PnpStop, PnpStart, mode change, TDR, desktop switch and the application needs to recreate the duplication interface
+ HANDLE ExpectedErrorEvent;
+
+ // Used by WinProc to signal to threads to exit
+ HANDLE TerminateThreadsEvent;
+
+ HANDLE TexSharedHandle;
+ UINT Output;
+ INT OffsetX;
+ INT OffsetY;
+ PTR_INFO* PtrInfo;
+ DX_RESOURCES DxRes;
+
+ const struct tmedia_producer_s* Producer;
+} THREAD_DATA;
+
+//
+// FRAME_DATA holds information about an acquired frame
+//
+typedef struct _FRAME_DATA
+{
+ ID3D11Texture2D* Frame;
+ DXGI_OUTDUPL_FRAME_INFO FrameInfo;
+ _Field_size_bytes_((MoveCount * sizeof(DXGI_OUTDUPL_MOVE_RECT)) + (DirtyCount * sizeof(RECT))) BYTE* MetaData;
+ UINT DirtyCount;
+ UINT MoveCount;
+} FRAME_DATA;
+
+//
+// A vertex with a position and texture coordinate
+//
+typedef struct _VERTEX
+{
+ DirectX::XMFLOAT3 Pos;
+ DirectX::XMFLOAT2 TexCoord;
+} VERTEX;
+
+#endif
diff --git a/plugins/pluginWinDD/internals/DisplayManager.cxx b/plugins/pluginWinDD/internals/DisplayManager.cxx
new file mode 100644
index 0000000..98209a0
--- /dev/null
+++ b/plugins/pluginWinDD/internals/DisplayManager.cxx
@@ -0,0 +1,478 @@
+// THIS CODE AND INFORMATION IS PROVIDED "AS IS" WITHOUT WARRANTY OF
+// ANY KIND, EITHER EXPRESSED OR IMPLIED, INCLUDING BUT NOT LIMITED TO
+// THE IMPLIED WARRANTIES OF MERCHANTABILITY AND/OR FITNESS FOR A
+// PARTICULAR PURPOSE.
+//
+// Copyright (c) Microsoft Corporation. All rights reserved
+
+#include "DisplayManager.h"
+using namespace DirectX;
+
+//
+// Constructor NULLs out vars
+//
+DISPLAYMANAGER::DISPLAYMANAGER() : m_Device(nullptr),
+ m_DeviceContext(nullptr),
+ m_MoveSurf(nullptr),
+ m_VertexShader(nullptr),
+ m_PixelShader(nullptr),
+ m_InputLayout(nullptr),
+ m_RTV(nullptr),
+ m_SamplerLinear(nullptr),
+ m_DirtyVertexBufferAlloc(nullptr),
+ m_DirtyVertexBufferAllocSize(0)
+{
+}
+
+//
+// Destructor calls CleanRefs to destroy everything
+//
+DISPLAYMANAGER::~DISPLAYMANAGER()
+{
+ CleanRefs();
+
+ if (m_DirtyVertexBufferAlloc)
+ {
+ delete [] m_DirtyVertexBufferAlloc;
+ m_DirtyVertexBufferAlloc = nullptr;
+ }
+}
+
+//
+// Initialize D3D variables
+//
+void DISPLAYMANAGER::InitD3D(DX_RESOURCES* Data)
+{
+ m_Device = Data->Device;
+ m_DeviceContext = Data->Context;
+ m_VertexShader = Data->VertexShader;
+ m_PixelShader = Data->PixelShader;
+ m_InputLayout = Data->InputLayout;
+ m_SamplerLinear = Data->SamplerLinear;
+
+ m_Device->AddRef();
+ m_DeviceContext->AddRef();
+ m_VertexShader->AddRef();
+ m_PixelShader->AddRef();
+ m_InputLayout->AddRef();
+ m_SamplerLinear->AddRef();
+}
+
+//
+// Process a given frame and its metadata
+//
+DUPL_RETURN DISPLAYMANAGER::ProcessFrame(_In_ FRAME_DATA* Data, _Inout_ ID3D11Texture2D* SharedSurf, INT OffsetX, INT OffsetY, _In_ DXGI_OUTPUT_DESC* DeskDesc)
+{
+ DUPL_RETURN Ret = DUPL_RETURN_SUCCESS;
+
+ // Process dirties and moves
+ if (Data->FrameInfo.TotalMetadataBufferSize)
+ {
+ D3D11_TEXTURE2D_DESC Desc;
+ Data->Frame->GetDesc(&Desc);
+
+ if (Data->MoveCount)
+ {
+ Ret = CopyMove(SharedSurf, reinterpret_cast<DXGI_OUTDUPL_MOVE_RECT*>(Data->MetaData), Data->MoveCount, OffsetX, OffsetY, DeskDesc, Desc.Width, Desc.Height);
+ if (Ret != DUPL_RETURN_SUCCESS)
+ {
+ return Ret;
+ }
+ }
+
+ if (Data->DirtyCount)
+ {
+ Ret = CopyDirty(Data->Frame, SharedSurf, reinterpret_cast<RECT*>(Data->MetaData + (Data->MoveCount * sizeof(DXGI_OUTDUPL_MOVE_RECT))), Data->DirtyCount, OffsetX, OffsetY, DeskDesc);
+ }
+ }
+
+ return Ret;
+}
+
+//
+// Returns D3D device being used
+//
+ID3D11Device* DISPLAYMANAGER::GetDevice()
+{
+ return m_Device;
+}
+
+//
+// Set appropriate source and destination rects for move rects
+//
+void DISPLAYMANAGER::SetMoveRect(_Out_ RECT* SrcRect, _Out_ RECT* DestRect, _In_ DXGI_OUTPUT_DESC* DeskDesc, _In_ DXGI_OUTDUPL_MOVE_RECT* MoveRect, INT TexWidth, INT TexHeight)
+{
+ switch (DeskDesc->Rotation)
+ {
+ case DXGI_MODE_ROTATION_UNSPECIFIED:
+ case DXGI_MODE_ROTATION_IDENTITY:
+ {
+ SrcRect->left = MoveRect->SourcePoint.x;
+ SrcRect->top = MoveRect->SourcePoint.y;
+ SrcRect->right = MoveRect->SourcePoint.x + MoveRect->DestinationRect.right - MoveRect->DestinationRect.left;
+ SrcRect->bottom = MoveRect->SourcePoint.y + MoveRect->DestinationRect.bottom - MoveRect->DestinationRect.top;
+
+ *DestRect = MoveRect->DestinationRect;
+ break;
+ }
+ case DXGI_MODE_ROTATION_ROTATE90:
+ {
+ SrcRect->left = TexHeight - (MoveRect->SourcePoint.y + MoveRect->DestinationRect.bottom - MoveRect->DestinationRect.top);
+ SrcRect->top = MoveRect->SourcePoint.x;
+ SrcRect->right = TexHeight - MoveRect->SourcePoint.y;
+ SrcRect->bottom = MoveRect->SourcePoint.x + MoveRect->DestinationRect.right - MoveRect->DestinationRect.left;
+
+ DestRect->left = TexHeight - MoveRect->DestinationRect.bottom;
+ DestRect->top = MoveRect->DestinationRect.left;
+ DestRect->right = TexHeight - MoveRect->DestinationRect.top;
+ DestRect->bottom = MoveRect->DestinationRect.right;
+ break;
+ }
+ case DXGI_MODE_ROTATION_ROTATE180:
+ {
+ SrcRect->left = TexWidth - (MoveRect->SourcePoint.x + MoveRect->DestinationRect.right - MoveRect->DestinationRect.left);
+ SrcRect->top = TexHeight - (MoveRect->SourcePoint.y + MoveRect->DestinationRect.bottom - MoveRect->DestinationRect.top);
+ SrcRect->right = TexWidth - MoveRect->SourcePoint.x;
+ SrcRect->bottom = TexHeight - MoveRect->SourcePoint.y;
+
+ DestRect->left = TexWidth - MoveRect->DestinationRect.right;
+ DestRect->top = TexHeight - MoveRect->DestinationRect.bottom;
+ DestRect->right = TexWidth - MoveRect->DestinationRect.left;
+ DestRect->bottom = TexHeight - MoveRect->DestinationRect.top;
+ break;
+ }
+ case DXGI_MODE_ROTATION_ROTATE270:
+ {
+ SrcRect->left = MoveRect->SourcePoint.x;
+ SrcRect->top = TexWidth - (MoveRect->SourcePoint.x + MoveRect->DestinationRect.right - MoveRect->DestinationRect.left);
+ SrcRect->right = MoveRect->SourcePoint.y + MoveRect->DestinationRect.bottom - MoveRect->DestinationRect.top;
+ SrcRect->bottom = TexWidth - MoveRect->SourcePoint.x;
+
+ DestRect->left = MoveRect->DestinationRect.top;
+ DestRect->top = TexWidth - MoveRect->DestinationRect.right;
+ DestRect->right = MoveRect->DestinationRect.bottom;
+ DestRect->bottom = TexWidth - MoveRect->DestinationRect.left;
+ break;
+ }
+ default:
+ {
+ RtlZeroMemory(DestRect, sizeof(RECT));
+ RtlZeroMemory(SrcRect, sizeof(RECT));
+ break;
+ }
+ }
+}
+
+//
+// Copy move rectangles
+//
+DUPL_RETURN DISPLAYMANAGER::CopyMove(_Inout_ ID3D11Texture2D* SharedSurf, _In_reads_(MoveCount) DXGI_OUTDUPL_MOVE_RECT* MoveBuffer, UINT MoveCount, INT OffsetX, INT OffsetY, _In_ DXGI_OUTPUT_DESC* DeskDesc, INT TexWidth, INT TexHeight)
+{
+ D3D11_TEXTURE2D_DESC FullDesc;
+ SharedSurf->GetDesc(&FullDesc);
+
+ // Make new intermediate surface to copy into for moving
+ if (!m_MoveSurf)
+ {
+ D3D11_TEXTURE2D_DESC MoveDesc;
+ MoveDesc = FullDesc;
+ MoveDesc.Width = DeskDesc->DesktopCoordinates.right - DeskDesc->DesktopCoordinates.left;
+ MoveDesc.Height = DeskDesc->DesktopCoordinates.bottom - DeskDesc->DesktopCoordinates.top;
+ MoveDesc.BindFlags = D3D11_BIND_RENDER_TARGET;
+ MoveDesc.MiscFlags = 0;
+ HRESULT hr = m_Device->CreateTexture2D(&MoveDesc, nullptr, &m_MoveSurf);
+ if (FAILED(hr))
+ {
+ return ProcessFailure(m_Device, L"Failed to create staging texture for move rects", L"Error", hr, SystemTransitionsExpectedErrors);
+ }
+ }
+
+ for (UINT i = 0; i < MoveCount; ++i)
+ {
+ RECT SrcRect;
+ RECT DestRect;
+
+ SetMoveRect(&SrcRect, &DestRect, DeskDesc, &(MoveBuffer[i]), TexWidth, TexHeight);
+
+ // Copy rect out of shared surface
+ D3D11_BOX Box;
+ Box.left = SrcRect.left + DeskDesc->DesktopCoordinates.left - OffsetX;
+ Box.top = SrcRect.top + DeskDesc->DesktopCoordinates.top - OffsetY;
+ Box.front = 0;
+ Box.right = SrcRect.right + DeskDesc->DesktopCoordinates.left - OffsetX;
+ Box.bottom = SrcRect.bottom + DeskDesc->DesktopCoordinates.top - OffsetY;
+ Box.back = 1;
+ m_DeviceContext->CopySubresourceRegion(m_MoveSurf, 0, SrcRect.left, SrcRect.top, 0, SharedSurf, 0, &Box);
+
+ // Copy back to shared surface
+ Box.left = SrcRect.left;
+ Box.top = SrcRect.top;
+ Box.front = 0;
+ Box.right = SrcRect.right;
+ Box.bottom = SrcRect.bottom;
+ Box.back = 1;
+ m_DeviceContext->CopySubresourceRegion(SharedSurf, 0, DestRect.left + DeskDesc->DesktopCoordinates.left - OffsetX, DestRect.top + DeskDesc->DesktopCoordinates.top - OffsetY, 0, m_MoveSurf, 0, &Box);
+ }
+
+ return DUPL_RETURN_SUCCESS;
+}
+
+//
+// Sets up vertices for dirty rects for rotated desktops
+//
+#pragma warning(push)
+#pragma warning(disable:__WARNING_USING_UNINIT_VAR) // false positives in SetDirtyVert due to tool bug
+
+void DISPLAYMANAGER::SetDirtyVert(_Out_writes_(NUMVERTICES) VERTEX* Vertices, _In_ RECT* Dirty, INT OffsetX, INT OffsetY, _In_ DXGI_OUTPUT_DESC* DeskDesc, _In_ D3D11_TEXTURE2D_DESC* FullDesc, _In_ D3D11_TEXTURE2D_DESC* ThisDesc)
+{
+ INT CenterX = FullDesc->Width / 2;
+ INT CenterY = FullDesc->Height / 2;
+
+ INT Width = DeskDesc->DesktopCoordinates.right - DeskDesc->DesktopCoordinates.left;
+ INT Height = DeskDesc->DesktopCoordinates.bottom - DeskDesc->DesktopCoordinates.top;
+
+ // Rotation compensated destination rect
+ RECT DestDirty = *Dirty;
+
+ // Set appropriate coordinates compensated for rotation
+ switch (DeskDesc->Rotation)
+ {
+ case DXGI_MODE_ROTATION_ROTATE90:
+ {
+ DestDirty.left = Width - Dirty->bottom;
+ DestDirty.top = Dirty->left;
+ DestDirty.right = Width - Dirty->top;
+ DestDirty.bottom = Dirty->right;
+
+ Vertices[0].TexCoord = XMFLOAT2(Dirty->right / static_cast<FLOAT>(ThisDesc->Width), Dirty->bottom / static_cast<FLOAT>(ThisDesc->Height));
+ Vertices[1].TexCoord = XMFLOAT2(Dirty->left / static_cast<FLOAT>(ThisDesc->Width), Dirty->bottom / static_cast<FLOAT>(ThisDesc->Height));
+ Vertices[2].TexCoord = XMFLOAT2(Dirty->right / static_cast<FLOAT>(ThisDesc->Width), Dirty->top / static_cast<FLOAT>(ThisDesc->Height));
+ Vertices[5].TexCoord = XMFLOAT2(Dirty->left / static_cast<FLOAT>(ThisDesc->Width), Dirty->top / static_cast<FLOAT>(ThisDesc->Height));
+ break;
+ }
+ case DXGI_MODE_ROTATION_ROTATE180:
+ {
+ DestDirty.left = Width - Dirty->right;
+ DestDirty.top = Height - Dirty->bottom;
+ DestDirty.right = Width - Dirty->left;
+ DestDirty.bottom = Height - Dirty->top;
+
+ Vertices[0].TexCoord = XMFLOAT2(Dirty->right / static_cast<FLOAT>(ThisDesc->Width), Dirty->top / static_cast<FLOAT>(ThisDesc->Height));
+ Vertices[1].TexCoord = XMFLOAT2(Dirty->right / static_cast<FLOAT>(ThisDesc->Width), Dirty->bottom / static_cast<FLOAT>(ThisDesc->Height));
+ Vertices[2].TexCoord = XMFLOAT2(Dirty->left / static_cast<FLOAT>(ThisDesc->Width), Dirty->top / static_cast<FLOAT>(ThisDesc->Height));
+ Vertices[5].TexCoord = XMFLOAT2(Dirty->left / static_cast<FLOAT>(ThisDesc->Width), Dirty->bottom / static_cast<FLOAT>(ThisDesc->Height));
+ break;
+ }
+ case DXGI_MODE_ROTATION_ROTATE270:
+ {
+ DestDirty.left = Dirty->top;
+ DestDirty.top = Height - Dirty->right;
+ DestDirty.right = Dirty->bottom;
+ DestDirty.bottom = Height - Dirty->left;
+
+ Vertices[0].TexCoord = XMFLOAT2(Dirty->left / static_cast<FLOAT>(ThisDesc->Width), Dirty->top / static_cast<FLOAT>(ThisDesc->Height));
+ Vertices[1].TexCoord = XMFLOAT2(Dirty->right / static_cast<FLOAT>(ThisDesc->Width), Dirty->top / static_cast<FLOAT>(ThisDesc->Height));
+ Vertices[2].TexCoord = XMFLOAT2(Dirty->left / static_cast<FLOAT>(ThisDesc->Width), Dirty->bottom / static_cast<FLOAT>(ThisDesc->Height));
+ Vertices[5].TexCoord = XMFLOAT2(Dirty->right / static_cast<FLOAT>(ThisDesc->Width), Dirty->bottom / static_cast<FLOAT>(ThisDesc->Height));
+ break;
+ }
+ default:
+ assert(false); // drop through
+ case DXGI_MODE_ROTATION_UNSPECIFIED:
+ case DXGI_MODE_ROTATION_IDENTITY:
+ {
+ Vertices[0].TexCoord = XMFLOAT2(Dirty->left / static_cast<FLOAT>(ThisDesc->Width), Dirty->bottom / static_cast<FLOAT>(ThisDesc->Height));
+ Vertices[1].TexCoord = XMFLOAT2(Dirty->left / static_cast<FLOAT>(ThisDesc->Width), Dirty->top / static_cast<FLOAT>(ThisDesc->Height));
+ Vertices[2].TexCoord = XMFLOAT2(Dirty->right / static_cast<FLOAT>(ThisDesc->Width), Dirty->bottom / static_cast<FLOAT>(ThisDesc->Height));
+ Vertices[5].TexCoord = XMFLOAT2(Dirty->right / static_cast<FLOAT>(ThisDesc->Width), Dirty->top / static_cast<FLOAT>(ThisDesc->Height));
+ break;
+ }
+ }
+
+ // Set positions
+ Vertices[0].Pos = XMFLOAT3((DestDirty.left + DeskDesc->DesktopCoordinates.left - OffsetX - CenterX) / static_cast<FLOAT>(CenterX),
+ -1 * (DestDirty.bottom + DeskDesc->DesktopCoordinates.top - OffsetY - CenterY) / static_cast<FLOAT>(CenterY),
+ 0.0f);
+ Vertices[1].Pos = XMFLOAT3((DestDirty.left + DeskDesc->DesktopCoordinates.left - OffsetX - CenterX) / static_cast<FLOAT>(CenterX),
+ -1 * (DestDirty.top + DeskDesc->DesktopCoordinates.top - OffsetY - CenterY) / static_cast<FLOAT>(CenterY),
+ 0.0f);
+ Vertices[2].Pos = XMFLOAT3((DestDirty.right + DeskDesc->DesktopCoordinates.left - OffsetX - CenterX) / static_cast<FLOAT>(CenterX),
+ -1 * (DestDirty.bottom + DeskDesc->DesktopCoordinates.top - OffsetY - CenterY) / static_cast<FLOAT>(CenterY),
+ 0.0f);
+ Vertices[3].Pos = Vertices[2].Pos;
+ Vertices[4].Pos = Vertices[1].Pos;
+ Vertices[5].Pos = XMFLOAT3((DestDirty.right + DeskDesc->DesktopCoordinates.left - OffsetX - CenterX) / static_cast<FLOAT>(CenterX),
+ -1 * (DestDirty.top + DeskDesc->DesktopCoordinates.top - OffsetY - CenterY) / static_cast<FLOAT>(CenterY),
+ 0.0f);
+
+ Vertices[3].TexCoord = Vertices[2].TexCoord;
+ Vertices[4].TexCoord = Vertices[1].TexCoord;
+}
+
+#pragma warning(pop) // re-enable __WARNING_USING_UNINIT_VAR
+
+//
+// Copies dirty rectangles
+//
+DUPL_RETURN DISPLAYMANAGER::CopyDirty(_In_ ID3D11Texture2D* SrcSurface, _Inout_ ID3D11Texture2D* SharedSurf, _In_reads_(DirtyCount) RECT* DirtyBuffer, UINT DirtyCount, INT OffsetX, INT OffsetY, _In_ DXGI_OUTPUT_DESC* DeskDesc)
+{
+ HRESULT hr;
+
+ D3D11_TEXTURE2D_DESC FullDesc;
+ SharedSurf->GetDesc(&FullDesc);
+
+ D3D11_TEXTURE2D_DESC ThisDesc;
+ SrcSurface->GetDesc(&ThisDesc);
+
+ if (!m_RTV)
+ {
+ hr = m_Device->CreateRenderTargetView(SharedSurf, nullptr, &m_RTV);
+ if (FAILED(hr))
+ {
+ return ProcessFailure(m_Device, L"Failed to create render target view for dirty rects", L"Error", hr, SystemTransitionsExpectedErrors);
+ }
+ }
+
+ D3D11_SHADER_RESOURCE_VIEW_DESC ShaderDesc;
+ ShaderDesc.Format = ThisDesc.Format;
+ ShaderDesc.ViewDimension = D3D11_SRV_DIMENSION_TEXTURE2D;
+ ShaderDesc.Texture2D.MostDetailedMip = ThisDesc.MipLevels - 1;
+ ShaderDesc.Texture2D.MipLevels = ThisDesc.MipLevels;
+
+ // Create new shader resource view
+ ID3D11ShaderResourceView* ShaderResource = nullptr;
+ hr = m_Device->CreateShaderResourceView(SrcSurface, &ShaderDesc, &ShaderResource);
+ if (FAILED(hr))
+ {
+ return ProcessFailure(m_Device, L"Failed to create shader resource view for dirty rects", L"Error", hr, SystemTransitionsExpectedErrors);
+ }
+
+ FLOAT BlendFactor[4] = {0.f, 0.f, 0.f, 0.f};
+ m_DeviceContext->OMSetBlendState(nullptr, BlendFactor, 0xFFFFFFFF);
+ m_DeviceContext->OMSetRenderTargets(1, &m_RTV, nullptr);
+ m_DeviceContext->VSSetShader(m_VertexShader, nullptr, 0);
+ m_DeviceContext->PSSetShader(m_PixelShader, nullptr, 0);
+ m_DeviceContext->PSSetShaderResources(0, 1, &ShaderResource);
+ m_DeviceContext->PSSetSamplers(0, 1, &m_SamplerLinear);
+ m_DeviceContext->IASetPrimitiveTopology(D3D11_PRIMITIVE_TOPOLOGY_TRIANGLELIST);
+
+ // Create space for vertices for the dirty rects if the current space isn't large enough
+ UINT BytesNeeded = sizeof(VERTEX) * NUMVERTICES * DirtyCount;
+ if (BytesNeeded > m_DirtyVertexBufferAllocSize)
+ {
+ if (m_DirtyVertexBufferAlloc)
+ {
+ delete [] m_DirtyVertexBufferAlloc;
+ }
+
+ m_DirtyVertexBufferAlloc = new (std::nothrow) BYTE[BytesNeeded];
+ if (!m_DirtyVertexBufferAlloc)
+ {
+ m_DirtyVertexBufferAllocSize = 0;
+ return ProcessFailure(nullptr, L"Failed to allocate memory for dirty vertex buffer.", L"Error", E_OUTOFMEMORY);
+ }
+
+ m_DirtyVertexBufferAllocSize = BytesNeeded;
+ }
+
+ // Fill them in
+ VERTEX* DirtyVertex = reinterpret_cast<VERTEX*>(m_DirtyVertexBufferAlloc);
+ for (UINT i = 0; i < DirtyCount; ++i, DirtyVertex += NUMVERTICES)
+ {
+ SetDirtyVert(DirtyVertex, &(DirtyBuffer[i]), OffsetX, OffsetY, DeskDesc, &FullDesc, &ThisDesc);
+ }
+
+ // Create vertex buffer
+ D3D11_BUFFER_DESC BufferDesc;
+ RtlZeroMemory(&BufferDesc, sizeof(BufferDesc));
+ BufferDesc.Usage = D3D11_USAGE_DEFAULT;
+ BufferDesc.ByteWidth = BytesNeeded;
+ BufferDesc.BindFlags = D3D11_BIND_VERTEX_BUFFER;
+ BufferDesc.CPUAccessFlags = 0;
+ D3D11_SUBRESOURCE_DATA InitData;
+ RtlZeroMemory(&InitData, sizeof(InitData));
+ InitData.pSysMem = m_DirtyVertexBufferAlloc;
+
+ ID3D11Buffer* VertBuf = nullptr;
+ hr = m_Device->CreateBuffer(&BufferDesc, &InitData, &VertBuf);
+ if (FAILED(hr))
+ {
+ return ProcessFailure(m_Device, L"Failed to create vertex buffer in dirty rect processing", L"Error", hr, SystemTransitionsExpectedErrors);
+ }
+ UINT Stride = sizeof(VERTEX);
+ UINT Offset = 0;
+ m_DeviceContext->IASetVertexBuffers(0, 1, &VertBuf, &Stride, &Offset);
+
+ D3D11_VIEWPORT VP;
+ VP.Width = static_cast<FLOAT>(FullDesc.Width);
+ VP.Height = static_cast<FLOAT>(FullDesc.Height);
+ VP.MinDepth = 0.0f;
+ VP.MaxDepth = 1.0f;
+ VP.TopLeftX = 0.0f;
+ VP.TopLeftY = 0.0f;
+ m_DeviceContext->RSSetViewports(1, &VP);
+
+ m_DeviceContext->Draw(NUMVERTICES * DirtyCount, 0);
+
+ VertBuf->Release();
+ VertBuf = nullptr;
+
+ ShaderResource->Release();
+ ShaderResource = nullptr;
+
+ return DUPL_RETURN_SUCCESS;
+}
+
+//
+// Clean all references
+//
+void DISPLAYMANAGER::CleanRefs()
+{
+ if (m_DeviceContext)
+ {
+ m_DeviceContext->Release();
+ m_DeviceContext = nullptr;
+ }
+
+ if (m_Device)
+ {
+ m_Device->Release();
+ m_Device = nullptr;
+ }
+
+ if (m_MoveSurf)
+ {
+ m_MoveSurf->Release();
+ m_MoveSurf = nullptr;
+ }
+
+ if (m_VertexShader)
+ {
+ m_VertexShader->Release();
+ m_VertexShader = nullptr;
+ }
+
+ if (m_PixelShader)
+ {
+ m_PixelShader->Release();
+ m_PixelShader = nullptr;
+ }
+
+ if (m_InputLayout)
+ {
+ m_InputLayout->Release();
+ m_InputLayout = nullptr;
+ }
+
+ if (m_SamplerLinear)
+ {
+ m_SamplerLinear->Release();
+ m_SamplerLinear = nullptr;
+ }
+
+ if (m_RTV)
+ {
+ m_RTV->Release();
+ m_RTV = nullptr;
+ }
+}
diff --git a/plugins/pluginWinDD/internals/DisplayManager.h b/plugins/pluginWinDD/internals/DisplayManager.h
new file mode 100644
index 0000000..f9bf69c
--- /dev/null
+++ b/plugins/pluginWinDD/internals/DisplayManager.h
@@ -0,0 +1,46 @@
+// THIS CODE AND INFORMATION IS PROVIDED "AS IS" WITHOUT WARRANTY OF
+// ANY KIND, EITHER EXPRESSED OR IMPLIED, INCLUDING BUT NOT LIMITED TO
+// THE IMPLIED WARRANTIES OF MERCHANTABILITY AND/OR FITNESS FOR A
+// PARTICULAR PURPOSE.
+//
+// Copyright (c) Microsoft Corporation. All rights reserved
+
+#ifndef _DISPLAYMANAGER_H_
+#define _DISPLAYMANAGER_H_
+
+#include "CommonTypes.h"
+
+//
+// Handles the task of processing frames
+//
+class DISPLAYMANAGER
+{
+ public:
+ DISPLAYMANAGER();
+ ~DISPLAYMANAGER();
+ void InitD3D(DX_RESOURCES* Data);
+ ID3D11Device* GetDevice();
+ DUPL_RETURN ProcessFrame(_In_ FRAME_DATA* Data, _Inout_ ID3D11Texture2D* SharedSurf, INT OffsetX, INT OffsetY, _In_ DXGI_OUTPUT_DESC* DeskDesc);
+ void CleanRefs();
+
+ private:
+ // methods
+ DUPL_RETURN CopyDirty(_In_ ID3D11Texture2D* SrcSurface, _Inout_ ID3D11Texture2D* SharedSurf, _In_reads_(DirtyCount) RECT* DirtyBuffer, UINT DirtyCount, INT OffsetX, INT OffsetY, _In_ DXGI_OUTPUT_DESC* DeskDesc);
+ DUPL_RETURN CopyMove(_Inout_ ID3D11Texture2D* SharedSurf, _In_reads_(MoveCount) DXGI_OUTDUPL_MOVE_RECT* MoveBuffer, UINT MoveCount, INT OffsetX, INT OffsetY, _In_ DXGI_OUTPUT_DESC* DeskDesc, INT TexWidth, INT TexHeight);
+ void SetDirtyVert(_Out_writes_(NUMVERTICES) VERTEX* Vertices, _In_ RECT* Dirty, INT OffsetX, INT OffsetY, _In_ DXGI_OUTPUT_DESC* DeskDesc, _In_ D3D11_TEXTURE2D_DESC* FullDesc, _In_ D3D11_TEXTURE2D_DESC* ThisDesc);
+ void SetMoveRect(_Out_ RECT* SrcRect, _Out_ RECT* DestRect, _In_ DXGI_OUTPUT_DESC* DeskDesc, _In_ DXGI_OUTDUPL_MOVE_RECT* MoveRect, INT TexWidth, INT TexHeight);
+
+ // variables
+ ID3D11Device* m_Device;
+ ID3D11DeviceContext* m_DeviceContext;
+ ID3D11Texture2D* m_MoveSurf;
+ ID3D11VertexShader* m_VertexShader;
+ ID3D11PixelShader* m_PixelShader;
+ ID3D11InputLayout* m_InputLayout;
+ ID3D11RenderTargetView* m_RTV;
+ ID3D11SamplerState* m_SamplerLinear;
+ BYTE* m_DirtyVertexBufferAlloc;
+ UINT m_DirtyVertexBufferAllocSize;
+};
+
+#endif
diff --git a/plugins/pluginWinDD/internals/DuplicationManager.cxx b/plugins/pluginWinDD/internals/DuplicationManager.cxx
new file mode 100644
index 0000000..995c8ec
--- /dev/null
+++ b/plugins/pluginWinDD/internals/DuplicationManager.cxx
@@ -0,0 +1,499 @@
+// THIS CODE AND INFORMATION IS PROVIDED "AS IS" WITHOUT WARRANTY OF
+// ANY KIND, EITHER EXPRESSED OR IMPLIED, INCLUDING BUT NOT LIMITED TO
+// THE IMPLIED WARRANTIES OF MERCHANTABILITY AND/OR FITNESS FOR A
+// PARTICULAR PURPOSE.
+//
+// Copyright (c) Microsoft Corporation. All rights reserved
+
+#include "DuplicationManager.h"
+
+#include "tinymedia/tmedia_producer.h"
+
+#include <Mfapi.h>
+
+static inline HRESULT CopyRGBb32DownTop(
+ BYTE* pDst,
+ const BYTE* pSrc,
+ INT dwWidthDstPixels,
+ INT dwWidthSrcPixels,
+ INT dwHeightPixels
+ );
+//
+// Constructor sets up references / variables
+//
+DUPLICATIONMANAGER::DUPLICATIONMANAGER() : m_DeskDupl(nullptr),
+ m_AcquiredDesktopImage(nullptr),
+ m_MetaDataBuffer(nullptr),
+ m_MetaDataSize(0),
+ m_OutputNumber(0),
+ m_Device(nullptr),
+ m_DeviceContext(nullptr),
+ m_BufferPtr(nullptr),
+ m_BufferSize(0)
+{
+ RtlZeroMemory(&m_OutputDesc, sizeof(m_OutputDesc));
+}
+
+//
+// Destructor simply calls CleanRefs to destroy everything
+//
+DUPLICATIONMANAGER::~DUPLICATIONMANAGER()
+{
+ if (m_DeskDupl)
+ {
+ m_DeskDupl->Release();
+ m_DeskDupl = nullptr;
+ }
+
+ if (m_AcquiredDesktopImage)
+ {
+ m_AcquiredDesktopImage->Release();
+ m_AcquiredDesktopImage = nullptr;
+ }
+
+ if (m_MetaDataBuffer)
+ {
+ delete [] m_MetaDataBuffer;
+ m_MetaDataBuffer = nullptr;
+ }
+
+ if (m_DeviceContext)
+ {
+ m_DeviceContext->Release();
+ m_DeviceContext = nullptr;
+ }
+
+ if (m_Device)
+ {
+ m_Device->Release();
+ m_Device = nullptr;
+ }
+
+ if (m_BufferPtr)
+ {
+ VirtualFree(m_BufferPtr, 0, MEM_RELEASE);
+ m_BufferPtr = nullptr;
+ }
+}
+
+//
+// Initialize duplication interfaces
+//
+DUPL_RETURN DUPLICATIONMANAGER::InitDupl(_In_ ID3D11Device* Device, ID3D11DeviceContext* DeviceContext, UINT Output)
+{
+ m_OutputNumber = Output;
+
+ // Take a reference on the device
+ m_Device = Device;
+ m_Device->AddRef();
+
+ m_DeviceContext = DeviceContext;
+ m_DeviceContext->AddRef();
+
+ // Get DXGI device
+ IDXGIDevice* DxgiDevice = nullptr;
+ HRESULT hr = m_Device->QueryInterface(__uuidof(IDXGIDevice), reinterpret_cast<void**>(&DxgiDevice));
+ if (FAILED(hr))
+ {
+ return ProcessFailure(nullptr, L"Failed to QI for DXGI Device", L"Error", hr);
+ }
+
+ // Get DXGI adapter
+ IDXGIAdapter* DxgiAdapter = nullptr;
+ hr = DxgiDevice->GetParent(__uuidof(IDXGIAdapter), reinterpret_cast<void**>(&DxgiAdapter));
+ DxgiDevice->Release();
+ DxgiDevice = nullptr;
+ if (FAILED(hr))
+ {
+ return ProcessFailure(m_Device, L"Failed to get parent DXGI Adapter", L"Error", hr, SystemTransitionsExpectedErrors);
+ }
+
+ // Get output
+ IDXGIOutput* DxgiOutput = nullptr;
+ hr = DxgiAdapter->EnumOutputs(Output, &DxgiOutput);
+ DxgiAdapter->Release();
+ DxgiAdapter = nullptr;
+ if (FAILED(hr))
+ {
+ return ProcessFailure(m_Device, L"Failed to get specified output in DUPLICATIONMANAGER", L"Error", hr, EnumOutputsExpectedErrors);
+ }
+
+ DxgiOutput->GetDesc(&m_OutputDesc);
+
+ // QI for Output 1
+ IDXGIOutput1* DxgiOutput1 = nullptr;
+ hr = DxgiOutput->QueryInterface(__uuidof(DxgiOutput1), reinterpret_cast<void**>(&DxgiOutput1));
+ DxgiOutput->Release();
+ DxgiOutput = nullptr;
+ if (FAILED(hr))
+ {
+ return ProcessFailure(nullptr, L"Failed to QI for DxgiOutput1 in DUPLICATIONMANAGER", L"Error", hr);
+ }
+
+ // Create desktop duplication
+ hr = DxgiOutput1->DuplicateOutput(m_Device, &m_DeskDupl);
+ DxgiOutput1->Release();
+ DxgiOutput1 = nullptr;
+ if (FAILED(hr))
+ {
+ if (hr == DXGI_ERROR_NOT_CURRENTLY_AVAILABLE)
+ {
+ MessageBoxW(nullptr, L"There is already the maximum number of applications using the Desktop Duplication API running, please close one of those applications and then try again.", L"Error", MB_OK);
+ return DUPL_RETURN_ERROR_UNEXPECTED;
+ }
+ return ProcessFailure(m_Device, L"Failed to get duplicate output in DUPLICATIONMANAGER", L"Error", hr, CreateDuplicationExpectedErrors);
+ }
+
+ return DUPL_RETURN_SUCCESS;
+}
+
+//
+// Retrieves mouse info and write it into PtrInfo
+//
+DUPL_RETURN DUPLICATIONMANAGER::GetMouse(_Inout_ PTR_INFO* PtrInfo, _In_ DXGI_OUTDUPL_FRAME_INFO* FrameInfo, INT OffsetX, INT OffsetY)
+{
+ // A non-zero mouse update timestamp indicates that there is a mouse position update and optionally a shape change
+ if (FrameInfo->LastMouseUpdateTime.QuadPart == 0)
+ {
+ return DUPL_RETURN_SUCCESS;
+ }
+
+ bool UpdatePosition = true;
+
+ // Make sure we don't update pointer position wrongly
+ // If pointer is invisible, make sure we did not get an update from another output that the last time that said pointer
+ // was visible, if so, don't set it to invisible or update.
+ if (!FrameInfo->PointerPosition.Visible && (PtrInfo->WhoUpdatedPositionLast != m_OutputNumber))
+ {
+ UpdatePosition = false;
+ }
+
+ // If two outputs both say they have a visible, only update if new update has newer timestamp
+ if (FrameInfo->PointerPosition.Visible && PtrInfo->Visible && (PtrInfo->WhoUpdatedPositionLast != m_OutputNumber) && (PtrInfo->LastTimeStamp.QuadPart > FrameInfo->LastMouseUpdateTime.QuadPart))
+ {
+ UpdatePosition = false;
+ }
+
+ // Update position
+ if (UpdatePosition)
+ {
+ PtrInfo->Position.x = FrameInfo->PointerPosition.Position.x + m_OutputDesc.DesktopCoordinates.left - OffsetX;
+ PtrInfo->Position.y = FrameInfo->PointerPosition.Position.y + m_OutputDesc.DesktopCoordinates.top - OffsetY;
+ PtrInfo->WhoUpdatedPositionLast = m_OutputNumber;
+ PtrInfo->LastTimeStamp = FrameInfo->LastMouseUpdateTime;
+ PtrInfo->Visible = FrameInfo->PointerPosition.Visible != 0;
+ }
+
+ // No new shape
+ if (FrameInfo->PointerShapeBufferSize == 0)
+ {
+ return DUPL_RETURN_SUCCESS;
+ }
+
+ // Old buffer too small
+ if (FrameInfo->PointerShapeBufferSize > PtrInfo->BufferSize)
+ {
+ if (PtrInfo->PtrShapeBuffer)
+ {
+ delete [] PtrInfo->PtrShapeBuffer;
+ PtrInfo->PtrShapeBuffer = nullptr;
+ }
+ PtrInfo->PtrShapeBuffer = new (std::nothrow) BYTE[FrameInfo->PointerShapeBufferSize];
+ if (!PtrInfo->PtrShapeBuffer)
+ {
+ PtrInfo->BufferSize = 0;
+ return ProcessFailure(nullptr, L"Failed to allocate memory for pointer shape in DUPLICATIONMANAGER", L"Error", E_OUTOFMEMORY);
+ }
+
+ // Update buffer size
+ PtrInfo->BufferSize = FrameInfo->PointerShapeBufferSize;
+ }
+
+ // Get shape
+ UINT BufferSizeRequired;
+ HRESULT hr = m_DeskDupl->GetFramePointerShape(FrameInfo->PointerShapeBufferSize, reinterpret_cast<VOID*>(PtrInfo->PtrShapeBuffer), &BufferSizeRequired, &(PtrInfo->ShapeInfo));
+ if (FAILED(hr))
+ {
+ delete [] PtrInfo->PtrShapeBuffer;
+ PtrInfo->PtrShapeBuffer = nullptr;
+ PtrInfo->BufferSize = 0;
+ return ProcessFailure(m_Device, L"Failed to get frame pointer shape in DUPLICATIONMANAGER", L"Error", hr, FrameInfoExpectedErrors);
+ }
+
+ return DUPL_RETURN_SUCCESS;
+}
+
+
+//
+// Get next frame and write it into Data
+//
+_Success_(*Timeout == false && return == DUPL_RETURN_SUCCESS)
+DUPL_RETURN DUPLICATIONMANAGER::GetFrame(_Out_ FRAME_DATA* Data, _Out_ bool* Timeout)
+{
+ IDXGIResource* DesktopResource = nullptr;
+ DXGI_OUTDUPL_FRAME_INFO FrameInfo;
+
+ // Get new frame
+ HRESULT hr = m_DeskDupl->AcquireNextFrame(500, &FrameInfo, &DesktopResource);
+ if (hr == DXGI_ERROR_WAIT_TIMEOUT)
+ {
+ *Timeout = true;
+ return DUPL_RETURN_SUCCESS;
+ }
+ *Timeout = false;
+
+ if (FAILED(hr))
+ {
+ return ProcessFailure(m_Device, L"Failed to acquire next frame in DUPLICATIONMANAGER", L"Error", hr, FrameInfoExpectedErrors);
+ }
+
+ // If still holding old frame, destroy it
+ if (m_AcquiredDesktopImage)
+ {
+ m_AcquiredDesktopImage->Release();
+ m_AcquiredDesktopImage = nullptr;
+ }
+
+ // QI for IDXGIResource
+ hr = DesktopResource->QueryInterface(__uuidof(ID3D11Texture2D), reinterpret_cast<void **>(&m_AcquiredDesktopImage));
+ DesktopResource->Release();
+ DesktopResource = nullptr;
+ if (FAILED(hr))
+ {
+ return ProcessFailure(nullptr, L"Failed to QI for ID3D11Texture2D from acquired IDXGIResource in DUPLICATIONMANAGER", L"Error", hr);
+ }
+
+ // Get metadata
+ if (FrameInfo.TotalMetadataBufferSize)
+ {
+ // Old buffer too small
+ if (FrameInfo.TotalMetadataBufferSize > m_MetaDataSize)
+ {
+ if (m_MetaDataBuffer)
+ {
+ delete [] m_MetaDataBuffer;
+ m_MetaDataBuffer = nullptr;
+ }
+ m_MetaDataBuffer = new (std::nothrow) BYTE[FrameInfo.TotalMetadataBufferSize];
+ if (!m_MetaDataBuffer)
+ {
+ m_MetaDataSize = 0;
+ Data->MoveCount = 0;
+ Data->DirtyCount = 0;
+ return ProcessFailure(nullptr, L"Failed to allocate memory for metadata in DUPLICATIONMANAGER", L"Error", E_OUTOFMEMORY);
+ }
+ m_MetaDataSize = FrameInfo.TotalMetadataBufferSize;
+ }
+
+ UINT BufSize = FrameInfo.TotalMetadataBufferSize;
+
+ // Get move rectangles
+ hr = m_DeskDupl->GetFrameMoveRects(BufSize, reinterpret_cast<DXGI_OUTDUPL_MOVE_RECT*>(m_MetaDataBuffer), &BufSize);
+ if (FAILED(hr))
+ {
+ Data->MoveCount = 0;
+ Data->DirtyCount = 0;
+ return ProcessFailure(nullptr, L"Failed to get frame move rects in DUPLICATIONMANAGER", L"Error", hr, FrameInfoExpectedErrors);
+ }
+ Data->MoveCount = BufSize / sizeof(DXGI_OUTDUPL_MOVE_RECT);
+
+ BYTE* DirtyRects = m_MetaDataBuffer + BufSize;
+ BufSize = FrameInfo.TotalMetadataBufferSize - BufSize;
+
+ // Get dirty rectangles
+ hr = m_DeskDupl->GetFrameDirtyRects(BufSize, reinterpret_cast<RECT*>(DirtyRects), &BufSize);
+ if (FAILED(hr))
+ {
+ Data->MoveCount = 0;
+ Data->DirtyCount = 0;
+ return ProcessFailure(nullptr, L"Failed to get frame dirty rects in DUPLICATIONMANAGER", L"Error", hr, FrameInfoExpectedErrors);
+ }
+ Data->DirtyCount = BufSize / sizeof(RECT);
+
+ Data->MetaData = m_MetaDataBuffer;
+ }
+
+ Data->Frame = m_AcquiredDesktopImage;
+ Data->FrameInfo = FrameInfo;
+
+ return DUPL_RETURN_SUCCESS;
+}
+
+//
+// Release frame
+//
+DUPL_RETURN DUPLICATIONMANAGER::DoneWithFrame()
+{
+ HRESULT hr = m_DeskDupl->ReleaseFrame();
+ if (FAILED(hr))
+ {
+ return ProcessFailure(m_Device, L"Failed to release frame in DUPLICATIONMANAGER", L"Error", hr, FrameInfoExpectedErrors);
+ }
+
+ if (m_AcquiredDesktopImage)
+ {
+ m_AcquiredDesktopImage->Release();
+ m_AcquiredDesktopImage = nullptr;
+ }
+
+ return DUPL_RETURN_SUCCESS;
+}
+
+//
+// Gets output desc into DescPtr
+//
+void DUPLICATIONMANAGER::GetOutputDesc(_Out_ DXGI_OUTPUT_DESC* DescPtr)
+{
+ *DescPtr = m_OutputDesc;
+}
+
+
+HRESULT DUPLICATIONMANAGER::SendData(struct tmedia_producer_s* pProducer, FRAME_DATA* FrameData)
+{
+ HRESULT hr = E_FAIL;
+ D3D11_TEXTURE2D_DESC CopyBufferDesc = {0};
+ D3D11_TEXTURE2D_DESC FullDesc;
+ DXGI_MAPPED_RECT MappedSurface;
+ D3D11_BOX Box;
+ UINT BuffSize;
+
+ ID3D11Texture2D* CopyBuffer = nullptr;
+ IDXGISurface* CopySurface = nullptr;
+ ID3D11Device* Device = nullptr;
+
+ FrameData->Frame->GetDesc(&FullDesc);
+
+ CopyBufferDesc.Width = FullDesc.Width;
+ CopyBufferDesc.Height = FullDesc.Height;
+ CopyBufferDesc.MipLevels = 1;
+ CopyBufferDesc.ArraySize = 1;
+ CopyBufferDesc.Format = DXGI_FORMAT_B8G8R8A8_UNORM;
+ CopyBufferDesc.SampleDesc.Count = 1;
+ CopyBufferDesc.SampleDesc.Quality = 0;
+ CopyBufferDesc.Usage = D3D11_USAGE_STAGING;
+ CopyBufferDesc.BindFlags = 0;
+ CopyBufferDesc.CPUAccessFlags = D3D11_CPU_ACCESS_READ;
+ CopyBufferDesc.MiscFlags = 0;
+
+ FrameData->Frame->GetDevice(&Device);
+ if (!Device)
+ {
+ hr = E_POINTER;
+ ProcessFailure(m_Device, L"Failed to get device", L"Error", hr, SystemTransitionsExpectedErrors);
+ goto bail;
+ }
+
+ hr = Device->CreateTexture2D(&CopyBufferDesc, nullptr, &CopyBuffer);
+ if (FAILED(hr))
+ {
+ ProcessFailure(m_Device, L"Failed creating staging texture for pointer", L"Error", hr, SystemTransitionsExpectedErrors);
+ goto bail;
+ }
+
+ Box.left = 0;
+ Box.top = 0;
+ Box.right = CopyBufferDesc.Width;
+ Box.bottom = CopyBufferDesc.Height;
+ Box.front = 0;
+ Box.back = 1;
+ m_DeviceContext->CopySubresourceRegion(CopyBuffer, 0, 0, 0, 0, FrameData->Frame, 0, &Box);
+
+ hr = CopyBuffer->QueryInterface(__uuidof(IDXGISurface), (void **)&CopySurface);
+ if (FAILED(hr))
+ {
+ ProcessFailure(nullptr, L"Failed to QI staging texture into IDXGISurface for pointer", L"Error", hr, SystemTransitionsExpectedErrors);
+ goto bail;
+ }
+
+ BuffSize = CopyBufferDesc.Width * CopyBufferDesc.Height * 4;
+ if (m_BufferSize < BuffSize)
+ {
+ if (m_BufferPtr)
+ {
+ VirtualFree(m_BufferPtr, 0, MEM_RELEASE);
+ m_BufferSize = 0;
+ }
+ if (!(m_BufferPtr = (BYTE*)VirtualAlloc(NULL, BuffSize, MEM_RESERVE | MEM_COMMIT, PAGE_READWRITE)))
+ {
+ ProcessFailure(Device, L"Failed to allocate memory", L"Error", hr, SystemTransitionsExpectedErrors);
+ goto bail;
+ }
+ m_BufferSize = BuffSize;
+ }
+
+ hr = CopySurface->Map(&MappedSurface, DXGI_MAP_READ); // *** MAP *** //
+ if (FAILED(hr))
+ {
+ ProcessFailure(Device, L"Failed to map surface for pointer", L"Error", hr, SystemTransitionsExpectedErrors);
+ goto bail;
+ }
+
+ pProducer->video.width = CopyBufferDesc.Width;
+ pProducer->video.height = CopyBufferDesc.Height;
+
+#if 0
+ hr = MFCopyImage(
+ m_BufferPtr,
+ (LONG)(CopyBufferDesc.Width << 2),
+ (BYTE*)MappedSurface.pBits,
+ (LONG)MappedSurface.Pitch,
+ (DWORD)(CopyBufferDesc.Width << 2),
+ (DWORD)CopyBufferDesc.Height
+ );
+#else;
+ hr = CopyRGBb32DownTop(
+ m_BufferPtr,
+ MappedSurface.pBits,
+ CopyBufferDesc.Width,
+ (MappedSurface.Pitch >> 2), // Bytes -> Pixels
+ CopyBufferDesc.Height);
+#endif
+
+ pProducer->enc_cb.callback(pProducer->enc_cb.callback_data, m_BufferPtr, BuffSize);
+
+ CopySurface->Unmap(); // *** UNMAP *** //
+
+bail:
+ if (CopyBuffer)
+ {
+ CopyBuffer->Release();
+ }
+ if (CopySurface)
+ {
+ CopySurface->Release();
+ }
+ if (Device)
+ {
+ Device->Release();
+ }
+ return hr;
+}
+
+// For RGB32:
+// Direct3D -> Top-Down
+// Video Processor -> Down-Top
+static inline HRESULT CopyRGBb32DownTop(
+ BYTE* pDst,
+ const BYTE* pSrc,
+ INT dwWidthDstPixels,
+ INT dwWidthSrcPixels,
+ INT dwHeightPixels
+ )
+{
+ RGBQUAD *pSrcPixel = &((RGBQUAD*)pSrc)[(dwWidthSrcPixels * dwHeightPixels) - dwWidthSrcPixels];
+ RGBQUAD *pDestPixel = &((RGBQUAD*)pDst)[0];
+
+ register INT x;
+ register INT y;
+
+ for (y = dwHeightPixels; y > 0; --y)
+ {
+ for (x = 0; x < dwWidthDstPixels; ++x)
+ {
+ pDestPixel[x] = pSrcPixel[x];
+ }
+ pDestPixel += dwWidthDstPixels;
+ pSrcPixel -= dwWidthSrcPixels;
+ }
+ return S_OK;
+} \ No newline at end of file
diff --git a/plugins/pluginWinDD/internals/DuplicationManager.h b/plugins/pluginWinDD/internals/DuplicationManager.h
new file mode 100644
index 0000000..2c44b57
--- /dev/null
+++ b/plugins/pluginWinDD/internals/DuplicationManager.h
@@ -0,0 +1,43 @@
+// THIS CODE AND INFORMATION IS PROVIDED "AS IS" WITHOUT WARRANTY OF
+// ANY KIND, EITHER EXPRESSED OR IMPLIED, INCLUDING BUT NOT LIMITED TO
+// THE IMPLIED WARRANTIES OF MERCHANTABILITY AND/OR FITNESS FOR A
+// PARTICULAR PURPOSE.
+//
+// Copyright (c) Microsoft Corporation. All rights reserved
+
+#ifndef _DUPLICATIONMANAGER_H_
+#define _DUPLICATIONMANAGER_H_
+
+#include "CommonTypes.h"
+
+//
+// Handles the task of duplicating an output.
+//
+class DUPLICATIONMANAGER
+{
+ public:
+ DUPLICATIONMANAGER();
+ ~DUPLICATIONMANAGER();
+ _Success_(*Timeout == false && return == DUPL_RETURN_SUCCESS) DUPL_RETURN GetFrame(_Out_ FRAME_DATA* Data, _Out_ bool* Timeout);
+ DUPL_RETURN DoneWithFrame();
+ DUPL_RETURN InitDupl(_In_ ID3D11Device* Device, ID3D11DeviceContext* DeviceContext, UINT Output);
+ DUPL_RETURN GetMouse(_Inout_ PTR_INFO* PtrInfo, _In_ DXGI_OUTDUPL_FRAME_INFO* FrameInfo, INT OffsetX, INT OffsetY);
+ void GetOutputDesc(_Out_ DXGI_OUTPUT_DESC* DescPtr);
+ HRESULT SendData(struct tmedia_producer_s* pProducer, FRAME_DATA* FrameData);
+
+ private:
+
+ // vars
+ IDXGIOutputDuplication* m_DeskDupl;
+ ID3D11Texture2D* m_AcquiredDesktopImage;
+ _Field_size_bytes_(m_MetaDataSize) BYTE* m_MetaDataBuffer;
+ UINT m_MetaDataSize;
+ UINT m_OutputNumber;
+ DXGI_OUTPUT_DESC m_OutputDesc;
+ ID3D11Device* m_Device;
+ ID3D11DeviceContext* m_DeviceContext;
+ BYTE* m_BufferPtr;
+ UINT m_BufferSize;
+};
+
+#endif
diff --git a/plugins/pluginWinDD/internals/OutputManager.cxx b/plugins/pluginWinDD/internals/OutputManager.cxx
new file mode 100644
index 0000000..7468cf2
--- /dev/null
+++ b/plugins/pluginWinDD/internals/OutputManager.cxx
@@ -0,0 +1,1118 @@
+// THIS CODE AND INFORMATION IS PROVIDED "AS IS" WITHOUT WARRANTY OF
+// ANY KIND, EITHER EXPRESSED OR IMPLIED, INCLUDING BUT NOT LIMITED TO
+// THE IMPLIED WARRANTIES OF MERCHANTABILITY AND/OR FITNESS FOR A
+// PARTICULAR PURPOSE.
+//
+// Copyright (c) Microsoft Corporation. All rights reserved
+
+#include "OutputManager.h"
+using namespace DirectX;
+
+//
+// Constructor NULLs out all pointers & sets appropriate var vals
+//
+OUTPUTMANAGER::OUTPUTMANAGER() : m_SwapChain(nullptr),
+ m_Device(nullptr),
+ m_Factory(nullptr),
+ m_DeviceContext(nullptr),
+ m_RTV(nullptr),
+ m_SamplerLinear(nullptr),
+ m_BlendState(nullptr),
+ m_VertexShader(nullptr),
+ m_PixelShader(nullptr),
+ m_InputLayout(nullptr),
+ m_SharedSurf(nullptr),
+ m_KeyMutex(nullptr),
+ m_WindowHandle(nullptr),
+ m_NeedsResize(false),
+ m_OcclusionCookie(0)
+{
+}
+
+//
+// Destructor which calls CleanRefs to release all references and memory.
+//
+OUTPUTMANAGER::~OUTPUTMANAGER()
+{
+ CleanRefs();
+}
+
+//
+// Indicates that window has been resized.
+//
+void OUTPUTMANAGER::WindowResize()
+{
+ m_NeedsResize = true;
+}
+
+//
+// Initialize all state
+//
+DUPL_RETURN OUTPUTMANAGER::InitOutput(HWND Window, INT SingleOutput, _Out_ UINT* OutCount, _Out_ RECT* DeskBounds)
+{
+ HRESULT hr;
+
+ // Store window handle
+ m_WindowHandle = Window;
+
+ // Driver types supported
+ D3D_DRIVER_TYPE DriverTypes[] =
+ {
+ D3D_DRIVER_TYPE_HARDWARE,
+ D3D_DRIVER_TYPE_WARP,
+ D3D_DRIVER_TYPE_REFERENCE,
+ };
+ UINT NumDriverTypes = ARRAYSIZE(DriverTypes);
+
+ // Feature levels supported
+ D3D_FEATURE_LEVEL FeatureLevels[] =
+ {
+ D3D_FEATURE_LEVEL_11_0,
+ D3D_FEATURE_LEVEL_10_1,
+ D3D_FEATURE_LEVEL_10_0,
+ D3D_FEATURE_LEVEL_9_1
+ };
+ UINT NumFeatureLevels = ARRAYSIZE(FeatureLevels);
+ D3D_FEATURE_LEVEL FeatureLevel;
+
+ // Create device
+ for (UINT DriverTypeIndex = 0; DriverTypeIndex < NumDriverTypes; ++DriverTypeIndex)
+ {
+ hr = D3D11CreateDevice(nullptr, DriverTypes[DriverTypeIndex], nullptr, 0, FeatureLevels, NumFeatureLevels,
+ D3D11_SDK_VERSION, &m_Device, &FeatureLevel, &m_DeviceContext);
+ if (SUCCEEDED(hr))
+ {
+ // Device creation succeeded, no need to loop anymore
+ break;
+ }
+ }
+ if (FAILED(hr))
+ {
+ return ProcessFailure(m_Device, L"Device creation in OUTPUTMANAGER failed", L"Error", hr, SystemTransitionsExpectedErrors);
+ }
+
+ // Get DXGI factory
+ IDXGIDevice* DxgiDevice = nullptr;
+ hr = m_Device->QueryInterface(__uuidof(IDXGIDevice), reinterpret_cast<void**>(&DxgiDevice));
+ if (FAILED(hr))
+ {
+ return ProcessFailure(nullptr, L"Failed to QI for DXGI Device", L"Error", hr, nullptr);
+ }
+
+ IDXGIAdapter* DxgiAdapter = nullptr;
+ hr = DxgiDevice->GetParent(__uuidof(IDXGIAdapter), reinterpret_cast<void**>(&DxgiAdapter));
+ DxgiDevice->Release();
+ DxgiDevice = nullptr;
+ if (FAILED(hr))
+ {
+ return ProcessFailure(m_Device, L"Failed to get parent DXGI Adapter", L"Error", hr, SystemTransitionsExpectedErrors);
+ }
+
+ hr = DxgiAdapter->GetParent(__uuidof(IDXGIFactory2), reinterpret_cast<void**>(&m_Factory));
+ DxgiAdapter->Release();
+ DxgiAdapter = nullptr;
+ if (FAILED(hr))
+ {
+ return ProcessFailure(m_Device, L"Failed to get parent DXGI Factory", L"Error", hr, SystemTransitionsExpectedErrors);
+ }
+
+ // Register for occlusion status windows message
+ if (m_WindowHandle)
+ {
+ hr = m_Factory->RegisterOcclusionStatusWindow(Window, OCCLUSION_STATUS_MSG, &m_OcclusionCookie);
+ if (FAILED(hr))
+ {
+ return ProcessFailure(m_Device, L"Failed to register for occlusion message", L"Error", hr, SystemTransitionsExpectedErrors);
+ }
+ }
+
+ // Get window size
+ RECT WindowRect;
+ GetClientRect(m_WindowHandle ? m_WindowHandle : GetDesktopWindow(), &WindowRect);
+ UINT Width = WindowRect.right - WindowRect.left;
+ UINT Height = WindowRect.bottom - WindowRect.top;
+
+ if (m_WindowHandle)
+ {
+ // Create swapchain for window
+ DXGI_SWAP_CHAIN_DESC1 SwapChainDesc;
+ RtlZeroMemory(&SwapChainDesc, sizeof(SwapChainDesc));
+
+ SwapChainDesc.SwapEffect = DXGI_SWAP_EFFECT_FLIP_SEQUENTIAL;
+ SwapChainDesc.BufferCount = 2;
+ SwapChainDesc.Width = Width;
+ SwapChainDesc.Height = Height;
+ SwapChainDesc.Format = DXGI_FORMAT_B8G8R8A8_UNORM;
+ SwapChainDesc.BufferUsage = DXGI_USAGE_RENDER_TARGET_OUTPUT;
+ SwapChainDesc.SampleDesc.Count = 1;
+ SwapChainDesc.SampleDesc.Quality = 0;
+ hr = m_Factory->CreateSwapChainForHwnd(m_Device, Window, &SwapChainDesc, nullptr, nullptr, &m_SwapChain);
+ if (FAILED(hr))
+ {
+ return ProcessFailure(m_Device, L"Failed to create window swapchain", L"Error", hr, SystemTransitionsExpectedErrors);
+ }
+
+ // Disable the ALT-ENTER shortcut for entering full-screen mode
+ hr = m_Factory->MakeWindowAssociation(Window, DXGI_MWA_NO_ALT_ENTER);
+ if (FAILED(hr))
+ {
+ return ProcessFailure(m_Device, L"Failed to make window association", L"Error", hr, SystemTransitionsExpectedErrors);
+ }
+ }
+
+ // Create shared texture
+ DUPL_RETURN Return = CreateSharedSurf(SingleOutput, OutCount, DeskBounds);
+ if (Return != DUPL_RETURN_SUCCESS)
+ {
+ return Return;
+ }
+
+ // Make new render target view
+ if (m_WindowHandle)
+ {
+ Return = MakeRTV();
+ if (Return != DUPL_RETURN_SUCCESS)
+ {
+ return Return;
+ }
+ }
+
+ // Set view port
+ SetViewPort(Width, Height);
+
+ // Create the sample state
+ D3D11_SAMPLER_DESC SampDesc;
+ RtlZeroMemory(&SampDesc, sizeof(SampDesc));
+ SampDesc.Filter = D3D11_FILTER_MIN_MAG_MIP_LINEAR;
+ SampDesc.AddressU = D3D11_TEXTURE_ADDRESS_CLAMP;
+ SampDesc.AddressV = D3D11_TEXTURE_ADDRESS_CLAMP;
+ SampDesc.AddressW = D3D11_TEXTURE_ADDRESS_CLAMP;
+ SampDesc.ComparisonFunc = D3D11_COMPARISON_NEVER;
+ SampDesc.MinLOD = 0;
+ SampDesc.MaxLOD = D3D11_FLOAT32_MAX;
+ hr = m_Device->CreateSamplerState(&SampDesc, &m_SamplerLinear);
+ if (FAILED(hr))
+ {
+ return ProcessFailure(m_Device, L"Failed to create sampler state in OUTPUTMANAGER", L"Error", hr, SystemTransitionsExpectedErrors);
+ }
+
+ if (m_WindowHandle)
+ {
+ // Create the blend state
+ D3D11_BLEND_DESC BlendStateDesc;
+ BlendStateDesc.AlphaToCoverageEnable = FALSE;
+ BlendStateDesc.IndependentBlendEnable = FALSE;
+ BlendStateDesc.RenderTarget[0].BlendEnable = TRUE;
+ BlendStateDesc.RenderTarget[0].SrcBlend = D3D11_BLEND_SRC_ALPHA;
+ BlendStateDesc.RenderTarget[0].DestBlend = D3D11_BLEND_INV_SRC_ALPHA;
+ BlendStateDesc.RenderTarget[0].BlendOp = D3D11_BLEND_OP_ADD;
+ BlendStateDesc.RenderTarget[0].SrcBlendAlpha = D3D11_BLEND_ONE;
+ BlendStateDesc.RenderTarget[0].DestBlendAlpha = D3D11_BLEND_ZERO;
+ BlendStateDesc.RenderTarget[0].BlendOpAlpha = D3D11_BLEND_OP_ADD;
+ BlendStateDesc.RenderTarget[0].RenderTargetWriteMask = D3D11_COLOR_WRITE_ENABLE_ALL;
+ hr = m_Device->CreateBlendState(&BlendStateDesc, &m_BlendState);
+ if (FAILED(hr))
+ {
+ return ProcessFailure(m_Device, L"Failed to create blend state in OUTPUTMANAGER", L"Error", hr, SystemTransitionsExpectedErrors);
+ }
+
+ // Initialize shaders
+ Return = InitShaders();
+ if (Return != DUPL_RETURN_SUCCESS)
+ {
+ return Return;
+ }
+
+ GetWindowRect(m_WindowHandle, &WindowRect);
+ MoveWindow(m_WindowHandle, WindowRect.left, WindowRect.top, (DeskBounds->right - DeskBounds->left) / 2, (DeskBounds->bottom - DeskBounds->top) / 2, TRUE);
+ }
+
+ return Return;
+}
+
+//
+// Recreate shared texture
+//
+DUPL_RETURN OUTPUTMANAGER::CreateSharedSurf(INT SingleOutput, _Out_ UINT* OutCount, _Out_ RECT* DeskBounds)
+{
+ HRESULT hr;
+
+ // Get DXGI resources
+ IDXGIDevice* DxgiDevice = nullptr;
+ hr = m_Device->QueryInterface(__uuidof(IDXGIDevice), reinterpret_cast<void**>(&DxgiDevice));
+ if (FAILED(hr))
+ {
+ return ProcessFailure(nullptr, L"Failed to QI for DXGI Device", L"Error", hr);
+ }
+
+ IDXGIAdapter* DxgiAdapter = nullptr;
+ hr = DxgiDevice->GetParent(__uuidof(IDXGIAdapter), reinterpret_cast<void**>(&DxgiAdapter));
+ DxgiDevice->Release();
+ DxgiDevice = nullptr;
+ if (FAILED(hr))
+ {
+ return ProcessFailure(m_Device, L"Failed to get parent DXGI Adapter", L"Error", hr, SystemTransitionsExpectedErrors);
+ }
+
+ // Set initial values so that we always catch the right coordinates
+ DeskBounds->left = INT_MAX;
+ DeskBounds->right = INT_MIN;
+ DeskBounds->top = INT_MAX;
+ DeskBounds->bottom = INT_MIN;
+
+ IDXGIOutput* DxgiOutput = nullptr;
+
+ // Figure out right dimensions for full size desktop texture and # of outputs to duplicate
+ UINT OutputCount;
+ if (SingleOutput < 0)
+ {
+ hr = S_OK;
+ for (OutputCount = 0; SUCCEEDED(hr); ++OutputCount)
+ {
+ if (DxgiOutput)
+ {
+ DxgiOutput->Release();
+ DxgiOutput = nullptr;
+ }
+ hr = DxgiAdapter->EnumOutputs(OutputCount, &DxgiOutput);
+ if (DxgiOutput && (hr != DXGI_ERROR_NOT_FOUND))
+ {
+ DXGI_OUTPUT_DESC DesktopDesc;
+ DxgiOutput->GetDesc(&DesktopDesc);
+
+ DeskBounds->left = min(DesktopDesc.DesktopCoordinates.left, DeskBounds->left);
+ DeskBounds->top = min(DesktopDesc.DesktopCoordinates.top, DeskBounds->top);
+ DeskBounds->right = max(DesktopDesc.DesktopCoordinates.right, DeskBounds->right);
+ DeskBounds->bottom = max(DesktopDesc.DesktopCoordinates.bottom, DeskBounds->bottom);
+ }
+ }
+
+ --OutputCount;
+ }
+ else
+ {
+ hr = DxgiAdapter->EnumOutputs(SingleOutput, &DxgiOutput);
+ if (FAILED(hr))
+ {
+ DxgiAdapter->Release();
+ DxgiAdapter = nullptr;
+ return ProcessFailure(m_Device, L"Output specified to be duplicated does not exist", L"Error", hr);
+ }
+ DXGI_OUTPUT_DESC DesktopDesc;
+ DxgiOutput->GetDesc(&DesktopDesc);
+ *DeskBounds = DesktopDesc.DesktopCoordinates;
+
+ DxgiOutput->Release();
+ DxgiOutput = nullptr;
+
+ OutputCount = 1;
+ }
+
+ DxgiAdapter->Release();
+ DxgiAdapter = nullptr;
+
+ // Set passed in output count variable
+ *OutCount = OutputCount;
+
+ if (OutputCount == 0)
+ {
+ // We could not find any outputs, the system must be in a transition so return expected error
+ // so we will attempt to recreate
+ return DUPL_RETURN_ERROR_EXPECTED;
+ }
+
+ // Create shared texture for all duplication threads to draw into
+ D3D11_TEXTURE2D_DESC DeskTexD;
+ RtlZeroMemory(&DeskTexD, sizeof(D3D11_TEXTURE2D_DESC));
+ DeskTexD.Width = DeskBounds->right - DeskBounds->left;
+ DeskTexD.Height = DeskBounds->bottom - DeskBounds->top;
+ DeskTexD.MipLevels = 1;
+ DeskTexD.ArraySize = 1;
+ DeskTexD.Format = DXGI_FORMAT_B8G8R8A8_UNORM;
+ DeskTexD.SampleDesc.Count = 1;
+ DeskTexD.Usage = D3D11_USAGE_DEFAULT;
+ DeskTexD.BindFlags = D3D11_BIND_RENDER_TARGET | D3D11_BIND_SHADER_RESOURCE;
+ DeskTexD.CPUAccessFlags = 0;
+ DeskTexD.MiscFlags = D3D11_RESOURCE_MISC_SHARED_KEYEDMUTEX;
+
+ hr = m_Device->CreateTexture2D(&DeskTexD, nullptr, &m_SharedSurf);
+ if (FAILED(hr))
+ {
+ if (OutputCount != 1)
+ {
+ // If we are duplicating the complete desktop we try to create a single texture to hold the
+ // complete desktop image and blit updates from the per output DDA interface. The GPU can
+ // always support a texture size of the maximum resolution of any single output but there is no
+ // guarantee that it can support a texture size of the desktop.
+ // The sample only use this large texture to display the desktop image in a single window using DX
+ // we could revert back to using GDI to update the window in this failure case.
+ return ProcessFailure(m_Device, L"Failed to create DirectX shared texture - we are attempting to create a texture the size of the complete desktop and this may be larger than the maximum texture size of your GPU. Please try again using the -output command line parameter to duplicate only 1 monitor or configure your computer to a single monitor configuration", L"Error", hr, SystemTransitionsExpectedErrors);
+ }
+ else
+ {
+ return ProcessFailure(m_Device, L"Failed to create shared texture", L"Error", hr, SystemTransitionsExpectedErrors);
+ }
+ }
+
+ // Get keyed mutex
+ hr = m_SharedSurf->QueryInterface(__uuidof(IDXGIKeyedMutex), reinterpret_cast<void**>(&m_KeyMutex));
+ if (FAILED(hr))
+ {
+ return ProcessFailure(m_Device, L"Failed to query for keyed mutex in OUTPUTMANAGER", L"Error", hr);
+ }
+
+ return DUPL_RETURN_SUCCESS;
+}
+
+//
+// Present to the application window
+//
+DUPL_RETURN OUTPUTMANAGER::UpdateApplicationWindow(_In_ PTR_INFO* PointerInfo, _Inout_ bool* Occluded)
+{
+ // In a typical desktop duplication application there would be an application running on one system collecting the desktop images
+ // and another application running on a different system that receives the desktop images via a network and display the image. This
+ // sample contains both these aspects into a single application.
+ // This routine is the part of the sample that displays the desktop image onto the display
+
+ // Try and acquire sync on common display buffer
+ HRESULT hr = m_KeyMutex->AcquireSync(1, 100);
+ if (hr == static_cast<HRESULT>(WAIT_TIMEOUT))
+ {
+ // Another thread has the keyed mutex so try again later
+ return DUPL_RETURN_SUCCESS;
+ }
+ else if (FAILED(hr))
+ {
+ return ProcessFailure(m_Device, L"Failed to acquire Keyed mutex in OUTPUTMANAGER", L"Error", hr, SystemTransitionsExpectedErrors);
+ }
+
+ // Got mutex, so draw
+ DUPL_RETURN Ret = DrawFrame();
+ if (Ret == DUPL_RETURN_SUCCESS)
+ {
+ // We have keyed mutex so we can access the mouse info
+ if (PointerInfo->Visible)
+ {
+ // Draw mouse into texture
+ Ret = DrawMouse(PointerInfo);
+ }
+ }
+
+ // Release keyed mutex
+ hr = m_KeyMutex->ReleaseSync(0);
+ if (FAILED(hr))
+ {
+ return ProcessFailure(m_Device, L"Failed to Release Keyed mutex in OUTPUTMANAGER", L"Error", hr, SystemTransitionsExpectedErrors);
+ }
+
+ // Present to window if all worked
+ if (Ret == DUPL_RETURN_SUCCESS)
+ {
+ // Present to window
+ if (m_SwapChain)
+ {
+ hr = m_SwapChain->Present(1, 0);
+ if (FAILED(hr))
+ {
+ return ProcessFailure(m_Device, L"Failed to present", L"Error", hr, SystemTransitionsExpectedErrors);
+ }
+ else if (hr == DXGI_STATUS_OCCLUDED)
+ {
+ *Occluded = true;
+ }
+ }
+ }
+
+ return Ret;
+}
+
+//
+// Returns shared handle
+//
+HANDLE OUTPUTMANAGER::GetSharedHandle()
+{
+ HANDLE Hnd = nullptr;
+
+ // QI IDXGIResource interface to synchronized shared surface.
+ IDXGIResource* DXGIResource = nullptr;
+ HRESULT hr = m_SharedSurf->QueryInterface(__uuidof(IDXGIResource), reinterpret_cast<void**>(&DXGIResource));
+ if (SUCCEEDED(hr))
+ {
+ // Obtain handle to IDXGIResource object.
+ DXGIResource->GetSharedHandle(&Hnd);
+ DXGIResource->Release();
+ DXGIResource = nullptr;
+ }
+
+ return Hnd;
+}
+
+//
+// Draw frame into backbuffer
+//
+DUPL_RETURN OUTPUTMANAGER::DrawFrame()
+{
+ HRESULT hr;
+
+ // If window was resized, resize swapchain
+ if (m_NeedsResize)
+ {
+ DUPL_RETURN Ret = ResizeSwapChain();
+ if (Ret != DUPL_RETURN_SUCCESS)
+ {
+ return Ret;
+ }
+ m_NeedsResize = false;
+ }
+
+ // Vertices for drawing whole texture
+ VERTEX Vertices[NUMVERTICES] =
+ {
+ {XMFLOAT3(-1.0f, -1.0f, 0), XMFLOAT2(0.0f, 1.0f)},
+ {XMFLOAT3(-1.0f, 1.0f, 0), XMFLOAT2(0.0f, 0.0f)},
+ {XMFLOAT3(1.0f, -1.0f, 0), XMFLOAT2(1.0f, 1.0f)},
+ {XMFLOAT3(1.0f, -1.0f, 0), XMFLOAT2(1.0f, 1.0f)},
+ {XMFLOAT3(-1.0f, 1.0f, 0), XMFLOAT2(0.0f, 0.0f)},
+ {XMFLOAT3(1.0f, 1.0f, 0), XMFLOAT2(1.0f, 0.0f)},
+ };
+
+ D3D11_TEXTURE2D_DESC FrameDesc;
+ m_SharedSurf->GetDesc(&FrameDesc);
+
+ D3D11_SHADER_RESOURCE_VIEW_DESC ShaderDesc;
+ ShaderDesc.Format = FrameDesc.Format;
+ ShaderDesc.ViewDimension = D3D11_SRV_DIMENSION_TEXTURE2D;
+ ShaderDesc.Texture2D.MostDetailedMip = FrameDesc.MipLevels - 1;
+ ShaderDesc.Texture2D.MipLevels = FrameDesc.MipLevels;
+
+ // Create new shader resource view
+ ID3D11ShaderResourceView* ShaderResource = nullptr;
+ hr = m_Device->CreateShaderResourceView(m_SharedSurf, &ShaderDesc, &ShaderResource);
+ if (FAILED(hr))
+ {
+ return ProcessFailure(m_Device, L"Failed to create shader resource when drawing a frame", L"Error", hr, SystemTransitionsExpectedErrors);
+ }
+
+ // Set resources
+ UINT Stride = sizeof(VERTEX);
+ UINT Offset = 0;
+ FLOAT blendFactor[4] = {0.f, 0.f, 0.f, 0.f};
+ m_DeviceContext->OMSetBlendState(nullptr, blendFactor, 0xffffffff);
+ m_DeviceContext->OMSetRenderTargets(1, &m_RTV, nullptr);
+ m_DeviceContext->VSSetShader(m_VertexShader, nullptr, 0);
+ m_DeviceContext->PSSetShader(m_PixelShader, nullptr, 0);
+ m_DeviceContext->PSSetShaderResources(0, 1, &ShaderResource);
+ m_DeviceContext->PSSetSamplers(0, 1, &m_SamplerLinear);
+ m_DeviceContext->IASetPrimitiveTopology(D3D11_PRIMITIVE_TOPOLOGY_TRIANGLELIST);
+
+ D3D11_BUFFER_DESC BufferDesc;
+ RtlZeroMemory(&BufferDesc, sizeof(BufferDesc));
+ BufferDesc.Usage = D3D11_USAGE_DEFAULT;
+ BufferDesc.ByteWidth = sizeof(VERTEX) * NUMVERTICES;
+ BufferDesc.BindFlags = D3D11_BIND_VERTEX_BUFFER;
+ BufferDesc.CPUAccessFlags = 0;
+ D3D11_SUBRESOURCE_DATA InitData;
+ RtlZeroMemory(&InitData, sizeof(InitData));
+ InitData.pSysMem = Vertices;
+
+ ID3D11Buffer* VertexBuffer = nullptr;
+
+ // Create vertex buffer
+ hr = m_Device->CreateBuffer(&BufferDesc, &InitData, &VertexBuffer);
+ if (FAILED(hr))
+ {
+ ShaderResource->Release();
+ ShaderResource = nullptr;
+ return ProcessFailure(m_Device, L"Failed to create vertex buffer when drawing a frame", L"Error", hr, SystemTransitionsExpectedErrors);
+ }
+ m_DeviceContext->IASetVertexBuffers(0, 1, &VertexBuffer, &Stride, &Offset);
+
+ // Draw textured quad onto render target
+ m_DeviceContext->Draw(NUMVERTICES, 0);
+
+ VertexBuffer->Release();
+ VertexBuffer = nullptr;
+
+ // Release shader resource
+ ShaderResource->Release();
+ ShaderResource = nullptr;
+
+ return DUPL_RETURN_SUCCESS;
+}
+
+//
+// Process both masked and monochrome pointers
+//
+DUPL_RETURN OUTPUTMANAGER::ProcessMonoMask(bool IsMono, _Inout_ PTR_INFO* PtrInfo, _Out_ INT* PtrWidth, _Out_ INT* PtrHeight, _Out_ INT* PtrLeft, _Out_ INT* PtrTop, _Outptr_result_bytebuffer_(*PtrHeight * *PtrWidth * BPP) BYTE** InitBuffer, _Out_ D3D11_BOX* Box)
+{
+ // Desktop dimensions
+ D3D11_TEXTURE2D_DESC FullDesc;
+ m_SharedSurf->GetDesc(&FullDesc);
+ INT DesktopWidth = FullDesc.Width;
+ INT DesktopHeight = FullDesc.Height;
+
+ // Pointer position
+ INT GivenLeft = PtrInfo->Position.x;
+ INT GivenTop = PtrInfo->Position.y;
+
+ // Figure out if any adjustment is needed for out of bound positions
+ if (GivenLeft < 0)
+ {
+ *PtrWidth = GivenLeft + static_cast<INT>(PtrInfo->ShapeInfo.Width);
+ }
+ else if ((GivenLeft + static_cast<INT>(PtrInfo->ShapeInfo.Width)) > DesktopWidth)
+ {
+ *PtrWidth = DesktopWidth - GivenLeft;
+ }
+ else
+ {
+ *PtrWidth = static_cast<INT>(PtrInfo->ShapeInfo.Width);
+ }
+
+ if (IsMono)
+ {
+ PtrInfo->ShapeInfo.Height = PtrInfo->ShapeInfo.Height / 2;
+ }
+
+ if (GivenTop < 0)
+ {
+ *PtrHeight = GivenTop + static_cast<INT>(PtrInfo->ShapeInfo.Height);
+ }
+ else if ((GivenTop + static_cast<INT>(PtrInfo->ShapeInfo.Height)) > DesktopHeight)
+ {
+ *PtrHeight = DesktopHeight - GivenTop;
+ }
+ else
+ {
+ *PtrHeight = static_cast<INT>(PtrInfo->ShapeInfo.Height);
+ }
+
+ if (IsMono)
+ {
+ PtrInfo->ShapeInfo.Height = PtrInfo->ShapeInfo.Height * 2;
+ }
+
+ *PtrLeft = (GivenLeft < 0) ? 0 : GivenLeft;
+ *PtrTop = (GivenTop < 0) ? 0 : GivenTop;
+
+ // Staging buffer/texture
+ D3D11_TEXTURE2D_DESC CopyBufferDesc;
+ CopyBufferDesc.Width = *PtrWidth;
+ CopyBufferDesc.Height = *PtrHeight;
+ CopyBufferDesc.MipLevels = 1;
+ CopyBufferDesc.ArraySize = 1;
+ CopyBufferDesc.Format = DXGI_FORMAT_B8G8R8A8_UNORM;
+ CopyBufferDesc.SampleDesc.Count = 1;
+ CopyBufferDesc.SampleDesc.Quality = 0;
+ CopyBufferDesc.Usage = D3D11_USAGE_STAGING;
+ CopyBufferDesc.BindFlags = 0;
+ CopyBufferDesc.CPUAccessFlags = D3D11_CPU_ACCESS_READ;
+ CopyBufferDesc.MiscFlags = 0;
+
+ ID3D11Texture2D* CopyBuffer = nullptr;
+ HRESULT hr = m_Device->CreateTexture2D(&CopyBufferDesc, nullptr, &CopyBuffer);
+ if (FAILED(hr))
+ {
+ return ProcessFailure(m_Device, L"Failed creating staging texture for pointer", L"Error", hr, SystemTransitionsExpectedErrors);
+ }
+
+ // Copy needed part of desktop image
+ Box->left = *PtrLeft;
+ Box->top = *PtrTop;
+ Box->right = *PtrLeft + *PtrWidth;
+ Box->bottom = *PtrTop + *PtrHeight;
+ m_DeviceContext->CopySubresourceRegion(CopyBuffer, 0, 0, 0, 0, m_SharedSurf, 0, Box);
+
+ // QI for IDXGISurface
+ IDXGISurface* CopySurface = nullptr;
+ hr = CopyBuffer->QueryInterface(__uuidof(IDXGISurface), (void **)&CopySurface);
+ CopyBuffer->Release();
+ CopyBuffer = nullptr;
+ if (FAILED(hr))
+ {
+ return ProcessFailure(nullptr, L"Failed to QI staging texture into IDXGISurface for pointer", L"Error", hr, SystemTransitionsExpectedErrors);
+ }
+
+ // Map pixels
+ DXGI_MAPPED_RECT MappedSurface;
+ hr = CopySurface->Map(&MappedSurface, DXGI_MAP_READ);
+ if (FAILED(hr))
+ {
+ CopySurface->Release();
+ CopySurface = nullptr;
+ return ProcessFailure(m_Device, L"Failed to map surface for pointer", L"Error", hr, SystemTransitionsExpectedErrors);
+ }
+
+ // New mouseshape buffer
+ *InitBuffer = new (std::nothrow) BYTE[*PtrWidth * *PtrHeight * BPP];
+ if (!(*InitBuffer))
+ {
+ return ProcessFailure(nullptr, L"Failed to allocate memory for new mouse shape buffer.", L"Error", E_OUTOFMEMORY);
+ }
+
+ UINT* InitBuffer32 = reinterpret_cast<UINT*>(*InitBuffer);
+ UINT* Desktop32 = reinterpret_cast<UINT*>(MappedSurface.pBits);
+ UINT DesktopPitchInPixels = MappedSurface.Pitch / sizeof(UINT);
+
+ // What to skip (pixel offset)
+ UINT SkipX = (GivenLeft < 0) ? (-1 * GivenLeft) : (0);
+ UINT SkipY = (GivenTop < 0) ? (-1 * GivenTop) : (0);
+
+ if (IsMono)
+ {
+ for (INT Row = 0; Row < *PtrHeight; ++Row)
+ {
+ // Set mask
+ BYTE Mask = 0x80;
+ Mask = Mask >> (SkipX % 8);
+ for (INT Col = 0; Col < *PtrWidth; ++Col)
+ {
+ // Get masks using appropriate offsets
+ BYTE AndMask = PtrInfo->PtrShapeBuffer[((Col + SkipX) / 8) + ((Row + SkipY) * (PtrInfo->ShapeInfo.Pitch))] & Mask;
+ BYTE XorMask = PtrInfo->PtrShapeBuffer[((Col + SkipX) / 8) + ((Row + SkipY + (PtrInfo->ShapeInfo.Height / 2)) * (PtrInfo->ShapeInfo.Pitch))] & Mask;
+ UINT AndMask32 = (AndMask) ? 0xFFFFFFFF : 0xFF000000;
+ UINT XorMask32 = (XorMask) ? 0x00FFFFFF : 0x00000000;
+
+ // Set new pixel
+ InitBuffer32[(Row * *PtrWidth) + Col] = (Desktop32[(Row * DesktopPitchInPixels) + Col] & AndMask32) ^ XorMask32;
+
+ // Adjust mask
+ if (Mask == 0x01)
+ {
+ Mask = 0x80;
+ }
+ else
+ {
+ Mask = Mask >> 1;
+ }
+ }
+ }
+ }
+ else
+ {
+ UINT* Buffer32 = reinterpret_cast<UINT*>(PtrInfo->PtrShapeBuffer);
+
+ // Iterate through pixels
+ for (INT Row = 0; Row < *PtrHeight; ++Row)
+ {
+ for (INT Col = 0; Col < *PtrWidth; ++Col)
+ {
+ // Set up mask
+ UINT MaskVal = 0xFF000000 & Buffer32[(Col + SkipX) + ((Row + SkipY) * (PtrInfo->ShapeInfo.Pitch / sizeof(UINT)))];
+ if (MaskVal)
+ {
+ // Mask was 0xFF
+ InitBuffer32[(Row * *PtrWidth) + Col] = (Desktop32[(Row * DesktopPitchInPixels) + Col] ^ Buffer32[(Col + SkipX) + ((Row + SkipY) * (PtrInfo->ShapeInfo.Pitch / sizeof(UINT)))]) | 0xFF000000;
+ }
+ else
+ {
+ // Mask was 0x00
+ InitBuffer32[(Row * *PtrWidth) + Col] = Buffer32[(Col + SkipX) + ((Row + SkipY) * (PtrInfo->ShapeInfo.Pitch / sizeof(UINT)))] | 0xFF000000;
+ }
+ }
+ }
+ }
+
+ // Done with resource
+ hr = CopySurface->Unmap();
+ CopySurface->Release();
+ CopySurface = nullptr;
+ if (FAILED(hr))
+ {
+ return ProcessFailure(m_Device, L"Failed to unmap surface for pointer", L"Error", hr, SystemTransitionsExpectedErrors);
+ }
+
+ return DUPL_RETURN_SUCCESS;
+}
+
+//
+// Draw mouse provided in buffer to backbuffer
+//
+DUPL_RETURN OUTPUTMANAGER::DrawMouse(_In_ PTR_INFO* PtrInfo)
+{
+ // Vars to be used
+ ID3D11Texture2D* MouseTex = nullptr;
+ ID3D11ShaderResourceView* ShaderRes = nullptr;
+ ID3D11Buffer* VertexBufferMouse = nullptr;
+ D3D11_SUBRESOURCE_DATA InitData;
+ D3D11_TEXTURE2D_DESC Desc;
+ D3D11_SHADER_RESOURCE_VIEW_DESC SDesc;
+
+ // Position will be changed based on mouse position
+ VERTEX Vertices[NUMVERTICES] =
+ {
+ {XMFLOAT3(-1.0f, -1.0f, 0), XMFLOAT2(0.0f, 1.0f)},
+ {XMFLOAT3(-1.0f, 1.0f, 0), XMFLOAT2(0.0f, 0.0f)},
+ {XMFLOAT3(1.0f, -1.0f, 0), XMFLOAT2(1.0f, 1.0f)},
+ {XMFLOAT3(1.0f, -1.0f, 0), XMFLOAT2(1.0f, 1.0f)},
+ {XMFLOAT3(-1.0f, 1.0f, 0), XMFLOAT2(0.0f, 0.0f)},
+ {XMFLOAT3(1.0f, 1.0f, 0), XMFLOAT2(1.0f, 0.0f)},
+ };
+
+ D3D11_TEXTURE2D_DESC FullDesc;
+ m_SharedSurf->GetDesc(&FullDesc);
+ INT DesktopWidth = FullDesc.Width;
+ INT DesktopHeight = FullDesc.Height;
+
+ // Center of desktop dimensions
+ INT CenterX = (DesktopWidth / 2);
+ INT CenterY = (DesktopHeight / 2);
+
+ // Clipping adjusted coordinates / dimensions
+ INT PtrWidth = 0;
+ INT PtrHeight = 0;
+ INT PtrLeft = 0;
+ INT PtrTop = 0;
+
+ // Buffer used if necessary (in case of monochrome or masked pointer)
+ BYTE* InitBuffer = nullptr;
+
+ // Used for copying pixels
+ D3D11_BOX Box;
+ Box.front = 0;
+ Box.back = 1;
+
+ Desc.MipLevels = 1;
+ Desc.ArraySize = 1;
+ Desc.Format = DXGI_FORMAT_B8G8R8A8_UNORM;
+ Desc.SampleDesc.Count = 1;
+ Desc.SampleDesc.Quality = 0;
+ Desc.Usage = D3D11_USAGE_DEFAULT;
+ Desc.BindFlags = D3D11_BIND_SHADER_RESOURCE;
+ Desc.CPUAccessFlags = 0;
+ Desc.MiscFlags = 0;
+
+ // Set shader resource properties
+ SDesc.Format = Desc.Format;
+ SDesc.ViewDimension = D3D11_SRV_DIMENSION_TEXTURE2D;
+ SDesc.Texture2D.MostDetailedMip = Desc.MipLevels - 1;
+ SDesc.Texture2D.MipLevels = Desc.MipLevels;
+
+ switch (PtrInfo->ShapeInfo.Type)
+ {
+ case DXGI_OUTDUPL_POINTER_SHAPE_TYPE_COLOR:
+ {
+ PtrLeft = PtrInfo->Position.x;
+ PtrTop = PtrInfo->Position.y;
+
+ PtrWidth = static_cast<INT>(PtrInfo->ShapeInfo.Width);
+ PtrHeight = static_cast<INT>(PtrInfo->ShapeInfo.Height);
+
+ break;
+ }
+
+ case DXGI_OUTDUPL_POINTER_SHAPE_TYPE_MONOCHROME:
+ {
+ ProcessMonoMask(true, PtrInfo, &PtrWidth, &PtrHeight, &PtrLeft, &PtrTop, &InitBuffer, &Box);
+ break;
+ }
+
+ case DXGI_OUTDUPL_POINTER_SHAPE_TYPE_MASKED_COLOR:
+ {
+ ProcessMonoMask(false, PtrInfo, &PtrWidth, &PtrHeight, &PtrLeft, &PtrTop, &InitBuffer, &Box);
+ break;
+ }
+
+ default:
+ break;
+ }
+
+ // VERTEX creation
+ Vertices[0].Pos.x = (PtrLeft - CenterX) / (FLOAT)CenterX;
+ Vertices[0].Pos.y = -1 * ((PtrTop + PtrHeight) - CenterY) / (FLOAT)CenterY;
+ Vertices[1].Pos.x = (PtrLeft - CenterX) / (FLOAT)CenterX;
+ Vertices[1].Pos.y = -1 * (PtrTop - CenterY) / (FLOAT)CenterY;
+ Vertices[2].Pos.x = ((PtrLeft + PtrWidth) - CenterX) / (FLOAT)CenterX;
+ Vertices[2].Pos.y = -1 * ((PtrTop + PtrHeight) - CenterY) / (FLOAT)CenterY;
+ Vertices[3].Pos.x = Vertices[2].Pos.x;
+ Vertices[3].Pos.y = Vertices[2].Pos.y;
+ Vertices[4].Pos.x = Vertices[1].Pos.x;
+ Vertices[4].Pos.y = Vertices[1].Pos.y;
+ Vertices[5].Pos.x = ((PtrLeft + PtrWidth) - CenterX) / (FLOAT)CenterX;
+ Vertices[5].Pos.y = -1 * (PtrTop - CenterY) / (FLOAT)CenterY;
+
+ // Set texture properties
+ Desc.Width = PtrWidth;
+ Desc.Height = PtrHeight;
+
+ // Set up init data
+ InitData.pSysMem = (PtrInfo->ShapeInfo.Type == DXGI_OUTDUPL_POINTER_SHAPE_TYPE_COLOR) ? PtrInfo->PtrShapeBuffer : InitBuffer;
+ InitData.SysMemPitch = (PtrInfo->ShapeInfo.Type == DXGI_OUTDUPL_POINTER_SHAPE_TYPE_COLOR) ? PtrInfo->ShapeInfo.Pitch : PtrWidth * BPP;
+ InitData.SysMemSlicePitch = 0;
+
+ // Create mouseshape as texture
+ HRESULT hr = m_Device->CreateTexture2D(&Desc, &InitData, &MouseTex);
+ if (FAILED(hr))
+ {
+ return ProcessFailure(m_Device, L"Failed to create mouse pointer texture", L"Error", hr, SystemTransitionsExpectedErrors);
+ }
+
+ // Create shader resource from texture
+ hr = m_Device->CreateShaderResourceView(MouseTex, &SDesc, &ShaderRes);
+ if (FAILED(hr))
+ {
+ MouseTex->Release();
+ MouseTex = nullptr;
+ return ProcessFailure(m_Device, L"Failed to create shader resource from mouse pointer texture", L"Error", hr, SystemTransitionsExpectedErrors);
+ }
+
+ D3D11_BUFFER_DESC BDesc;
+ ZeroMemory(&BDesc, sizeof(D3D11_BUFFER_DESC));
+ BDesc.Usage = D3D11_USAGE_DEFAULT;
+ BDesc.ByteWidth = sizeof(VERTEX) * NUMVERTICES;
+ BDesc.BindFlags = D3D11_BIND_VERTEX_BUFFER;
+ BDesc.CPUAccessFlags = 0;
+
+ ZeroMemory(&InitData, sizeof(D3D11_SUBRESOURCE_DATA));
+ InitData.pSysMem = Vertices;
+
+ // Create vertex buffer
+ hr = m_Device->CreateBuffer(&BDesc, &InitData, &VertexBufferMouse);
+ if (FAILED(hr))
+ {
+ ShaderRes->Release();
+ ShaderRes = nullptr;
+ MouseTex->Release();
+ MouseTex = nullptr;
+ return ProcessFailure(m_Device, L"Failed to create mouse pointer vertex buffer in OutputManager", L"Error", hr, SystemTransitionsExpectedErrors);
+ }
+
+ // Set resources
+ FLOAT BlendFactor[4] = {0.f, 0.f, 0.f, 0.f};
+ UINT Stride = sizeof(VERTEX);
+ UINT Offset = 0;
+ m_DeviceContext->IASetVertexBuffers(0, 1, &VertexBufferMouse, &Stride, &Offset);
+ m_DeviceContext->OMSetBlendState(m_BlendState, BlendFactor, 0xFFFFFFFF);
+ m_DeviceContext->OMSetRenderTargets(1, &m_RTV, nullptr);
+ m_DeviceContext->VSSetShader(m_VertexShader, nullptr, 0);
+ m_DeviceContext->PSSetShader(m_PixelShader, nullptr, 0);
+ m_DeviceContext->PSSetShaderResources(0, 1, &ShaderRes);
+ m_DeviceContext->PSSetSamplers(0, 1, &m_SamplerLinear);
+
+ // Draw
+ m_DeviceContext->Draw(NUMVERTICES, 0);
+
+ // Clean
+ if (VertexBufferMouse)
+ {
+ VertexBufferMouse->Release();
+ VertexBufferMouse = nullptr;
+ }
+ if (ShaderRes)
+ {
+ ShaderRes->Release();
+ ShaderRes = nullptr;
+ }
+ if (MouseTex)
+ {
+ MouseTex->Release();
+ MouseTex = nullptr;
+ }
+ if (InitBuffer)
+ {
+ delete [] InitBuffer;
+ InitBuffer = nullptr;
+ }
+
+ return DUPL_RETURN_SUCCESS;
+}
+
+//
+// Initialize shaders for drawing to screen
+//
+DUPL_RETURN OUTPUTMANAGER::InitShaders()
+{
+ HRESULT hr;
+
+ UINT Size = ARRAYSIZE(g_VS);
+ hr = m_Device->CreateVertexShader(g_VS, Size, nullptr, &m_VertexShader);
+ if (FAILED(hr))
+ {
+ return ProcessFailure(m_Device, L"Failed to create vertex shader in OUTPUTMANAGER", L"Error", hr, SystemTransitionsExpectedErrors);
+ }
+
+ D3D11_INPUT_ELEMENT_DESC Layout[] =
+ {
+ {"POSITION", 0, DXGI_FORMAT_R32G32B32_FLOAT, 0, 0, D3D11_INPUT_PER_VERTEX_DATA, 0},
+ {"TEXCOORD", 0, DXGI_FORMAT_R32G32_FLOAT, 0, 12, D3D11_INPUT_PER_VERTEX_DATA, 0}
+ };
+ UINT NumElements = ARRAYSIZE(Layout);
+ hr = m_Device->CreateInputLayout(Layout, NumElements, g_VS, Size, &m_InputLayout);
+ if (FAILED(hr))
+ {
+ return ProcessFailure(m_Device, L"Failed to create input layout in OUTPUTMANAGER", L"Error", hr, SystemTransitionsExpectedErrors);
+ }
+ m_DeviceContext->IASetInputLayout(m_InputLayout);
+
+ Size = ARRAYSIZE(g_PS);
+ hr = m_Device->CreatePixelShader(g_PS, Size, nullptr, &m_PixelShader);
+ if (FAILED(hr))
+ {
+ return ProcessFailure(m_Device, L"Failed to create pixel shader in OUTPUTMANAGER", L"Error", hr, SystemTransitionsExpectedErrors);
+ }
+
+ return DUPL_RETURN_SUCCESS;
+}
+
+//
+// Reset render target view
+//
+DUPL_RETURN OUTPUTMANAGER::MakeRTV()
+{
+ // Get backbuffer
+ ID3D11Texture2D* BackBuffer = nullptr;
+ HRESULT hr = m_SwapChain->GetBuffer(0, __uuidof(ID3D11Texture2D), reinterpret_cast<void**>(&BackBuffer));
+ if (FAILED(hr))
+ {
+ return ProcessFailure(m_Device, L"Failed to get backbuffer for making render target view in OUTPUTMANAGER", L"Error", hr, SystemTransitionsExpectedErrors);
+ }
+
+ // Create a render target view
+ hr = m_Device->CreateRenderTargetView(BackBuffer, nullptr, &m_RTV);
+ BackBuffer->Release();
+ if (FAILED(hr))
+ {
+ return ProcessFailure(m_Device, L"Failed to create render target view in OUTPUTMANAGER", L"Error", hr, SystemTransitionsExpectedErrors);
+ }
+
+ // Set new render target
+ m_DeviceContext->OMSetRenderTargets(1, &m_RTV, nullptr);
+
+ return DUPL_RETURN_SUCCESS;
+}
+
+//
+// Set new viewport
+//
+void OUTPUTMANAGER::SetViewPort(UINT Width, UINT Height)
+{
+ D3D11_VIEWPORT VP;
+ VP.Width = static_cast<FLOAT>(Width);
+ VP.Height = static_cast<FLOAT>(Height);
+ VP.MinDepth = 0.0f;
+ VP.MaxDepth = 1.0f;
+ VP.TopLeftX = 0;
+ VP.TopLeftY = 0;
+ m_DeviceContext->RSSetViewports(1, &VP);
+}
+
+//
+// Resize swapchain
+//
+DUPL_RETURN OUTPUTMANAGER::ResizeSwapChain()
+{
+ if (m_RTV)
+ {
+ m_RTV->Release();
+ m_RTV = nullptr;
+ }
+
+ RECT WindowRect;
+ GetClientRect(m_WindowHandle, &WindowRect);
+ UINT Width = WindowRect.right - WindowRect.left;
+ UINT Height = WindowRect.bottom - WindowRect.top;
+
+ // Resize swapchain
+ DXGI_SWAP_CHAIN_DESC SwapChainDesc;
+ m_SwapChain->GetDesc(&SwapChainDesc);
+ HRESULT hr = m_SwapChain->ResizeBuffers(SwapChainDesc.BufferCount, Width, Height, SwapChainDesc.BufferDesc.Format, SwapChainDesc.Flags);
+ if (FAILED(hr))
+ {
+ return ProcessFailure(m_Device, L"Failed to resize swapchain buffers in OUTPUTMANAGER", L"Error", hr, SystemTransitionsExpectedErrors);
+ }
+
+ // Make new render target view
+ DUPL_RETURN Ret = MakeRTV();
+ if (Ret != DUPL_RETURN_SUCCESS)
+ {
+ return Ret;
+ }
+
+ // Set new viewport
+ SetViewPort(Width, Height);
+
+ return Ret;
+}
+
+//
+// Releases all references
+//
+void OUTPUTMANAGER::CleanRefs()
+{
+ if (m_VertexShader)
+ {
+ m_VertexShader->Release();
+ m_VertexShader = nullptr;
+ }
+
+ if (m_PixelShader)
+ {
+ m_PixelShader->Release();
+ m_PixelShader = nullptr;
+ }
+
+ if (m_InputLayout)
+ {
+ m_InputLayout->Release();
+ m_InputLayout = nullptr;
+ }
+
+ if (m_RTV)
+ {
+ m_RTV->Release();
+ m_RTV = nullptr;
+ }
+
+ if (m_SamplerLinear)
+ {
+ m_SamplerLinear->Release();
+ m_SamplerLinear = nullptr;
+ }
+
+ if (m_BlendState)
+ {
+ m_BlendState->Release();
+ m_BlendState = nullptr;
+ }
+
+ if (m_DeviceContext)
+ {
+ m_DeviceContext->Release();
+ m_DeviceContext = nullptr;
+ }
+
+ if (m_Device)
+ {
+ m_Device->Release();
+ m_Device = nullptr;
+ }
+
+ if (m_SwapChain)
+ {
+ m_SwapChain->Release();
+ m_SwapChain = nullptr;
+ }
+
+ if (m_SharedSurf)
+ {
+ m_SharedSurf->Release();
+ m_SharedSurf = nullptr;
+ }
+
+ if (m_KeyMutex)
+ {
+ m_KeyMutex->Release();
+ m_KeyMutex = nullptr;
+ }
+
+ if (m_Factory)
+ {
+ if (m_OcclusionCookie)
+ {
+ m_Factory->UnregisterOcclusionStatus(m_OcclusionCookie);
+ m_OcclusionCookie = 0;
+ }
+ m_Factory->Release();
+ m_Factory = nullptr;
+ }
+}
diff --git a/plugins/pluginWinDD/internals/OutputManager.h b/plugins/pluginWinDD/internals/OutputManager.h
new file mode 100644
index 0000000..cd16e5f
--- /dev/null
+++ b/plugins/pluginWinDD/internals/OutputManager.h
@@ -0,0 +1,61 @@
+// THIS CODE AND INFORMATION IS PROVIDED "AS IS" WITHOUT WARRANTY OF
+// ANY KIND, EITHER EXPRESSED OR IMPLIED, INCLUDING BUT NOT LIMITED TO
+// THE IMPLIED WARRANTIES OF MERCHANTABILITY AND/OR FITNESS FOR A
+// PARTICULAR PURPOSE.
+//
+// Copyright (c) Microsoft Corporation. All rights reserved
+
+#ifndef _OUTPUTMANAGER_H_
+#define _OUTPUTMANAGER_H_
+
+#include <stdio.h>
+
+#include "CommonTypes.h"
+#include "warning.h"
+
+//
+// Handles the task of drawing into a window.
+// Has the functionality to draw the mouse given a mouse shape buffer and position
+//
+class OUTPUTMANAGER
+{
+ public:
+ OUTPUTMANAGER();
+ ~OUTPUTMANAGER();
+ DUPL_RETURN InitOutput(HWND Window, INT SingleOutput, _Out_ UINT* OutCount, _Out_ RECT* DeskBounds);
+ DUPL_RETURN UpdateApplicationWindow(_In_ PTR_INFO* PointerInfo, _Inout_ bool* Occluded);
+ void CleanRefs();
+ HANDLE GetSharedHandle();
+ void WindowResize();
+
+ private:
+ // Methods
+ DUPL_RETURN ProcessMonoMask(bool IsMono, _Inout_ PTR_INFO* PtrInfo, _Out_ INT* PtrWidth, _Out_ INT* PtrHeight, _Out_ INT* PtrLeft, _Out_ INT* PtrTop, _Outptr_result_bytebuffer_(*PtrHeight * *PtrWidth * BPP) BYTE** InitBuffer, _Out_ D3D11_BOX* Box);
+ DUPL_RETURN MakeRTV();
+ void SetViewPort(UINT Width, UINT Height);
+ DUPL_RETURN InitShaders();
+ DUPL_RETURN InitGeometry();
+ DUPL_RETURN CreateSharedSurf(INT SingleOutput, _Out_ UINT* OutCount, _Out_ RECT* DeskBounds);
+ DUPL_RETURN DrawFrame();
+ DUPL_RETURN DrawMouse(_In_ PTR_INFO* PtrInfo);
+ DUPL_RETURN ResizeSwapChain();
+
+ // Vars
+ IDXGISwapChain1* m_SwapChain;
+ ID3D11Device* m_Device;
+ IDXGIFactory2* m_Factory;
+ ID3D11DeviceContext* m_DeviceContext;
+ ID3D11RenderTargetView* m_RTV;
+ ID3D11SamplerState* m_SamplerLinear;
+ ID3D11BlendState* m_BlendState;
+ ID3D11VertexShader* m_VertexShader;
+ ID3D11PixelShader* m_PixelShader;
+ ID3D11InputLayout* m_InputLayout;
+ ID3D11Texture2D* m_SharedSurf;
+ IDXGIKeyedMutex* m_KeyMutex;
+ HWND m_WindowHandle;
+ bool m_NeedsResize;
+ DWORD m_OcclusionCookie;
+};
+
+#endif
diff --git a/plugins/pluginWinDD/internals/PixelShader.hlsl b/plugins/pluginWinDD/internals/PixelShader.hlsl
new file mode 100644
index 0000000..db0067d
--- /dev/null
+++ b/plugins/pluginWinDD/internals/PixelShader.hlsl
@@ -0,0 +1,24 @@
+// THIS CODE AND INFORMATION IS PROVIDED "AS IS" WITHOUT WARRANTY OF
+// ANY KIND, EITHER EXPRESSED OR IMPLIED, INCLUDING BUT NOT LIMITED TO
+// THE IMPLIED WARRANTIES OF MERCHANTABILITY AND/OR FITNESS FOR A
+// PARTICULAR PURPOSE.
+//
+// Copyright (c) Microsoft Corporation. All rights reserved
+//----------------------------------------------------------------------
+
+Texture2D tx : register( t0 );
+SamplerState samLinear : register( s0 );
+
+struct PS_INPUT
+{
+ float4 Pos : SV_POSITION;
+ float2 Tex : TEXCOORD;
+};
+
+//--------------------------------------------------------------------------------------
+// Pixel Shader
+//--------------------------------------------------------------------------------------
+float4 PS(PS_INPUT input) : SV_Target
+{
+ return tx.Sample( samLinear, input.Tex );
+} \ No newline at end of file
diff --git a/plugins/pluginWinDD/internals/ThreadManager.cxx b/plugins/pluginWinDD/internals/ThreadManager.cxx
new file mode 100644
index 0000000..68fe757
--- /dev/null
+++ b/plugins/pluginWinDD/internals/ThreadManager.cxx
@@ -0,0 +1,261 @@
+// THIS CODE AND INFORMATION IS PROVIDED "AS IS" WITHOUT WARRANTY OF
+// ANY KIND, EITHER EXPRESSED OR IMPLIED, INCLUDING BUT NOT LIMITED TO
+// THE IMPLIED WARRANTIES OF MERCHANTABILITY AND/OR FITNESS FOR A
+// PARTICULAR PURPOSE.
+//
+// Copyright (c) Microsoft Corporation. All rights reserved
+
+#include "ThreadManager.h"
+
+DWORD WINAPI DDProc(_In_ void* Param);
+
+THREADMANAGER::THREADMANAGER() : m_ThreadCount(0),
+ m_ThreadHandles(nullptr),
+ m_ThreadData(nullptr)
+{
+ RtlZeroMemory(&m_PtrInfo, sizeof(m_PtrInfo));
+}
+
+THREADMANAGER::~THREADMANAGER()
+{
+ Clean();
+}
+
+//
+// Clean up resources
+//
+void THREADMANAGER::Clean()
+{
+ if (m_PtrInfo.PtrShapeBuffer)
+ {
+ delete [] m_PtrInfo.PtrShapeBuffer;
+ m_PtrInfo.PtrShapeBuffer = nullptr;
+ }
+ RtlZeroMemory(&m_PtrInfo, sizeof(m_PtrInfo));
+
+ if (m_ThreadHandles)
+ {
+ for (UINT i = 0; i < m_ThreadCount; ++i)
+ {
+ if (m_ThreadHandles[i])
+ {
+ CloseHandle(m_ThreadHandles[i]);
+ }
+ }
+ delete [] m_ThreadHandles;
+ m_ThreadHandles = nullptr;
+ }
+
+ if (m_ThreadData)
+ {
+ for (UINT i = 0; i < m_ThreadCount; ++i)
+ {
+ CleanDx(&m_ThreadData[i].DxRes);
+ }
+ delete [] m_ThreadData;
+ m_ThreadData = nullptr;
+ }
+
+ m_ThreadCount = 0;
+}
+
+//
+// Clean up DX_RESOURCES
+//
+void THREADMANAGER::CleanDx(_Inout_ DX_RESOURCES* Data)
+{
+ if (Data->Device)
+ {
+ Data->Device->Release();
+ Data->Device = nullptr;
+ }
+
+ if (Data->Context)
+ {
+ Data->Context->Release();
+ Data->Context = nullptr;
+ }
+
+ if (Data->VertexShader)
+ {
+ Data->VertexShader->Release();
+ Data->VertexShader = nullptr;
+ }
+
+ if (Data->PixelShader)
+ {
+ Data->PixelShader->Release();
+ Data->PixelShader = nullptr;
+ }
+
+ if (Data->InputLayout)
+ {
+ Data->InputLayout->Release();
+ Data->InputLayout = nullptr;
+ }
+
+ if (Data->SamplerLinear)
+ {
+ Data->SamplerLinear->Release();
+ Data->SamplerLinear = nullptr;
+ }
+}
+
+//
+// Start up threads for DDA
+//
+DUPL_RETURN THREADMANAGER::Initialize(INT SingleOutput, UINT OutputCount, HANDLE UnexpectedErrorEvent, HANDLE ExpectedErrorEvent, HANDLE TerminateThreadsEvent, HANDLE SharedHandle, _In_ const struct tmedia_producer_s* Producer, _In_ RECT* DesktopDim)
+{
+ m_ThreadCount = OutputCount;
+ m_ThreadHandles = new (std::nothrow) HANDLE[m_ThreadCount];
+ m_ThreadData = new (std::nothrow) THREAD_DATA[m_ThreadCount];
+ if (!m_ThreadHandles || !m_ThreadData)
+ {
+ return ProcessFailure(nullptr, L"Failed to allocate array for threads", L"Error", E_OUTOFMEMORY);
+ }
+
+ // Create appropriate # of threads for duplication
+ DUPL_RETURN Ret = DUPL_RETURN_SUCCESS;
+ for (UINT i = 0; i < m_ThreadCount; ++i)
+ {
+ m_ThreadData[i].UnexpectedErrorEvent = UnexpectedErrorEvent;
+ m_ThreadData[i].ExpectedErrorEvent = ExpectedErrorEvent;
+ m_ThreadData[i].TerminateThreadsEvent = TerminateThreadsEvent;
+ m_ThreadData[i].Output = (SingleOutput < 0) ? i : SingleOutput;
+ m_ThreadData[i].TexSharedHandle = SharedHandle;
+ m_ThreadData[i].OffsetX = DesktopDim->left;
+ m_ThreadData[i].OffsetY = DesktopDim->top;
+ m_ThreadData[i].PtrInfo = &m_PtrInfo;
+ m_ThreadData[i].Producer = Producer;
+
+ RtlZeroMemory(&m_ThreadData[i].DxRes, sizeof(DX_RESOURCES));
+ Ret = InitializeDx(&m_ThreadData[i].DxRes);
+ if (Ret != DUPL_RETURN_SUCCESS)
+ {
+ return Ret;
+ }
+
+ DWORD ThreadId;
+ m_ThreadHandles[i] = CreateThread(nullptr, 0, DDProc, &m_ThreadData[i], 0, &ThreadId);
+ if (m_ThreadHandles[i] == nullptr)
+ {
+ return ProcessFailure(nullptr, L"Failed to create thread", L"Error", E_FAIL);
+ }
+ }
+
+ return Ret;
+}
+
+//
+// Get DX_RESOURCES
+//
+DUPL_RETURN THREADMANAGER::InitializeDx(_Out_ DX_RESOURCES* Data)
+{
+ HRESULT hr = S_OK;
+
+ // Driver types supported
+ D3D_DRIVER_TYPE DriverTypes[] =
+ {
+ D3D_DRIVER_TYPE_HARDWARE,
+ D3D_DRIVER_TYPE_WARP,
+ D3D_DRIVER_TYPE_REFERENCE,
+ };
+ UINT NumDriverTypes = ARRAYSIZE(DriverTypes);
+
+ // Feature levels supported
+ D3D_FEATURE_LEVEL FeatureLevels[] =
+ {
+ D3D_FEATURE_LEVEL_11_0,
+ D3D_FEATURE_LEVEL_10_1,
+ D3D_FEATURE_LEVEL_10_0,
+ D3D_FEATURE_LEVEL_9_1
+ };
+ UINT NumFeatureLevels = ARRAYSIZE(FeatureLevels);
+
+ D3D_FEATURE_LEVEL FeatureLevel;
+
+ // Create device
+ for (UINT DriverTypeIndex = 0; DriverTypeIndex < NumDriverTypes; ++DriverTypeIndex)
+ {
+ hr = D3D11CreateDevice(nullptr, DriverTypes[DriverTypeIndex], nullptr, 0, FeatureLevels, NumFeatureLevels,
+ D3D11_SDK_VERSION, &Data->Device, &FeatureLevel, &Data->Context);
+ if (SUCCEEDED(hr))
+ {
+ // Device creation success, no need to loop anymore
+ break;
+ }
+ }
+ if (FAILED(hr))
+ {
+ return ProcessFailure(nullptr, L"Failed to create device in InitializeDx", L"Error", hr);
+ }
+
+ // VERTEX shader
+ UINT Size = ARRAYSIZE(g_VS);
+ hr = Data->Device->CreateVertexShader(g_VS, Size, nullptr, &Data->VertexShader);
+ if (FAILED(hr))
+ {
+ return ProcessFailure(Data->Device, L"Failed to create vertex shader in InitializeDx", L"Error", hr, SystemTransitionsExpectedErrors);
+ }
+
+ // Input layout
+ D3D11_INPUT_ELEMENT_DESC Layout[] =
+ {
+ {"POSITION", 0, DXGI_FORMAT_R32G32B32_FLOAT, 0, 0, D3D11_INPUT_PER_VERTEX_DATA, 0},
+ {"TEXCOORD", 0, DXGI_FORMAT_R32G32_FLOAT, 0, 12, D3D11_INPUT_PER_VERTEX_DATA, 0}
+ };
+ UINT NumElements = ARRAYSIZE(Layout);
+ hr = Data->Device->CreateInputLayout(Layout, NumElements, g_VS, Size, &Data->InputLayout);
+ if (FAILED(hr))
+ {
+ return ProcessFailure(Data->Device, L"Failed to create input layout in InitializeDx", L"Error", hr, SystemTransitionsExpectedErrors);
+ }
+ Data->Context->IASetInputLayout(Data->InputLayout);
+
+ // Pixel shader
+ Size = ARRAYSIZE(g_PS);
+ hr = Data->Device->CreatePixelShader(g_PS, Size, nullptr, &Data->PixelShader);
+ if (FAILED(hr))
+ {
+ return ProcessFailure(Data->Device, L"Failed to create pixel shader in InitializeDx", L"Error", hr, SystemTransitionsExpectedErrors);
+ }
+
+ // Set up sampler
+ D3D11_SAMPLER_DESC SampDesc;
+ RtlZeroMemory(&SampDesc, sizeof(SampDesc));
+ SampDesc.Filter = D3D11_FILTER_MIN_MAG_MIP_LINEAR;
+ SampDesc.AddressU = D3D11_TEXTURE_ADDRESS_CLAMP;
+ SampDesc.AddressV = D3D11_TEXTURE_ADDRESS_CLAMP;
+ SampDesc.AddressW = D3D11_TEXTURE_ADDRESS_CLAMP;
+ SampDesc.ComparisonFunc = D3D11_COMPARISON_NEVER;
+ SampDesc.MinLOD = 0;
+ SampDesc.MaxLOD = D3D11_FLOAT32_MAX;
+ hr = Data->Device->CreateSamplerState(&SampDesc, &Data->SamplerLinear);
+ if (FAILED(hr))
+ {
+ return ProcessFailure(Data->Device, L"Failed to create sampler state in InitializeDx", L"Error", hr, SystemTransitionsExpectedErrors);
+ }
+
+ return DUPL_RETURN_SUCCESS;
+}
+
+//
+// Getter for the PTR_INFO structure
+//
+PTR_INFO* THREADMANAGER::GetPointerInfo()
+{
+ return &m_PtrInfo;
+}
+
+//
+// Waits infinitely for all spawned threads to terminate
+//
+bool THREADMANAGER::WaitForThreadTermination(DWORD timeout /*= INFINITE*/)
+{
+ bool bRet = true;
+ if (m_ThreadCount != 0)
+ {
+ bRet = (WaitForMultipleObjectsEx(m_ThreadCount, m_ThreadHandles, TRUE, timeout, FALSE) != WAIT_TIMEOUT);
+ }
+ return bRet;
+}
diff --git a/plugins/pluginWinDD/internals/ThreadManager.h b/plugins/pluginWinDD/internals/ThreadManager.h
new file mode 100644
index 0000000..d710998
--- /dev/null
+++ b/plugins/pluginWinDD/internals/ThreadManager.h
@@ -0,0 +1,33 @@
+// THIS CODE AND INFORMATION IS PROVIDED "AS IS" WITHOUT WARRANTY OF
+// ANY KIND, EITHER EXPRESSED OR IMPLIED, INCLUDING BUT NOT LIMITED TO
+// THE IMPLIED WARRANTIES OF MERCHANTABILITY AND/OR FITNESS FOR A
+// PARTICULAR PURPOSE.
+//
+// Copyright (c) Microsoft Corporation. All rights reserved
+
+#ifndef _THREADMANAGER_H_
+#define _THREADMANAGER_H_
+
+#include "CommonTypes.h"
+
+class THREADMANAGER
+{
+ public:
+ THREADMANAGER();
+ ~THREADMANAGER();
+ void Clean();
+ DUPL_RETURN Initialize(INT SingleOutput, UINT OutputCount, HANDLE UnexpectedErrorEvent, HANDLE ExpectedErrorEvent, HANDLE TerminateThreadsEvent, HANDLE SharedHandle, _In_ const struct tmedia_producer_s* Producer, _In_ RECT* DesktopDim);
+ PTR_INFO* GetPointerInfo();
+ bool WaitForThreadTermination(DWORD timeout = INFINITE);
+
+ private:
+ DUPL_RETURN InitializeDx(_Out_ DX_RESOURCES* Data);
+ void CleanDx(_Inout_ DX_RESOURCES* Data);
+
+ PTR_INFO m_PtrInfo;
+ UINT m_ThreadCount;
+ _Field_size_(m_ThreadCount) HANDLE* m_ThreadHandles;
+ _Field_size_(m_ThreadCount) THREAD_DATA* m_ThreadData;
+};
+
+#endif
diff --git a/plugins/pluginWinDD/internals/VertexShader.hlsl b/plugins/pluginWinDD/internals/VertexShader.hlsl
new file mode 100644
index 0000000..95f9435
--- /dev/null
+++ b/plugins/pluginWinDD/internals/VertexShader.hlsl
@@ -0,0 +1,28 @@
+// THIS CODE AND INFORMATION IS PROVIDED "AS IS" WITHOUT WARRANTY OF
+// ANY KIND, EITHER EXPRESSED OR IMPLIED, INCLUDING BUT NOT LIMITED TO
+// THE IMPLIED WARRANTIES OF MERCHANTABILITY AND/OR FITNESS FOR A
+// PARTICULAR PURPOSE.
+//
+// Copyright (c) Microsoft Corporation. All rights reserved
+//----------------------------------------------------------------------
+
+struct VS_INPUT
+{
+ float4 Pos : POSITION;
+ float2 Tex : TEXCOORD;
+};
+
+struct VS_OUTPUT
+{
+ float4 Pos : SV_POSITION;
+ float2 Tex : TEXCOORD;
+};
+
+
+//--------------------------------------------------------------------------------------
+// Vertex Shader
+//--------------------------------------------------------------------------------------
+VS_OUTPUT VS(VS_INPUT input)
+{
+ return input;
+} \ No newline at end of file
diff --git a/plugins/pluginWinDD/pluginWinDD.vcproj b/plugins/pluginWinDD/pluginWinDD.vcproj
new file mode 100644
index 0000000..df7152c
--- /dev/null
+++ b/plugins/pluginWinDD/pluginWinDD.vcproj
@@ -0,0 +1,189 @@
+<?xml version="1.0" encoding="Windows-1252"?>
+<VisualStudioProject
+ ProjectType="Visual C++"
+ Version="9,00"
+ Name="pluginWinDD"
+ ProjectGUID="{64DABDF0-D9AB-45BD-A269-002EF6E72E27}"
+ RootNamespace="pluginWinDD"
+ Keyword="Win32Proj"
+ TargetFrameworkVersion="196613"
+ >
+ <Platforms>
+ <Platform
+ Name="Win32"
+ />
+ </Platforms>
+ <ToolFiles>
+ </ToolFiles>
+ <Configurations>
+ <Configuration
+ Name="Debug|Win32"
+ OutputDirectory="$(SolutionDir)$(ConfigurationName)"
+ IntermediateDirectory="$(ConfigurationName)"
+ ConfigurationType="2"
+ CharacterSet="1"
+ >
+ <Tool
+ Name="VCPreBuildEventTool"
+ />
+ <Tool
+ Name="VCCustomBuildTool"
+ />
+ <Tool
+ Name="VCXMLDataGeneratorTool"
+ />
+ <Tool
+ Name="VCWebServiceProxyGeneratorTool"
+ />
+ <Tool
+ Name="VCMIDLTool"
+ />
+ <Tool
+ Name="VCCLCompilerTool"
+ Optimization="0"
+ PreprocessorDefinitions="WIN32;_DEBUG;_WINDOWS;_USRDLL;PLUGINWINDD_EXPORTS"
+ MinimalRebuild="true"
+ BasicRuntimeChecks="3"
+ RuntimeLibrary="3"
+ UsePrecompiledHeader="0"
+ WarningLevel="3"
+ DebugInformationFormat="4"
+ />
+ <Tool
+ Name="VCManagedResourceCompilerTool"
+ />
+ <Tool
+ Name="VCResourceCompilerTool"
+ />
+ <Tool
+ Name="VCPreLinkEventTool"
+ />
+ <Tool
+ Name="VCLinkerTool"
+ LinkIncremental="2"
+ GenerateDebugInformation="true"
+ SubSystem="2"
+ TargetMachine="1"
+ />
+ <Tool
+ Name="VCALinkTool"
+ />
+ <Tool
+ Name="VCManifestTool"
+ />
+ <Tool
+ Name="VCXDCMakeTool"
+ />
+ <Tool
+ Name="VCBscMakeTool"
+ />
+ <Tool
+ Name="VCFxCopTool"
+ />
+ <Tool
+ Name="VCAppVerifierTool"
+ />
+ <Tool
+ Name="VCPostBuildEventTool"
+ />
+ </Configuration>
+ <Configuration
+ Name="Release|Win32"
+ OutputDirectory="$(SolutionDir)$(ConfigurationName)"
+ IntermediateDirectory="$(ConfigurationName)"
+ ConfigurationType="2"
+ CharacterSet="1"
+ WholeProgramOptimization="1"
+ >
+ <Tool
+ Name="VCPreBuildEventTool"
+ />
+ <Tool
+ Name="VCCustomBuildTool"
+ />
+ <Tool
+ Name="VCXMLDataGeneratorTool"
+ />
+ <Tool
+ Name="VCWebServiceProxyGeneratorTool"
+ />
+ <Tool
+ Name="VCMIDLTool"
+ />
+ <Tool
+ Name="VCCLCompilerTool"
+ Optimization="2"
+ EnableIntrinsicFunctions="true"
+ PreprocessorDefinitions="WIN32;NDEBUG;_WINDOWS;_USRDLL;PLUGINWINDD_EXPORTS"
+ RuntimeLibrary="2"
+ EnableFunctionLevelLinking="true"
+ UsePrecompiledHeader="0"
+ WarningLevel="3"
+ DebugInformationFormat="3"
+ />
+ <Tool
+ Name="VCManagedResourceCompilerTool"
+ />
+ <Tool
+ Name="VCResourceCompilerTool"
+ />
+ <Tool
+ Name="VCPreLinkEventTool"
+ />
+ <Tool
+ Name="VCLinkerTool"
+ LinkIncremental="1"
+ GenerateDebugInformation="true"
+ SubSystem="2"
+ OptimizeReferences="2"
+ EnableCOMDATFolding="2"
+ TargetMachine="1"
+ />
+ <Tool
+ Name="VCALinkTool"
+ />
+ <Tool
+ Name="VCManifestTool"
+ />
+ <Tool
+ Name="VCXDCMakeTool"
+ />
+ <Tool
+ Name="VCBscMakeTool"
+ />
+ <Tool
+ Name="VCFxCopTool"
+ />
+ <Tool
+ Name="VCAppVerifierTool"
+ />
+ <Tool
+ Name="VCPostBuildEventTool"
+ />
+ </Configuration>
+ </Configurations>
+ <References>
+ </References>
+ <Files>
+ <Filter
+ Name="Source Files"
+ Filter="cpp;c;cc;cxx;def;odl;idl;hpj;bat;asm;asmx"
+ UniqueIdentifier="{4FC737F1-C7A5-4376-A066-2A32D752A2FF}"
+ >
+ </Filter>
+ <Filter
+ Name="Header Files"
+ Filter="h;hpp;hxx;hm;inl;inc;xsd"
+ UniqueIdentifier="{93995380-89BD-4b04-88EB-625FBE52EBFB}"
+ >
+ </Filter>
+ <Filter
+ Name="Resource Files"
+ Filter="rc;ico;cur;bmp;dlg;rc2;rct;bin;rgs;gif;jpg;jpeg;jpe;resx;tiff;tif;png;wav"
+ UniqueIdentifier="{67DA6AB6-F800-4c08-8B7A-83BB121AAD01}"
+ >
+ </Filter>
+ </Files>
+ <Globals>
+ </Globals>
+</VisualStudioProject>
diff --git a/plugins/pluginWinDD/plugin_win_dd_config.h b/plugins/pluginWinDD/plugin_win_dd_config.h
new file mode 100644
index 0000000..a1d3855
--- /dev/null
+++ b/plugins/pluginWinDD/plugin_win_dd_config.h
@@ -0,0 +1,75 @@
+/* Copyright (C) 2015 Mamadou DIOP
+* Copyright (C) 2015 Doubango Telecom <http://www.doubango.org>
+*
+* This file is part of Open Source Doubango Framework.
+*
+* DOUBANGO is free software: you can redistribute it and/or modify
+* it under the terms of the GNU General Public License as published by
+* the Free Software Foundation, either version 3 of the License, or
+* (at your option) any later version.
+*
+* DOUBANGO is distributed in the hope that it will be useful,
+* but WITHOUT ANY WARRANTY; without even the implied warranty of
+* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
+* GNU General Public License for more details.
+*
+* You should have received a copy of the GNU General Public License
+* along with DOUBANGO.
+*/
+#ifndef PLUGIN_WIN_DD_CONFIG_H
+#define PLUGIN_WIN_DD_CONFIG_H
+
+#ifdef __SYMBIAN32__
+#undef _WIN32 /* Because of WINSCW */
+#endif
+
+
+// Windows (XP/Vista/7/CE and Windows Mobile) macro definition
+#if defined(WIN32)|| defined(_WIN32) || defined(_WIN32_WCE)
+# define PLUGIN_WIN_DD_UNDER_WINDOWS 1
+# if defined(WINAPI_FAMILY) && (WINAPI_FAMILY == WINAPI_FAMILY_PHONE_APP || WINAPI_FAMILY == WINAPI_FAMILY_APP)
+# define PLUGIN_WIN_DD_UNDER_WINDOWS_RT 1
+# endif
+#endif
+
+#if (PLUGIN_WIN_DD_UNDER_WINDOWS || defined(__SYMBIAN32__)) && defined(PLUGIN_WIN_DDP_EXPORTS)
+# define PLUGIN_WIN_DDP_API __declspec(dllexport)
+# define PLUGIN_WIN_DDP_GEXTERN extern __declspec(dllexport)
+#elif (PLUGIN_WIN_DD_UNDER_WINDOWS || defined(__SYMBIAN32__)) && !defined(PLUGIN_WIN_DDP_IMPORTS_IGNORE)
+# define PLUGIN_WIN_DDP_API __declspec(dllimport)
+# define PLUGIN_WIN_DDP_GEXTERN __declspec(dllimport)
+#else
+# define PLUGIN_WIN_DDP_API
+# define PLUGIN_WIN_DDP_GEXTERN extern
+#endif
+
+// x86
+#if defined(__x86_64__) || defined(__x86__) || defined(__i386__)
+# define PLUGIN_WIN_DD_UNDER_X86 1
+#endif
+
+// Guards against C++ name mangling
+#ifdef __cplusplus
+# define PLUGIN_WIN_DD_BEGIN_DECLS extern "C" {
+# define PLUGIN_WIN_DD_END_DECLS }
+#else
+# define PLUGIN_WIN_DD_BEGIN_DECLS
+# define PLUGIN_WIN_DD_END_DECLS
+#endif
+
+#ifdef _MSC_VER
+# define inline __inline
+# define _CRT_SECURE_NO_WARNINGS
+# define _ALLOW_KEYWORD_MACROS
+#endif
+
+#include <stdint.h>
+#ifdef __SYMBIAN32__
+#include <stdlib.h>
+#endif
+
+#if HAVE_CONFIG_H
+#include <config.h>
+#endif
+
+#endif // PLUGIN_WIN_DD_CONFIG_H
diff --git a/plugins/pluginWinDD/plugin_win_dd_producer.cxx b/plugins/pluginWinDD/plugin_win_dd_producer.cxx
new file mode 100644
index 0000000..7c13767
--- /dev/null
+++ b/plugins/pluginWinDD/plugin_win_dd_producer.cxx
@@ -0,0 +1,1074 @@
+/* Copyright (C) 2015 Mamadou DIOP
+* Copyright (C) 2015 Doubango Telecom <http://www.doubango.org>
+*
+* This file is part of Open Source Doubango Framework.
+*
+* DOUBANGO is free software: you can redistribute it and/or modify
+* it under the terms of the GNU General Public License as published by
+* the Free Software Foundation, either version 3 of the License, or
+* (at your option) any later version.
+*
+* DOUBANGO is distributed in the hope that it will be useful,
+* but WITHOUT ANY WARRANTY; without even the implied warranty of
+* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
+* GNU General Public License for more details.
+*
+* You should have received a copy of the GNU General Public License
+* along with DOUBANGO.
+*/
+// Microsoft Duplication Desktop producer for Win8+: https://msdn.microsoft.com/en-us/library/windows/desktop/hh404487(v=VS.85).aspx
+// THIS CODE AND INFORMATION IS PROVIDED "AS IS" WITHOUT WARRANTY OF
+// ANY KIND, EITHER EXPRESSED OR IMPLIED, INCLUDING BUT NOT LIMITED TO
+// THE IMPLIED WARRANTIES OF MERCHANTABILITY AND/OR FITNESS FOR A
+// PARTICULAR PURPOSE.
+//
+// Copyright (c) Microsoft Corporation. All rights reserved
+#include "plugin_win_dd_config.h"
+
+#include "tinymedia/tmedia_defaults.h"
+#include "tinymedia/tmedia_producer.h"
+
+#include "tsk_time.h"
+#include "tsk_string.h"
+#include "tsk_thread.h"
+#include "tsk_safeobj.h"
+#include "tsk_debug.h"
+
+#include "internals/DisplayManager.h"
+#include "internals/DuplicationManager.h"
+#include "internals/OutputManager.h"
+#include "internals/ThreadManager.h"
+
+#include <Windows.h>
+
+#define DD_DEBUG_INFO(FMT, ...) TSK_DEBUG_INFO("[DESKTOP DUPLICATION] " FMT, ##__VA_ARGS__)
+#define DD_DEBUG_WARN(FMT, ...) TSK_DEBUG_WARN("[DESKTOP DUPLICATION] " FMT, ##__VA_ARGS__)
+#define DD_DEBUG_ERROR(FMT, ...) TSK_DEBUG_ERROR("[DESKTOP DUPLICATION] " FMT, ##__VA_ARGS__)
+#define DD_DEBUG_FATAL(FMT, ...) TSK_DEBUG_FATAL("[DESKTOP DUPLICATION] " FMT, ##__VA_ARGS__)
+
+#define DD_CHECK_HR(x) { HRESULT __hr__ = (x); if (FAILED(__hr__)) { DD_DEBUG_ERROR("Operation Failed (%08x)", __hr__); goto bail; } }
+
+#if !defined(DD_DDPROC_THREAD_TIMEOUT)
+# define DD_DDPROC_THREAD_TIMEOUT 1500
+#endif
+
+//
+// plugin_win_dd_producer_t
+//
+typedef struct plugin_win_dd_producer_s
+{
+ TMEDIA_DECLARE_PRODUCER;
+
+ bool bStarted, bPrepared, bMuted, bWindowHooked, bThreadTerminationDelayed;
+ tsk_thread_handle_t* ppTread[1];
+
+ OUTPUTMANAGER *pOutMgr;
+ THREADMANAGER *pThreadMgr;
+
+ // Window handles
+ HWND hwndPreview;
+ WNDPROC wndPreviewProc;
+ HWND hwndSrc;
+
+ // Synchronization
+ HANDLE hlUnexpectedErrorEvent;
+ HANDLE hlExpectedErrorEvent;
+ HANDLE hlOcclutionEvent;
+ HANDLE hlTerminateThreadsEvent;
+
+ HCURSOR hcCursor;
+}
+plugin_win_dd_producer_t;
+
+
+// Forward declarations
+static int _plugin_win_dd_producer_unprepare(plugin_win_dd_producer_t* pSelf, bool bCleanup = false);
+
+LRESULT CALLBACK WndProc(HWND hWnd, UINT message, WPARAM wParam, LPARAM lParam);
+static HRESULT HookWindow(struct plugin_win_dd_producer_s *pSelf, HWND hWnd);
+static HRESULT UnhookWindow(struct plugin_win_dd_producer_s *pSelf);
+
+DWORD WINAPI DDProc(_In_ void* Param);
+void DisplayMsg(_In_ LPCWSTR Str, _In_ LPCWSTR Title, HRESULT hr);
+_Post_satisfies_(return != DUPL_RETURN_SUCCESS)
+DUPL_RETURN ProcessFailure(_In_opt_ ID3D11Device* Device, _In_ LPCWSTR Str, _In_ LPCWSTR Title, HRESULT hr, _In_opt_z_ HRESULT* ExpectedErrors);
+static void* TSK_STDCALL DDThread(void *pArg);
+
+//
+// Class for progressive waits
+//
+typedef struct
+{
+ UINT WaitTime;
+ UINT WaitCount;
+}WAIT_BAND;
+
+#define WAIT_BAND_COUNT 3
+#define WAIT_BAND_STOP 0
+
+class DYNAMIC_WAIT
+{
+public:
+ DYNAMIC_WAIT();
+ ~DYNAMIC_WAIT();
+
+ void Wait();
+
+private:
+
+ static const WAIT_BAND m_WaitBands[WAIT_BAND_COUNT];
+
+ // Period in seconds that a new wait call is considered part of the same wait sequence
+ static const UINT m_WaitSequenceTimeInSeconds = 2;
+
+ UINT m_CurrentWaitBandIdx;
+ UINT m_WaitCountInCurrentBand;
+ LARGE_INTEGER m_QPCFrequency;
+ LARGE_INTEGER m_LastWakeUpTime;
+ BOOL m_QPCValid;
+};
+const WAIT_BAND DYNAMIC_WAIT::m_WaitBands[WAIT_BAND_COUNT] = {
+ { 250, 20 },
+ { 2000, 60 },
+ { 5000, WAIT_BAND_STOP } // Never move past this band
+};
+
+/* ============ Video DD Producer Interface ================= */
+static int plugin_win_dd_producer_set(tmedia_producer_t *p_self, const tmedia_param_t* pc_param)
+{
+ int ret = -1;
+ plugin_win_dd_producer_t* p_dd = (plugin_win_dd_producer_t*)p_self;
+
+ if (!p_dd || !pc_param)
+ {
+ DD_DEBUG_ERROR("Invalid parameter");
+ return -1;
+ }
+
+ if (pc_param->value_type == tmedia_pvt_int64)
+ {
+ if (tsk_striequals(pc_param->key, "local-hwnd") || tsk_striequals(pc_param->key, "preview-hwnd"))
+ {
+ HWND hwnd = (HWND)*((int64_t*)pc_param->value);
+ ret = SUCCEEDED(HookWindow(p_dd, hwnd)) ? 0 : -1;
+ }
+ else if (tsk_striequals(pc_param->key, "src-hwnd"))
+ {
+ p_dd->hwndSrc = (HWND)*((int64_t*)pc_param->value);
+ ret = 0;
+ }
+ }
+ else if (pc_param->value_type == tmedia_pvt_int32)
+ {
+ if (tsk_striequals(pc_param->key, "mute"))
+ {
+ p_dd->bMuted = (TSK_TO_INT32((uint8_t*)pc_param->value) != 0);
+ ret = 0;
+ }
+ }
+
+ return ret;
+}
+
+static int plugin_win_dd_producer_prepare(tmedia_producer_t* self, const tmedia_codec_t* codec)
+{
+ plugin_win_dd_producer_t* pSelf = (plugin_win_dd_producer_t*)self;
+ HRESULT hr = S_OK;
+
+ if (!pSelf || !codec && codec->plugin)
+ {
+ DD_DEBUG_ERROR("Invalid parameter");
+ DD_CHECK_HR(hr = E_UNEXPECTED);
+ }
+
+ if (pSelf->bPrepared)
+ {
+ DD_DEBUG_WARN("DD video producer already prepared");
+ DD_CHECK_HR(hr = E_UNEXPECTED);
+ }
+
+ if (pSelf->bThreadTerminationDelayed)
+ {
+ DD_DEBUG_INFO("Thread termination was delayed ...cleanup now");
+ if (_plugin_win_dd_producer_unprepare(pSelf, true/*cleanup?*/) != 0)
+ {
+ DD_CHECK_HR(hr = E_UNEXPECTED);
+ }
+ }
+
+ TMEDIA_PRODUCER(pSelf)->video.fps = TMEDIA_CODEC_VIDEO(codec)->out.fps;
+ TMEDIA_PRODUCER(pSelf)->video.width = TMEDIA_CODEC_VIDEO(codec)->out.width;
+ TMEDIA_PRODUCER(pSelf)->video.height = TMEDIA_CODEC_VIDEO(codec)->out.height;
+ TMEDIA_PRODUCER(pSelf)->encoder.codec_id = tmedia_codec_id_none; // means RAW frames as input
+
+ DD_DEBUG_INFO("DD video producer: fps=%d, width=%d, height=%d",
+ TMEDIA_PRODUCER(pSelf)->video.fps,
+ TMEDIA_PRODUCER(pSelf)->video.width,
+ TMEDIA_PRODUCER(pSelf)->video.height);
+
+ // Event used by the threads to signal an unexpected error and we want to quit the app
+ if (!pSelf->hlUnexpectedErrorEvent && !(pSelf->hlUnexpectedErrorEvent = CreateEvent(nullptr, TRUE, FALSE, nullptr)))
+ {
+ ProcessFailure(nullptr, L"UnexpectedErrorEvent creation failed", L"Error", E_UNEXPECTED);
+ DD_CHECK_HR(hr = E_UNEXPECTED);
+ }
+
+ // Event for when a thread encounters an expected error
+ if (!pSelf->hlExpectedErrorEvent && !(pSelf->hlExpectedErrorEvent = CreateEvent(nullptr, TRUE, FALSE, nullptr)))
+ {
+ ProcessFailure(nullptr, L"ExpectedErrorEvent creation failed", L"Error", E_UNEXPECTED);
+ DD_CHECK_HR(hr = E_UNEXPECTED);
+ }
+
+ // Event for Occlution
+ if (!pSelf->hlOcclutionEvent && !(pSelf->hlOcclutionEvent = CreateEvent(nullptr, TRUE, FALSE, nullptr)))
+ {
+ ProcessFailure(nullptr, L"OcclutionEvent creation failed", L"Error", E_UNEXPECTED);
+ DD_CHECK_HR(hr = E_UNEXPECTED);
+ }
+
+ // Event to tell spawned threads to quit
+ if (!pSelf->hlTerminateThreadsEvent && !(pSelf->hlTerminateThreadsEvent = CreateEvent(nullptr, TRUE, FALSE, nullptr)))
+ {
+ ProcessFailure(nullptr, L"TerminateThreadsEvent creation failed", L"Error", E_UNEXPECTED);
+ DD_CHECK_HR(hr = E_UNEXPECTED);
+ }
+
+ // Load simple cursor
+ if (!pSelf->hcCursor && !(pSelf->hcCursor = LoadCursor(nullptr, IDC_ARROW)))
+ {
+ ProcessFailure(nullptr, L"Cursor load failed", L"Error", E_UNEXPECTED);
+ DD_CHECK_HR(hr = E_UNEXPECTED);
+ }
+
+ if (!pSelf->pOutMgr && !(pSelf->pOutMgr = new OUTPUTMANAGER()))
+ {
+ ProcessFailure(nullptr, L"Out manager allocation failed", L"Error", E_OUTOFMEMORY);
+ DD_CHECK_HR(hr = E_OUTOFMEMORY);
+ }
+
+ if (!pSelf->pThreadMgr && !(pSelf->pThreadMgr = new THREADMANAGER()))
+ {
+ ProcessFailure(nullptr, L"Thread managed allocation failed", L"Error", E_OUTOFMEMORY);
+ DD_CHECK_HR(hr = E_OUTOFMEMORY);
+ }
+
+bail:
+ pSelf->bPrepared = SUCCEEDED(hr);
+ return SUCCEEDED(hr) ? 0 : -1;
+}
+
+static int plugin_win_dd_producer_start(tmedia_producer_t* self)
+{
+ plugin_win_dd_producer_t* pSelf = (plugin_win_dd_producer_t*)self;
+ HRESULT hr = S_OK;
+
+ if (!pSelf)
+ {
+ DD_DEBUG_ERROR("Invalid parameter");
+ return -1;
+ }
+
+ if (pSelf->bStarted)
+ {
+ DD_DEBUG_INFO("Producer already started");
+ goto bail;
+ }
+ if (!pSelf->bPrepared)
+ {
+ DD_DEBUG_ERROR("Producer not prepared");
+ DD_CHECK_HR(hr = E_UNEXPECTED);
+ }
+
+ DD_CHECK_HR(hr = HookWindow(pSelf, pSelf->hwndPreview));
+
+ // Start asynchronous watcher thread
+ pSelf->bStarted = true;
+ int ret = tsk_thread_create(&pSelf->ppTread[0], DDThread, pSelf);
+ if (ret != 0)
+ {
+ TSK_DEBUG_ERROR("Failed to create thread");
+ pSelf->bStarted = false;
+ if (pSelf->ppTread[0])
+ {
+ tsk_thread_join(&pSelf->ppTread[0]);
+ }
+ DD_CHECK_HR(hr = E_UNEXPECTED);
+ }
+
+bail:
+ if (FAILED(hr))
+ {
+ UnhookWindow(pSelf);
+ return -1;
+ }
+ pSelf->bStarted = true;
+ return 0;
+}
+
+static int plugin_win_dd_producer_pause(tmedia_producer_t* self)
+{
+ plugin_win_dd_producer_t* pSelf = (plugin_win_dd_producer_t*)self;
+
+ if (!pSelf)
+ {
+ DD_DEBUG_ERROR("Invalid parameter");
+ return -1;
+ }
+ if (!pSelf->bStarted)
+ {
+ DD_DEBUG_INFO("MF video producer not started");
+ }
+
+ return 0;
+}
+
+static int plugin_win_dd_producer_stop(tmedia_producer_t* self)
+{
+ plugin_win_dd_producer_t* pSelf = (plugin_win_dd_producer_t*)self;
+
+ if (!pSelf)
+ {
+ DD_DEBUG_ERROR("Invalid parameter");
+ return -1;
+ }
+
+ pSelf->bStarted = false;
+
+ UnhookWindow(pSelf);
+
+ if (pSelf->hlTerminateThreadsEvent)
+ {
+ SetEvent(pSelf->hlTerminateThreadsEvent);
+ }
+ if (pSelf->ppTread[0])
+ {
+ tsk_thread_join(&pSelf->ppTread[0]);
+ }
+
+ // next start() will be called after prepare()
+ int ret = _plugin_win_dd_producer_unprepare(pSelf);
+
+ return ret;
+}
+
+static int _plugin_win_dd_producer_unprepare(plugin_win_dd_producer_t* pSelf, bool bCleanup /*= false*/)
+{
+ HRESULT hr = S_OK;
+
+ if (!pSelf)
+ {
+ DD_DEBUG_ERROR("Invalid parameter");
+ return -1;
+ }
+
+ if (pSelf->bStarted)
+ {
+ DD_CHECK_HR(hr = E_UNEXPECTED);
+ }
+
+ pSelf->bThreadTerminationDelayed = false;
+
+ // Thread manager must be destroyed before the events and output manager
+ if (pSelf->pThreadMgr)
+ {
+ // if we are cleaning the producer then all threads must exit only when all threads are destroyed
+ // https://code.google.com/p/sincity/issues/detail?id=7
+ if (pSelf->pThreadMgr->WaitForThreadTermination(bCleanup ? INFINITE : DD_DDPROC_THREAD_TIMEOUT) == true)
+ {
+ delete pSelf->pThreadMgr;
+ pSelf->pThreadMgr = nullptr;
+ }
+ else
+ {
+ // Thread wait timedout
+ DD_DEBUG_WARN("DDProc thread termination delayed");
+ pSelf->bThreadTerminationDelayed = true;
+ }
+ }
+
+ if (!pSelf->bThreadTerminationDelayed)
+ {
+ if (pSelf->hlUnexpectedErrorEvent)
+ {
+ CloseHandle(pSelf->hlUnexpectedErrorEvent);
+ pSelf->hlUnexpectedErrorEvent = nullptr;
+ }
+ if (pSelf->hlExpectedErrorEvent)
+ {
+ CloseHandle(pSelf->hlExpectedErrorEvent);
+ pSelf->hlExpectedErrorEvent = nullptr;
+ }
+ if (pSelf->hlOcclutionEvent)
+ {
+ CloseHandle(pSelf->hlOcclutionEvent);
+ pSelf->hlOcclutionEvent = nullptr;
+ }
+ if (pSelf->hlTerminateThreadsEvent)
+ {
+ CloseHandle(pSelf->hlTerminateThreadsEvent);
+ pSelf->hlTerminateThreadsEvent = nullptr;
+ }
+
+ if (pSelf->hcCursor)
+ {
+ DestroyCursor(pSelf->hcCursor);
+ pSelf->hcCursor = nullptr;
+ }
+
+ if (pSelf->pOutMgr)
+ {
+ delete pSelf->pOutMgr;
+ pSelf->pOutMgr = nullptr;
+ }
+ }
+
+ pSelf->bPrepared = false;
+
+bail:
+ return 0;
+}
+
+static HRESULT HookWindow(struct plugin_win_dd_producer_s *pSelf, HWND hWnd)
+{
+ HRESULT hr = S_OK;
+
+ DD_CHECK_HR(hr = UnhookWindow(pSelf));
+
+ if ((pSelf->hwndPreview = hWnd))
+ {
+ pSelf->wndPreviewProc = (WNDPROC)SetWindowLongPtr(pSelf->hwndPreview, GWLP_WNDPROC, (LONG_PTR)WndProc);
+ if (!pSelf->wndPreviewProc)
+ {
+ DD_DEBUG_ERROR("HookWindowLongPtr() failed with errcode=%d", GetLastError());
+ DD_CHECK_HR(hr = E_FAIL);
+ }
+ SetProp(pSelf->hwndPreview, L"Self", pSelf);
+ pSelf->bWindowHooked = true;
+ }
+bail:
+ return S_OK;
+}
+
+static HRESULT UnhookWindow(struct plugin_win_dd_producer_s *pSelf)
+{
+ if (pSelf->hwndPreview && pSelf->wndPreviewProc)
+ {
+ SetWindowLongPtr(pSelf->hwndPreview, GWLP_WNDPROC, (LONG_PTR)pSelf->wndPreviewProc);
+ pSelf->wndPreviewProc = NULL;
+ }
+ if (pSelf->hwndPreview)
+ {
+ ::InvalidateRect(pSelf->hwndPreview, NULL, FALSE);
+ }
+ pSelf->bWindowHooked = false;
+ return S_OK;
+}
+
+//
+// Windows Desktop Duplication producer object definition
+//
+/* constructor */
+static tsk_object_t* plugin_win_dd_producer_ctor(tsk_object_t * self, va_list * app)
+{
+ plugin_win_dd_producer_t *pSelf = (plugin_win_dd_producer_t *)self;
+ if (pSelf)
+ {
+ /* init base */
+ tmedia_producer_init(TMEDIA_PRODUCER(pSelf));
+
+ /* init self with default values*/
+ TMEDIA_PRODUCER(pSelf)->encoder.codec_id = tmedia_codec_id_none; // means RAW frames as input
+ TMEDIA_PRODUCER(pSelf)->video.chroma = tmedia_chroma_rgb32;
+ TMEDIA_PRODUCER(pSelf)->video.fps = 15;
+ TMEDIA_PRODUCER(pSelf)->video.width = 352;
+ TMEDIA_PRODUCER(pSelf)->video.height = 288;
+
+ DD_DEBUG_INFO("Create Microsoft Desktop Duplication producer");
+ }
+ return self;
+}
+/* destructor */
+static tsk_object_t* plugin_win_dd_producer_dtor(tsk_object_t * self)
+{
+ plugin_win_dd_producer_t *pSelf = (plugin_win_dd_producer_t *)self;
+ if (pSelf)
+ {
+ /* stop */
+ if (pSelf->bStarted)
+ {
+ plugin_win_dd_producer_stop(TMEDIA_PRODUCER(pSelf));
+ }
+
+ /* deinit base */
+ tmedia_producer_deinit(TMEDIA_PRODUCER(pSelf));
+ /* deinit self */
+ _plugin_win_dd_producer_unprepare(pSelf, true/*cleanup*/);
+
+ DD_DEBUG_INFO("*** WinDD producer destroyed ***");
+ }
+
+ return self;
+}
+/* object definition */
+static const tsk_object_def_t plugin_win_dd_producer_def_s =
+{
+ sizeof(plugin_win_dd_producer_t),
+ plugin_win_dd_producer_ctor,
+ plugin_win_dd_producer_dtor,
+ tsk_null,
+};
+/* plugin definition*/
+static const tmedia_producer_plugin_def_t plugin_win_dd_producer_plugin_def_s =
+{
+ &plugin_win_dd_producer_def_s,
+
+ tmedia_bfcp_video,
+ "Microsoft Windows Desktop Duplication producer (Video)",
+
+ plugin_win_dd_producer_set,
+ plugin_win_dd_producer_prepare,
+ plugin_win_dd_producer_start,
+ plugin_win_dd_producer_pause,
+ plugin_win_dd_producer_stop
+};
+const tmedia_producer_plugin_def_t *plugin_win_dd_producer_plugin_def_t = &plugin_win_dd_producer_plugin_def_s;
+
+
+
+
+
+// Below are lists of errors expect from Dxgi API calls when a transition event like mode change, PnpStop, PnpStart
+// desktop switch, TDR or session disconnect/reconnect. In all these cases we want the application to clean up the threads that process
+// the desktop updates and attempt to recreate them.
+// If we get an error that is not on the appropriate list then we exit the application
+
+// These are the errors we expect from general Dxgi API due to a transition
+HRESULT SystemTransitionsExpectedErrors[] = {
+ DXGI_ERROR_DEVICE_REMOVED,
+ DXGI_ERROR_ACCESS_LOST,
+ static_cast<HRESULT>(WAIT_ABANDONED),
+ S_OK // Terminate list with zero valued HRESULT
+};
+
+// These are the errors we expect from IDXGIOutput1::DuplicateOutput due to a transition
+HRESULT CreateDuplicationExpectedErrors[] = {
+ DXGI_ERROR_DEVICE_REMOVED,
+ static_cast<HRESULT>(E_ACCESSDENIED),
+ DXGI_ERROR_UNSUPPORTED,
+ DXGI_ERROR_SESSION_DISCONNECTED,
+ S_OK // Terminate list with zero valued HRESULT
+};
+
+// These are the errors we expect from IDXGIOutputDuplication methods due to a transition
+HRESULT FrameInfoExpectedErrors[] = {
+ DXGI_ERROR_DEVICE_REMOVED,
+ DXGI_ERROR_ACCESS_LOST,
+ S_OK // Terminate list with zero valued HRESULT
+};
+
+// These are the errors we expect from IDXGIAdapter::EnumOutputs methods due to outputs becoming stale during a transition
+HRESULT EnumOutputsExpectedErrors[] = {
+ DXGI_ERROR_NOT_FOUND,
+ S_OK // Terminate list with zero valued HRESULT
+};
+
+
+
+_Post_satisfies_(return != DUPL_RETURN_SUCCESS)
+ DUPL_RETURN ProcessFailure(_In_opt_ ID3D11Device* Device, _In_ LPCWSTR Str, _In_ LPCWSTR Title, HRESULT hr, _In_opt_z_ HRESULT* ExpectedErrors)
+{
+ HRESULT TranslatedHr;
+
+ // On an error check if the DX device is lost
+ if (Device)
+ {
+ HRESULT DeviceRemovedReason = Device->GetDeviceRemovedReason();
+
+ switch (DeviceRemovedReason)
+ {
+ case DXGI_ERROR_DEVICE_REMOVED:
+ case DXGI_ERROR_DEVICE_RESET:
+ case static_cast<HRESULT>(E_OUTOFMEMORY) :
+ {
+ // Our device has been stopped due to an external event on the GPU so map them all to
+ // device removed and continue processing the condition
+ TranslatedHr = DXGI_ERROR_DEVICE_REMOVED;
+ break;
+ }
+
+ case S_OK:
+ {
+ // Device is not removed so use original error
+ TranslatedHr = hr;
+ break;
+ }
+
+ default:
+ {
+ // Device is removed but not a error we want to remap
+ TranslatedHr = DeviceRemovedReason;
+ }
+ }
+ }
+ else
+ {
+ TranslatedHr = hr;
+ }
+
+ // Check if this error was expected or not
+ if (ExpectedErrors)
+ {
+ HRESULT* CurrentResult = ExpectedErrors;
+
+ while (*CurrentResult != S_OK)
+ {
+ if (*(CurrentResult++) == TranslatedHr)
+ {
+ return DUPL_RETURN_ERROR_EXPECTED;
+ }
+ }
+ }
+
+ // Error was not expected so display the message box
+ DisplayMsg(Str, Title, TranslatedHr);
+
+ return DUPL_RETURN_ERROR_UNEXPECTED;
+}
+
+LRESULT CALLBACK WndProc(HWND hWnd, UINT message, WPARAM wParam, LPARAM lParam)
+{
+ switch (message)
+ {
+ case WM_DESTROY:
+ {
+ PostQuitMessage(0);
+ break;
+ }
+ case WM_SIZE:
+ {
+ // Tell output manager that window size has changed
+ plugin_win_dd_producer_t* pSelf = static_cast<plugin_win_dd_producer_t*>(GetProp(hWnd, L"Self"));
+ if (pSelf && pSelf->pOutMgr)
+ {
+ pSelf->pOutMgr->WindowResize();
+ }
+ break;
+ }
+ case OCCLUSION_STATUS_MSG:
+ {
+ plugin_win_dd_producer_t* pSelf = static_cast<plugin_win_dd_producer_t*>(GetProp(hWnd, L"Self"));
+ if (pSelf && pSelf->hlOcclutionEvent)
+ {
+ SetEvent(pSelf->hlOcclutionEvent);
+ }
+ break;
+ }
+ default:
+ return DefWindowProc(hWnd, message, wParam, lParam);
+ }
+
+ return 0;
+}
+
+//
+// Entry point for new duplication threads
+//
+DWORD WINAPI DDProc(_In_ void* Param)
+{
+ DD_DEBUG_INFO("DDProc (producer) - ENTER");
+
+ // Classes
+ DISPLAYMANAGER DispMgr;
+ DUPLICATIONMANAGER DuplMgr;
+
+ // D3D objects
+ ID3D11Texture2D* SharedSurf = nullptr;
+ IDXGIKeyedMutex* KeyMutex = nullptr;
+
+ // Data passed in from thread creation
+ THREAD_DATA* TData = reinterpret_cast<THREAD_DATA*>(Param);
+
+ // Get desktop
+ DUPL_RETURN Ret;
+ HDESK CurrentDesktop = nullptr;
+ CurrentDesktop = OpenInputDesktop(0, FALSE, GENERIC_ALL);
+ if (!CurrentDesktop)
+ {
+ // We do not have access to the desktop so request a retry
+ SetEvent(TData->ExpectedErrorEvent);
+ Ret = DUPL_RETURN_ERROR_EXPECTED;
+ goto Exit;
+ }
+
+ // Attach desktop to this thread
+ bool DesktopAttached = SetThreadDesktop(CurrentDesktop) != 0;
+ CloseDesktop(CurrentDesktop);
+ CurrentDesktop = nullptr;
+ if (!DesktopAttached)
+ {
+ // We do not have access to the desktop so request a retry
+ Ret = DUPL_RETURN_ERROR_EXPECTED;
+ goto Exit;
+ }
+
+ // New display manager
+ DispMgr.InitD3D(&TData->DxRes);
+
+ // FPS manager
+ uint64_t TimeNow, TimeLastFrame = 0;
+ const uint64_t TimeFrameDuration = 1000 / TData->Producer->video.fps;
+
+ // Obtain handle to sync shared Surface
+ HRESULT hr = TData->DxRes.Device->OpenSharedResource(TData->TexSharedHandle, __uuidof(ID3D11Texture2D), reinterpret_cast<void**>(&SharedSurf));
+ if (FAILED(hr))
+ {
+ Ret = ProcessFailure(TData->DxRes.Device, L"Opening shared texture failed", L"Error", hr, SystemTransitionsExpectedErrors);
+ goto Exit;
+ }
+
+ hr = SharedSurf->QueryInterface(__uuidof(IDXGIKeyedMutex), reinterpret_cast<void**>(&KeyMutex));
+ if (FAILED(hr))
+ {
+ Ret = ProcessFailure(nullptr, L"Failed to get keyed mutex interface in spawned thread", L"Error", hr);
+ goto Exit;
+ }
+
+ // Make duplication manager
+ Ret = DuplMgr.InitDupl(TData->DxRes.Device, TData->DxRes.Context ,TData->Output);
+ if (Ret != DUPL_RETURN_SUCCESS)
+ {
+ goto Exit;
+ }
+
+ // Get output description
+ DXGI_OUTPUT_DESC DesktopDesc;
+ RtlZeroMemory(&DesktopDesc, sizeof(DXGI_OUTPUT_DESC));
+ DuplMgr.GetOutputDesc(&DesktopDesc);
+
+ // Main duplication loop
+ bool WaitToProcessCurrentFrame = false;
+ FRAME_DATA CurrentData;
+
+ while (TData->Producer->is_started && (WaitForSingleObjectEx(TData->TerminateThreadsEvent, 0, FALSE) == WAIT_TIMEOUT))
+ {
+ if (!WaitToProcessCurrentFrame)
+ {
+ // Get new frame from desktop duplication
+ bool TimeOut;
+ Ret = DuplMgr.GetFrame(&CurrentData, &TimeOut);
+ if (Ret != DUPL_RETURN_SUCCESS)
+ {
+ // An error occurred getting the next frame drop out of loop which
+ // will check if it was expected or not
+ break;
+ }
+
+ // Check for timeout
+ if (TimeOut)
+ {
+ // No new frame at the moment
+ continue;
+ }
+ }
+
+ // We have a new frame so try and process it
+ // Try to acquire keyed mutex in order to access shared surface
+ hr = KeyMutex->AcquireSync(0, 1000);
+ if (hr == static_cast<HRESULT>(WAIT_TIMEOUT))
+ {
+ // Can't use shared surface right now, try again later
+ WaitToProcessCurrentFrame = true;
+ continue;
+ }
+ else if (FAILED(hr))
+ {
+ // Generic unknown failure
+ Ret = ProcessFailure(TData->DxRes.Device, L"Unexpected error acquiring KeyMutex", L"Error", hr, SystemTransitionsExpectedErrors);
+ DuplMgr.DoneWithFrame();
+ break;
+ }
+
+ // We can now process the current frame
+ WaitToProcessCurrentFrame = false;
+
+ // Get mouse info
+ Ret = DuplMgr.GetMouse(TData->PtrInfo, &(CurrentData.FrameInfo), TData->OffsetX, TData->OffsetY);
+ if (Ret != DUPL_RETURN_SUCCESS)
+ {
+ DuplMgr.DoneWithFrame();
+ KeyMutex->ReleaseSync(1);
+ break;
+ }
+
+ // Process new frame
+ Ret = DispMgr.ProcessFrame(&CurrentData, SharedSurf, TData->OffsetX, TData->OffsetY, &DesktopDesc);
+ if (Ret != DUPL_RETURN_SUCCESS)
+ {
+ DuplMgr.DoneWithFrame();
+ KeyMutex->ReleaseSync(1);
+ break;
+ }
+
+ // Release acquired keyed mutex
+ hr = KeyMutex->ReleaseSync(1);
+ if (FAILED(hr))
+ {
+ Ret = ProcessFailure(TData->DxRes.Device, L"Unexpected error releasing the keyed mutex", L"Error", hr, SystemTransitionsExpectedErrors);
+ DuplMgr.DoneWithFrame();
+ break;
+ }
+
+ // Send Frame Over the Network
+ TimeNow = tsk_time_now();
+ if ((TimeNow - TimeLastFrame) > TimeFrameDuration)
+ {
+ if (!((const plugin_win_dd_producer_t*)TData->Producer)->bMuted)
+ {
+ hr = DuplMgr.SendData(const_cast<struct tmedia_producer_s*>(TData->Producer), &CurrentData);
+ }
+ if (SUCCEEDED(hr))
+ {
+ TimeLastFrame = TimeNow;
+ }
+ }
+#if 0
+ else
+ {
+ DD_DEBUG_INFO("Skip frame");
+ }
+#endif
+
+ // Release frame back to desktop duplication
+ Ret = DuplMgr.DoneWithFrame();
+ if (Ret != DUPL_RETURN_SUCCESS)
+ {
+ break;
+ }
+ }
+
+Exit:
+ if (Ret != DUPL_RETURN_SUCCESS)
+ {
+ if (Ret == DUPL_RETURN_ERROR_EXPECTED)
+ {
+ // The system is in a transition state so request the duplication be restarted
+ SetEvent(TData->ExpectedErrorEvent);
+ }
+ else
+ {
+ // Unexpected error so exit the application
+ SetEvent(TData->UnexpectedErrorEvent);
+ }
+ }
+
+ if (SharedSurf)
+ {
+ SharedSurf->Release();
+ SharedSurf = nullptr;
+ }
+
+ if (KeyMutex)
+ {
+ KeyMutex->Release();
+ KeyMutex = nullptr;
+ }
+
+ DD_DEBUG_INFO("DDProc (producer) - EXIT");
+
+ return 0;
+}
+
+// Run session async thread
+static void* TSK_STDCALL DDThread(void *pArg)
+{
+ plugin_win_dd_producer_t *pSelf = (plugin_win_dd_producer_t *)pArg;
+ HRESULT hr = S_OK;
+ INT SingleOutput = -1;
+
+ RECT DeskBounds = {};
+ UINT OutputCount = 1;
+
+ bool FirstTime = true;
+ bool Occluded = true;
+ bool PreviewChanged = false;
+ DYNAMIC_WAIT DynamicWait;
+ HWND hwndPreview = NULL;
+
+ DD_DEBUG_INFO("DDThread (producer) - ENTER");
+
+ while (pSelf->bStarted)
+ {
+ DUPL_RETURN Ret = DUPL_RETURN_SUCCESS;
+
+ // Check if Preview window changed
+ PreviewChanged = (hwndPreview != pSelf->hwndPreview);
+
+ if (WaitForSingleObjectEx(pSelf->hlOcclutionEvent, 0, FALSE) == WAIT_OBJECT_0)
+ {
+ Occluded = false;
+ }
+ if (WaitForSingleObjectEx(pSelf->hlUnexpectedErrorEvent, 0, FALSE) == WAIT_OBJECT_0)
+ {
+ // Unexpected error occurred so exit the application
+ DD_CHECK_HR(hr = E_UNEXPECTED);
+ }
+ else if (FirstTime || PreviewChanged || WaitForSingleObjectEx(pSelf->hlExpectedErrorEvent, 0, FALSE) == WAIT_OBJECT_0)
+ {
+ if (PreviewChanged)
+ {
+ hwndPreview = pSelf->hwndPreview;
+ }
+
+ if (!FirstTime)
+ {
+ // Terminate other threads
+ SetEvent(pSelf->hlTerminateThreadsEvent);
+ pSelf->pThreadMgr->WaitForThreadTermination();
+ ResetEvent(pSelf->hlTerminateThreadsEvent);
+ ResetEvent(pSelf->hlExpectedErrorEvent);
+
+ // Clean up
+ pSelf->pThreadMgr->Clean();
+ pSelf->pOutMgr->CleanRefs();
+
+ // As we have encountered an error due to a system transition we wait before trying again, using this dynamic wait
+ // the wait periods will get progressively long to avoid wasting too much system resource if this state lasts a long time
+ DynamicWait.Wait();
+ }
+ else
+ {
+ // First time through the loop so nothing to clean up
+ FirstTime = false;
+ }
+
+ // Re-initialize
+ Ret = pSelf->pOutMgr->InitOutput(hwndPreview, SingleOutput, &OutputCount, &DeskBounds);
+ if (Ret == DUPL_RETURN_SUCCESS)
+ {
+ HANDLE SharedHandle = pSelf->pOutMgr->GetSharedHandle();
+ if (SharedHandle)
+ {
+ Ret = pSelf->pThreadMgr->Initialize(SingleOutput, OutputCount, pSelf->hlUnexpectedErrorEvent, pSelf->hlExpectedErrorEvent, pSelf->hlTerminateThreadsEvent, SharedHandle, TMEDIA_PRODUCER(pSelf), &DeskBounds);
+ }
+ else
+ {
+ DisplayMsg(L"Failed to get handle of shared surface", L"Error", S_OK);
+ Ret = DUPL_RETURN_ERROR_UNEXPECTED;
+ }
+ }
+
+
+ // We start off in occluded state and we should immediate get a occlusion status window message
+ Occluded = true;
+ }
+ else
+ {
+ // Nothing else to do, so try to present to write out to window if not occluded
+ if (!Occluded || !pSelf->bWindowHooked)
+ {
+ Ret = pSelf->pOutMgr->UpdateApplicationWindow(pSelf->pThreadMgr->GetPointerInfo(), &Occluded);
+ }
+ }
+
+ // Check if for errors
+ if (Ret != DUPL_RETURN_SUCCESS)
+ {
+ if (Ret == DUPL_RETURN_ERROR_EXPECTED)
+ {
+ // Some type of system transition is occurring so retry
+ SetEvent(pSelf->hlExpectedErrorEvent);
+ }
+ else
+ {
+ // Unexpected error so exit
+ DD_CHECK_HR(hr = E_UNEXPECTED);
+ break;
+ }
+ }
+ }
+
+bail:
+
+ DD_DEBUG_INFO("DDThread (producer) - BAIL");
+
+#if 0 // Done by unprepare()
+ // Make sure all other threads have exited
+ if (SetEvent(pSelf->hlTerminateThreadsEvent))
+ {
+ ThreadMgr.WaitForThreadTermination();
+ }
+
+ // Clean up
+ CloseHandle(pSelf->hlUnexpectedErrorEvent); pSelf->hlUnexpectedErrorEvent = NULL;
+ CloseHandle(pSelf->hlExpectedErrorEvent); pSelf->hlExpectedErrorEvent = NULL;
+ CloseHandle(pSelf->hlTerminateThreadsEvent); pSelf->hlTerminateThreadsEvent = NULL;
+#endif
+
+ DD_DEBUG_INFO("DDThread (producer) - EXIT");
+
+ return NULL;
+}
+
+//
+// Displays a message
+//
+ void DisplayMsg(_In_ LPCWSTR Str, _In_ LPCWSTR Title, HRESULT hr)
+{
+ const UINT StringLen = (UINT)(wcslen(Str) + sizeof(" with HRESULT 0x########."));
+ wchar_t* OutStr = new wchar_t[StringLen];
+ if (!OutStr)
+ {
+ return;
+ }
+
+ INT LenWritten = swprintf_s(OutStr, StringLen, L"%s with 0x%X.", Str, hr);
+ if (LenWritten != -1)
+ {
+ DD_DEBUG_ERROR("%ls: %ls", Title, OutStr);
+ }
+
+ delete[] OutStr;
+}
+
+
+
+ DYNAMIC_WAIT::DYNAMIC_WAIT() : m_CurrentWaitBandIdx(0), m_WaitCountInCurrentBand(0)
+ {
+ m_QPCValid = QueryPerformanceFrequency(&m_QPCFrequency);
+ m_LastWakeUpTime.QuadPart = 0L;
+ }
+
+ DYNAMIC_WAIT::~DYNAMIC_WAIT()
+ {
+ }
+
+ void DYNAMIC_WAIT::Wait()
+ {
+ LARGE_INTEGER CurrentQPC = { 0 };
+
+ // Is this wait being called with the period that we consider it to be part of the same wait sequence
+ QueryPerformanceCounter(&CurrentQPC);
+ if (m_QPCValid && (CurrentQPC.QuadPart <= (m_LastWakeUpTime.QuadPart + (m_QPCFrequency.QuadPart * m_WaitSequenceTimeInSeconds))))
+ {
+ // We are still in the same wait sequence, lets check if we should move to the next band
+ if ((m_WaitBands[m_CurrentWaitBandIdx].WaitCount != WAIT_BAND_STOP) && (m_WaitCountInCurrentBand > m_WaitBands[m_CurrentWaitBandIdx].WaitCount))
+ {
+ m_CurrentWaitBandIdx++;
+ m_WaitCountInCurrentBand = 0;
+ }
+ }
+ else
+ {
+ // Either we could not get the current time or we are starting a new wait sequence
+ m_WaitCountInCurrentBand = 0;
+ m_CurrentWaitBandIdx = 0;
+ }
+
+ // Sleep for the required period of time
+ Sleep(m_WaitBands[m_CurrentWaitBandIdx].WaitTime);
+
+ // Record the time we woke up so we can detect wait sequences
+ QueryPerformanceCounter(&m_LastWakeUpTime);
+ m_WaitCountInCurrentBand++;
+ } \ No newline at end of file
diff --git a/plugins/pluginWinDD/version.aps b/plugins/pluginWinDD/version.aps
new file mode 100644
index 0000000..d380036
--- /dev/null
+++ b/plugins/pluginWinDD/version.aps
Binary files differ
diff --git a/plugins/pluginWinDD/version.rc b/plugins/pluginWinDD/version.rc
new file mode 100644
index 0000000..d7f1e0b
--- /dev/null
+++ b/plugins/pluginWinDD/version.rc
@@ -0,0 +1,102 @@
+// Microsoft Visual C++ generated resource script.
+//
+// #include "resource.h"
+
+#define APSTUDIO_READONLY_SYMBOLS
+/////////////////////////////////////////////////////////////////////////////
+//
+// Generated from the TEXTINCLUDE 2 resource.
+//
+#include "afxres.h"
+
+/////////////////////////////////////////////////////////////////////////////
+#undef APSTUDIO_READONLY_SYMBOLS
+
+/////////////////////////////////////////////////////////////////////////////
+// English (U.S.) resources
+
+#if !defined(AFX_RESOURCE_DLL) || defined(AFX_TARG_ENU)
+#ifdef _WIN32
+LANGUAGE LANG_ENGLISH, SUBLANG_ENGLISH_US
+#pragma code_page(1252)
+#endif //_WIN32
+
+#ifdef APSTUDIO_INVOKED
+/////////////////////////////////////////////////////////////////////////////
+//
+// TEXTINCLUDE
+//
+
+1 TEXTINCLUDE
+BEGIN
+ "resource.h\0"
+END
+
+2 TEXTINCLUDE
+BEGIN
+ "#include ""afxres.h""\r\n"
+ "\0"
+END
+
+3 TEXTINCLUDE
+BEGIN
+ "\r\n"
+ "\0"
+END
+
+#endif // APSTUDIO_INVOKED
+
+
+/////////////////////////////////////////////////////////////////////////////
+//
+// Version
+//
+
+VS_VERSION_INFO VERSIONINFO
+ FILEVERSION 2.0.0.1156
+ PRODUCTVERSION 2.0.0.1156
+ FILEFLAGSMASK 0x17L
+#ifdef _DEBUG
+ FILEFLAGS 0x1L
+#else
+ FILEFLAGS 0x0L
+#endif
+ FILEOS 0x4L
+ FILETYPE 0x2L
+ FILESUBTYPE 0x0L
+BEGIN
+ BLOCK "StringFileInfo"
+ BEGIN
+ BLOCK "040904b0"
+ BEGIN
+ VALUE "CompanyName", "Doubango Telecom"
+ VALUE "FileDescription", "Doubango IMS Framework Desktop Duplication Plugin"
+ VALUE "FileVersion", "2.0.0.1156"
+ VALUE "InternalName", "pluginWinDD.dll"
+ VALUE "LegalCopyright", "(c) 2010-2015 Doubango Telecom. All rights reserved."
+ VALUE "OriginalFilename", "pluginWinDD.dll"
+ VALUE "ProductName", "Doubango IMS Framework Desktop Duplication Plugin"
+ VALUE "ProductVersion", "2.0.0.1156"
+ END
+ END
+ BLOCK "VarFileInfo"
+ BEGIN
+ VALUE "Translation", 0x409, 1200
+ END
+END
+
+#endif // English (U.S.) resources
+/////////////////////////////////////////////////////////////////////////////
+
+
+
+#ifndef APSTUDIO_INVOKED
+/////////////////////////////////////////////////////////////////////////////
+//
+// Generated from the TEXTINCLUDE 3 resource.
+//
+
+
+/////////////////////////////////////////////////////////////////////////////
+#endif // not APSTUDIO_INVOKED
+
diff --git a/plugins/pluginWinIPSecVista/AStyle.sh b/plugins/pluginWinIPSecVista/AStyle.sh
new file mode 100644
index 0000000..e736f47
--- /dev/null
+++ b/plugins/pluginWinIPSecVista/AStyle.sh
@@ -0,0 +1 @@
+AStyle.exe --style=k/r --lineend=linux --mode=c --add-brackets --break-closing-brackets --recursive "*.c" "*.h" \ No newline at end of file
diff --git a/plugins/pluginWinIPSecVista/dllmain_ipsec_vista.c b/plugins/pluginWinIPSecVista/dllmain_ipsec_vista.c
new file mode 100644
index 0000000..d61752e
--- /dev/null
+++ b/plugins/pluginWinIPSecVista/dllmain_ipsec_vista.c
@@ -0,0 +1,100 @@
+/* Copyright (C) 2013-2014 Mamadou DIOP
+* Copyright (C) 2013-2014 Doubango Telecom <http://www.doubango.org>
+*
+* This file is part of Open Source Doubango Framework.
+*
+* DOUBANGO is free software: you can redistribute it and/or modify
+* it under the terms of the GNU General Public License as published by
+* the Free Software Foundation, either version 3 of the License, or
+* (at your option) any later version.
+*
+* DOUBANGO is distributed in the hope that it will be useful,
+* but WITHOUT ANY WARRANTY; without even the implied warranty of
+* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
+* GNU General Public License for more details.
+*
+* You should have received a copy of the GNU General Public License
+* along with DOUBANGO.
+*/
+#include "plugin_win_ipsec_vista_config.h"
+
+#include "tipsec.h"
+#include "tsk_plugin.h"
+#include "tsk_debug.h"
+
+#include <windows.h>
+
+extern const tipsec_plugin_def_t *plugin_win_ipsec_vista_plugin_def_t;
+
+PLUGIN_WIN_IPSEC_VISTA_BEGIN_DECLS /* BEGIN */
+PLUGIN_WIN_IPSEC_VISTA_API int __plugin_get_def_count();
+PLUGIN_WIN_IPSEC_VISTA_API tsk_plugin_def_type_t __plugin_get_def_type_at(int index);
+PLUGIN_WIN_IPSEC_VISTA_API tsk_plugin_def_media_type_t __plugin_get_def_media_type_at(int index);
+PLUGIN_WIN_IPSEC_VISTA_API tsk_plugin_def_ptr_const_t __plugin_get_def_at(int index);
+PLUGIN_WIN_IPSEC_VISTA_END_DECLS /* END */
+
+typedef enum PLUGIN_INDEX_E {
+ PLUGIN_INDEX_WFP, // Windows Filtering platform
+
+ PLUGIN_INDEX_COUNT
+}
+PLUGIN_INDEX_T;
+
+BOOL APIENTRY DllMain( HMODULE hModule,
+ DWORD ul_reason_for_call,
+ LPVOID lpReserved
+ )
+{
+ switch (ul_reason_for_call) {
+ case DLL_PROCESS_ATTACH:
+ break;
+ case DLL_THREAD_ATTACH:
+ break;
+ case DLL_THREAD_DETACH:
+ break;
+ case DLL_PROCESS_DETACH:
+ break;
+ }
+ return TRUE;
+}
+
+
+int __plugin_get_def_count()
+{
+ return PLUGIN_INDEX_COUNT;
+}
+
+tsk_plugin_def_type_t __plugin_get_def_type_at(int index)
+{
+ switch(index) {
+ case PLUGIN_INDEX_WFP: {
+ return tsk_plugin_def_type_ipsec;
+ }
+ }
+ TSK_DEBUG_ERROR("No plugin at index %d", index);
+ return tsk_plugin_def_type_none;
+}
+
+tsk_plugin_def_media_type_t __plugin_get_def_media_type_at(int index)
+{
+ switch(index) {
+ case PLUGIN_INDEX_WFP: {
+ return tsk_plugin_def_media_type_all;
+ }
+ }
+
+ TSK_DEBUG_ERROR("No plugin at index %d", index);
+ return tsk_plugin_def_media_type_none;
+}
+
+tsk_plugin_def_ptr_const_t __plugin_get_def_at(int index)
+{
+ switch(index) {
+ case PLUGIN_INDEX_WFP: {
+ return plugin_win_ipsec_vista_plugin_def_t;
+ }
+ }
+
+ TSK_DEBUG_ERROR("No plugin at index %d", index);
+ return tsk_null;
+}
diff --git a/plugins/pluginWinIPSecVista/pluginWinIPSecVista.vcproj b/plugins/pluginWinIPSecVista/pluginWinIPSecVista.vcproj
new file mode 100644
index 0000000..7428f2b
--- /dev/null
+++ b/plugins/pluginWinIPSecVista/pluginWinIPSecVista.vcproj
@@ -0,0 +1,212 @@
+<?xml version="1.0" encoding="Windows-1252"?>
+<VisualStudioProject
+ ProjectType="Visual C++"
+ Version="9.00"
+ Name="pluginWinIPSecVista"
+ ProjectGUID="{AAD0B2B5-8D8C-4DE0-BB20-1AB6CFAA617D}"
+ RootNamespace="pluginWinIPSecVista"
+ Keyword="Win32Proj"
+ TargetFrameworkVersion="196613"
+ >
+ <Platforms>
+ <Platform
+ Name="Win32"
+ />
+ </Platforms>
+ <ToolFiles>
+ </ToolFiles>
+ <Configurations>
+ <Configuration
+ Name="Debug|Win32"
+ OutputDirectory="$(SolutionDir)$(ConfigurationName)"
+ IntermediateDirectory="$(ConfigurationName)"
+ ConfigurationType="2"
+ CharacterSet="1"
+ >
+ <Tool
+ Name="VCPreBuildEventTool"
+ />
+ <Tool
+ Name="VCCustomBuildTool"
+ />
+ <Tool
+ Name="VCXMLDataGeneratorTool"
+ />
+ <Tool
+ Name="VCWebServiceProxyGeneratorTool"
+ />
+ <Tool
+ Name="VCMIDLTool"
+ />
+ <Tool
+ Name="VCCLCompilerTool"
+ Optimization="0"
+ AdditionalIncludeDirectories="..\..\thirdparties\common\include;..\..\thirdparties\win32\include;..\..\tinySAK\src;..\..\tinyIPSec\src"
+ PreprocessorDefinitions="WIN32;_DEBUG;_WINDOWS;_USRDLL;PLUGIN_WIN_IPSEC_VISTA_EXPORTS"
+ MinimalRebuild="true"
+ BasicRuntimeChecks="3"
+ RuntimeLibrary="3"
+ UsePrecompiledHeader="0"
+ WarningLevel="3"
+ WarnAsError="true"
+ DebugInformationFormat="4"
+ />
+ <Tool
+ Name="VCManagedResourceCompilerTool"
+ />
+ <Tool
+ Name="VCResourceCompilerTool"
+ />
+ <Tool
+ Name="VCPreLinkEventTool"
+ />
+ <Tool
+ Name="VCLinkerTool"
+ AdditionalDependencies="$(OutDir)\tinySAK.lib $(OutDir)\tinyIPSec.lib Ws2_32.lib"
+ LinkIncremental="2"
+ GenerateDebugInformation="true"
+ SubSystem="2"
+ TargetMachine="1"
+ />
+ <Tool
+ Name="VCALinkTool"
+ />
+ <Tool
+ Name="VCManifestTool"
+ />
+ <Tool
+ Name="VCXDCMakeTool"
+ />
+ <Tool
+ Name="VCBscMakeTool"
+ />
+ <Tool
+ Name="VCFxCopTool"
+ />
+ <Tool
+ Name="VCAppVerifierTool"
+ />
+ <Tool
+ Name="VCPostBuildEventTool"
+ />
+ </Configuration>
+ <Configuration
+ Name="Release|Win32"
+ OutputDirectory="$(SolutionDir)$(ConfigurationName)"
+ IntermediateDirectory="$(ConfigurationName)"
+ ConfigurationType="2"
+ CharacterSet="1"
+ WholeProgramOptimization="1"
+ >
+ <Tool
+ Name="VCPreBuildEventTool"
+ />
+ <Tool
+ Name="VCCustomBuildTool"
+ />
+ <Tool
+ Name="VCXMLDataGeneratorTool"
+ />
+ <Tool
+ Name="VCWebServiceProxyGeneratorTool"
+ />
+ <Tool
+ Name="VCMIDLTool"
+ />
+ <Tool
+ Name="VCCLCompilerTool"
+ Optimization="2"
+ EnableIntrinsicFunctions="true"
+ AdditionalIncludeDirectories="..\..\thirdparties\common\include;..\..\thirdparties\win32\include;..\..\tinySAK\src;..\..\tinyIPSec\src"
+ PreprocessorDefinitions="WIN32;NDEBUG;_WINDOWS;_USRDLL;PLUGIN_WIN_IPSEC_VISTA_EXPORTS"
+ RuntimeLibrary="2"
+ EnableFunctionLevelLinking="true"
+ UsePrecompiledHeader="0"
+ WarningLevel="3"
+ WarnAsError="true"
+ DebugInformationFormat="0"
+ CompileAs="1"
+ />
+ <Tool
+ Name="VCManagedResourceCompilerTool"
+ />
+ <Tool
+ Name="VCResourceCompilerTool"
+ />
+ <Tool
+ Name="VCPreLinkEventTool"
+ />
+ <Tool
+ Name="VCLinkerTool"
+ AdditionalDependencies="$(OutDir)\tinySAK.lib $(OutDir)\tinyIPSec.lib Ws2_32.lib"
+ LinkIncremental="1"
+ GenerateDebugInformation="false"
+ SubSystem="2"
+ OptimizeReferences="2"
+ EnableCOMDATFolding="2"
+ TargetMachine="1"
+ />
+ <Tool
+ Name="VCALinkTool"
+ />
+ <Tool
+ Name="VCManifestTool"
+ />
+ <Tool
+ Name="VCXDCMakeTool"
+ />
+ <Tool
+ Name="VCBscMakeTool"
+ />
+ <Tool
+ Name="VCFxCopTool"
+ />
+ <Tool
+ Name="VCAppVerifierTool"
+ />
+ <Tool
+ Name="VCPostBuildEventTool"
+ />
+ </Configuration>
+ </Configurations>
+ <References>
+ </References>
+ <Files>
+ <Filter
+ Name="Source Files"
+ Filter="cpp;c;cc;cxx;def;odl;idl;hpj;bat;asm;asmx"
+ UniqueIdentifier="{4FC737F1-C7A5-4376-A066-2A32D752A2FF}"
+ >
+ <File
+ RelativePath=".\dllmain_ipsec_vista.c"
+ >
+ </File>
+ <File
+ RelativePath=".\plugin_win_ipsec_vista.c"
+ >
+ </File>
+ </Filter>
+ <Filter
+ Name="Header Files"
+ Filter="h;hpp;hxx;hm;inl;inc;xsd"
+ UniqueIdentifier="{93995380-89BD-4b04-88EB-625FBE52EBFB}"
+ >
+ <File
+ RelativePath=".\plugin_win_ipsec_vista_config.h"
+ >
+ </File>
+ </Filter>
+ <Filter
+ Name="Resource Files"
+ Filter="rc;ico;cur;bmp;dlg;rc2;rct;bin;rgs;gif;jpg;jpeg;jpe;resx;tiff;tif;png;wav"
+ UniqueIdentifier="{67DA6AB6-F800-4c08-8B7A-83BB121AAD01}"
+ >
+ <File
+ RelativePath=".\version.rc"
+ >
+ </File>
+ </Filter>
+ </Files>
+ <Globals>
+ </Globals>
+</VisualStudioProject>
diff --git a/plugins/pluginWinIPSecVista/plugin_win_ipsec_vista.c b/plugins/pluginWinIPSecVista/plugin_win_ipsec_vista.c
new file mode 100644
index 0000000..ac65b94
--- /dev/null
+++ b/plugins/pluginWinIPSecVista/plugin_win_ipsec_vista.c
@@ -0,0 +1,711 @@
+/* Copyright (C) 2013-2014 Mamadou DIOP
+* Copyright (C) 2013-2014 Doubango Telecom <http://www.doubango.org>
+*
+* This file is part of Open Source Doubango Framework.
+*
+* DOUBANGO is free software: you can redistribute it and/or modify
+* it under the terms of the GNU General Public License as published by
+* the Free Software Foundation, either version 3 of the License, or
+* (at your option) any later version.
+*
+* DOUBANGO is distributed in the hope that it will be useful,
+* but WITHOUT ANY WARRANTY; without even the implied warranty of
+* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
+* GNU General Public License for more details.
+*
+* You should have received a copy of the GNU General Public License
+* along with DOUBANGO.
+*/
+#include "plugin_win_ipsec_vista_config.h"
+
+#include "tipsec.h" /* From tinyIPSec project. Requires linking against "tinyIPSec.lib" */
+
+#include "tsk_memory.h"
+#include "tsk_object.h"
+#include "tsk_debug.h"
+
+#include <ws2tcpip.h>
+#include <Fwpmu.h>
+#include <Rpc.h>
+
+#if defined(_MSC_VER)
+# pragma comment(lib, "Fwpuclnt.lib")
+# pragma comment(lib, "Rpcrt4.lib")
+#endif
+
+typedef FWP_BYTE_BLOB* PFWP_BYTE_BLOB;
+
+/* as WFP do not provide null encryption I define my own*/
+static const IPSEC_CIPHER_TRANSFORM_ID0 IPSEC_CIPHER_TRANSFORM_ID_NULL_NULL= {
+ (IPSEC_CIPHER_TYPE)NULL,
+ (IPSEC_CIPHER_TYPE)NULL
+};
+
+#define TINYIPSEC_FILTER_NAME TEXT("Doubango Telecom tinyIPSec (Windows Vista)")
+#define TINYIPSEC_PROVIDER_KEY NULL
+
+#define TINYIPSEC_SA_NUM_ENTRIES_TO_REQUEST INT_MAX
+#define TINYIPSEC_SA_MAX_LIFETIME 172799
+
+#define TINYIPSEC_VISTA_GET_ALGO(algo) (algo == tipsec_alg_hmac_md5_96) ? IPSEC_AUTH_TRANSFORM_ID_HMAC_MD5_96 : IPSEC_AUTH_TRANSFORM_ID_HMAC_SHA_1_96
+#define TINYIPSEC_VISTA_GET_EALGO(ealg) (ealg == tipsec_ealg_des_ede3_cbc) ? IPSEC_CIPHER_TRANSFORM_ID_CBC_3DES : ( (ealg == tipsec_ealg_aes) ? IPSEC_CIPHER_TRANSFORM_ID_AES_128 : IPSEC_CIPHER_TRANSFORM_ID_NULL_NULL )
+#define TINYIPSEC_VISTA_GET_MODE(mode) (mode == tipsec_mode_tun) ? IPSEC_TRAFFIC_TYPE_TUNNEL : IPSEC_TRAFFIC_TYPE_TRANSPORT
+#define TINYIPSEC_VISTA_GET_IPPROTO(ipproto) (ipproto == tipsec_ipproto_tcp) ? IPPROTO_TCP : ((ipproto == tipsec_ipproto_icmp) ? IPPROTO_ICMP : IPPROTO_UDP)
+#define TINYIPSEC_VISTA_GET_IPVER(ipv6) (ipv6) ? FWP_IP_VERSION_V6 : FWP_IP_VERSION_V4
+#define TINYIPSEC_VISTA_GET_PROTO(proto, ealg) (proto == tipsec_proto_ah) ? IPSEC_TRANSFORM_AH : ( (proto == tipsec_proto_esp) ? (ealg == tipsec_ealg_null ? IPSEC_TRANSFORM_ESP_AUTH : IPSEC_TRANSFORM_ESP_AUTH_AND_CIPHER) : IPSEC_TRANSFORM_ESP_AUTH_AND_CIPHER );
+
+typedef struct plugin_win_ipsec_vista_ctx_s {
+ TIPSEC_DECLARE_CTX;
+
+ tipsec_ctx_t* pc_base;
+ UINT64 saId_us;
+ UINT64 saId_uc;
+ UINT64 filterId_in_us;
+ UINT64 filterId_out_us;
+ UINT64 filterId_in_uc;
+ UINT64 filterId_out_uc;
+ WCHAR filter_name[256];
+
+ HANDLE engine;
+}
+plugin_win_ipsec_vista_ctx_t;
+
+static int _vista_createLocalSA(__in const plugin_win_ipsec_vista_ctx_t* p_ctx, __in tipsec_port_t local_port, __out tipsec_spi_t *spi, __out UINT64 *saId, __out UINT64 *filterId_in, __out UINT64 *filterId_out);
+static int _vista_boundSA(__in const plugin_win_ipsec_vista_ctx_t* p_ctx, __in UINT64 local_saId, __in tipsec_spi_t remote_spi, __in BOOLEAN toInbound);
+
+static int _vista_flushAll(const plugin_win_ipsec_vista_ctx_t* p_ctx);
+static void _vista_deleteSaContextAndFilters(__in HANDLE engine, __in UINT64 inFilterId, __in UINT64 outFilterId, __in UINT64 saId);
+
+//
+// Plugin implementation
+//
+
+static tipsec_error_t _plugin_win_ipsec_vista_ctx_init(tipsec_ctx_t* _p_ctx)
+{
+ plugin_win_ipsec_vista_ctx_t* p_ctx = (plugin_win_ipsec_vista_ctx_t*)_p_ctx;
+ DWORD code;
+ UUID uuid;
+ RPC_STATUS status;
+ static uint64_t __guard = 0;
+
+ if (p_ctx->pc_base->initialized) {
+ TSK_DEBUG_ERROR("Already initialized");
+ return tipsec_error_invalid_state;
+ }
+
+ /* Create filter name */
+ status = UuidCreate(&uuid);
+ if (status == RPC_S_OK) {
+ WCHAR* wszUuid = NULL;
+ UuidToStringW(&uuid, (RPC_WSTR*)&wszUuid);
+ if (!wszUuid) {
+ TSK_DEBUG_ERROR("Failed to convert the UUID");
+ return tipsec_error_sys;
+ }
+ swprintf(p_ctx->filter_name, sizeof(p_ctx->filter_name)/sizeof(p_ctx->filter_name[0]), L"%s//%s//%llu", TINYIPSEC_FILTER_NAME, wszUuid, __guard++);
+ RpcStringFree((RPC_WSTR*)&wszUuid);
+ }
+ else {
+ TSK_DEBUG_ERROR("Failed to create new UUID");
+ return tipsec_error_sys;
+ }
+
+
+
+ /* Open engine */
+ if ((code = FwpmEngineOpen0(NULL, RPC_C_AUTHN_WINNT, NULL, NULL, &p_ctx->engine))) {
+ p_ctx->pc_base->initialized = tsk_false;
+ TSK_DEBUG_ERROR("FwpmEngineOpen0 failed with error code [%x].", code);
+ return tipsec_error_sys;
+ }
+ else {
+ p_ctx->pc_base->initialized = tsk_true;
+ p_ctx->pc_base->state = tipsec_state_initial;
+ return tipsec_error_success;
+ }
+}
+
+static tipsec_error_t _plugin_win_ipsec_vista_ctx_set_local(tipsec_ctx_t* _p_ctx, const char* addr_local, const char* addr_remote, tipsec_port_t port_uc, tipsec_port_t port_us)
+{
+ plugin_win_ipsec_vista_ctx_t* p_ctx = (plugin_win_ipsec_vista_ctx_t*)_p_ctx;
+ int ret;
+
+ _p_ctx->addr_local = tsk_realloc(_p_ctx->addr_local, _p_ctx->use_ipv6 ? 16 : 4);
+ if (!_p_ctx->addr_local) {
+ return tipsec_error_outofmemory;
+ }
+ _p_ctx->addr_remote = tsk_realloc(_p_ctx->addr_remote, _p_ctx->use_ipv6 ? 16 : 4);
+ if (!_p_ctx->addr_remote) {
+ return tipsec_error_outofmemory;
+ }
+
+ /* Set local IP */
+ if (_p_ctx->use_ipv6) {
+ if ((ret = inet_pton(AF_INET6, addr_local, _p_ctx->addr_local)) != 1 ) {
+ TSK_DEBUG_ERROR("inet_pton(%s) have failed with error code [%x].", addr_local, ret);
+ return tipsec_error_sys;
+ }
+ if ((ret = inet_pton(AF_INET6, addr_remote, _p_ctx->addr_remote)) != 1 ) {
+ TSK_DEBUG_ERROR("inet_pton(%s) have failed with error code [%x].", addr_remote, ret);
+ return tipsec_error_sys;
+ }
+ }
+ else {
+ if ((ret = inet_pton(AF_INET, addr_local, _p_ctx->addr_local)) != 1 ) {
+ TSK_DEBUG_ERROR("inet_pton(%s) have failed with error code [%x].", addr_local, ret);
+ return tipsec_error_sys;
+ }
+ else {
+ *((UINT32*)_p_ctx->addr_local) = ntohl(*((UINT32*)_p_ctx->addr_local));
+ }
+ if ((ret = inet_pton(AF_INET, addr_remote, _p_ctx->addr_remote)) != 1 ) {
+ TSK_DEBUG_ERROR("inet_pton(%s) have failed with error code [%x].", addr_remote, ret);
+ return tipsec_error_sys;
+ }
+ else {
+ *((UINT32*)_p_ctx->addr_remote) = ntohl(*((UINT32*)_p_ctx->addr_remote));
+ }
+ }
+
+ /* Set ports */
+ _p_ctx->port_uc = port_uc;
+ _p_ctx->port_us = port_us;
+
+ // Create SA1: (UC -> PS)
+ if ((ret = _vista_createLocalSA(p_ctx, _p_ctx->port_uc, &_p_ctx->spi_uc, &p_ctx->saId_uc, &p_ctx->filterId_in_uc, &p_ctx->filterId_out_uc))) {
+ return tipsec_error_sys;
+ }
+
+ // Create SA2: (US <-PC)
+ if ((ret = _vista_createLocalSA(p_ctx, _p_ctx->port_us, &_p_ctx->spi_us, &p_ctx->saId_us, &p_ctx->filterId_in_us, &p_ctx->filterId_out_uc))) {
+ return tipsec_error_sys;
+ }
+
+ _p_ctx->state = tipsec_state_inbound;
+
+ return tipsec_error_success;
+}
+
+static tipsec_error_t _plugin_win_ipsec_vista_ctx_set_remote(tipsec_ctx_t* _p_ctx, tipsec_spi_t spi_pc, tipsec_spi_t spi_ps, tipsec_port_t port_pc, tipsec_port_t port_ps, tipsec_lifetime_t lifetime)
+{
+ plugin_win_ipsec_vista_ctx_t* p_ctx = (plugin_win_ipsec_vista_ctx_t*)_p_ctx;
+
+ /* Set Lifetime */
+ _p_ctx->lifetime = lifetime;
+
+ /* Set ports */
+ _p_ctx->port_ps = port_ps;
+ _p_ctx->port_pc = port_pc;
+
+ /* Set SPIs */
+ _p_ctx->spi_ps = spi_ps;
+ _p_ctx->spi_pc = spi_pc;
+
+ _p_ctx->state = tipsec_state_full;
+
+ return tipsec_error_success;
+}
+
+static tipsec_error_t _plugin_win_ipsec_vista_ctx_set_keys(tipsec_ctx_t* _p_ctx, const tipsec_key_t* ik, const tipsec_key_t* ck)
+{
+ plugin_win_ipsec_vista_ctx_t* p_ctx = (plugin_win_ipsec_vista_ctx_t*)_p_ctx;
+ PFWP_BYTE_BLOB _ik, _ck;
+
+ /* Compute ik and ck */
+ _p_ctx->ik = tsk_realloc(_p_ctx->ik, sizeof(FWP_BYTE_BLOB));
+ if (!_p_ctx->ik) {
+ return tipsec_error_outofmemory;
+ }
+ _ik = (PFWP_BYTE_BLOB)_p_ctx->ik;
+ _p_ctx->ck = tsk_realloc(_p_ctx->ck, sizeof(FWP_BYTE_BLOB));
+ if (!_p_ctx->ck) {
+ return tipsec_error_outofmemory;
+ }
+ _ck = (PFWP_BYTE_BLOB)_p_ctx->ck;
+
+ _ik->data = tsk_calloc(TIPSEC_IK_LEN, 1);
+ if (!_ik->data) {
+ return tipsec_error_outofmemory;
+ }
+ memcpy(_ik->data, ik, TIPSEC_KEY_LEN);
+ _ik->size = TIPSEC_KEY_LEN;
+
+ _ck->data = tsk_calloc(TIPSEC_CK_LEN, 1);
+ if (!_ck->data) {
+ return tipsec_error_outofmemory;
+ }
+ memcpy(_ck->data, ck, TIPSEC_KEY_LEN);
+ _ck->size = TIPSEC_KEY_LEN;
+
+ return tipsec_error_success;
+}
+
+static tipsec_error_t _plugin_win_ipsec_vista_ctx_start(tipsec_ctx_t* _p_ctx)
+{
+ plugin_win_ipsec_vista_ctx_t* p_ctx = (plugin_win_ipsec_vista_ctx_t*)_p_ctx;
+ int ret;
+
+ /* VERY IMPORTANT: The SA context functions must be called in a specific order:
+ (http://msdn.microsoft.com/en-us/library/bb540652(VS.85).aspx).
+
+ IPsecSaContextCreate0
+ IPsecSaContextGetSpi0
+ IPsecSaContextAddInbound0
+ IPsecSaContextAddOutbound0
+ */
+
+ /* US <- PC */
+ if ((ret = _vista_boundSA(p_ctx, p_ctx->saId_us, _p_ctx->spi_us, TRUE))) {
+ TSK_DEBUG_ERROR("Failed to setup [US <- PC] SA. Error code = %d", ret);
+ return tipsec_error_sys;
+ }
+ /* UC <- PS */
+ if ((ret = _vista_boundSA(p_ctx, p_ctx->saId_uc, _p_ctx->spi_uc, TRUE))) {
+ TSK_DEBUG_ERROR("Failed to setup [UC <- PS] SA. Error code = %d", ret);
+ return tipsec_error_sys;
+ }
+
+ /* UC -> PS */
+ if ((ret = _vista_boundSA(p_ctx, p_ctx->saId_uc, _p_ctx->spi_ps, FALSE))) {
+ TSK_DEBUG_ERROR("Failed to setup [UC -> PS] SA. Error code = %d", ret);
+ return tipsec_error_sys;
+ }
+ /* US -> PC */
+ if ((ret = _vista_boundSA(p_ctx, p_ctx->saId_us, _p_ctx->spi_pc, FALSE))) {
+ TSK_DEBUG_ERROR("Failed to setup [US -> PC] SA. Error code = %d", ret);
+ return tipsec_error_sys;
+ }
+
+ _p_ctx->state = tipsec_state_active;
+ _p_ctx->started = 1;
+
+ return tipsec_error_success;
+}
+
+static tipsec_error_t _plugin_win_ipsec_vista_ctx_stop(tipsec_ctx_t* _p_ctx)
+{
+ plugin_win_ipsec_vista_ctx_t* p_ctx = (plugin_win_ipsec_vista_ctx_t*)_p_ctx;
+ tipsec_error_t err = tipsec_error_success;
+
+ //if (!_p_ctx->started) {
+ // return tipsec_error_success;
+ //}
+
+ /* Flush (delete) all SAs associated to tinyIPSEC */
+ _vista_flushAll(p_ctx);
+
+ _p_ctx->started = 0;
+ _p_ctx->state = tipsec_state_initial;
+
+ return tipsec_error_success;
+}
+
+//
+// Private functions
+//
+static int _vista_createLocalSA(__in const plugin_win_ipsec_vista_ctx_t* p_ctx, __in tipsec_port_t local_port, __out tipsec_spi_t *spi, __out UINT64 *saId, __out UINT64 *filterId_in, __out UINT64 *filterId_out)
+{
+ DWORD result = NO_ERROR;
+ UINT64 tmpInFilterId = 0, tmpOutFilterId = 0, tmpSaId = 0;
+ FWPM_FILTER0 filter;
+ IPSEC_TRAFFIC0 outTraffic;
+ IPSEC_GETSPI0 getSpi;
+ int ret = -1;
+ FWPM_FILTER_CONDITION0 conds[6];
+ UINT32 numFilterConditions = 3;
+
+ *spi = 0;
+ *saId = 0;
+ *filterId_in = 0;
+ *filterId_out = 0;
+
+ conds[0].fieldKey = FWPM_CONDITION_IP_LOCAL_ADDRESS;
+ conds[0].matchType = FWP_MATCH_EQUAL;
+ conds[1].fieldKey = FWPM_CONDITION_IP_REMOTE_ADDRESS;
+ conds[1].matchType = FWP_MATCH_EQUAL;
+ if (p_ctx->pc_base->use_ipv6) {
+ conds[0].conditionValue.type = FWP_BYTE_ARRAY16_TYPE;
+ conds[0].conditionValue.byteArray16 = (FWP_BYTE_ARRAY16*)p_ctx->pc_base->addr_local;
+ conds[1].conditionValue.type = FWP_BYTE_ARRAY16_TYPE;
+ conds[1].conditionValue.byteArray16 = (FWP_BYTE_ARRAY16*)p_ctx->pc_base->addr_remote;
+ }
+ else {
+ conds[0].conditionValue.type = FWP_UINT32;
+ conds[0].conditionValue.uint32 = *((UINT32*)p_ctx->pc_base->addr_local);
+ conds[1].conditionValue.type = FWP_UINT32;
+ conds[1].conditionValue.uint32 = *((UINT32*)p_ctx->pc_base->addr_remote);
+ }
+
+ conds[2].fieldKey = FWPM_CONDITION_IP_LOCAL_PORT;
+ conds[2].matchType = FWP_MATCH_EQUAL;
+ conds[2].conditionValue.type = FWP_UINT16;
+ conds[2].conditionValue.uint16 = local_port;
+
+ if (p_ctx->pc_base->ipproto != tipsec_ipproto_all) {
+ conds[numFilterConditions].fieldKey = FWPM_CONDITION_IP_PROTOCOL;
+ conds[numFilterConditions].matchType = FWP_MATCH_EQUAL;
+ conds[numFilterConditions].conditionValue.type = FWP_UINT8;
+ conds[numFilterConditions].conditionValue.uint8 = TINYIPSEC_VISTA_GET_IPPROTO(p_ctx->pc_base->ipproto);
+ ++numFilterConditions;
+ }
+
+ // Fill in the common fields shared by both filters.
+ memset(&filter, 0, sizeof(filter));
+ // For MUI compatibility, object names should be indirect strings. See
+ // SHLoadIndirectString for details.
+ filter.displayData.name = (PWCHAR)p_ctx->filter_name;
+ // Link all objects to our provider. When multiple providers are installed
+ // on a computer, this makes it easy to determine who added what.
+ filter.providerKey = (GUID*)TINYIPSEC_PROVIDER_KEY;
+ filter.numFilterConditions = numFilterConditions;
+ filter.filterCondition = conds;
+ filter.action.type = FWP_ACTION_CALLOUT_TERMINATING;
+ filter.flags = FWPM_FILTER_FLAG_NONE;
+ filter.weight.type = FWP_EMPTY;
+
+ // Add the inbound filter.
+ filter.layerKey = (p_ctx->pc_base->use_ipv6) ? FWPM_LAYER_INBOUND_TRANSPORT_V6 : FWPM_LAYER_INBOUND_TRANSPORT_V4;
+ if (p_ctx->pc_base->mode == tipsec_mode_tun) {
+ filter.action.calloutKey = (p_ctx->pc_base->use_ipv6) ? FWPM_CALLOUT_IPSEC_INBOUND_TUNNEL_V6 : FWPM_CALLOUT_IPSEC_INBOUND_TUNNEL_V4;
+ }
+ else {
+ filter.action.calloutKey = (p_ctx->pc_base->use_ipv6) ? FWPM_CALLOUT_IPSEC_INBOUND_TRANSPORT_V6 : FWPM_CALLOUT_IPSEC_INBOUND_TRANSPORT_V4;
+ }
+ if ((result = FwpmFilterAdd0(p_ctx->engine, &filter, NULL, &tmpInFilterId)) != ERROR_SUCCESS) {
+ TSK_DEBUG_ERROR("FwpmFilterAdd0 (inbound) failed with error code [%x]", result);
+ goto CLEANUP;
+ }
+
+ // Add the outbound filter.
+ filter.layerKey = (p_ctx->pc_base->use_ipv6) ? FWPM_LAYER_OUTBOUND_TRANSPORT_V6 : FWPM_LAYER_OUTBOUND_TRANSPORT_V4;
+ if (p_ctx->pc_base->mode == tipsec_mode_tun) {
+ filter.action.calloutKey = (p_ctx->pc_base->use_ipv6) ? FWPM_CALLOUT_IPSEC_OUTBOUND_TUNNEL_V6 : FWPM_CALLOUT_IPSEC_OUTBOUND_TUNNEL_V4;
+ }
+ else {
+ filter.action.calloutKey = (p_ctx->pc_base->use_ipv6) ? FWPM_CALLOUT_IPSEC_OUTBOUND_TRANSPORT_V6 : FWPM_CALLOUT_IPSEC_OUTBOUND_TRANSPORT_V4;
+ }
+ if ((result = FwpmFilterAdd0(p_ctx->engine, &filter, NULL, &tmpOutFilterId)) != ERROR_SUCCESS) {
+ TSK_DEBUG_ERROR("FwpmFilterAdd0(outbound) failed with error code [%x]", result);
+ goto CLEANUP;
+ }
+
+ // Create the SA context using the outbound traffic descriptor.
+ memset(&outTraffic, 0, sizeof(outTraffic));
+ outTraffic.ipVersion = TINYIPSEC_VISTA_GET_IPVER(p_ctx->pc_base->use_ipv6);
+ if (p_ctx->pc_base->use_ipv6) {
+ memcpy(outTraffic.localV6Address, p_ctx->pc_base->addr_local, 16);
+ memcpy(outTraffic.remoteV6Address, p_ctx->pc_base->addr_remote, 16);
+ }
+ else {
+ outTraffic.localV4Address = *((UINT32*)p_ctx->pc_base->addr_local);
+ outTraffic.remoteV4Address = *((UINT32*)p_ctx->pc_base->addr_remote);
+ }
+ outTraffic.trafficType = TINYIPSEC_VISTA_GET_MODE(p_ctx->pc_base->mode);
+ outTraffic.ipsecFilterId = tmpOutFilterId;
+ if ((result = IPsecSaContextCreate0(p_ctx->engine, &outTraffic, NULL, &tmpSaId)) != ERROR_SUCCESS) {
+ TSK_DEBUG_ERROR("IPsecSaContextCreate0 failed with error code [%x]", result);
+ goto CLEANUP;
+ }
+
+ // Get the inbound SPI using the inbound traffic descriptor.
+ memset(&getSpi, 0, sizeof(getSpi));
+ getSpi.inboundIpsecTraffic.ipVersion = TINYIPSEC_VISTA_GET_IPVER(p_ctx->pc_base->use_ipv6);
+ if (p_ctx->pc_base->use_ipv6) {
+ memcpy(getSpi.inboundIpsecTraffic.localV6Address, p_ctx->pc_base->addr_local, 16);
+ memcpy(getSpi.inboundIpsecTraffic.remoteV6Address, p_ctx->pc_base->addr_remote, 16);
+ }
+ else {
+ getSpi.inboundIpsecTraffic.localV4Address = *((UINT32*)p_ctx->pc_base->addr_local);
+ getSpi.inboundIpsecTraffic.remoteV4Address = *((UINT32*)p_ctx->pc_base->addr_remote);
+ }
+ getSpi.inboundIpsecTraffic.trafficType = TINYIPSEC_VISTA_GET_MODE(p_ctx->pc_base->mode);
+ getSpi.inboundIpsecTraffic.ipsecFilterId = tmpInFilterId;
+ getSpi.ipVersion = TINYIPSEC_VISTA_GET_IPVER(p_ctx->pc_base->use_ipv6);
+ if ((result = IPsecSaContextGetSpi0(p_ctx->engine, tmpSaId, &getSpi, spi))) {
+ TSK_DEBUG_ERROR("IPsecSaContextGetSpi0 failed with error code [%x]", result);
+ goto CLEANUP;
+ }
+
+ //// Return the various LUIDs to the caller, so he can clean up.
+ *filterId_in = tmpInFilterId;
+ *filterId_out = tmpOutFilterId;
+ *saId = tmpSaId;
+
+CLEANUP:
+ if (result != NO_ERROR) {
+ _vista_deleteSaContextAndFilters(p_ctx->engine, tmpInFilterId, tmpOutFilterId, tmpSaId);
+ }
+ else {
+ ret = 0;
+ }
+
+ return ret;
+}
+
+static int _vista_boundSA(__in const plugin_win_ipsec_vista_ctx_t* p_ctx, __in UINT64 local_saId, __in tipsec_spi_t remote_spi, __in BOOLEAN toInbound)
+{
+ UINT32 i=0, j=0;
+ DWORD result = NO_ERROR;
+ IPSEC_SA0 sa;
+ IPSEC_SA_BUNDLE0 bundle;
+ IPSEC_SA_AUTH_INFORMATION0 authInfo; // must be global because use as reference (X = &authInfo)
+ IPSEC_SA_AUTH_AND_CIPHER_INFORMATION0 cipherAuthInfo; // must be global because use as reference (X = &cipherAuthInfo)
+ PFWP_BYTE_BLOB ik = (PFWP_BYTE_BLOB)p_ctx->pc_base->ik;
+ PFWP_BYTE_BLOB ck = (PFWP_BYTE_BLOB)p_ctx->pc_base->ck;
+
+ memset(&sa, 0, sizeof(sa));
+ sa.spi = remote_spi;
+ sa.saTransformType = TINYIPSEC_VISTA_GET_PROTO(p_ctx->pc_base->protocol, p_ctx->pc_base->ealg);
+
+ //
+ // Keys padding
+ //
+ if (p_ctx->pc_base->alg == tipsec_alg_hmac_sha_1_96) {
+ if (ik->size < TIPSEC_IK_LEN) {
+ for(i = ik->size; i < TIPSEC_KEY_LEN; i++) {
+ ik->data[i] = 0x00; /* Already done by "tsk_calloc" but ... */
+ }
+ ik->size = TIPSEC_IK_LEN;
+ }
+ }
+ if (p_ctx->pc_base->ealg == tipsec_ealg_des_ede3_cbc) {
+ if (ck->size < TIPSEC_CK_LEN) {
+ for (i = ck->size; i<TIPSEC_CK_LEN; i++) {
+ ck->data[i] = ck->data[j++];
+ }
+ ck->size = TIPSEC_CK_LEN;
+ }
+ }
+
+ //
+ // In all case create Authentication info
+ //
+ memset(&authInfo, 0, sizeof(authInfo));
+ authInfo.authTransform.authTransformId = TINYIPSEC_VISTA_GET_ALGO(p_ctx->pc_base->alg);
+ authInfo.authKey = *ik;
+
+ if ( sa.saTransformType == IPSEC_TRANSFORM_AH ) {
+ sa.ahInformation = &authInfo;
+ }
+ else if ( sa.saTransformType == IPSEC_TRANSFORM_ESP_AUTH ) {
+ sa.espAuthInformation = &authInfo;
+ }
+ else if ( sa.saTransformType == IPSEC_TRANSFORM_ESP_CIPHER ) {
+ IPSEC_SA_CIPHER_INFORMATION0 cipherInfo;
+
+ memset(&cipherInfo, 0, sizeof(cipherInfo));
+ cipherInfo.cipherTransform.cipherTransformId = TINYIPSEC_VISTA_GET_EALGO(p_ctx->pc_base->ealg);
+ cipherInfo.cipherKey = *ck;
+
+ sa.espCipherInformation = &cipherInfo;
+ }
+ else if ( sa.saTransformType == IPSEC_TRANSFORM_ESP_AUTH_AND_CIPHER ) {
+ IPSEC_SA_CIPHER_INFORMATION0 cipherInfo;
+
+ memset(&cipherInfo, 0, sizeof(cipherInfo));
+ cipherInfo.cipherTransform.cipherTransformId = TINYIPSEC_VISTA_GET_EALGO(p_ctx->pc_base->ealg);
+ cipherInfo.cipherKey = *ck;
+
+ memset(&cipherAuthInfo, 0, sizeof(cipherAuthInfo));
+ cipherAuthInfo.saAuthInformation = authInfo;
+ cipherAuthInfo.saCipherInformation = cipherInfo;
+
+ sa.espAuthAndCipherInformation = &cipherAuthInfo;
+ }
+
+ memset(&bundle, 0, sizeof(bundle));
+ bundle.numSAs = 1;
+ bundle.saList = &sa;
+ bundle.ipVersion = TINYIPSEC_VISTA_GET_IPVER(p_ctx->pc_base->use_ipv6);
+ bundle.lifetime.lifetimeSeconds = (UINT32)((p_ctx->pc_base->lifetime > TINYIPSEC_SA_MAX_LIFETIME) ? TINYIPSEC_SA_MAX_LIFETIME : p_ctx->pc_base->lifetime);
+
+ /* From remote to local (inbound) ? */
+ if (toInbound) {
+ if((result = IPsecSaContextAddInbound0(p_ctx->engine, local_saId, &bundle)) != ERROR_SUCCESS) {
+ TSK_DEBUG_ERROR("IPsecSaContextAddInbound0 failed with error code [%x]", result);
+ goto CLEANUP;
+ }
+ }
+ else {
+ if ((result = IPsecSaContextAddOutbound0(p_ctx->engine, local_saId, &bundle)) != ERROR_SUCCESS) {
+ TSK_DEBUG_ERROR("IPsecSaContextAddOutbound0 failed with error code [%x]", result);
+ goto CLEANUP;
+ }
+ }
+
+CLEANUP:
+ return (result == ERROR_SUCCESS) ? 0 : -1;
+}
+
+static int _vista_flushAll(const plugin_win_ipsec_vista_ctx_t* p_ctx)
+{
+#if 1
+ int ret = -1;
+ if (p_ctx && p_ctx->engine) {
+ DWORD result;
+ result = FwpmFilterDeleteById0(p_ctx->engine, p_ctx->filterId_in_uc);
+ if (result != ERROR_SUCCESS && result != FWP_E_FILTER_NOT_FOUND) {
+ TSK_DEBUG_ERROR("FwpmFilterDeleteById0 failed with error code [%x]", result);
+ }
+ result = FwpmFilterDeleteById0(p_ctx->engine, p_ctx->filterId_in_us);
+ if (result != ERROR_SUCCESS && result != FWP_E_FILTER_NOT_FOUND) {
+ TSK_DEBUG_ERROR("FwpmFilterDeleteById0 failed with error code [%x]", result);
+ }
+ result = FwpmFilterDeleteById0(p_ctx->engine, p_ctx->filterId_out_uc);
+ if (result != ERROR_SUCCESS && result != FWP_E_FILTER_NOT_FOUND) {
+ TSK_DEBUG_ERROR("FwpmFilterDeleteById0 failed with error code [%x]", result);
+ }
+ result = FwpmFilterDeleteById0(p_ctx->engine, p_ctx->filterId_out_us);
+ if (result != ERROR_SUCCESS && result != FWP_E_FILTER_NOT_FOUND) {
+ TSK_DEBUG_ERROR("FwpmFilterDeleteById0 failed with error code [%x]", result);
+ }
+ return 0;
+ }
+ //
+ return ret;
+#else
+ UINT32 i;
+ int ret = -1;
+
+ if (p_ctx && p_ctx->engine) {
+ HANDLE enumHandle = NULL;
+ IPSEC_SA_DETAILS0** entries = NULL;
+ UINT32 numEntriesReturned = 0;
+ DWORD result;
+
+ if ((result = IPsecSaCreateEnumHandle0(p_ctx->engine, NULL, &enumHandle)) != ERROR_SUCCESS) {
+ TSK_DEBUG_ERROR("IPsecSaCreateEnumHandle0 failed with error code [%x].", result);
+ goto CLEANUP;
+ }
+
+ if ((result = IPsecSaEnum0(p_ctx->engine, enumHandle, TINYIPSEC_SA_NUM_ENTRIES_TO_REQUEST, &entries, &numEntriesReturned)) != ERROR_SUCCESS) {
+ TSK_DEBUG_ERROR("IPsecSaEnum0 failed with error code [%x].", result);
+ goto CLEANUP;
+ }
+
+ for (i = 0; i<numEntriesReturned; i++) {
+ IPSEC_SA_DETAILS0* entry = (entries)[i];
+ if ( !wcscmp(entry->transportFilter->displayData.name, p_ctx->filter_name)) {
+ if ((result = FwpmFilterDeleteById0(p_ctx->engine, entry->transportFilter->filterId)) != ERROR_SUCCESS) {
+ TSK_DEBUG_ERROR("FwpmFilterDeleteById0 failed with error code [%x].", result);
+ goto CLEANUP;
+ }
+ }
+ }
+
+ TSK_DEBUG_INFO("All SAs have been flushed.");
+ ret = 0;
+
+CLEANUP:
+ if (entries) {
+ FwpmFreeMemory0((void**)entries);
+ }
+ if (enumHandle) {
+ if ((result = IPsecSaDestroyEnumHandle0(p_ctx->engine, enumHandle)) != ERROR_SUCCESS) {
+ TSK_DEBUG_ERROR("IPsecSaDestroyEnumHandle0 failed with error code [%x].", result);
+ }
+ }
+ }
+
+ return ret;
+#endif
+}
+
+static void _vista_deleteSaContextAndFilters(__in HANDLE engine, __in UINT64 inFilterId, __in UINT64 outFilterId, __in UINT64 saId)
+{
+ DWORD result;
+
+ // Allow the LUIDs to be zero, so we can use this function to cleanup
+ // partial results.
+ if (saId != 0) {
+ result = IPsecSaContextDeleteById0(engine, saId);
+ if (result != ERROR_SUCCESS) {
+ // There's not much we can do if delete fails, so continue trying to
+ // clean up the remaining objects.
+ TSK_DEBUG_ERROR("IPsecSaContextDeleteById0 = 0x%08X\n", result);
+ }
+ }
+ if (outFilterId != 0) {
+ result = FwpmFilterDeleteById0(engine, outFilterId);
+ if (result != ERROR_SUCCESS) {
+ TSK_DEBUG_ERROR("FwpmFilterDeleteById0 = 0x%08X\n", result);
+ }
+ }
+ if (inFilterId != 0) {
+ result = FwpmFilterDeleteById0(engine, inFilterId);
+ if (result != ERROR_SUCCESS) {
+ TSK_DEBUG_ERROR("FwpmFilterDeleteById0 = 0x%08X\n", result);
+ }
+ }
+}
+
+//
+// Windows Vista IPSec Plugin definition
+//
+
+/* constructor */
+static tsk_object_t* _plugin_win_ipsec_vista_ctx_ctor(tsk_object_t * self, va_list * app)
+{
+ plugin_win_ipsec_vista_ctx_t *p_ctx = (plugin_win_ipsec_vista_ctx_t *)self;
+ if (p_ctx) {
+ p_ctx->pc_base = TIPSEC_CTX(p_ctx);
+ }
+ return self;
+}
+/* destructor */
+static tsk_object_t* _plugin_win_ipsec_vista_ctx_dtor(tsk_object_t * self)
+{
+ plugin_win_ipsec_vista_ctx_t *p_ctx = (plugin_win_ipsec_vista_ctx_t *)self;
+ if (p_ctx) {
+ DWORD code;
+
+ if (p_ctx->pc_base->started) {
+ tipsec_ctx_stop(p_ctx->pc_base);
+ }
+
+ /* Close engine */
+ if (p_ctx->engine) {
+ if ((code = FwpmEngineClose0(p_ctx->engine))) {
+ TSK_DEBUG_ERROR("FwpmEngineClose0 failed with error code [%x].", code);
+ }
+ }
+
+ TSK_FREE(p_ctx->pc_base->addr_local);
+ TSK_FREE(p_ctx->pc_base->addr_remote);
+
+ if (p_ctx->pc_base->ik) {
+ TSK_FREE(((PFWP_BYTE_BLOB)p_ctx->pc_base->ik)->data);
+ TSK_FREE(p_ctx->pc_base->ik);
+ }
+ if (p_ctx->pc_base->ck) {
+ TSK_FREE(((PFWP_BYTE_BLOB)p_ctx->pc_base->ck)->data);
+ TSK_FREE(p_ctx->pc_base->ck);
+ }
+
+ TSK_DEBUG_INFO("*** Windows Vista IPSec plugin (Windows Filtering Platform) context destroyed ***");
+ }
+
+ return self;
+}
+/* object definition */
+static const tsk_object_def_t plugin_win_ipsec_vista_ctx_def_s = {
+ sizeof(plugin_win_ipsec_vista_ctx_t),
+ _plugin_win_ipsec_vista_ctx_ctor,
+ _plugin_win_ipsec_vista_ctx_dtor,
+ tsk_null,
+};
+/* plugin definition*/
+static const tipsec_plugin_def_t plugin_win_ipsec_vista_plugin_def_s = {
+ &plugin_win_ipsec_vista_ctx_def_s,
+
+ tipsec_impl_type_vista,
+ "Windows Vista IPSec (Windows Filtering Platform)",
+
+ _plugin_win_ipsec_vista_ctx_init,
+ _plugin_win_ipsec_vista_ctx_set_local,
+ _plugin_win_ipsec_vista_ctx_set_remote,
+ _plugin_win_ipsec_vista_ctx_set_keys,
+ _plugin_win_ipsec_vista_ctx_start,
+ _plugin_win_ipsec_vista_ctx_stop,
+};
+const tipsec_plugin_def_t *plugin_win_ipsec_vista_plugin_def_t = &plugin_win_ipsec_vista_plugin_def_s;
diff --git a/plugins/pluginWinIPSecVista/plugin_win_ipsec_vista_config.h b/plugins/pluginWinIPSecVista/plugin_win_ipsec_vista_config.h
new file mode 100644
index 0000000..61bc822
--- /dev/null
+++ b/plugins/pluginWinIPSecVista/plugin_win_ipsec_vista_config.h
@@ -0,0 +1,75 @@
+/* Copyright (C) 2013-2014 Mamadou DIOP
+* Copyright (C) 2013-2014 Doubango Telecom <http://www.doubango.org>
+*
+* This file is part of Open Source Doubango Framework.
+*
+* DOUBANGO is free software: you can redistribute it and/or modify
+* it under the terms of the GNU General Public License as published by
+* the Free Software Foundation, either version 3 of the License, or
+* (at your option) any later version.
+*
+* DOUBANGO is distributed in the hope that it will be useful,
+* but WITHOUT ANY WARRANTY; without even the implied warranty of
+* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
+* GNU General Public License for more details.
+*
+* You should have received a copy of the GNU General Public License
+* along with DOUBANGO.
+*/
+#ifndef PLUGIN_WIN_IPSEC_VISTA_CONFIG_H
+#define PLUGIN_WIN_IPSEC_VISTA_CONFIG_H
+
+#ifdef __SYMBIAN32__
+#undef _WIN32 /* Because of WINSCW */
+#endif
+
+
+// Windows (XP/Vista/7/CE and Windows Mobile) macro definition
+#if defined(WIN32)|| defined(_WIN32) || defined(_WIN32_WCE)
+# define PLUGIN_WIN_IPSEC_VISTA_UNDER_WINDOWS 1
+# if defined(WINAPI_FAMILY) && (WINAPI_FAMILY == WINAPI_FAMILY_PHONE_APP || WINAPI_FAMILY == WINAPI_FAMILY_APP)
+# define PLUGIN_WIN_IPSEC_VISTA_UNDER_WINDOWS_RT 1
+# endif
+#endif
+
+#if (PLUGIN_WIN_IPSEC_VISTA_UNDER_WINDOWS || defined(__SYMBIAN32__)) && defined(PLUGIN_WIN_IPSEC_VISTA_EXPORTS)
+# define PLUGIN_WIN_IPSEC_VISTA_API __declspec(dllexport)
+# define PLUGIN_WIN_IPSEC_VISTA_GEXTERN extern __declspec(dllexport)
+#elif (PLUGIN_WIN_IPSEC_VISTA_UNDER_WINDOWS || defined(__SYMBIAN32__)) && !defined(PLUGIN_WIN_IPSEC_VISTA_IMPORTS_IGNORE)
+# define PLUGIN_WIN_IPSEC_VISTA_API __declspec(dllimport)
+# define PLUGIN_WIN_IPSEC_VISTA_GEXTERN __declspec(dllimport)
+#else
+# define PLUGIN_WIN_IPSEC_VISTA_API
+# define PLUGIN_WIN_IPSEC_VISTA_GEXTERN extern
+#endif
+
+// x86
+#if defined(__x86_64__) || defined(__x86__) || defined(__i386__)
+# define PLUGIN_WIN_IPSEC_VISTA_UNDER_X86 1
+#endif
+
+// Guards against C++ name mangling
+#ifdef __cplusplus
+# define PLUGIN_WIN_IPSEC_VISTA_BEGIN_DECLS extern "C" {
+# define PLUGIN_WIN_IPSEC_VISTA_END_DECLS }
+#else
+# define PLUGIN_WIN_IPSEC_VISTA_BEGIN_DECLS
+# define PLUGIN_WIN_IPSEC_VISTA_END_DECLS
+#endif
+
+#ifdef _MSC_VER
+# define inline __inline
+# define _CRT_SECURE_NO_WARNINGS
+# define _ALLOW_KEYWORD_MACROS
+#endif
+
+#include <stdint.h>
+#ifdef __SYMBIAN32__
+#include <stdlib.h>
+#endif
+
+#if HAVE_CONFIG_H
+#include <config.h>
+#endif
+
+#endif // PLUGIN_WIN_IPSEC_VISTA_CONFIG_H
diff --git a/plugins/pluginWinIPSecVista/version.rc b/plugins/pluginWinIPSecVista/version.rc
new file mode 100644
index 0000000..26bdf78
--- /dev/null
+++ b/plugins/pluginWinIPSecVista/version.rc
@@ -0,0 +1,102 @@
+// Microsoft Visual C++ generated resource script.
+//
+// #include "resource.h"
+
+#define APSTUDIO_READONLY_SYMBOLS
+/////////////////////////////////////////////////////////////////////////////
+//
+// Generated from the TEXTINCLUDE 2 resource.
+//
+#include "afxres.h"
+
+/////////////////////////////////////////////////////////////////////////////
+#undef APSTUDIO_READONLY_SYMBOLS
+
+/////////////////////////////////////////////////////////////////////////////
+// English (U.S.) resources
+
+#if !defined(AFX_RESOURCE_DLL) || defined(AFX_TARG_ENU)
+#ifdef _WIN32
+LANGUAGE LANG_ENGLISH, SUBLANG_ENGLISH_US
+#pragma code_page(1252)
+#endif //_WIN32
+
+#ifdef APSTUDIO_INVOKED
+/////////////////////////////////////////////////////////////////////////////
+//
+// TEXTINCLUDE
+//
+
+1 TEXTINCLUDE
+BEGIN
+ "resource.h\0"
+END
+
+2 TEXTINCLUDE
+BEGIN
+ "#include ""afxres.h""\r\n"
+ "\0"
+END
+
+3 TEXTINCLUDE
+BEGIN
+ "\r\n"
+ "\0"
+END
+
+#endif // APSTUDIO_INVOKED
+
+
+/////////////////////////////////////////////////////////////////////////////
+//
+// Version
+//
+
+VS_VERSION_INFO VERSIONINFO
+ FILEVERSION 2.0.0.1156
+ PRODUCTVERSION 2.0.0.1156
+ FILEFLAGSMASK 0x17L
+#ifdef _DEBUG
+ FILEFLAGS 0x1L
+#else
+ FILEFLAGS 0x0L
+#endif
+ FILEOS 0x4L
+ FILETYPE 0x2L
+ FILESUBTYPE 0x0L
+BEGIN
+ BLOCK "StringFileInfo"
+ BEGIN
+ BLOCK "040904b0"
+ BEGIN
+ VALUE "CompanyName", "Doubango Telecom"
+ VALUE "FileDescription", "Doubango IMS Framework IPSec implementaion for Windows Vista and later"
+ VALUE "FileVersion", "2.0.0.1156"
+ VALUE "InternalName", "pluginMF.dll"
+ VALUE "LegalCopyright", "(c) 2010-2013 Doubango Telecom. All rights reserved."
+ VALUE "OriginalFilename", "pluginWinIPSecVista.dll"
+ VALUE "ProductName", "Doubango IMS Framework IPSec implementaion"
+ VALUE "ProductVersion", "2.0.0.1156"
+ END
+ END
+ BLOCK "VarFileInfo"
+ BEGIN
+ VALUE "Translation", 0x409, 1200
+ END
+END
+
+#endif // English (U.S.) resources
+/////////////////////////////////////////////////////////////////////////////
+
+
+
+#ifndef APSTUDIO_INVOKED
+/////////////////////////////////////////////////////////////////////////////
+//
+// Generated from the TEXTINCLUDE 3 resource.
+//
+
+
+/////////////////////////////////////////////////////////////////////////////
+#endif // not APSTUDIO_INVOKED
+
diff --git a/plugins/pluginWinMF/dllmain_mf.cxx b/plugins/pluginWinMF/dllmain_mf.cxx
new file mode 100644
index 0000000..aeeb863
--- /dev/null
+++ b/plugins/pluginWinMF/dllmain_mf.cxx
@@ -0,0 +1,244 @@
+/* Copyright (C) 2013 Mamadou DIOP
+* Copyright (C) 2013 Doubango Telecom <http://www.doubango.org>
+*
+* This file is part of Open Source Doubango Framework.
+*
+* DOUBANGO is free software: you can redistribute it and/or modify
+* it under the terms of the GNU General Public License as published by
+* the Free Software Foundation, either version 3 of the License, or
+* (at your option) any later version.
+*
+* DOUBANGO is distributed in the hope that it will be useful,
+* but WITHOUT ANY WARRANTY; without even the implied warranty of
+* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
+* GNU General Public License for more details.
+*
+* You should have received a copy of the GNU General Public License
+* along with DOUBANGO.
+*/
+#include "plugin_win_mf_config.h"
+#include "internals/mf_utils.h"
+
+#include "tinymedia/tmedia_producer.h"
+#include "tinymedia/tmedia_consumer.h"
+#include "tinymedia/tmedia_converter_video.h"
+
+#include "tsk_plugin.h"
+#include "tsk_debug.h"
+
+#include <windows.h>
+
+#if defined(_MSC_VER)
+# pragma comment(lib, "mfplat")
+# pragma comment(lib, "mf")
+# pragma comment(lib, "mfuuid")
+# pragma comment(lib, "shlwapi")
+# pragma comment(lib, "Strmiids")
+#endif
+
+#if !defined(PLUGIN_MF_ENABLE_AUDIO_IO)
+# define PLUGIN_MF_ENABLE_AUDIO_IO 0 /* audio not good as DirectSound */
+#endif
+#if !defined(PLUGIN_MF_ENABLE_VIDEO_CONVERTER)
+# define PLUGIN_MF_ENABLE_VIDEO_CONVERTER 1
+#endif
+#if !defined(PLUGIN_MF_ENABLE_VIDEO_IO)
+# define PLUGIN_MF_ENABLE_VIDEO_IO 1
+#endif
+
+extern const tmedia_codec_plugin_def_t *mf_codec_h264_main_plugin_def_t;
+extern const tmedia_codec_plugin_def_t *mf_codec_h264_base_plugin_def_t;
+
+#if PLUGIN_MF_ENABLE_VIDEO_CONVERTER
+extern const tmedia_converter_video_plugin_def_t *plugin_win_mf_converter_video_ms_plugin_def_t;
+#endif
+#if PLUGIN_MF_ENABLE_VIDEO_IO
+extern const tmedia_producer_plugin_def_t *plugin_win_mf_producer_video_plugin_def_t;
+extern const tmedia_consumer_plugin_def_t *plugin_win_mf_consumer_video_plugin_def_t;
+#endif
+#if PLUGIN_MF_ENABLE_AUDIO_IO
+extern const tmedia_producer_plugin_def_t *plugin_win_mf_producer_audio_plugin_def_t;
+extern const tmedia_consumer_plugin_def_t *plugin_win_mf_consumer_audio_plugin_def_t;
+#endif
+
+PLUGIN_WIN_MF_BEGIN_DECLS /* BEGIN */
+PLUGIN_WIN_MFP_API int __plugin_get_def_count();
+PLUGIN_WIN_MFP_API tsk_plugin_def_type_t __plugin_get_def_type_at(int index);
+PLUGIN_WIN_MFP_API tsk_plugin_def_media_type_t __plugin_get_def_media_type_at(int index);
+PLUGIN_WIN_MFP_API tsk_plugin_def_ptr_const_t __plugin_get_def_at(int index);
+PLUGIN_WIN_MF_END_DECLS /* END */
+
+BOOL APIENTRY DllMain( HMODULE hModule,
+ DWORD ul_reason_for_call,
+ LPVOID lpReserved
+ )
+{
+ switch (ul_reason_for_call)
+ {
+ case DLL_PROCESS_ATTACH:
+ break;
+ case DLL_THREAD_ATTACH:
+ break;
+ case DLL_THREAD_DETACH:
+ break;
+ case DLL_PROCESS_DETACH:
+ break;
+ }
+ return TRUE;
+}
+
+
+typedef enum PLUGIN_INDEX_E
+{
+#if PLUGIN_MF_ENABLE_AUDIO_IO
+ PLUGIN_INDEX_AUDIO_CONSUMER,
+ PLUGIN_INDEX_AUDIO_PRODUCER,
+#endif
+#if PLUGIN_MF_ENABLE_VIDEO_IO
+ PLUGIN_INDEX_VIDEO_PRODUCER,
+ PLUGIN_INDEX_VIDEO_CONSUMER,
+#endif
+#if PLUGIN_MF_ENABLE_VIDEO_CONVERTER
+ PLUGIN_INDEX_VIDEO_CONVERTER,
+#endif
+
+ PLUGIN_INDEX_CODEC_H264_MAIN,
+ PLUGIN_INDEX_CODEC_H264_BASE,
+
+ PLUGIN_INDEX_COUNT
+}
+PLUGIN_INDEX_T;
+
+
+int __plugin_get_def_count()
+{
+ int count = PLUGIN_INDEX_COUNT;
+ if(!MFUtils::IsLowLatencyH264Supported())
+ {
+ count -= 2;
+ }
+ return count;
+}
+
+tsk_plugin_def_type_t __plugin_get_def_type_at(int index)
+{
+ switch(index){
+#if PLUGIN_MF_ENABLE_AUDIO_IO
+ case PLUGIN_INDEX_AUDIO_CONSUMER:
+ case PLUGIN_INDEX_AUDIO_PRODUCER:
+ {
+ return (index == PLUGIN_INDEX_AUDIO_CONSUMER) ? tsk_plugin_def_type_consumer : tsk_plugin_def_type_producer;
+ }
+#endif
+#if PLUGIN_MF_ENABLE_VIDEO_IO
+ case PLUGIN_INDEX_VIDEO_CONSUMER:
+ {
+ return MFUtils::IsD3D9Supported() ? tsk_plugin_def_type_consumer : tsk_plugin_def_type_none;
+ }
+ case PLUGIN_INDEX_VIDEO_PRODUCER:
+ {
+ return tsk_plugin_def_type_producer;
+ }
+#endif
+#if PLUGIN_MF_ENABLE_VIDEO_CONVERTER
+ case PLUGIN_INDEX_VIDEO_CONVERTER:
+ {
+ return tsk_plugin_def_type_converter;
+ }
+#endif
+ case PLUGIN_INDEX_CODEC_H264_MAIN:
+ case PLUGIN_INDEX_CODEC_H264_BASE:
+ {
+ return MFUtils::IsLowLatencyH264Supported() ? tsk_plugin_def_type_codec : tsk_plugin_def_type_none;
+ }
+ default:
+ {
+ TSK_DEBUG_ERROR("No plugin at index %d", index);
+ return tsk_plugin_def_type_none;
+ }
+ }
+}
+
+tsk_plugin_def_media_type_t __plugin_get_def_media_type_at(int index)
+{
+ switch(index){
+#if PLUGIN_MF_ENABLE_AUDIO_IO
+ case PLUGIN_INDEX_AUDIO_CONSUMER:
+ case PLUGIN_INDEX_AUDIO_PRODUCER:
+ {
+ return tsk_plugin_def_media_type_audio;
+ }
+#endif
+#if PLUGIN_MF_ENABLE_VIDEO_IO
+ case PLUGIN_INDEX_VIDEO_CONSUMER:
+ {
+ return MFUtils::IsD3D9Supported() ? tsk_plugin_def_media_type_video : tsk_plugin_def_media_type_none;
+ }
+ case PLUGIN_INDEX_VIDEO_PRODUCER:
+ {
+ return tsk_plugin_def_media_type_video;
+ }
+#endif
+#if PLUGIN_MF_ENABLE_VIDEO_CONVERTER
+ case PLUGIN_INDEX_VIDEO_CONVERTER:
+ {
+ return tsk_plugin_def_media_type_video;
+ }
+#endif
+ case PLUGIN_INDEX_CODEC_H264_MAIN:
+ case PLUGIN_INDEX_CODEC_H264_BASE:
+ {
+ return MFUtils::IsLowLatencyH264Supported() ? tsk_plugin_def_media_type_video : tsk_plugin_def_media_type_none;
+ }
+ default:
+ {
+ TSK_DEBUG_ERROR("No plugin at index %d", index);
+ return tsk_plugin_def_media_type_none;
+ }
+ }
+}
+
+tsk_plugin_def_ptr_const_t __plugin_get_def_at(int index)
+{
+ switch(index){
+#if PLUGIN_MF_ENABLE_VIDEO_IO
+ case PLUGIN_INDEX_VIDEO_PRODUCER:
+ {
+ return plugin_win_mf_producer_video_plugin_def_t;
+ }
+ case PLUGIN_INDEX_VIDEO_CONSUMER:
+ {
+ return MFUtils::IsD3D9Supported() ? plugin_win_mf_consumer_video_plugin_def_t : tsk_null;
+ }
+#endif
+#if PLUGIN_MF_ENABLE_AUDIO_IO
+ case PLUGIN_INDEX_AUDIO_PRODUCER:
+ {
+ return plugin_win_mf_producer_audio_plugin_def_t;
+ }
+ case PLUGIN_INDEX_AUDIO_CONSUMER:
+ {
+ return plugin_win_mf_consumer_audio_plugin_def_t;
+ }
+#endif
+#if PLUGIN_MF_ENABLE_VIDEO_CONVERTER
+ case PLUGIN_INDEX_VIDEO_CONVERTER:
+ {
+ return plugin_win_mf_converter_video_ms_plugin_def_t;
+ }
+#endif
+ case PLUGIN_INDEX_CODEC_H264_MAIN:
+ {
+ return MFUtils::IsLowLatencyH264Supported() ? mf_codec_h264_main_plugin_def_t : tsk_null;
+ }
+ case PLUGIN_INDEX_CODEC_H264_BASE:
+ {
+ return MFUtils::IsLowLatencyH264Supported() ? mf_codec_h264_base_plugin_def_t : tsk_null;
+ }
+ default:
+ {
+ TSK_DEBUG_ERROR("No plugin at index %d", index);
+ return tsk_null;
+ }
+ }
+}
diff --git a/plugins/pluginWinMF/internals/mf_codec.cxx b/plugins/pluginWinMF/internals/mf_codec.cxx
new file mode 100644
index 0000000..e2968f4
--- /dev/null
+++ b/plugins/pluginWinMF/internals/mf_codec.cxx
@@ -0,0 +1,888 @@
+/* Copyright (C) 2013 Mamadou DIOP
+* Copyright (C) 2013 Doubango Telecom <http://www.doubango.org>
+*
+* This file is part of Open Source Doubango Framework.
+*
+* DOUBANGO is free software: you can redistribute it and/or modify
+* it under the terms of the GNU General Public License as published by
+* the Free Software Foundation, either version 3 of the License, or
+* (at your option) any later version.
+*
+* DOUBANGO is distributed in the hope that it will be useful,
+* but WITHOUT ANY WARRANTY; without even the implied warranty of
+* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
+* GNU General Public License for more details.
+*
+* You should have received a copy of the GNU General Public License
+* along with DOUBANGO.
+*/
+#include "mf_codec.h"
+#include "mf_utils.h"
+#include "mf_sample_queue.h"
+
+#include "tinymedia/tmedia_common.h"
+
+#include "tsk_debug.h"
+
+#include <KS.h>
+#include <Codecapi.h>
+#include <assert.h>
+#include <initguid.h>
+
+// NV12 is the only format supported by all HW encoders and decoders
+#if !defined(kMFCodecUncompressedFormat)
+# define kMFCodecUncompressedFormat MFVideoFormat_NV12
+#endif
+
+// Max frames allowed in the queue
+#if !defined(kMFCodecQueuedFramesMax)
+# define kMFCodecQueuedFramesMax (30 << 1)
+#endif
+
+// Make sure usable on Win7 SDK targeting Win8 OS
+#if !defined(CODECAPI_AVLowLatencyMode)
+DEFINE_GUID(CODECAPI_AVLowLatencyMode,
+ 0x9c27891a, 0xed7a, 0x40e1, 0x88, 0xe8, 0xb2, 0x27, 0x27, 0xa0, 0x24, 0xee);
+#endif
+#if !defined(CODECAPI_AVDecVideoH264ErrorConcealment)
+DEFINE_GUID(CODECAPI_AVDecVideoH264ErrorConcealment,
+0xececace8, 0x3436, 0x462c, 0x92, 0x94, 0xcd, 0x7b, 0xac, 0xd7, 0x58, 0xa9);
+#endif
+
+//
+// MFCodec
+//
+
+MFCodec::MFCodec(MFCodecId_t eId, MFCodecType_t eType, IMFTransform *pMFT /*= NULL*/)
+: m_nRefCount(1)
+, m_eId(eId)
+, m_eType(eType)
+, m_pMFT(NULL)
+, m_pCodecAPI(NULL)
+, m_pOutputType(NULL)
+, m_pInputType(NULL)
+, m_dwInputID(0)
+, m_dwOutputID(0)
+, m_rtStart(0)
+, m_rtDuration(0)
+, m_pSampleIn(NULL)
+, m_pSampleOut(NULL)
+, m_pEventGenerator(NULL)
+, m_bIsAsync(FALSE)
+, m_bIsFirstFrame(TRUE)
+, m_bIsBundled(FALSE)
+, m_nMETransformNeedInputCount(0)
+, m_nMETransformHaveOutputCount(0)
+, m_pSampleQueueAsyncInput(NULL)
+{
+ MFUtils::Startup();
+
+ HRESULT hr = S_OK;
+
+ switch(eId)
+ {
+ case MFCodecId_H264Base:
+ case MFCodecId_H264Main:
+ {
+ m_eMediaType = MFCodecMediaType_Video;
+ m_guidCompressedFormat = MFVideoFormat_H264;
+ break;
+ }
+ case MFCodecId_AAC:
+ {
+ m_eMediaType = MFCodecMediaType_Audio;
+ m_guidCompressedFormat = MFAudioFormat_AAC;
+ break;
+ }
+ default:
+ {
+ assert(false);
+ break;
+ }
+ }
+ CHECK_HR(hr = MFCreateMediaType(&m_pOutputType));
+ CHECK_HR(hr = MFCreateMediaType(&m_pInputType));
+ if(pMFT) // up to the caller to make sure all parameters are corrrect
+ {
+ m_pMFT = pMFT;
+ m_pMFT->AddRef();
+ }
+ else
+ {
+ CHECK_HR(hr = MFUtils::GetBestCodec(
+ (m_eType == MFCodecType_Encoder) ? TRUE : FALSE, // Encoder ?
+ (m_eMediaType == MFCodecMediaType_Video) ? MFMediaType_Video : MFMediaType_Audio, // Media Type
+ (m_eType == MFCodecType_Encoder) ? kMFCodecUncompressedFormat : m_guidCompressedFormat/*GUID_NULL*/, // Input
+ (m_eType == MFCodecType_Encoder) ? m_guidCompressedFormat : kMFCodecUncompressedFormat, // Output
+ &m_pMFT));
+ }
+ hr = m_pMFT->QueryInterface(IID_PPV_ARGS(&m_pCodecAPI));
+ if(FAILED(hr) && m_eType == MFCodecType_Encoder) // Required only for Encoders
+ {
+ CHECK_HR(hr);
+ }
+
+
+ CHECK_HR(hr = MFUtils::IsAsyncMFT(m_pMFT, &m_bIsAsync));
+ if(m_bIsAsync)
+ {
+ m_pSampleQueueAsyncInput = new MFSampleQueue();
+ if(!m_pSampleQueueAsyncInput)
+ {
+ CHECK_HR(hr = E_OUTOFMEMORY);
+ }
+ CHECK_HR(hr = MFUtils::UnlockAsyncMFT(m_pMFT));
+ CHECK_HR(hr = m_pMFT->QueryInterface(IID_PPV_ARGS(&m_pEventGenerator)));
+ }
+
+bail:
+ if(FAILED(hr))
+ {
+ SafeRelease(&m_pMFT);
+ SafeRelease(&m_pCodecAPI);
+ }
+ if(!IsValid())
+ {
+ TSK_DEBUG_ERROR("Failed to create codec with id = %d", m_eId);
+ }
+}
+
+MFCodec::~MFCodec()
+{
+ assert(m_nRefCount == 0);
+
+ if(m_bIsAsync && m_pMFT)
+ {
+ m_pMFT->ProcessMessage(MFT_MESSAGE_NOTIFY_END_OF_STREAM, NULL);
+ m_pMFT->ProcessMessage(MFT_MESSAGE_COMMAND_DRAIN, NULL);
+ }
+
+ SafeRelease(&m_pMFT);
+ SafeRelease(&m_pCodecAPI);
+ SafeRelease(&m_pOutputType);
+ SafeRelease(&m_pInputType);
+ SafeRelease(&m_pSampleIn);
+ SafeRelease(&m_pSampleOut);
+ SafeRelease(&m_pEventGenerator);
+ SafeRelease(&m_pSampleQueueAsyncInput);
+}
+
+ULONG MFCodec::AddRef()
+{
+ return InterlockedIncrement(&m_nRefCount);
+}
+
+ULONG MFCodec::Release()
+{
+ ULONG uCount = InterlockedDecrement(&m_nRefCount);
+ if (uCount == 0)
+ {
+ delete this;
+ }
+ // For thread safety, return a temporary variable.
+ return uCount;
+}
+
+HRESULT MFCodec::QueryInterface(REFIID iid, void** ppv)
+{
+ if(!IsValid())
+ {
+ return E_FAIL;
+ }
+ return m_pMFT->QueryInterface(iid, ppv);
+}
+
+// IMFAsyncCallback
+STDMETHODIMP MFCodec::GetParameters(DWORD *pdwFlags, DWORD *pdwQueue)
+{
+ return E_NOTIMPL;
+}
+
+STDMETHODIMP MFCodec::Invoke(IMFAsyncResult *pAsyncResult)
+{
+ HRESULT hr = S_OK, hrStatus = S_OK;
+ IMFMediaEvent* pEvent = NULL;
+ MediaEventType meType = MEUnknown;
+
+ CHECK_HR(hr = m_pEventGenerator->EndGetEvent(pAsyncResult, &pEvent));
+ CHECK_HR(hr = pEvent->GetType(&meType));
+ CHECK_HR(hr = pEvent->GetStatus(&hrStatus));
+
+ if (SUCCEEDED(hrStatus))
+ {
+ switch(meType)
+ {
+ case METransformNeedInput:
+ {
+ InterlockedIncrement(&m_nMETransformNeedInputCount);
+ break;
+ }
+
+ case METransformHaveOutput:
+ {
+ InterlockedIncrement(&m_nMETransformHaveOutputCount);
+ break;
+ }
+ }
+ }
+
+ CHECK_HR(hr = m_pEventGenerator->BeginGetEvent(this, NULL));
+
+bail:
+ SafeRelease(&pEvent);
+ return hr;
+}
+
+HRESULT MFCodec::ProcessInput(IMFSample* pSample)
+{
+ assert(IsReady());
+
+ HRESULT hr = S_OK;
+
+ if(m_bIsFirstFrame)
+ {
+ if(m_bIsAsync && !m_bIsBundled)
+ {
+ CHECK_HR(hr = m_pMFT->ProcessMessage(MFT_MESSAGE_NOTIFY_START_OF_STREAM, NULL));
+ CHECK_HR(hr = m_pEventGenerator->BeginGetEvent(this, NULL));
+ }
+ m_bIsFirstFrame = FALSE;
+ }
+
+ if(m_bIsAsync)
+ {
+ if(m_nMETransformNeedInputCount == 1 && m_pSampleQueueAsyncInput->IsEmpty())
+ {
+ InterlockedDecrement(&m_nMETransformNeedInputCount);
+ return m_pMFT->ProcessInput(m_dwInputID, pSample, 0);
+ }
+
+ if(m_pSampleQueueAsyncInput->Count() > kMFCodecQueuedFramesMax)
+ {
+ m_pSampleQueueAsyncInput->Clear();
+ CHECK_HR(hr = E_UNEXPECTED);
+ }
+
+ // Input sample holds shared memory (also used by other samples)
+ IMFSample *pSampleCopy = NULL;
+ IMFMediaBuffer *pMediaBuffer = NULL, *pMediaBufferCopy = NULL;
+ BYTE *pBufferPtr = NULL, *pBufferPtrCopy = NULL;
+ DWORD dwDataLength = 0;
+ BOOL bMediaBufferLocked = FALSE, bMediaBufferLockedCopy = FALSE;
+
+ CHECK_HR(hr = pSample->GetBufferByIndex(0, &pMediaBuffer));
+ hr = pMediaBuffer->GetCurrentLength(&dwDataLength);
+ if(FAILED(hr))
+ {
+ goto endofcopy;
+ }
+ hr = pMediaBuffer->Lock(&pBufferPtr, NULL, NULL);
+ if(FAILED(hr))
+ {
+ goto endofcopy;
+ }
+ bMediaBufferLocked = TRUE;
+
+ hr = MFUtils::CreateMediaSample(dwDataLength, &pSampleCopy);
+ if(FAILED(hr))
+ {
+ goto endofcopy;
+ }
+ hr = pSampleCopy->GetBufferByIndex(0, &pMediaBufferCopy);
+ if(FAILED(hr))
+ {
+ goto endofcopy;
+ }
+ hr = pMediaBufferCopy->Lock(&pBufferPtrCopy, NULL, NULL);
+ if(FAILED(hr))
+ {
+ goto endofcopy;
+ }
+ bMediaBufferLockedCopy = TRUE;
+
+ memcpy(pBufferPtrCopy, pBufferPtr, dwDataLength);
+ hr = pMediaBufferCopy->SetCurrentLength(dwDataLength);
+ if(FAILED(hr))
+ {
+ goto endofcopy;
+ }
+
+ LONGLONG hnsSampleTime = 0;
+ LONGLONG hnsSampleDuration = 0;
+ hr = pSample->GetSampleTime(&hnsSampleTime);
+ if(SUCCEEDED(hr))
+ {
+ hr = pSampleCopy->SetSampleTime(hnsSampleTime);
+ }
+ hr = pSample->GetSampleDuration(&hnsSampleDuration);
+ if(SUCCEEDED(hr))
+ {
+ hr = pSampleCopy->SetSampleDuration(hnsSampleDuration);
+ }
+
+ // EnQueue
+ hr = m_pSampleQueueAsyncInput->Queue(pSampleCopy);
+endofcopy:
+ if(pMediaBuffer && bMediaBufferLocked)
+ {
+ pMediaBuffer->Unlock();
+ }
+ if(pMediaBufferCopy && bMediaBufferLockedCopy)
+ {
+ pMediaBufferCopy->Unlock();
+ }
+ SafeRelease(&pSampleCopy);
+ SafeRelease(&pMediaBuffer);
+ CHECK_HR(hr);
+
+ while(m_nMETransformNeedInputCount > 0)
+ {
+ if(m_pSampleQueueAsyncInput->IsEmpty())
+ {
+ break;
+ }
+ IMFSample *_pSample = NULL;
+ hr = m_pSampleQueueAsyncInput->Dequeue(&_pSample);
+ if(SUCCEEDED(hr))
+ {
+ InterlockedDecrement(&m_nMETransformNeedInputCount);
+ hr = m_pMFT->ProcessInput(m_dwInputID, _pSample, 0);
+ }
+ SafeRelease(&_pSample);
+ CHECK_HR(hr);
+ }
+ }
+ else
+ {
+ CHECK_HR(hr = m_pMFT->ProcessInput(m_dwInputID, pSample, 0));
+ }
+
+bail:
+ return hr;
+}
+
+HRESULT MFCodec::ProcessOutput(IMFSample **ppSample)
+{
+ assert(IsReady());
+
+ if(m_bIsAsync)
+ {
+ if(m_nMETransformHaveOutputCount == 0)
+ {
+ return S_OK;
+ }
+ InterlockedDecrement(&m_nMETransformHaveOutputCount);
+ }
+
+ *ppSample = NULL;
+
+ IMFMediaBuffer* pBufferOut = NULL;
+
+ DWORD dwStatus;
+
+ HRESULT hr = S_OK;
+
+ MFT_OUTPUT_STREAM_INFO mftStreamInfo = { 0 };
+ MFT_OUTPUT_DATA_BUFFER mftOutputData = { 0 };
+
+ CHECK_HR(hr = m_pMFT->GetOutputStreamInfo(m_dwOutputID, &mftStreamInfo));
+
+ BOOL bOutputStreamProvidesSamples = (mftStreamInfo.dwFlags & MFT_OUTPUT_STREAM_PROVIDES_SAMPLES) == MFT_OUTPUT_STREAM_PROVIDES_SAMPLES;
+
+ if(!bOutputStreamProvidesSamples)
+ {
+ if(!m_pSampleOut)
+ {
+ CHECK_HR(hr = MFUtils::CreateMediaSample(mftStreamInfo.cbSize, &m_pSampleOut));
+ hr = m_pSampleOut->GetBufferByIndex(0, &pBufferOut);
+ if(FAILED(hr))
+ {
+ SafeRelease(&m_pSampleOut);
+ CHECK_HR(hr);
+ }
+ }
+ else
+ {
+ DWORD dwMaxLength = 0;
+ CHECK_HR(hr = m_pSampleOut->GetBufferByIndex(0, &pBufferOut));
+ CHECK_HR(hr = pBufferOut->GetMaxLength(&dwMaxLength));
+ if(dwMaxLength < mftStreamInfo.cbSize)
+ {
+ CHECK_HR(hr = m_pSampleOut->RemoveAllBuffers());
+ SafeRelease(&pBufferOut);
+ CHECK_HR(hr = MFCreateMemoryBuffer(mftStreamInfo.cbSize, &pBufferOut));
+ CHECK_HR(hr = m_pSampleOut->AddBuffer(pBufferOut));
+ }
+ }
+ }
+
+ if(pBufferOut)
+ {
+ CHECK_HR(hr = pBufferOut->SetCurrentLength(0));
+ }
+
+ //Set the output sample
+ mftOutputData.pSample = bOutputStreamProvidesSamples ? NULL : m_pSampleOut;
+ //Set the output id
+ mftOutputData.dwStreamID = m_dwOutputID;
+
+ //Generate the output sample
+ hr = m_pMFT->ProcessOutput(0, 1, &mftOutputData, &dwStatus);
+ if (hr == MF_E_TRANSFORM_NEED_MORE_INPUT)
+ {
+ hr = S_OK;
+ goto bail;
+ }
+
+ if (FAILED(hr))
+ {
+ goto bail;
+ }
+
+ *ppSample = mftOutputData.pSample;
+ if(*ppSample)
+ {
+ (*ppSample)->AddRef();
+ }
+
+bail:
+ if(bOutputStreamProvidesSamples)
+ {
+ SafeRelease(&mftOutputData.pSample);
+ }
+ SafeRelease(&pBufferOut);
+ return hr;
+}
+
+bool MFCodec::IsValid()
+{
+ return (m_pMFT && (m_eType == MFCodecType_Decoder || m_pCodecAPI));
+}
+
+bool MFCodec::IsReady()
+{
+ return (IsValid() && m_pOutputType && m_pInputType);
+}
+
+HRESULT MFCodec::Process(const void* pcInputPtr, UINT32 nInputSize, IMFSample **ppSampleOut)
+{
+ if(!pcInputPtr || !nInputSize || !ppSampleOut)
+ {
+ TSK_DEBUG_ERROR("Invalid parameter");
+ return E_INVALIDARG;
+ }
+
+ *ppSampleOut = NULL;
+
+ HRESULT hr = S_OK;
+
+ IMFMediaBuffer* pBufferIn = NULL;
+ BYTE* pBufferPtr = NULL;
+ BOOL bMediaChangeHandled = FALSE; // Endless loop guard
+
+ if(!m_pSampleIn)
+ {
+ CHECK_HR(hr = MFUtils::CreateMediaSample(nInputSize, &m_pSampleIn));
+ hr = m_pSampleIn->GetBufferByIndex(0, &pBufferIn);
+ if(FAILED(hr))
+ {
+ SafeRelease(&m_pSampleIn);
+ CHECK_HR(hr);
+ }
+ }
+ else
+ {
+ DWORD dwMaxLength = 0;
+ CHECK_HR(hr = m_pSampleIn->GetBufferByIndex(0, &pBufferIn));
+ CHECK_HR(hr = pBufferIn->GetMaxLength(&dwMaxLength));
+ if(dwMaxLength < nInputSize)
+ {
+ CHECK_HR(hr = m_pSampleIn->RemoveAllBuffers());
+ SafeRelease(&pBufferIn);
+ CHECK_HR(hr = MFCreateMemoryBuffer(nInputSize, &pBufferIn));
+ CHECK_HR(hr = m_pSampleIn->AddBuffer(pBufferIn));
+ }
+ }
+
+ CHECK_HR(hr = pBufferIn->Lock(&pBufferPtr, NULL, NULL));
+ memcpy(pBufferPtr, pcInputPtr, nInputSize);
+ CHECK_HR(hr = pBufferIn->Unlock());
+ CHECK_HR(hr = pBufferIn->SetCurrentLength(nInputSize));
+
+ if(m_eType == MFCodecType_Encoder)
+ {
+ CHECK_HR(hr = m_pSampleIn->SetSampleDuration(m_rtDuration));
+ CHECK_HR(hr = m_pSampleIn->SetSampleTime(m_rtStart)); // FIXME: use clock(), Same for custom source
+ }
+Label_ProcessInput:
+ hr = ProcessInput(m_pSampleIn);
+ while(hr == MF_E_NOTACCEPTING)
+ {
+ TSK_DEBUG_INFO("MF_E_NOTACCEPTING");
+ IMFSample* pSample = NULL;
+ hr = ProcessOutput(&pSample);
+ if(SUCCEEDED(hr) && pSample)
+ {
+ SafeRelease(ppSampleOut);
+ *ppSampleOut = pSample, pSample = NULL;
+
+ hr = m_pSampleIn->SetUINT32(MFSampleExtension_Discontinuity, TRUE);
+ hr = ProcessInput(m_pSampleIn);
+ }
+ }
+ if(!*ppSampleOut)
+ {
+ hr = ProcessOutput(ppSampleOut);
+ if(hr == MF_E_TRANSFORM_STREAM_CHANGE) /* Handling Stream Changes: http://msdn.microsoft.com/en-us/library/windows/desktop/ee663587(v=vs.85).aspx */
+ {
+ TSK_DEBUG_INFO("[MF Codec] Stream changed");
+ if(m_eType == MFCodecType_Decoder)
+ {
+ IMFMediaType *pTypeOut = NULL;
+ hr = m_pMFT->GetOutputAvailableType(m_dwOutputID, 0, &pTypeOut);
+ if(SUCCEEDED(hr))
+ {
+ UINT32 uWidth = 0, uHeight = 0;
+ hr = MFGetAttributeSize(pTypeOut, MF_MT_FRAME_SIZE, &uWidth, &uHeight);
+ if(SUCCEEDED(hr))
+ {
+ TSK_DEBUG_INFO("[MF Decoder] New size: width=%u, height=%u", uWidth, uHeight);
+ hr = m_pMFT->SetOutputType(m_dwOutputID, pTypeOut, 0);
+ if(SUCCEEDED(hr))
+ {
+ SafeRelease(&m_pOutputType);
+ pTypeOut->AddRef();
+ m_pOutputType = pTypeOut;
+ if(m_eMediaType == MFCodecMediaType_Video)
+ {
+ dynamic_cast<MFCodecVideo*>(this)->m_nWidth = uWidth;
+ dynamic_cast<MFCodecVideo*>(this)->m_nHeight = uHeight;
+ }
+ }
+ }
+ }
+ SafeRelease(&pTypeOut);
+ if(SUCCEEDED(hr))
+ {
+ if(!bMediaChangeHandled)
+ {
+ bMediaChangeHandled = TRUE;
+ goto Label_ProcessInput;
+ }
+ }
+ }
+ }
+ }
+
+ m_rtStart += m_rtDuration;
+
+bail:
+ SafeRelease(&pBufferIn);
+ return hr;
+}
+
+enum tmedia_chroma_e MFCodec::GetUncompressedChroma()
+{
+ if(kMFCodecUncompressedFormat == MFVideoFormat_NV12)
+ {
+ return tmedia_chroma_nv12;
+ }
+ assert(false);
+ return tmedia_chroma_none;
+}
+
+//
+// MFCodecVideo
+//
+
+MFCodecVideo::MFCodecVideo(MFCodecId_t eId, MFCodecType_t eType, IMFTransform *pMFT /*= NULL*/)
+: MFCodec(eId, eType, pMFT)
+, m_nFrameRate(0)
+, m_nWidth(0)
+, m_nHeight(0)
+{
+ assert(m_eMediaType == MFCodecMediaType_Video);
+}
+
+MFCodecVideo::~MFCodecVideo()
+{
+
+}
+
+HRESULT MFCodecVideo::Initialize(
+ UINT32 nFrameRate,
+ UINT32 nWidth,
+ UINT32 nHeight,
+ UINT32 nOutputBitRateInBps /*= 0*/
+ )
+{
+ assert(IsValid());
+
+ HRESULT hr = S_OK;
+
+ VARIANT var = {0};
+
+ // make sure identifiers are zero-based (other layouts not supported yet)
+ hr = m_pMFT->GetStreamIDs(1, &m_dwInputID, 1, &m_dwOutputID);
+ if (hr == E_NOTIMPL)
+ {
+ m_dwInputID = 0;
+ m_dwOutputID = 0;
+ hr = S_OK;
+ }
+ else if (FAILED(hr))
+ {
+ TSK_DEBUG_ERROR("The stream identifiers are not zero-based");
+ return hr;
+ }
+
+ m_rtStart = 0;
+ CHECK_HR(hr = MFFrameRateToAverageTimePerFrame(nFrameRate, 1, &m_rtDuration));
+
+ CHECK_HR(hr = m_pOutputType->SetGUID(MF_MT_MAJOR_TYPE, (m_eMediaType == MFCodecMediaType_Video) ? MFMediaType_Video : MFMediaType_Audio));
+ CHECK_HR(hr = m_pInputType->SetGUID(MF_MT_MAJOR_TYPE, (m_eMediaType == MFCodecMediaType_Video) ? MFMediaType_Video : MFMediaType_Audio));
+
+ CHECK_HR(hr = m_pOutputType->SetGUID(MF_MT_SUBTYPE, (m_eType == MFCodecType_Encoder) ? m_guidCompressedFormat : kMFCodecUncompressedFormat));
+ CHECK_HR(hr = m_pInputType->SetGUID(MF_MT_SUBTYPE, (m_eType == MFCodecType_Encoder) ? kMFCodecUncompressedFormat : m_guidCompressedFormat));
+
+ CHECK_HR(hr = m_pOutputType->SetUINT32(MF_MT_ALL_SAMPLES_INDEPENDENT, (m_eType == MFCodecType_Encoder) ? FALSE : TRUE));
+ CHECK_HR(hr = m_pInputType->SetUINT32(MF_MT_ALL_SAMPLES_INDEPENDENT, (m_eType == MFCodecType_Encoder) ? TRUE : FALSE));
+
+ CHECK_HR(hr = m_pOutputType->SetUINT32(MF_MT_FIXED_SIZE_SAMPLES, (m_eType == MFCodecType_Encoder) ? FALSE : TRUE));
+ CHECK_HR(hr = m_pInputType->SetUINT32(MF_MT_FIXED_SIZE_SAMPLES, (m_eType == MFCodecType_Encoder) ? TRUE : FALSE));
+
+ // Set bitrate
+ // Set (MF_MT_AVG_BITRATE) for MediaType
+ // Set (CODECAPI_AVEncCommonMeanBitRate) for H.264
+ hr = SetBitRate(nOutputBitRateInBps);
+
+ CHECK_HR(hr = m_pOutputType->SetUINT32(MF_MT_INTERLACE_MODE, MFVideoInterlace_Progressive));
+ CHECK_HR(hr = m_pInputType->SetUINT32(MF_MT_INTERLACE_MODE, MFVideoInterlace_Progressive));
+
+ CHECK_HR(hr = MFSetAttributeSize(m_pOutputType, MF_MT_FRAME_SIZE, nWidth, nHeight));
+ CHECK_HR(hr = MFSetAttributeSize(m_pInputType, MF_MT_FRAME_SIZE, nWidth, nHeight));
+
+ CHECK_HR(hr = MFSetAttributeRatio(m_pOutputType, MF_MT_FRAME_RATE, nFrameRate, 1));
+ CHECK_HR(hr = MFSetAttributeRatio(m_pInputType, MF_MT_FRAME_RATE, nFrameRate, 1));
+
+ CHECK_HR(hr = MFSetAttributeRatio(m_pOutputType, MF_MT_PIXEL_ASPECT_RATIO, 1, 1));
+ CHECK_HR(hr = MFSetAttributeRatio(m_pInputType, MF_MT_PIXEL_ASPECT_RATIO, 1, 1));
+
+ // Encoder: Output format must be set before input
+ // Decoder: Input format must be set before output
+ if(m_eType == MFCodecType_Encoder)
+ {
+ CHECK_HR(hr = m_pMFT->SetOutputType(m_dwOutputID, m_pOutputType, 0));
+ CHECK_HR(hr = m_pMFT->SetInputType(m_dwInputID, m_pInputType, 0));
+ }
+ else
+ {
+ CHECK_HR(hr = m_pMFT->SetInputType(m_dwInputID, m_pInputType, 0));
+ CHECK_HR(hr = m_pMFT->SetOutputType(m_dwOutputID, m_pOutputType, 0));
+ }
+
+ if(m_eId == MFCodecId_H264Base || m_eId == MFCodecId_H264Main)
+ {
+ if(m_eType == MFCodecType_Decoder)
+ {
+ // Only decoder support GetAttributes()
+ IMFAttributes* pAttributes = NULL;
+ hr = m_pMFT->GetAttributes(&pAttributes);
+ if(SUCCEEDED(hr))
+ {
+ // FIXME: Very strange that "CODECAPI_AVLowLatencyMode" only works with "IMFAttributes->" and not "ICodecAPI->SetValue()"
+ hr = pAttributes->SetUINT32(CODECAPI_AVLowLatencyMode, TRUE);
+ }
+ SafeRelease(&pAttributes);
+ }
+ else
+ {
+ var.vt = VT_BOOL;
+ var.boolVal = VARIANT_TRUE;
+ hr = m_pCodecAPI->SetValue(&CODECAPI_AVLowLatencyMode, &var);
+
+ var.vt = VT_BOOL;
+ var.boolVal = VARIANT_TRUE;
+ hr = m_pCodecAPI->SetValue(&CODECAPI_AVEncCommonLowLatency, &var); // Correct for the decoder
+
+ // Disable B-Frames
+ var.vt = VT_UI4;
+ var.ulVal = 0;
+ hr = m_pCodecAPI->SetValue(&CODECAPI_AVEncMPVDefaultBPictureCount, &var);
+
+ // Constant bitrate (updated using RTCP)
+ var.vt = VT_UI4;
+ var.ulVal = eAVEncCommonRateControlMode_CBR;
+ hr = m_pCodecAPI->SetValue(&CODECAPI_AVEncCommonRateControlMode, &var);
+ }
+
+ hr = S_OK; // Not mandatory features
+ }
+
+bail:
+
+ if(SUCCEEDED(hr))
+ {
+ m_nFrameRate = nFrameRate;
+ m_nWidth = nWidth;
+ m_nHeight = nHeight;
+ }
+
+ return hr;
+}
+
+HRESULT MFCodecVideo::SetGOPSize(UINT32 nFramesCount)
+{
+ assert(IsValid());
+
+ HRESULT hr = S_OK;
+
+ if(m_eType == MFCodecType_Encoder && (m_eId == MFCodecId_H264Base || m_eId == MFCodecId_H264Main))
+ {
+ VARIANT var = {0};
+ var.vt = VT_UI4;
+ var.ullVal = nFramesCount;
+ CHECK_HR(hr = m_pCodecAPI->SetValue(&CODECAPI_AVEncMPVGOPSize, &var));
+ }
+
+bail:
+ return hr;
+}
+
+HRESULT MFCodecVideo::SetBitRate(UINT32 nBitRateInBps)
+{
+ assert(IsValid());
+
+ HRESULT hr = S_OK;
+
+ if(nBitRateInBps > 0 && m_eType == MFCodecType_Encoder)
+ {
+ CHECK_HR(hr = m_pOutputType->SetUINT32(MF_MT_AVG_BITRATE, nBitRateInBps));
+
+ if((m_eId == MFCodecId_H264Base || m_eId == MFCodecId_H264Main))
+ {
+ VARIANT var = {0};
+
+ // Set BitRate
+ var.vt = VT_UI4;
+ var.ullVal = nBitRateInBps;
+ CHECK_HR(hr = m_pCodecAPI->SetValue(&CODECAPI_AVEncCommonMeanBitRate, &var));
+ }
+ }
+
+bail:
+ return hr;
+}
+
+HRESULT MFCodecVideo::IsSetSliceMaxSizeInBytesSupported(BOOL &supported)
+{
+ HRESULT hr = S_OK;
+ supported = FALSE;
+
+ if ((m_eId == MFCodecId_H264Base || m_eId == MFCodecId_H264Main))
+ {
+#if defined(CODECAPI_AVEncSliceControlMode) && defined(CODECAPI_AVEncSliceControlSize)
+ if (m_pCodecAPI->IsSupported(&CODECAPI_AVEncSliceControlMode) == S_OK && m_pCodecAPI->IsSupported(&CODECAPI_AVEncSliceControlSize) == S_OK) {
+ supported = TRUE;
+ }
+#endif
+ }
+ return hr;
+}
+
+HRESULT MFCodecVideo::SetSliceMaxSizeInBytes(UINT32 nSliceMaxSizeInBytes)
+{
+ assert(IsValid() && nSliceMaxSizeInBytes > 0);
+
+ HRESULT hr = S_OK;
+
+ if ((m_eId == MFCodecId_H264Base || m_eId == MFCodecId_H264Main))
+ {
+#if defined(CODECAPI_AVEncSliceControlMode) && defined(CODECAPI_AVEncSliceControlSize)
+ if (m_pCodecAPI->IsSupported(&CODECAPI_AVEncSliceControlMode) == S_OK && m_pCodecAPI->IsSupported(&CODECAPI_AVEncSliceControlSize) == S_OK) {
+ VARIANT var = { 0 };
+ var.vt = VT_UI4;
+
+ var.ulVal = 1; // Bits
+ CHECK_HR(hr = m_pCodecAPI->SetValue(&CODECAPI_AVEncSliceControlMode, &var));
+
+ var.ulVal = (nSliceMaxSizeInBytes << 3); // From Bytes to Bits
+ CHECK_HR(hr = m_pCodecAPI->SetValue(&CODECAPI_AVEncSliceControlSize, &var));
+ }
+#else
+ CHECK_HR(hr = S_OK);
+#endif
+ }
+
+bail:
+ return hr;
+}
+
+HRESULT MFCodecVideo::RequestKeyFrame()
+{
+ assert(IsValid());
+
+ HRESULT hr = S_OK;
+
+ if ((m_eId == MFCodecId_H264Base || m_eId == MFCodecId_H264Main))
+ {
+#if defined(CODECAPI_AVEncVideoForceKeyFrame)
+ if (m_pCodecAPI->IsSupported(&CODECAPI_AVEncVideoForceKeyFrame) == S_OK) {
+ VARIANT var = { 0 };
+
+ var.vt = VT_UI4;
+ var.ulVal = 1;
+ CHECK_HR(hr = m_pCodecAPI->SetValue(&CODECAPI_AVEncVideoForceKeyFrame, &var));
+ }
+#else
+ CHECK_HR(hr = S_OK);
+#endif
+ }
+
+bail:
+ return hr;
+}
+
+//
+// MFCodecVideo
+//
+MFCodecVideoH264::MFCodecVideoH264(MFCodecId_t eId, MFCodecType_t eType, IMFTransform *pMFT /*= NULL*/)
+: MFCodecVideo(eId, eType, pMFT)
+{
+ assert(eId == MFCodecId_H264Base || eId == MFCodecId_H264Main);
+
+ HRESULT hr = S_OK;
+
+ if(m_pOutputType)
+ {
+ CHECK_HR(hr = m_pOutputType->SetUINT32(MF_MT_MPEG2_PROFILE, (m_eId == MFCodecId_H264Base) ? eAVEncH264VProfile_Base : eAVEncH264VProfile_Main));
+ }
+
+bail:
+ assert(SUCCEEDED(hr));
+}
+
+MFCodecVideoH264::~MFCodecVideoH264()
+{
+
+}
+
+MFCodecVideoH264* MFCodecVideoH264::CreateCodecH264Base(MFCodecType_t eType, IMFTransform *pMFT /*= NULL*/)
+{
+ MFCodecVideoH264* pCodec = new MFCodecVideoH264(MFCodecId_H264Base, eType, pMFT);
+ if(pCodec && !pCodec->IsValid())
+ {
+ SafeRelease(&pCodec);
+ }
+ return pCodec;
+}
+
+MFCodecVideoH264* MFCodecVideoH264::CreateCodecH264Main(MFCodecType_t eType, IMFTransform *pMFT /*= NULL*/)
+{
+ MFCodecVideoH264* pCodec = new MFCodecVideoH264(MFCodecId_H264Main, eType, pMFT);
+ if(pCodec && !pCodec->IsValid())
+ {
+ SafeRelease(&pCodec);
+ }
+ return pCodec;
+}
diff --git a/plugins/pluginWinMF/internals/mf_codec.h b/plugins/pluginWinMF/internals/mf_codec.h
new file mode 100644
index 0000000..51b06dc
--- /dev/null
+++ b/plugins/pluginWinMF/internals/mf_codec.h
@@ -0,0 +1,158 @@
+/* Copyright (C) 2013 Mamadou DIOP
+* Copyright (C) 2013 Doubango Telecom <http://www.doubango.org>
+*
+* This file is part of Open Source Doubango Framework.
+*
+* DOUBANGO is free software: you can redistribute it and/or modify
+* it under the terms of the GNU General Public License as published by
+* the Free Software Foundation, either version 3 of the License, or
+* (at your option) any later version.
+*
+* DOUBANGO is distributed in the hope that it will be useful,
+* but WITHOUT ANY WARRANTY; without even the implied warranty of
+* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
+* GNU General Public License for more details.
+*
+* You should have received a copy of the GNU General Public License
+* along with DOUBANGO.
+*/
+#ifndef PLUGIN_WIN_MF_CODEC_H
+#define PLUGIN_WIN_MF_CODEC_H
+
+#include "../plugin_win_mf_config.h"
+
+#include <new>
+#include <mfapi.h>
+#include <mfidl.h>
+#include <Mferror.h>
+#include <shlwapi.h>
+#include <strmif.h>
+
+class MFSampleQueue;
+
+typedef enum MFCodecId_e
+{
+ MFCodecId_H264Base,
+ MFCodecId_H264Main,
+ MFCodecId_AAC
+}
+MFCodecId_t;
+
+typedef enum MFCodecType_e
+{
+ MFCodecType_Encoder,
+ MFCodecType_Decoder
+}
+MFCodecType_t;
+
+typedef enum MFCodecMediaType_e
+{
+ MFCodecMediaType_Audio,
+ MFCodecMediaType_Video
+}
+MFCodecMediaType_t;
+
+class MFCodec : IMFAsyncCallback
+{
+protected:
+ MFCodec(MFCodecId_t eId, MFCodecType_t eType, IMFTransform *pMFT = NULL);
+ virtual ~MFCodec();
+ HRESULT ProcessInput(IMFSample* pSample);
+ HRESULT ProcessOutput(IMFSample **ppSample);
+
+public:
+ virtual bool IsValid();
+ virtual bool IsReady();
+ virtual HRESULT Process(const void* pcInputPtr, UINT32 nInputSize, IMFSample **ppSampleOut);
+ static enum tmedia_chroma_e GetUncompressedChroma();
+ inline IMFTransform* GetMFT(){ return m_pMFT; }
+ inline MFCodecId_t GetId() { return m_eId; }
+ inline MFCodecType_t GetType() { return m_eType; }
+ inline void setBundled(BOOL bBundled) { m_bIsBundled = bBundled; }
+
+ // IUnknown
+ STDMETHODIMP QueryInterface(REFIID iid, void** ppv);
+ STDMETHODIMP_(ULONG) AddRef();
+ STDMETHODIMP_(ULONG) Release();
+
+ // IMFAsyncCallback
+ STDMETHODIMP GetParameters(DWORD *pdwFlags, DWORD *pdwQueue);
+ STDMETHODIMP Invoke(IMFAsyncResult *pAsyncResult);
+
+private:
+ long m_nRefCount;
+
+protected:
+ MFCodecId_t m_eId; // Codec Id
+ MFCodecType_t m_eType; // Codec type.
+ MFCodecMediaType_t m_eMediaType; // Codec Media type.
+ DWORD m_dwInputID; // Input stream ID.
+ DWORD m_dwOutputID; // Output stream ID.
+
+ GUID m_guidCompressedFormat; // Compressed Media format (e.g. MFVideoFormat_H264)
+ IMFTransform *m_pMFT; // Pointer to the encoder MFT.
+ ICodecAPI *m_pCodecAPI; // Pointer to CodecAPI.
+ IMFMediaType *m_pOutputType; // Output media type of the codec.
+ IMFMediaType *m_pInputType; // Input media type of the codec.
+
+ LONGLONG m_rtStart;
+ UINT64 m_rtDuration;
+
+ IMFSample *m_pSampleIn;
+ IMFSample *m_pSampleOut;
+
+ MFSampleQueue *m_pSampleQueueAsyncInput;
+ BOOL m_bIsBundled; // Bundled with a producer or cosumer -> do not monitor events
+ BOOL m_bIsAsync;
+ IMFMediaEventGenerator *m_pEventGenerator;
+ BOOL m_bIsFirstFrame;
+ long m_nMETransformNeedInputCount, m_nMETransformHaveOutputCount;
+};
+
+
+class MFCodecVideo : public MFCodec
+{
+ friend class MFCodec;
+protected:
+ MFCodecVideo(MFCodecId_t eId, MFCodecType_t eType, IMFTransform *pMFT = NULL);
+ virtual ~MFCodecVideo();
+
+public:
+ virtual HRESULT Initialize(
+ UINT32 nFrameRate,
+ UINT32 nWidth,
+ UINT32 nHeight,
+ UINT32 nOutputBitRateInBps = 0 // Only for encoders
+ );
+ virtual HRESULT SetGOPSize(UINT32 nFramesCount);
+ virtual HRESULT SetBitRate(UINT32 nBitRateInBps);
+ virtual HRESULT SetSliceMaxSizeInBytes(UINT32 nSliceMaxSizeInBytes);
+ virtual HRESULT RequestKeyFrame();
+
+ virtual HRESULT IsSetSliceMaxSizeInBytesSupported(BOOL &supported);
+ virtual inline UINT32 GetFrameRate() { return m_nFrameRate; }
+ virtual inline UINT32 GetWidth() { return m_nWidth; }
+ virtual inline UINT32 GetHeight() { return m_nHeight; }
+
+protected:
+ UINT32 m_nFrameRate;
+ UINT32 m_nWidth;
+ UINT32 m_nHeight;
+};
+
+class MFCodecVideoH264 : public MFCodecVideo
+{
+protected:
+ MFCodecVideoH264(MFCodecId_t eId, MFCodecType_t eType, IMFTransform *pMFT = NULL);
+
+public:
+ virtual ~MFCodecVideoH264();
+ static MFCodecVideoH264* CreateCodecH264Base(MFCodecType_t eType, IMFTransform *pMFT = NULL);
+ static MFCodecVideoH264* CreateCodecH264Main(MFCodecType_t eType, IMFTransform *pMFT = NULL);
+
+protected:
+
+};
+
+
+#endif /* PLUGIN_WIN_MF_CODEC_H */
diff --git a/plugins/pluginWinMF/internals/mf_codec_topology.cxx b/plugins/pluginWinMF/internals/mf_codec_topology.cxx
new file mode 100644
index 0000000..1ee2a16
--- /dev/null
+++ b/plugins/pluginWinMF/internals/mf_codec_topology.cxx
@@ -0,0 +1,473 @@
+/* Copyright (C) 2013 Mamadou DIOP
+* Copyright (C) 2013 Doubango Telecom <http://www.doubango.org>
+*
+* This file is part of Open Source Doubango Framework.
+*
+* DOUBANGO is free software: you can redistribute it and/or modify
+* it under the terms of the GNU General Public License as published by
+* the Free Software Foundation, either version 3 of the License, or
+* (at your option) any later version.
+*
+* DOUBANGO is distributed in the hope that it will be useful,
+* but WITHOUT ANY WARRANTY; without even the implied warranty of
+* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
+* GNU General Public License for more details.
+*
+* You should have received a copy of the GNU General Public License
+* along with DOUBANGO.
+*/
+#include "mf_codec_topology.h"
+#include "mf_utils.h"
+
+#include "tsk_debug.h"
+
+//
+// MFCodecTopologySampleGrabberCB
+//
+
+class MFCodecTopologySampleGrabberCB : public IMFSampleGrabberSinkCallback
+{
+ long m_cRef;
+ MFCodecTopology *m_pCodecTopology;
+
+ MFCodecTopologySampleGrabberCB(MFCodecTopology *pCodecTopology)
+ : m_cRef(1)
+ {
+ m_pCodecTopology = pCodecTopology;
+ m_pCodecTopology->AddRef();
+ }
+ virtual ~MFCodecTopologySampleGrabberCB()
+ {
+ SafeRelease(&m_pCodecTopology);
+ }
+
+public:
+ // Create a new instance of the object.
+ static HRESULT MFCodecTopologySampleGrabberCB::CreateInstance(MFCodecTopology *pCodecTopology, MFCodecTopologySampleGrabberCB **ppCB)
+ {
+ *ppCB = new (std::nothrow) MFCodecTopologySampleGrabberCB(pCodecTopology);
+
+ if (ppCB == NULL)
+ {
+ return E_OUTOFMEMORY;
+ }
+ return S_OK;
+ }
+
+ STDMETHODIMP MFCodecTopologySampleGrabberCB::QueryInterface(REFIID riid, void** ppv)
+ {
+ static const QITAB qit[] =
+ {
+ QITABENT(MFCodecTopologySampleGrabberCB, IMFSampleGrabberSinkCallback),
+ QITABENT(MFCodecTopologySampleGrabberCB, IMFClockStateSink),
+ { 0 }
+ };
+ return QISearch(this, qit, riid, ppv);
+ }
+
+ STDMETHODIMP_(ULONG) MFCodecTopologySampleGrabberCB::AddRef()
+ {
+ return InterlockedIncrement(&m_cRef);
+ }
+
+ STDMETHODIMP_(ULONG) MFCodecTopologySampleGrabberCB::Release()
+ {
+ ULONG cRef = InterlockedDecrement(&m_cRef);
+ if (cRef == 0)
+ {
+ delete this;
+ }
+ return cRef;
+
+ }
+
+ // IMFClockStateSink methods
+
+ STDMETHODIMP MFCodecTopologySampleGrabberCB::OnClockStart(MFTIME hnsSystemTime, LONGLONG llClockStartOffset)
+ {
+ TSK_DEBUG_INFO("MFCodecTopologySampleGrabberCB::OnClockStart(%lld, %lld)", hnsSystemTime, llClockStartOffset);
+ return S_OK;
+ }
+
+ STDMETHODIMP MFCodecTopologySampleGrabberCB::OnClockStop(MFTIME hnsSystemTime)
+ {
+ TSK_DEBUG_INFO("MFCodecTopologySampleGrabberCB::OnClockStop(%lld)", hnsSystemTime);
+ return S_OK;
+ }
+
+ STDMETHODIMP MFCodecTopologySampleGrabberCB::OnClockPause(MFTIME hnsSystemTime)
+ {
+ TSK_DEBUG_INFO("MFCodecTopologySampleGrabberCB::OnClockPause(%lld)", hnsSystemTime);
+ return S_OK;
+ }
+
+ STDMETHODIMP MFCodecTopologySampleGrabberCB::OnClockRestart(MFTIME hnsSystemTime)
+ {
+ TSK_DEBUG_INFO("MFCodecTopologySampleGrabberCB::OnClockRestart(%lld)", hnsSystemTime);
+ return S_OK;
+ }
+
+ STDMETHODIMP MFCodecTopologySampleGrabberCB::OnClockSetRate(MFTIME hnsSystemTime, float flRate)
+ {
+ TSK_DEBUG_INFO("MFCodecTopologySampleGrabberCB::OnClockSetRate(%lld, %f)", hnsSystemTime, flRate);
+ return S_OK;
+ }
+
+ // IMFSampleGrabberSink methods.
+
+ STDMETHODIMP MFCodecTopologySampleGrabberCB::OnSetPresentationClock(IMFPresentationClock* pClock)
+ {
+ TSK_DEBUG_INFO("MFCodecTopologySampleGrabberCB::OnSetPresentationClock");
+ return S_OK;
+ }
+
+ STDMETHODIMP MFCodecTopologySampleGrabberCB::OnProcessSample(
+ REFGUID guidMajorMediaType, DWORD dwSampleFlags,
+ LONGLONG llSampleTime, LONGLONG llSampleDuration, const BYTE * pSampleBuffer,
+ DWORD dwSampleSize)
+ {
+ HRESULT hr = S_OK;
+ IMFSample *pSample = NULL;
+ IMFMediaBuffer* pMediaBuffer = NULL;
+ BYTE* _pcBufferPtr = NULL;
+
+ CHECK_HR(hr = MFUtils::CreateMediaSample(dwSampleSize, &pSample));
+ CHECK_HR(hr = pSample->SetSampleTime(llSampleTime));
+ CHECK_HR(hr = pSample->SetSampleDuration(llSampleDuration));
+ CHECK_HR(hr = pSample->GetBufferByIndex(0, &pMediaBuffer));
+ CHECK_HR(hr = pMediaBuffer->Lock(&_pcBufferPtr, NULL, NULL));
+ memcpy(_pcBufferPtr, pSampleBuffer, dwSampleSize);
+ CHECK_HR(hr = pMediaBuffer->SetCurrentLength(dwSampleSize));
+ CHECK_HR(hr = pMediaBuffer->Unlock());
+
+ m_pCodecTopology->m_SampleQueue.Queue(pSample); // thread-safe
+
+bail:
+ SafeRelease(&pSample);
+ SafeRelease(&pMediaBuffer);
+ return hr;
+ }
+
+ STDMETHODIMP MFCodecTopologySampleGrabberCB::OnShutdown()
+ {
+ TSK_DEBUG_INFO("MFCodecTopologySampleGrabberCB::OnShutdown");
+ return S_OK;
+ }
+};
+
+//
+// MFCodecTopology
+//
+
+
+MFCodecTopology::MFCodecTopology(MFCodec* pCodec, HRESULT &hr)
+: m_nRefCount(1)
+, m_bInitialized(FALSE)
+, m_bStarted(FALSE)
+, m_pCodec(NULL)
+, m_pSource(NULL)
+, m_pSession(NULL)
+, m_pTopologyFull(NULL)
+, m_pTopologyPartial(NULL)
+, m_pOutputType(NULL)
+, m_pInputType(NULL)
+, m_pGrabberCallback(NULL)
+, m_pGrabberActivate(NULL)
+, m_pTread(NULL)
+{
+ hr = S_OK;
+
+ if(!pCodec)
+ {
+ CHECK_HR(hr = E_POINTER);
+ }
+
+ m_pCodec = pCodec;
+ m_pCodec->AddRef();
+
+bail: ;
+}
+
+MFCodecTopology::~MFCodecTopology()
+{
+ DeInitialize();
+}
+
+ULONG MFCodecTopology::AddRef()
+{
+ return InterlockedIncrement(&m_nRefCount);
+}
+
+ULONG MFCodecTopology::Release()
+{
+ ULONG uCount = InterlockedDecrement(&m_nRefCount);
+ if (uCount == 0)
+ {
+ delete this;
+ }
+ // For thread safety, return a temporary variable.
+ return uCount;
+}
+
+HRESULT MFCodecTopology::QueryInterface(REFIID iid, void** ppv)
+{
+ return E_NOTIMPL;
+}
+
+HRESULT MFCodecTopology::Start()
+{
+ HRESULT hr = S_OK;
+
+ if(m_bStarted)
+ {
+ return S_OK;
+ }
+
+ if(!m_bInitialized)
+ {
+ CHECK_HR(hr = E_FAIL);
+ }
+
+ CHECK_HR(hr = MFUtils::RunSession(m_pSession, m_pTopologyFull));
+
+ // Start asynchronous watcher thread
+ m_bStarted = TRUE;
+ int ret = tsk_thread_create(&m_pTread, MFCodecTopology::RunSessionThread, this);
+ if(ret != 0)
+ {
+ TSK_DEBUG_ERROR("Failed to create thread");
+ m_bStarted = FALSE;
+ if(m_pTread)
+ {
+ tsk_thread_join(&m_pTread);
+ }
+ MFUtils::ShutdownSession(m_pSession, m_pSource);
+ CHECK_HR(hr = E_FAIL);
+ }
+
+ // FIXME
+ Sleep(2000);
+
+bail:
+ return hr;
+}
+
+HRESULT MFCodecTopology::Stop()
+{
+ HRESULT hr = S_OK;
+
+ if(!m_bStarted)
+ {
+ return S_OK;
+ }
+
+ m_bStarted = FALSE;
+ hr = MFUtils::ShutdownSession(m_pSession, NULL); // stop session to wakeup the asynchronous thread
+ if(m_pTread)
+ {
+ tsk_thread_join(&m_pTread);
+ }
+ hr = MFUtils::ShutdownSession(NULL, m_pSource);
+
+ return hr;
+}
+
+HRESULT MFCodecTopology::Initialize()
+{
+ HRESULT hr = S_OK;
+ IMFAttributes* pSessionAttributes = NULL;
+
+ if(m_bInitialized)
+ {
+ CHECK_HR(hr = E_FAIL);
+ }
+
+ // Set session attributes
+ CHECK_HR(hr = MFCreateAttributes(&pSessionAttributes, 1));
+ CHECK_HR(hr = pSessionAttributes->SetUINT32(PLUGIN_MF_LOW_LATENCY, 1));
+
+ // Get input and output type
+ CHECK_HR(hr = m_pCodec->GetInputType(&m_pInputType));
+ CHECK_HR(hr = m_pCodec->GetOutputType(&m_pOutputType));
+
+ // Create custom source
+ CHECK_HR(hr = CMFSource::CreateInstanceEx(IID_IMFMediaSource, (void**)&m_pSource, m_pInputType));
+
+ // Create the sample grabber sink.
+ CHECK_HR(hr = MFCodecTopologySampleGrabberCB::CreateInstance(this, &m_pGrabberCallback));
+ CHECK_HR(hr = MFCreateSampleGrabberSinkActivate(m_pOutputType, m_pGrabberCallback, &m_pGrabberActivate));
+
+ // To run as fast as possible, set this attribute (requires Windows 7 or later):
+ CHECK_HR(hr = m_pGrabberActivate->SetUINT32(MF_SAMPLEGRABBERSINK_IGNORE_CLOCK, TRUE));
+
+ // Create the Media Session.
+ CHECK_HR(hr = MFCreateMediaSession(pSessionAttributes, &m_pSession));
+
+ // Create the topology.
+ CHECK_HR(hr = MFUtils::CreateTopology(
+ m_pSource,
+ m_pCodec->GetMFT(),
+ m_pGrabberActivate,
+ NULL, // no preview
+ m_pOutputType,
+ &m_pTopologyPartial));
+ // Resolve topology (adds video processors if needed).
+ CHECK_HR(hr = MFUtils::ResolveTopology(m_pTopologyPartial, &m_pTopologyFull));
+
+ m_bInitialized = TRUE;
+
+bail:
+ SafeRelease(&pSessionAttributes);
+
+ if(FAILED(hr))
+ {
+ DeInitialize();
+ }
+
+ return hr;
+}
+
+void* TSK_STDCALL MFCodecTopology::RunSessionThread(void *pArg)
+{
+ MFCodecTopology *pSelf = (MFCodecTopology *)pArg;
+ HRESULT hrStatus = S_OK;
+ HRESULT hr = S_OK;
+ IMFMediaEvent *pEvent = NULL;
+ MediaEventType met;
+
+ TSK_DEBUG_INFO("RunSessionThread (MFCodecTopology) - ENTER");
+
+ while(pSelf->isStarted())
+ {
+ CHECK_HR(hr = pSelf->m_pSession->GetEvent(0, &pEvent));
+ CHECK_HR(hr = pEvent->GetStatus(&hrStatus));
+ CHECK_HR(hr = pEvent->GetType(&met));
+
+ if (FAILED(hrStatus) /*&& hrStatus != MF_E_NO_SAMPLE_TIMESTAMP*/)
+ {
+ TSK_DEBUG_ERROR("Session error: 0x%x (event id: %d)\n", hrStatus, met);
+ hr = hrStatus;
+ goto bail;
+ }
+ if (met == MESessionEnded)
+ {
+ break;
+ }
+ SafeRelease(&pEvent);
+ }
+
+bail:
+ TSK_DEBUG_INFO("RunSessionThread (MFCodecTopology) - EXIT");
+
+ return NULL;
+}
+
+HRESULT MFCodecTopology::DeInitialize()
+{
+ Stop();
+
+ SafeRelease(&m_pCodec);
+ SafeRelease(&m_pSource);
+ SafeRelease(&m_pCodec);
+ SafeRelease(&m_pSession);
+ SafeRelease(&m_pTopologyFull);
+ SafeRelease(&m_pTopologyPartial);
+ SafeRelease(&m_pOutputType);
+ SafeRelease(&m_pInputType);
+ SafeRelease(&m_pGrabberCallback);
+ SafeRelease(&m_pGrabberActivate);
+
+ if(m_pTread)
+ {
+ tsk_thread_join(&m_pTread);
+ }
+
+ m_SampleQueue.Clear();
+
+ m_bInitialized = FALSE;
+
+ return S_OK;
+}
+
+HRESULT MFCodecTopology::ProcessInput(IMFSample* pSample)
+{
+ HRESULT hr = S_OK;
+ IMFMediaBuffer* pMediaBuffer = NULL;
+ BYTE* _pcBufferPtr = NULL;
+
+ if(!pSample)
+ {
+ CHECK_HR(hr = E_POINTER);
+ }
+
+ if(m_pCodec->GetMediaType() != MFCodecMediaType_Video)
+ {
+ CHECK_HR(hr = E_NOTIMPL);
+ }
+
+ if(!m_bStarted)
+ {
+ CHECK_HR(hr = Start());
+ }
+
+ CHECK_HR(hr = pSample->GetBufferByIndex(0, &pMediaBuffer));
+
+ DWORD dwDataLength = 0;
+ BOOL bLocked = FALSE;
+ CHECK_HR(hr = pMediaBuffer->GetCurrentLength(&dwDataLength));
+ bLocked = TRUE;
+ if(dwDataLength > 0)
+ {
+ CHECK_HR(hr = pMediaBuffer->Lock(&_pcBufferPtr, NULL, NULL));
+ CHECK_HR(hr = m_pSource->CopyVideoBuffer(
+ dynamic_cast<MFCodecVideo*>(m_pCodec)->GetWidth(),
+ dynamic_cast<MFCodecVideo*>(m_pCodec)->GetHeight(),
+ _pcBufferPtr, dwDataLength));
+ }
+
+bail:
+ if(bLocked)
+ {
+ pMediaBuffer->Unlock();
+ }
+ SafeRelease(&pMediaBuffer);
+ return hr;
+}
+
+HRESULT MFCodecTopology::ProcessOutput(IMFSample **ppSample)
+{
+ HRESULT hr = S_OK;
+
+ if(!ppSample)
+ {
+ CHECK_HR(hr = E_POINTER);
+ }
+
+ if(!m_SampleQueue.IsEmpty())
+ {
+ CHECK_HR(hr = m_SampleQueue.Dequeue(ppSample)); // thread-safe
+ }
+
+bail:
+ return hr;
+}
+
+//
+// MFCodecVideoTopology
+//
+
+
+MFCodecVideoTopology::MFCodecVideoTopology(MFCodec* pCodec, HRESULT &hr)
+: MFCodecTopology(pCodec, hr)
+, m_nWidth(0)
+, m_nHeight(0)
+{
+ assert(pCodec->GetMediaType() == MFCodecMediaType_Video);
+}
+
+MFCodecVideoTopology::~MFCodecVideoTopology()
+{
+
+}
+
+
diff --git a/plugins/pluginWinMF/internals/mf_codec_topology.h b/plugins/pluginWinMF/internals/mf_codec_topology.h
new file mode 100644
index 0000000..c5d2f34
--- /dev/null
+++ b/plugins/pluginWinMF/internals/mf_codec_topology.h
@@ -0,0 +1,87 @@
+/* Copyright (C) 2013 Mamadou DIOP
+* Copyright (C) 2013 Doubango Telecom <http://www.doubango.org>
+*
+* This file is part of Open Source Doubango Framework.
+*
+* DOUBANGO is free software: you can redistribute it and/or modify
+* it under the terms of the GNU General Public License as published by
+* the Free Software Foundation, either version 3 of the License, or
+* (at your option) any later version.
+*
+* DOUBANGO is distributed in the hope that it will be useful,
+* but WITHOUT ANY WARRANTY; without even the implied warranty of
+* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
+* GNU General Public License for more details.
+*
+* You should have received a copy of the GNU General Public License
+* along with DOUBANGO.
+*/
+#ifndef PLUGIN_WIN_MF_CODEC_TOPOLOGY_H
+#define PLUGIN_WIN_MF_CODEC_TOPOLOGY_H
+
+#include "mf_codec.h"
+#include "mf_custom_src.h"
+
+#include "tsk_thread.h"
+
+class MFCodecTopologySampleGrabberCB;
+
+class MFCodecTopology : IUnknown
+{
+ friend class MFCodecTopologySampleGrabberCB;
+public:
+ MFCodecTopology(MFCodec* pCodec, HRESULT &hr);
+ virtual ~MFCodecTopology();
+
+ virtual HRESULT Initialize();
+ virtual HRESULT DeInitialize();
+
+ virtual HRESULT ProcessInput(IMFSample* pSample);
+ virtual HRESULT ProcessOutput(IMFSample **ppSample);
+
+ // IUnknown
+ STDMETHODIMP QueryInterface(REFIID iid, void** ppv);
+ STDMETHODIMP_(ULONG) AddRef();
+ STDMETHODIMP_(ULONG) Release();
+
+ inline BOOL isStarted() { return m_bStarted; }
+ inline BOOL isInitialized() { return m_bInitialized; }
+
+private:
+ static void* TSK_STDCALL RunSessionThread(void *pArg);
+
+protected:
+ HRESULT Start();
+ HRESULT Stop();
+
+private:
+ long m_nRefCount;
+
+protected:
+ BOOL m_bInitialized;
+ BOOL m_bStarted;
+ MFCodec* m_pCodec;
+ CMFSource *m_pSource;
+ IMFMediaSession *m_pSession;
+ IMFTopology *m_pTopologyFull;
+ IMFTopology *m_pTopologyPartial;
+ IMFMediaType *m_pOutputType;
+ IMFMediaType *m_pInputType;
+ MFCodecTopologySampleGrabberCB *m_pGrabberCallback;
+ IMFActivate *m_pGrabberActivate;
+ tsk_thread_handle_t* m_pTread;
+ SampleQueue m_SampleQueue;
+};
+
+class MFCodecVideoTopology : public MFCodecTopology
+{
+public:
+ MFCodecVideoTopology(MFCodec* pCodec, HRESULT &hr);
+ virtual ~MFCodecVideoTopology();
+
+private:
+ UINT32 m_nWidth, m_nHeight;
+};
+
+
+#endif /* PLUGIN_WIN_MF_CODEC_TOPOLOGY_H */
diff --git a/plugins/pluginWinMF/internals/mf_custom_src.cxx b/plugins/pluginWinMF/internals/mf_custom_src.cxx
new file mode 100644
index 0000000..1de9904
--- /dev/null
+++ b/plugins/pluginWinMF/internals/mf_custom_src.cxx
@@ -0,0 +1,1722 @@
+/*
+* Copyright (C) Microsoft Corporation. All rights reserved.
+* Copyright (C) 2013 Mamadou DIOP
+* Copyright (C) 2013 Doubango Telecom <http://www.doubango.org>
+*
+* This file is part of Open Source Doubango Framework.
+*
+* DOUBANGO is free software: you can redistribute it and/or modify
+* it under the terms of the GNU General Public License as published by
+* the Free Software Foundation, either version 3 of the License, or
+* (at your option) any later version.
+*
+* DOUBANGO is distributed in the hope that it will be useful,
+* but WITHOUT ANY WARRANTY; without even the implied warranty of
+* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
+* GNU General Public License for more details.
+*
+* You should have received a copy of the GNU General Public License
+* along with DOUBANGO.
+*/
+
+// Implementing custom source: http://msdn.microsoft.com/en-us/library/windows/desktop/ms700134(v=vs.85).aspx
+
+#include "mf_custom_src.h"
+#include "mf_utils.h"
+
+#include "tsk_debug.h"
+
+#include <assert.h>
+
+//
+// Locking:
+// The source and stream objects both have critical sections. If you
+// hold both locks, the source lock must be held FIRST, to avoid
+// deadlocks.
+//
+// Shutdown:
+// Most methods start by calling CheckShutdown(). This method
+// fails if the source was shut down.
+//
+
+
+
+template <class T>
+T AlignUp(T num, T mult)
+{
+ assert(num >= 0);
+ T tmp = num + mult - 1;
+ return tmp - (tmp % mult);
+}
+
+
+
+// Helper Functions
+
+HRESULT QueueEventWithIUnknown(
+ IMFMediaEventGenerator *pMEG,
+ MediaEventType meType,
+ HRESULT hrStatus,
+ IUnknown *pUnk);
+
+LONGLONG BufferSizeFromAudioDuration(const WAVEFORMATEX *pWav, LONGLONG duration);
+
+HRESULT CMFSource_CreateInstance(REFIID iid, void **ppMFT)
+{
+ return CMFSource::CreateInstance(iid, ppMFT);
+}
+
+
+//-------------------------------------------------------------------
+// Name: CreateInstance
+// Description: Static method to create an instance of the source.
+//
+// iid: IID of the requested interface on the source.
+// ppSource: Receives a ref-counted pointer to the source.
+//-------------------------------------------------------------------
+
+HRESULT CMFSource::CreateInstance(REFIID iid, void **ppSource) // Called when source used as plugin
+{
+ return CreateInstanceEx(iid, ppSource, NULL);
+}
+
+HRESULT CMFSource::CreateInstanceEx(REFIID iid, void **ppSource, IMFMediaType *pMediaType) // Called when source directly called
+{
+ if (ppSource == NULL)
+ {
+ return E_POINTER;
+ }
+
+ HRESULT hr = S_OK;
+ CMFSource *pSource = new (std::nothrow) CMFSource(hr, pMediaType); // Created with ref count = 1.
+ if (pSource == NULL)
+ {
+ return E_OUTOFMEMORY;
+ }
+
+ if (SUCCEEDED(hr))
+ {
+ hr = pSource->QueryInterface(iid, ppSource);
+ if(SUCCEEDED(hr))
+ {
+ ((CMFSource*)(*ppSource))->AddRef();
+ }
+ }
+
+ SafeRelease(&pSource);
+ return hr;
+}
+
+
+//-------------------------------------------------------------------
+// CMFSource constructor.
+//
+// hr: If the constructor fails, this value is set to a failure code.
+//-------------------------------------------------------------------
+
+CMFSource::CMFSource(HRESULT& hr, IMFMediaType *pMediaType)
+ : m_nRefCount(1),
+ m_pEventQueue(NULL),
+ m_pPresentationDescriptor(NULL),
+ m_IsShutdown(FALSE),
+ m_state(STATE_STOPPED),
+ m_pStream(NULL),
+ m_pMediaType(NULL)
+{
+ // Create the media event queue.
+ hr = MFCreateEventQueue(&m_pEventQueue);
+
+ if(pMediaType)
+ {
+ m_pMediaType = pMediaType;
+ pMediaType->AddRef();
+ }
+
+ InitializeCriticalSection(&m_critSec);
+}
+
+
+//-------------------------------------------------------------------
+// CMFSource destructor.
+//-------------------------------------------------------------------
+
+
+CMFSource::~CMFSource()
+{
+ assert(m_IsShutdown);
+ assert(m_nRefCount == 0);
+ SafeRelease(&m_pMediaType);
+
+ DeleteCriticalSection(&m_critSec);
+}
+
+// IMFCustomSource methods
+
+HRESULT CMFSource::CopyVideoBuffer(UINT32 nWidth, UINT32 nHeight, const void* pBufferPtr, UINT32 nBufferSize)
+{
+ if(!pBufferPtr)
+ {
+ TSK_DEBUG_ERROR("Invalid buffer pointer");
+ return E_POINTER;
+ }
+
+ if(!nWidth || !nHeight || !nBufferSize)
+ {
+ TSK_DEBUG_ERROR("Invalid parameter");
+ return E_INVALIDARG;
+ }
+ if(m_pStream)
+ {
+ return m_pStream->CopyVideoBuffer(nWidth, nHeight, pBufferPtr, nBufferSize);
+ }
+ else
+ {
+ TSK_DEBUG_ERROR("No stream associated to this source");
+ return E_NOT_VALID_STATE;
+ }
+}
+
+// IUnknown methods
+
+ULONG CMFSource::AddRef()
+{
+ return InterlockedIncrement(&m_nRefCount);
+}
+
+ULONG CMFSource::Release()
+{
+ ULONG uCount = InterlockedDecrement(&m_nRefCount);
+ if (uCount == 0)
+ {
+ delete this;
+ }
+ // For thread safety, return a temporary variable.
+ return uCount;
+}
+
+HRESULT CMFSource::QueryInterface(REFIID iid, void** ppv)
+{
+ static const QITAB qit[] =
+ {
+ QITABENT(CMFSource, IMFMediaEventGenerator),
+ QITABENT(CMFSource, IMFMediaSource),
+ { 0 }
+ };
+ return QISearch(this, qit, iid, ppv);
+}
+
+
+// IMFMediaEventGenerator methods
+//
+// All of the IMFMediaEventGenerator methods do the following:
+// 1. Check for shutdown status.
+// 2. Call the event generator helper object.
+
+HRESULT CMFSource::BeginGetEvent(IMFAsyncCallback* pCallback, IUnknown* punkState)
+{
+ HRESULT hr = S_OK;
+
+ EnterCriticalSection(&m_critSec);
+
+ hr = CheckShutdown();
+
+ if (SUCCEEDED(hr))
+ {
+ hr = m_pEventQueue->BeginGetEvent(pCallback, punkState);
+ }
+
+ LeaveCriticalSection(&m_critSec);
+
+ return hr;
+}
+
+HRESULT CMFSource::EndGetEvent(IMFAsyncResult* pResult, IMFMediaEvent** ppEvent)
+{
+ HRESULT hr = S_OK;
+
+ EnterCriticalSection(&m_critSec);
+
+ hr = CheckShutdown();
+
+ if (SUCCEEDED(hr))
+ {
+ hr = m_pEventQueue->EndGetEvent(pResult, ppEvent);
+ }
+
+ LeaveCriticalSection(&m_critSec);
+
+ return hr;
+}
+
+HRESULT CMFSource::GetEvent(DWORD dwFlags, IMFMediaEvent** ppEvent)
+{
+ // NOTE: GetEvent can block indefinitely, so we don't hold the
+ // CMFSource lock. This requires some juggling with the
+ // event queue pointer.
+
+ HRESULT hr = S_OK;
+
+ IMFMediaEventQueue *pQueue = NULL;
+
+ EnterCriticalSection(&m_critSec);
+
+ // Check shutdown
+ hr = CheckShutdown();
+
+ if (SUCCEEDED(hr))
+ {
+ pQueue = m_pEventQueue;
+ pQueue->AddRef();
+ }
+
+ LeaveCriticalSection(&m_critSec);
+
+ if (SUCCEEDED(hr))
+ {
+ hr = pQueue->GetEvent(dwFlags, ppEvent);
+ }
+
+ SafeRelease(&pQueue);
+ return hr;
+}
+
+HRESULT CMFSource::QueueEvent(MediaEventType met, REFGUID guidExtendedType, HRESULT hrStatus, const PROPVARIANT* pvValue)
+{
+ HRESULT hr = S_OK;
+
+ EnterCriticalSection(&m_critSec);
+
+ hr = CheckShutdown();
+
+ if (SUCCEEDED(hr))
+ {
+ hr = m_pEventQueue->QueueEventParamVar(met, guidExtendedType, hrStatus, pvValue);
+ }
+
+ LeaveCriticalSection(&m_critSec);
+
+ return hr;
+}
+
+
+// IMFMediaSource methods
+
+
+//-------------------------------------------------------------------
+// Name: CreatePresentationDescriptor
+// Description: Returns a copy of the default presentation descriptor.
+//-------------------------------------------------------------------
+
+HRESULT CMFSource::CreatePresentationDescriptor(IMFPresentationDescriptor** ppPresentationDescriptor)
+{
+ if (ppPresentationDescriptor == NULL)
+ {
+ return E_POINTER;
+ }
+
+ EnterCriticalSection(&m_critSec);
+
+ HRESULT hr = S_OK;
+
+ hr = CheckShutdown();
+
+ if (SUCCEEDED(hr))
+ {
+ if (m_pPresentationDescriptor == NULL)
+ {
+ hr = CreatePresentationDescriptor();
+ }
+ }
+
+ // Clone our default presentation descriptor.
+ if (SUCCEEDED(hr))
+ {
+ hr = m_pPresentationDescriptor->Clone(ppPresentationDescriptor);
+ }
+
+ LeaveCriticalSection(&m_critSec);
+
+ return hr;
+}
+
+
+//-------------------------------------------------------------------
+// Name: GetCharacteristics
+// Description: Returns flags the describe the source.
+//-------------------------------------------------------------------
+
+HRESULT CMFSource::GetCharacteristics(DWORD* pdwCharacteristics)
+{
+ if (pdwCharacteristics == NULL)
+ {
+ return E_POINTER;
+ }
+
+ EnterCriticalSection(&m_critSec);
+
+ HRESULT hr = S_OK;
+
+ hr = CheckShutdown();
+
+ if (SUCCEEDED(hr))
+ {
+ *pdwCharacteristics = MFMEDIASOURCE_CAN_PAUSE | MFMEDIASOURCE_IS_LIVE;
+ }
+
+ LeaveCriticalSection(&m_critSec);
+
+ return hr;
+}
+
+//-------------------------------------------------------------------
+// Name: Start
+// Description: Switches to running state.
+//-------------------------------------------------------------------
+
+HRESULT CMFSource::Start(
+ IMFPresentationDescriptor* pPresentationDescriptor,
+ const GUID* pguidTimeFormat,
+ const PROPVARIANT* pvarStartPosition
+ )
+{
+ HRESULT hr = S_OK;
+ LONGLONG llStartOffset = 0;
+ BOOL bIsSeek = FALSE;
+ BOOL bIsRestartFromCurrentPosition = FALSE;
+ BOOL bQueuedStartEvent = FALSE;
+
+ IMFMediaEvent *pEvent = NULL;
+
+ PROPVARIANT var;
+ PropVariantInit(&var);
+
+ // Check parameters.
+ // Start position and presentation descriptor cannot be NULL.
+ if (pvarStartPosition == NULL || pPresentationDescriptor == NULL)
+ {
+ return E_INVALIDARG;
+ }
+
+ // Check the time format. Must be "reference time" units.
+ if ((pguidTimeFormat != NULL) && (*pguidTimeFormat != GUID_NULL))
+ {
+ // Unrecognized time format GUID.
+ return MF_E_UNSUPPORTED_TIME_FORMAT;
+ }
+
+ EnterCriticalSection(&m_critSec);
+
+ // Fail if the source is shut down.
+ CHECK_HR(hr = CheckShutdown());
+
+ // Check the start position.
+ if (pvarStartPosition->vt == VT_I8)
+ {
+ // Start position is given in pvarStartPosition in 100-ns units.
+ llStartOffset = pvarStartPosition->hVal.QuadPart;
+
+ if (m_state != STATE_STOPPED)
+ {
+ // Source is running or paused, so this is a seek.
+ bIsSeek = TRUE;
+ }
+ }
+ else if (pvarStartPosition->vt == VT_EMPTY)
+ {
+ // Start position is "current position".
+ // For stopped, that means 0. Otherwise, use the current position.
+ if (m_state == STATE_STOPPED)
+ {
+ llStartOffset = 0;
+ }
+ else
+ {
+ llStartOffset = GetCurrentPosition();
+ bIsRestartFromCurrentPosition = TRUE;
+ }
+ }
+ else
+ {
+ // We don't support this time format.
+ hr = MF_E_UNSUPPORTED_TIME_FORMAT;
+ goto bail;
+ }
+
+ // Validate the caller's presentation descriptor.
+ CHECK_HR(hr = ValidatePresentationDescriptor(pPresentationDescriptor));
+
+ // Sends the MENewStream or MEUpdatedStream event.
+ CHECK_HR(hr = QueueNewStreamEvent(pPresentationDescriptor));
+
+ // Notify the stream of the new start time.
+ CHECK_HR(hr = m_pStream->SetPosition(llStartOffset));
+
+ // Send Started or Seeked events.
+
+ var.vt = VT_I8;
+ var.hVal.QuadPart = llStartOffset;
+
+ // Send the source event.
+ if (bIsSeek)
+ {
+ CHECK_HR(hr = QueueEvent(MESourceSeeked, GUID_NULL, hr, &var));
+ }
+ else
+ {
+ // For starting, if we are RESTARTING from the current position and our
+ // previous state was running/paused, then we need to add the
+ // MF_EVENT_SOURCE_ACTUAL_START attribute to the event. This requires
+ // creating the event object first.
+
+ // Create the event.
+ CHECK_HR(hr = MFCreateMediaEvent(MESourceStarted, GUID_NULL, hr, &var, &pEvent));
+
+ // For restarts, set the actual start time as an attribute.
+ if (bIsRestartFromCurrentPosition)
+ {
+ CHECK_HR(hr = pEvent->SetUINT64(MF_EVENT_SOURCE_ACTUAL_START, llStartOffset));
+ }
+
+ // Now queue the event.
+ CHECK_HR(hr = m_pEventQueue->QueueEvent(pEvent));
+ }
+
+ bQueuedStartEvent = TRUE;
+
+ // Send the stream event.
+ if (m_pStream)
+ {
+ if (bIsSeek)
+ {
+ CHECK_HR(hr = m_pStream->QueueEvent(MEStreamSeeked, GUID_NULL, hr, &var));
+ }
+ else
+ {
+ CHECK_HR(hr = m_pStream->QueueEvent(MEStreamStarted, GUID_NULL, hr, &var));
+ }
+ }
+
+ if (bIsSeek)
+ {
+ // For seek requests, flush any queued samples.
+ CHECK_HR(hr = m_pStream->Flush());
+ }
+ else
+ {
+ // Otherwise, deliver any queued samples.
+ CHECK_HR(hr = m_pStream->DeliverQueuedSamples());
+ }
+
+ // Initialize Stream parameters
+ CHECK_HR(hr = m_pStream->InitializeParams());
+
+ m_state = STATE_STARTED;
+
+bail:
+
+ // If a failure occurred and we have not sent the
+ // MESourceStarted/MESourceSeeked event yet, then it is
+ // OK just to return an error code from Start().
+
+ // If a failure occurred and we have already sent the
+ // event (with a success code), then we need to raise an
+ // MEError event.
+
+ if (FAILED(hr) && bQueuedStartEvent)
+ {
+ hr = QueueEvent(MEError, GUID_NULL, hr, &var);
+ }
+
+ PropVariantClear(&var);
+ SafeRelease(&pEvent);
+
+ LeaveCriticalSection(&m_critSec);
+
+ return hr;
+}
+
+
+//-------------------------------------------------------------------
+// Name: Pause
+// Description: Switches to paused state.
+//-------------------------------------------------------------------
+
+HRESULT CMFSource::Pause()
+{
+ EnterCriticalSection(&m_critSec);
+
+ HRESULT hr = S_OK;
+
+ hr = CheckShutdown();
+
+ // Pause is only allowed from started state.
+ if (SUCCEEDED(hr))
+ {
+ if (m_state != STATE_STARTED)
+ {
+ hr = MF_E_INVALID_STATE_TRANSITION;
+ }
+ }
+
+ // Send the appropriate events.
+ if (SUCCEEDED(hr))
+ {
+ if (m_pStream)
+ {
+ hr = m_pStream->QueueEvent(MEStreamPaused, GUID_NULL, S_OK, NULL);
+ }
+ }
+
+ if (SUCCEEDED(hr))
+ {
+ hr = QueueEvent(MESourcePaused, GUID_NULL, S_OK, NULL);
+ }
+
+ // Update our state.
+ if (SUCCEEDED(hr))
+ {
+ m_state = STATE_PAUSED;
+ }
+
+ LeaveCriticalSection(&m_critSec);
+
+ return hr;
+}
+
+
+//-------------------------------------------------------------------
+// Name: Stop
+// Description: Switches to stopped state.
+//-------------------------------------------------------------------
+
+HRESULT CMFSource::Stop()
+{
+ EnterCriticalSection(&m_critSec);
+
+ HRESULT hr = S_OK;
+
+ hr = CheckShutdown();
+
+ if (SUCCEEDED(hr))
+ {
+ // Update our state.
+ m_state = STATE_STOPPED;
+
+ // Flush all queued samples.
+ hr = m_pStream->Flush();
+ }
+
+ //
+ // Queue events.
+ //
+
+ if (SUCCEEDED(hr))
+ {
+ if (m_pStream)
+ {
+ hr = m_pStream->QueueEvent(MEStreamStopped, GUID_NULL, S_OK, NULL);
+ }
+ }
+ if (SUCCEEDED(hr))
+ {
+ hr = QueueEvent(MESourceStopped, GUID_NULL, S_OK, NULL);
+ }
+
+ LeaveCriticalSection(&m_critSec);
+
+ return hr;
+}
+
+
+//-------------------------------------------------------------------
+// Name: Shutdown
+// Description: Releases resources.
+//
+// The source and stream objects hold reference counts on each other.
+// To avoid memory leaks caused by circular ref. counts, the Shutdown
+// method releases the pointer to the stream.
+//-------------------------------------------------------------------
+
+HRESULT CMFSource::Shutdown()
+{
+ EnterCriticalSection(&m_critSec);
+
+ HRESULT hr = S_OK;
+
+ hr = CheckShutdown();
+
+ if (SUCCEEDED(hr))
+ {
+ // Shut down the stream object.
+ if (m_pStream)
+ {
+ (void)m_pStream->Shutdown();
+ }
+
+ // Shut down the event queue.
+ if (m_pEventQueue)
+ {
+ (void)m_pEventQueue->Shutdown();
+ }
+
+ // Release objects.
+ SafeRelease(&m_pStream);
+ SafeRelease(&m_pEventQueue);
+ SafeRelease(&m_pPresentationDescriptor);
+
+ // Set our shutdown flag.
+ m_IsShutdown = TRUE;
+ }
+
+ LeaveCriticalSection(&m_critSec);
+
+ return hr;
+}
+
+/////////////// Private CMFSource methods
+
+// NOTE: These private methods do not hold the source's critical
+// section. The caller must ensure the critical section is held.
+// Also, these methods do not check for shut-down.
+
+
+//-------------------------------------------------------------------
+// Name: CreatePresentationDescriptor
+// Description: Creates the default presentation descriptor.
+//-------------------------------------------------------------------
+
+HRESULT CMFSource::CreatePresentationDescriptor()
+{
+ HRESULT hr = S_OK;
+
+ IMFStreamDescriptor *pStreamDescriptor = NULL;
+ IMFMediaTypeHandler *pHandler = NULL;
+
+ // Create the stream descriptor.
+ hr = MFCreateStreamDescriptor(
+ 0, // stream identifier
+ 1, // Number of media types.
+ &m_pMediaType, // Array of media types
+ &pStreamDescriptor
+ );
+
+ // Set the default media type on the media type handler.
+ if (SUCCEEDED(hr))
+ {
+ hr = pStreamDescriptor->GetMediaTypeHandler(&pHandler);
+ }
+
+ if (SUCCEEDED(hr))
+ {
+ hr = pHandler->SetCurrentMediaType(m_pMediaType);
+ }
+
+ // Create the presentation descriptor.
+ if (SUCCEEDED(hr))
+ {
+ hr = MFCreatePresentationDescriptor(
+ 1, // Number of stream descriptors
+ &pStreamDescriptor, // Array of stream descriptors
+ &m_pPresentationDescriptor
+ );
+ }
+ // Select the first stream
+ if (SUCCEEDED(hr))
+ {
+ hr = m_pPresentationDescriptor->SelectStream(0);
+ }
+
+ // Set the file/stream duration as an attribute on the presentation descriptor.
+ if (SUCCEEDED(hr))
+ {
+ hr = m_pPresentationDescriptor->SetUINT64(MF_PD_DURATION, (UINT64)ULLONG_MAX);
+ }
+
+ SafeRelease(&pStreamDescriptor);
+ SafeRelease(&pHandler);
+ return hr;
+}
+
+
+
+//-------------------------------------------------------------------
+// Name: ValidatePresentationDescriptor
+// Description: Validates the caller's presentation descriptor.
+//
+// This method is called when Start() is called with a non-NULL
+// presentation descriptor. The caller is supposed to give us back
+// the same PD that we gave out in CreatePresentationDescriptor().
+// This method performs a sanity check on the caller's PD to make
+// sure it matches ours.
+//
+// Note: Because this media source has one stream with single, fixed
+// media type, there is not much for the caller to decide. In
+// a more complicated source, the caller might select different
+// streams, or select from a list of media types.
+//-------------------------------------------------------------------
+
+HRESULT CMFSource::ValidatePresentationDescriptor(IMFPresentationDescriptor *pPD)
+{
+ HRESULT hr;
+
+ assert(pPD != NULL);
+
+ IMFStreamDescriptor *pStreamDescriptor = NULL;
+ IMFMediaTypeHandler *pHandler = NULL;
+ IMFMediaType *pMediaType = NULL;
+ GUID majorType;
+
+ DWORD cStreamDescriptors = 0;
+ BOOL fSelected = FALSE;
+
+ // Make sure there is only one stream.
+ hr = pPD->GetStreamDescriptorCount(&cStreamDescriptors);
+
+ if (SUCCEEDED(hr))
+ {
+ if (cStreamDescriptors != 1)
+ {
+ hr = MF_E_UNSUPPORTED_REPRESENTATION;
+ }
+ }
+
+ // Get the stream descriptor.
+ if (SUCCEEDED(hr))
+ {
+ hr = pPD->GetStreamDescriptorByIndex(0, &fSelected, &pStreamDescriptor);
+ }
+
+ // Make sure it's selected. (This media source has only one stream, so it
+ // is not useful to deselect the only stream.)
+ if (SUCCEEDED(hr))
+ {
+ if (!fSelected)
+ {
+ hr = MF_E_UNSUPPORTED_REPRESENTATION;
+ }
+ }
+
+ // Get the media type handler, so that we can get the media type.
+ if (SUCCEEDED(hr))
+ {
+ hr = pStreamDescriptor->GetMediaTypeHandler(&pHandler);
+ }
+
+ if (SUCCEEDED(hr))
+ {
+ hr = pHandler->GetCurrentMediaType(&pMediaType);
+ }
+
+ hr = pMediaType->GetMajorType(&majorType);
+
+ if (SUCCEEDED(hr))
+ {
+ if(majorType == MFMediaType_Video)
+ {
+ if (SUCCEEDED(hr))
+ {
+ hr = MFUtils::ValidateVideoFormat(pMediaType);
+ }
+ }
+ else
+ {
+ WAVEFORMATEX *pFormat = NULL;
+ UINT32 cbWaveFormat = 0;
+
+ if (SUCCEEDED(hr))
+ {
+ hr = MFCreateWaveFormatExFromMFMediaType(
+ pMediaType,
+ &pFormat,
+ &cbWaveFormat);
+ }
+ if (SUCCEEDED(hr))
+ {
+ /*assert(this->WaveFormat() != NULL);
+
+ if (cbWaveFormat < this->WaveFormatSize())
+ {
+ hr = MF_E_INVALIDMEDIATYPE;
+ }*/
+ }
+
+ if (SUCCEEDED(hr))
+ {
+ /*if (memcmp(pFormat, WaveFormat(), WaveFormatSize()) != 0)
+ {
+ hr = MF_E_INVALIDMEDIATYPE;
+ }*/
+ }
+
+ CoTaskMemFree(pFormat);
+ }
+ }
+
+ SafeRelease(&pStreamDescriptor);
+ SafeRelease(&pHandler);
+ SafeRelease(&pMediaType);
+
+ return hr;
+}
+
+
+//-------------------------------------------------------------------
+// Name: QueueNewStreamEvent
+// Description:
+// Queues an MENewStream or MEUpdatedStream event during Start.
+//
+// pPD: The presentation descriptor.
+//
+// Precondition: The presentation descriptor is assumed to be valid.
+// Call ValidatePresentationDescriptor before calling this method.
+//-------------------------------------------------------------------
+
+HRESULT CMFSource::QueueNewStreamEvent(IMFPresentationDescriptor *pPD)
+{
+ assert(pPD != NULL);
+
+ HRESULT hr = S_OK;
+ IMFStreamDescriptor *pSD = NULL;
+
+ BOOL fSelected = FALSE;
+
+ hr = pPD->GetStreamDescriptorByIndex(0, &fSelected, &pSD);
+
+ if (SUCCEEDED(hr))
+ {
+ // The stream must be selected, because we don't allow the app
+ // to de-select the stream. See ValidatePresentationDescriptor.
+ assert(fSelected);
+
+ if (m_pStream)
+ {
+ // The stream already exists, and is still selected.
+ // Send the MEUpdatedStream event.
+ hr = QueueEventWithIUnknown(this, MEUpdatedStream, S_OK, m_pStream);
+ }
+ else
+ {
+ // The stream does not exist, and is now selected.
+ // Create a new stream.
+
+ hr = CreateCMFStreamSource(pSD);
+
+ if (SUCCEEDED(hr))
+ {
+ // CreateCMFStreamSource creates the stream, so m_pStream is no longer NULL.
+ assert(m_pStream != NULL);
+
+ // Send the MENewStream event.
+ hr = QueueEventWithIUnknown(this, MENewStream, S_OK, m_pStream);
+ }
+ }
+ }
+
+ SafeRelease(&pSD);
+ return hr;
+}
+
+//-------------------------------------------------------------------
+// Name: CreateCMFStreamSource
+// Description: Creates the source's media stream object.
+//-------------------------------------------------------------------
+
+HRESULT CMFSource::CreateCMFStreamSource(IMFStreamDescriptor *pSD)
+{
+ HRESULT hr = S_OK;
+ m_pStream = new (std::nothrow) CMFStreamSource(this, pSD, hr);
+
+ if (m_pStream == NULL)
+ {
+ hr = E_OUTOFMEMORY;
+ }
+
+ if (FAILED(hr))
+ {
+ SafeRelease(&m_pStream);
+ }
+
+ return hr;
+}
+
+
+
+//-------------------------------------------------------------------
+// Name: GetCurrentPosition
+// Description: Returns the current playback position.
+//-------------------------------------------------------------------
+
+LONGLONG CMFSource::GetCurrentPosition() const
+{
+ if (m_pStream)
+ {
+ return m_pStream->GetCurrentPosition();
+ }
+ else
+ {
+ // If no stream is selected, we are at time 0 by definition.
+ return 0;
+ }
+}
+
+
+
+////////// AUDIO STREAM
+
+//-------------------------------------------------------------------
+// CMFStreamSource constructor.
+//
+// pSource: Parent media source.
+// pSD: Stream descriptor that describes this stream.
+// hr: If the constructor fails, this value is set to a failure code.
+//-------------------------------------------------------------------
+
+
+CMFStreamSource::CMFStreamSource(CMFSource *pSource, IMFStreamDescriptor *pSD, HRESULT& hr) :
+ m_nRefCount(1),
+ m_pEventQueue(NULL),
+ m_IsShutdown(FALSE),
+ m_rtCurrentPosition(0),
+ m_rtDuration(0),
+ m_discontinuity(FALSE),
+ m_EOS(FALSE),
+ m_pMediaBuffer(NULL),
+ m_nBufferSize(0)
+{
+ m_pSource = pSource;
+ m_pSource->AddRef();
+
+ m_pStreamDescriptor = pSD;
+ m_pStreamDescriptor->AddRef();
+
+ // Create the media event queue.
+ CHECK_HR(hr = MFCreateEventQueue(&m_pEventQueue));
+
+ //CHECK_HR(hr = InitializeParams());
+
+ InitializeCriticalSection(&m_critSec);
+
+bail:
+ return;
+}
+
+
+//-------------------------------------------------------------------
+// CMFStreamSource destructor.
+//-------------------------------------------------------------------
+
+CMFStreamSource::~CMFStreamSource()
+{
+ assert(m_IsShutdown);
+ assert(m_nRefCount == 0);
+
+ SafeRelease(&m_pMediaBuffer);
+
+ DeleteCriticalSection(&m_critSec);
+}
+
+
+// IMFCustomSource methods
+
+HRESULT CMFStreamSource::CopyVideoBuffer(UINT32 nWidth, UINT32 nHeight, const void* pBufferPtr, UINT32 nBufferSize)
+{
+ // Buffer pointer and size validity already checked by source (caller)
+ if(m_guidMajorType != MFMediaType_Video)
+ {
+ TSK_DEBUG_ERROR("Calling CopyVideoBuffer on no-video stream");
+#if defined(E_ILLEGAL_METHOD_CALL)
+ return E_ILLEGAL_METHOD_CALL;
+#else
+ return _HRESULT_TYPEDEF_(0x8000000EL);
+#endif
+ }
+ if(nWidth != m_structVideoParams.nWidth || nHeight != m_structVideoParams.nHeigh || nBufferSize != m_nBufferSize)
+ {
+ TSK_DEBUG_ERROR("Invalid argument %u#%u or %u#%u or %u#%u. If the call is from a video consumer then, you can safely ignore this message.", nWidth, m_structVideoParams.nWidth, nHeight, m_structVideoParams.nHeigh, nBufferSize, m_nBufferSize);
+#if defined(E_BOUNDS)
+ return E_BOUNDS;
+#else
+ return _HRESULT_TYPEDEF_(0x8000000BL);
+#endif
+ }
+
+ HRESULT hr = S_OK;
+
+ BYTE* pMediaBufferPtr = NULL;
+ DWORD cbMaxLength = nBufferSize, cbCurrentLength = nBufferSize;
+ CHECK_HR(hr = m_pMediaBuffer->Lock(&pMediaBufferPtr, &cbMaxLength, &cbCurrentLength));
+
+ memcpy(pMediaBufferPtr, pBufferPtr, nBufferSize);
+ CHECK_HR(hr = m_pMediaBuffer->SetCurrentLength(nBufferSize));
+ CHECK_HR(hr = m_pMediaBuffer->Unlock());
+
+bail:
+ return hr;
+}
+
+// IUnknown methods
+
+ULONG CMFStreamSource::AddRef()
+{
+ return InterlockedIncrement(&m_nRefCount);
+}
+
+ULONG CMFStreamSource::Release()
+{
+ ULONG uCount = InterlockedDecrement(&m_nRefCount);
+ if (uCount == 0)
+ {
+ delete this;
+ }
+ // For thread safety, return a temporary variable.
+ return uCount;
+}
+
+HRESULT CMFStreamSource::QueryInterface(REFIID iid, void** ppv)
+{
+ static const QITAB qit[] =
+ {
+ QITABENT(CMFStreamSource, IMFMediaEventGenerator),
+ QITABENT(CMFStreamSource, IMFMediaStream),
+ { 0 }
+ };
+ return QISearch(this, qit, iid, ppv);
+}
+
+
+// IMFMediaEventGenerator methods
+// [See note for CMFSource class]
+
+HRESULT CMFStreamSource::BeginGetEvent(IMFAsyncCallback* pCallback, IUnknown* punkState)
+{
+ HRESULT hr = S_OK;
+
+ EnterCriticalSection(&m_critSec);
+
+ hr = CheckShutdown();
+ if (SUCCEEDED(hr))
+ {
+ hr = m_pEventQueue->BeginGetEvent(pCallback, punkState);
+ }
+
+ LeaveCriticalSection(&m_critSec);
+ return hr;
+}
+
+HRESULT CMFStreamSource::EndGetEvent(IMFAsyncResult* pResult, IMFMediaEvent** ppEvent)
+{
+ HRESULT hr = S_OK;
+
+ EnterCriticalSection(&m_critSec);
+
+ hr = CheckShutdown();
+ if (SUCCEEDED(hr))
+ {
+ hr = m_pEventQueue->EndGetEvent(pResult, ppEvent);
+ }
+
+ LeaveCriticalSection(&m_critSec);
+ return hr;
+}
+
+HRESULT CMFStreamSource::GetEvent(DWORD dwFlags, IMFMediaEvent** ppEvent)
+{
+ HRESULT hr = S_OK;
+
+ IMFMediaEventQueue *pQueue = NULL;
+
+ EnterCriticalSection(&m_critSec);
+
+ hr = CheckShutdown();
+
+ if (SUCCEEDED(hr))
+ {
+ pQueue = m_pEventQueue;
+ pQueue->AddRef();
+ }
+
+ LeaveCriticalSection(&m_critSec);
+
+ if (SUCCEEDED(hr))
+ {
+ hr = pQueue->GetEvent(dwFlags, ppEvent);
+ }
+
+ SafeRelease(&pQueue);
+ return hr;
+}
+
+HRESULT CMFStreamSource::QueueEvent(MediaEventType met, REFGUID guidExtendedType, HRESULT hrStatus, const PROPVARIANT* pvValue)
+{
+ HRESULT hr = S_OK;
+
+ EnterCriticalSection(&m_critSec);
+
+ hr = CheckShutdown();
+ if (SUCCEEDED(hr))
+ {
+ hr = m_pEventQueue->QueueEventParamVar(met, guidExtendedType, hrStatus, pvValue);
+ }
+
+ LeaveCriticalSection(&m_critSec);
+ return hr;
+}
+
+
+// IMFMediaStream methods.
+
+
+//-------------------------------------------------------------------
+// Name: GetMediaSource
+// Description: Returns a pointer to the media source.
+//-------------------------------------------------------------------
+
+HRESULT CMFStreamSource::GetMediaSource(IMFMediaSource** ppMediaSource)
+{
+ if (ppMediaSource == NULL)
+ {
+ return E_POINTER;
+ }
+
+ EnterCriticalSection(&m_critSec);
+
+ HRESULT hr = S_OK;
+
+ // If called after shutdown, them m_pSource is NULL.
+ // Otherwise, m_pSource should not be NULL.
+
+ hr = CheckShutdown();
+
+ if (SUCCEEDED(hr))
+ {
+ if (m_pSource == NULL)
+ {
+ hr = E_UNEXPECTED;
+ }
+ }
+
+ if (SUCCEEDED(hr))
+ {
+ hr = m_pSource->QueryInterface(IID_PPV_ARGS(ppMediaSource));
+ }
+
+ LeaveCriticalSection(&m_critSec);
+ return hr;
+}
+
+
+//-------------------------------------------------------------------
+// Name: GetStreamDescriptor
+// Description: Returns the stream descriptor for this stream.
+//-------------------------------------------------------------------
+
+HRESULT CMFStreamSource::GetStreamDescriptor(IMFStreamDescriptor** ppStreamDescriptor)
+{
+ if (ppStreamDescriptor == NULL)
+ {
+ return E_POINTER;
+ }
+
+ if (m_pStreamDescriptor == NULL)
+ {
+ return E_UNEXPECTED;
+ }
+
+ EnterCriticalSection(&m_critSec);
+
+ HRESULT hr = S_OK;
+
+ hr = CheckShutdown();
+
+ if (SUCCEEDED(hr))
+ {
+ *ppStreamDescriptor = m_pStreamDescriptor;
+ (*ppStreamDescriptor)->AddRef();
+ }
+
+ LeaveCriticalSection(&m_critSec);
+
+ return hr;
+}
+
+
+
+//-------------------------------------------------------------------
+// Name: RequestSample
+// Description: Requests a new sample.
+//
+// pToken: Token object. Can be NULL.
+//-------------------------------------------------------------------
+
+HRESULT CMFStreamSource::RequestSample(IUnknown* pToken)
+{
+ if (m_pSource == NULL)
+ {
+ return E_UNEXPECTED;
+ }
+
+ HRESULT hr = S_OK;
+
+ IMFMediaSource *pSource = NULL;
+ IMFSample *pSample = NULL; // Sample to deliver.
+
+ EnterCriticalSection(&m_critSec);
+
+ // Check if we are shut down.
+ hr = CheckShutdown();
+
+ // Check if we already reached the end of the stream.
+ if (SUCCEEDED(hr))
+ {
+ if (m_EOS)
+ {
+ hr = MF_E_END_OF_STREAM;
+ }
+ }
+
+ // Check the source is stopped.
+ // GetState does not hold the source's critical section. Safe to call.
+ if (SUCCEEDED(hr))
+ {
+ if (m_pSource->GetState() == CMFSource::STATE_STOPPED)
+ {
+ hr = MF_E_INVALIDREQUEST;
+ }
+ }
+
+ if (SUCCEEDED(hr))
+ {
+ // Create a new audio sample.
+ hr = CreateSample(&pSample);
+ }
+
+ if (SUCCEEDED(hr))
+ {
+ // If the caller provided a token, attach it to the sample as
+ // an attribute.
+
+ // NOTE: If we processed sample requests asynchronously, we would
+ // need to call AddRef on the token and put the token onto a FIFO
+ // queue. See documenation for IMFMediaStream::RequestSample.
+ if (pToken && pSample)
+ {
+ hr = pSample->SetUnknown(MFSampleExtension_Token, pToken);
+ }
+ }
+
+ // If paused, queue the sample for later delivery. Otherwise, deliver the sample now.
+ if (SUCCEEDED(hr) && pSample)
+ {
+ if (m_pSource->GetState() == CMFSource::STATE_PAUSED)
+ {
+ hr = m_sampleQueue.Queue(pSample);
+ }
+ else
+ {
+ hr = DeliverSample(pSample);
+ }
+ }
+
+ // Cache a pointer to the source, prior to leaving the critical section.
+ if (SUCCEEDED(hr))
+ {
+ pSource = m_pSource;
+ pSource->AddRef();
+ }
+
+ LeaveCriticalSection(&m_critSec);
+
+
+ // We only have one stream, so the end of the stream is also the end of the
+ // presentation. Therefore, when we reach the end of the stream, we need to
+ // queue the end-of-presentation event from the source. Logically we would do
+ // this inside the CheckEndOfStream method. However, we cannot hold the
+ // source's critical section while holding the stream's critical section, at
+ // risk of deadlock.
+
+ if (SUCCEEDED(hr))
+ {
+ if (m_EOS)
+ {
+ hr = pSource->QueueEvent(MEEndOfPresentation, GUID_NULL, S_OK, NULL);
+ }
+ }
+
+ SafeRelease(&pSample);
+ SafeRelease(&pSource);
+ return hr;
+}
+
+
+///// Private CMFStreamSource methods
+
+HRESULT CMFStreamSource::InitializeParams()
+{
+ HRESULT hr = S_OK;
+
+ IMFMediaTypeHandler *pMediaTypeHandler = NULL;
+ IMFMediaType* pMediaType = NULL;
+
+ CHECK_HR(hr = m_pStreamDescriptor->GetMediaTypeHandler(&pMediaTypeHandler));
+ CHECK_HR(hr = pMediaTypeHandler->GetCurrentMediaType(&pMediaType));
+
+ GUID majorType, subType;
+ pMediaType->GetMajorType(&majorType);
+ if(majorType == MFMediaType_Video)
+ {
+ memset(&m_structVideoParams, 0, sizeof(m_structVideoParams));
+ CHECK_HR(hr = MFGetAttributeSize(pMediaType, MF_MT_FRAME_SIZE, &m_structVideoParams.nWidth, &m_structVideoParams.nHeigh));
+ CHECK_HR(hr = pMediaType->GetGUID(MF_MT_SUBTYPE, &subType));
+
+ m_guidMajorType = MFMediaType_Video;
+ m_guidSubType = subType;
+
+ // Guess video size
+ UINT32 nBufferSize;
+ if(subType == MFVideoFormat_RGB32)
+ {
+ nBufferSize = (m_structVideoParams.nWidth * m_structVideoParams.nHeigh << 2);
+ }
+ else if(subType == MFVideoFormat_RGB24)
+ {
+ nBufferSize = (m_structVideoParams.nWidth * m_structVideoParams.nHeigh << 2);
+ }
+ else if(subType == MFVideoFormat_NV12 || subType == MFVideoFormat_I420)
+ {
+ nBufferSize = (m_structVideoParams.nWidth * m_structVideoParams.nHeigh * 3) >> 1;
+ }
+ else
+ {
+ TSK_DEBUG_ERROR("Video subType not supported");
+ CHECK_HR(hr = E_NOTIMPL);
+ }
+
+ // Allocate media buffer
+ SafeRelease(&m_pMediaBuffer);
+ CHECK_HR(hr = MFCreateMemoryBuffer(nBufferSize, &m_pMediaBuffer));
+ m_nBufferSize = nBufferSize;
+ {
+ //FIXME: DeliverSample() stops if no data
+ BYTE* pBuffer = NULL;
+ CHECK_HR(hr = m_pMediaBuffer->Lock(&pBuffer, NULL, NULL));
+ memset(pBuffer, 0, nBufferSize);
+ CHECK_HR(hr = m_pMediaBuffer->SetCurrentLength(nBufferSize));
+ CHECK_HR(hr = m_pMediaBuffer->Unlock());
+ }
+
+ // Retrieve video Frame rate
+ UINT32 unNumerator, unDenominator;
+ CHECK_HR(hr = MFGetAttributeRatio(pMediaType, MF_MT_FRAME_RATE, &unNumerator, &unDenominator));
+ m_structVideoParams.nFps = (unNumerator / unDenominator);
+
+ // Retrieve sample duration based on framerate
+ m_rtCurrentPosition = 0;
+ CHECK_HR(hr = MFFrameRateToAverageTimePerFrame(m_structVideoParams.nFps, 1, &m_rtDuration));
+ }
+ else
+ {
+ TSK_DEBUG_ERROR("Only video media type is supported");
+ CHECK_HR(hr = E_NOTIMPL);
+ }
+
+bail:
+ SafeRelease(&pMediaTypeHandler);
+ SafeRelease(&pMediaType);
+
+ return hr;
+}
+
+// NOTE: Some of these methods hold the stream's critical section
+// because they are called by the media source object.
+
+//-------------------------------------------------------------------
+// Name: CreateSample
+// Description: Creates a new audio/video sample.
+//-------------------------------------------------------------------
+
+HRESULT CMFStreamSource::CreateSample(IMFSample **ppSample)
+{
+ *ppSample = NULL;
+
+ HRESULT hr = S_OK;
+
+ IMFSample *pSample = NULL;
+ DWORD nCurrentLength = 0;
+
+ CHECK_HR(hr = m_pMediaBuffer->GetCurrentLength(&nCurrentLength));
+
+ if(nCurrentLength > 0)
+ {
+ CHECK_HR(hr = MFCreateSample(&pSample));
+ CHECK_HR(hr = pSample->SetSampleTime(m_rtCurrentPosition));
+ CHECK_HR(hr = pSample->SetSampleDuration(m_rtDuration));
+ m_rtCurrentPosition += m_rtDuration;
+ CHECK_HR(hr = pSample->AddBuffer(m_pMediaBuffer));
+
+ if((*ppSample = pSample))
+ {
+ (*ppSample)->AddRef();
+ }
+ }
+
+bail:
+ SafeRelease(&pSample);
+ return hr;
+}
+
+//-------------------------------------------------------------------
+// Name: DeliverSample
+// Description: Delivers a sample by sending an MEMediaSample event.
+//-------------------------------------------------------------------
+HRESULT CMFStreamSource::DeliverSample(IMFSample *pSample)
+{
+ HRESULT hr = S_OK;
+
+ if(pSample)
+ {
+ // Send the MEMediaSample event with the new sample.
+ hr = QueueEventWithIUnknown(this, MEMediaSample, hr, pSample);
+ }
+
+ // See if we reached the end of the stream.
+ if (SUCCEEDED(hr))
+ {
+ hr = CheckEndOfStream(); // This method sends MEEndOfStream if needed.
+ }
+
+ return hr;
+}
+
+//-------------------------------------------------------------------
+// Name: DeliverQueuedSamples
+// Description: Delivers any samples waiting in the queue.
+//
+// Note: If the client requests a sample while the source is paused,
+// the sample is queued and delivered on the next non-seeking call
+// to Start(). The queue is flushed if the source is seeked or
+// stopped.
+//-------------------------------------------------------------------
+
+HRESULT CMFStreamSource::DeliverQueuedSamples()
+{
+ HRESULT hr = S_OK;
+ IMFSample *pSample = NULL;
+
+ EnterCriticalSection(&m_critSec);
+
+ // If we already reached the end of the stream, send the MEEndStream
+ // event again.
+ if (m_EOS)
+ {
+ hr = QueueEvent(MEEndOfStream, GUID_NULL, S_OK, NULL);
+ }
+
+ if (SUCCEEDED(hr))
+ {
+ // Deliver any queued samples.
+ while (!m_sampleQueue.IsEmpty())
+ {
+ hr = m_sampleQueue.Dequeue(&pSample);
+ if (FAILED(hr))
+ {
+ break;
+ }
+
+ hr = DeliverSample(pSample);
+ if (FAILED(hr))
+ {
+ break;
+ }
+
+ SafeRelease(&pSample);
+ }
+ }
+
+ LeaveCriticalSection(&m_critSec);
+
+ // If we reached the end of the stream, send the end-of-presentation event from
+ // the media source.
+ if (SUCCEEDED(hr))
+ {
+ if (m_EOS)
+ {
+ hr = m_pSource->QueueEvent(MEEndOfPresentation, GUID_NULL, S_OK, NULL);
+ }
+ }
+
+ SafeRelease(&pSample);
+ return hr;
+}
+
+
+//-------------------------------------------------------------------
+// Name: Flush
+// Description: Flushes the sample queue.
+//-------------------------------------------------------------------
+
+HRESULT CMFStreamSource::Flush()
+{
+ EnterCriticalSection(&m_critSec);
+
+ m_sampleQueue.Clear();
+
+ LeaveCriticalSection(&m_critSec);
+ return S_OK;
+}
+
+
+//-------------------------------------------------------------------
+// Name: Shutdown
+// Description: Notifies the stream that the source was shut down.
+//-------------------------------------------------------------------
+
+HRESULT CMFStreamSource::Shutdown()
+{
+ EnterCriticalSection(&m_critSec);
+
+ // Flush queued samples.
+ Flush();
+
+ // Shut down the event queue.
+ if (m_pEventQueue)
+ {
+ m_pEventQueue->Shutdown();
+ }
+
+ SafeRelease(&m_pEventQueue);
+ SafeRelease(&m_pSource);
+ SafeRelease(&m_pStreamDescriptor);
+
+ m_IsShutdown = TRUE;
+
+ LeaveCriticalSection(&m_critSec);
+ return S_OK;
+}
+
+//-------------------------------------------------------------------
+// Name: SetPosition
+// Description: Updates the new stream position.
+//-------------------------------------------------------------------
+
+HRESULT CMFStreamSource::SetPosition(LONGLONG rtNewPosition)
+{
+ EnterCriticalSection(&m_critSec);
+
+ HRESULT hr = S_OK;
+
+/*
+ // Check if the requested position is beyond the end of the stream.
+ LONGLONG duration = AudioDurationFromBufferSize(m_pRiff->Format(), m_pRiff->Chunk().DataSize());
+
+ if (rtNewPosition > duration)
+ {
+ LeaveCriticalSection(&m_critSec);
+
+ return MF_E_INVALIDREQUEST; // Start position is past the end of the presentation.
+ }
+
+ if (m_rtCurrentPosition != rtNewPosition)
+ {
+ LONGLONG offset = BufferSizeFromAudioDuration(m_pRiff->Format(), rtNewPosition);
+
+ // The chunk size is a DWORD. So if our calculations are correct, there is no
+ // way that the maximum valid seek position can be larger than a DWORD.
+ assert(offset <= MAXDWORD);
+
+ hr = m_pRiff->MoveToChunkOffset((DWORD)offset);
+
+ if (SUCCEEDED(hr))
+ {
+ m_rtCurrentPosition = rtNewPosition;
+ m_discontinuity = TRUE;
+ m_EOS = FALSE;
+ }
+ }
+*/
+ LeaveCriticalSection(&m_critSec);
+ return hr;
+}
+
+HRESULT CMFStreamSource::CheckEndOfStream()
+{
+ HRESULT hr = S_OK;
+/*
+ if (m_pRiff->BytesRemainingInChunk() < m_pRiff->Format()->nBlockAlign)
+ {
+ // The remaining data is smaller than the audio block size. (In theory there shouldn't be
+ // partial bits of data at the end, so we should reach an even zero bytes, but the file
+ // might not be authored correctly.)
+ m_EOS = TRUE;
+
+ // Send the end-of-stream event,
+ hr = QueueEvent(MEEndOfStream, GUID_NULL, S_OK, NULL);
+ }
+ */
+ return hr;
+}
+
+
+
+
+//-------------------------------------------------------------------
+// Name: QueueEventWithIUnknown
+// Description: Helper function to queue an event with an IUnknown
+// pointer value.
+//
+// pMEG: Media event generator that will queue the event.
+// meType: Media event type.
+// hrStatus: Status code for the event.
+// pUnk: IUnknown pointer value.
+//
+//-------------------------------------------------------------------
+
+
+HRESULT QueueEventWithIUnknown(
+ IMFMediaEventGenerator *pMEG,
+ MediaEventType meType,
+ HRESULT hrStatus,
+ IUnknown *pUnk)
+{
+
+ // Create the PROPVARIANT to hold the IUnknown value.
+ PROPVARIANT var;
+ var.vt = VT_UNKNOWN;
+ var.punkVal = pUnk;
+ pUnk->AddRef();
+
+ // Queue the event.
+ HRESULT hr = pMEG->QueueEvent(meType, GUID_NULL, hrStatus, &var);
+
+ // Clear the PROPVARIANT.
+ PropVariantClear(&var);
+
+ return hr;
+}
+
+LONGLONG AudioDurationFromBufferSize(const WAVEFORMATEX *pWav, DWORD cbAudioDataSize)
+{
+ assert(pWav != NULL);
+
+ if (pWav->nAvgBytesPerSec == 0)
+ {
+ return 0;
+ }
+ return (LONGLONG)cbAudioDataSize * 10000000 / pWav->nAvgBytesPerSec;
+}
+
+LONGLONG BufferSizeFromAudioDuration(const WAVEFORMATEX *pWav, LONGLONG duration)
+{
+ LONGLONG cbSize = duration * pWav->nAvgBytesPerSec / 10000000;
+
+ ULONG ulRemainder = (ULONG)(cbSize % pWav->nBlockAlign);
+
+ // Round up to the next block.
+ if(ulRemainder)
+ {
+ cbSize += pWav->nBlockAlign - ulRemainder;
+ }
+
+ return cbSize;
+}
+
+
diff --git a/plugins/pluginWinMF/internals/mf_custom_src.h b/plugins/pluginWinMF/internals/mf_custom_src.h
new file mode 100644
index 0000000..f9194c9
--- /dev/null
+++ b/plugins/pluginWinMF/internals/mf_custom_src.h
@@ -0,0 +1,340 @@
+/*
+* Copyright (C) Microsoft Corporation. All rights reserved.
+* Copyright (C) 2013 Mamadou DIOP
+* Copyright (C) 2013 Doubango Telecom <http://www.doubango.org>
+*
+* This file is part of Open Source Doubango Framework.
+*
+* DOUBANGO is free software: you can redistribute it and/or modify
+* it under the terms of the GNU General Public License as published by
+* the Free Software Foundation, either version 3 of the License, or
+* (at your option) any later version.
+*
+* DOUBANGO is distributed in the hope that it will be useful,
+* but WITHOUT ANY WARRANTY; without even the implied warranty of
+* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
+* GNU General Public License for more details.
+*
+* You should have received a copy of the GNU General Public License
+* along with DOUBANGO.
+*/
+#ifndef PLUGIN_WIN_MF_CUSTOM_SOURCE_H
+#define PLUGIN_WIN_MF_CUSTOM_SOURCE_H
+
+#include "../plugin_win_mf_config.h"
+
+#include <new>
+#include <windows.h>
+#include <assert.h>
+
+#include <mfapi.h>
+#include <mfobjects.h>
+#include <mfidl.h>
+#include <mferror.h>
+#include <shlwapi.h>
+
+class CMFStreamSource;
+class CMFSource;
+
+LONGLONG AudioDurationFromBufferSize(const WAVEFORMATEX *pWav, DWORD cbAudioDataSize);
+
+
+//////////////////////////////////////////////////////////////////////////
+// CMFSource
+// Description: Media source object.
+//////////////////////////////////////////////////////////////////////////
+
+class CMFSource : public IMFMediaSource
+{
+ friend class CMFStreamSource;
+
+public:
+ static HRESULT CreateInstance(REFIID iid, void **ppSource);
+ static HRESULT CreateInstanceEx(REFIID iid, void **ppSource, IMFMediaType *pMediaType);
+
+ // IMFCustomSource
+ HRESULT CopyVideoBuffer(UINT32 nWidth, UINT32 nHeight, const void* pBufferPtr, UINT32 nBufferSize);
+
+ // IUnknown
+ STDMETHODIMP QueryInterface(REFIID iid, void** ppv);
+ STDMETHODIMP_(ULONG) AddRef();
+ STDMETHODIMP_(ULONG) Release();
+
+ // IMFMediaEventGenerator
+ STDMETHODIMP BeginGetEvent(IMFAsyncCallback* pCallback,IUnknown* punkState);
+ STDMETHODIMP EndGetEvent(IMFAsyncResult* pResult, IMFMediaEvent** ppEvent);
+ STDMETHODIMP GetEvent(DWORD dwFlags, IMFMediaEvent** ppEvent);
+ STDMETHODIMP QueueEvent(MediaEventType met, REFGUID guidExtendedType, HRESULT hrStatus, const PROPVARIANT* pvValue);
+
+ // IMFMediaSource
+ STDMETHODIMP CreatePresentationDescriptor(IMFPresentationDescriptor** ppPresentationDescriptor);
+ STDMETHODIMP GetCharacteristics(DWORD* pdwCharacteristics);
+ STDMETHODIMP Pause();
+ STDMETHODIMP Shutdown();
+ STDMETHODIMP Start(
+ IMFPresentationDescriptor* pPresentationDescriptor,
+ const GUID* pguidTimeFormat,
+ const PROPVARIANT* pvarStartPosition
+ );
+ STDMETHODIMP Stop();
+
+private:
+
+ enum State
+ {
+ STATE_STOPPED,
+ STATE_PAUSED,
+ STATE_STARTED
+ };
+
+
+ // Constructor is private - client should use static CreateInstance method.
+ CMFSource(HRESULT &hr, IMFMediaType *pMediaType);
+ virtual ~CMFSource();
+
+ HRESULT CheckShutdown() const
+ {
+ if (m_IsShutdown)
+ {
+ return MF_E_SHUTDOWN;
+ }
+ else
+ {
+ return S_OK;
+ }
+ }
+
+ HRESULT CreatePresentationDescriptor();
+ HRESULT QueueNewStreamEvent(IMFPresentationDescriptor *pPD);
+ HRESULT CreateCMFStreamSource(IMFStreamDescriptor *pSD);
+ HRESULT ValidatePresentationDescriptor(IMFPresentationDescriptor *pPD);
+
+ LONGLONG GetCurrentPosition() const;
+ State GetState() const { return m_state; }
+
+ IMFMediaEventQueue *m_pEventQueue; // Event generator helper
+ IMFPresentationDescriptor *m_pPresentationDescriptor; // Default presentation
+
+ CMFStreamSource *m_pStream; // Media stream. Can be NULL is no stream is selected.
+
+ long m_nRefCount; // reference count
+ CRITICAL_SECTION m_critSec;
+ BOOL m_IsShutdown; // Flag to indicate if Shutdown() method was called.
+ State m_state; // Current state (running, stopped, paused)
+
+ IMFMediaType *m_pMediaType; // The supported mediaType
+};
+
+
+class SampleQueue
+{
+protected:
+
+ // Nodes in the linked list
+ struct Node
+ {
+ Node *prev;
+ Node *next;
+ IMFSample* item;
+
+ Node() : prev(NULL), next(NULL)
+ {
+ }
+
+ Node(IMFSample* item) : prev(NULL), next(NULL)
+ {
+ this->item = item;
+ }
+
+ IMFSample* Item() const { return item; }
+ };
+
+
+protected:
+ Node m_anchor; // Anchor node for the linked list.
+
+public:
+
+ SampleQueue()
+ {
+ m_anchor.next = &m_anchor;
+ m_anchor.prev = &m_anchor;
+ }
+
+ virtual ~SampleQueue()
+ {
+ Clear();
+ }
+
+ HRESULT Queue(IMFSample* item)
+ {
+ if (item == NULL)
+ {
+ return E_POINTER;
+ }
+
+ Node *pNode = new (std::nothrow) Node(item);
+ if (pNode == NULL)
+ {
+ return E_OUTOFMEMORY;
+ }
+
+ item->AddRef();
+
+ Node *pBefore = m_anchor.prev;
+
+ Node *pAfter = pBefore->next;
+
+ pBefore->next = pNode;
+ pAfter->prev = pNode;
+
+ pNode->prev = pBefore;
+ pNode->next = pAfter;
+
+ return S_OK;
+
+ }
+
+ HRESULT Dequeue(IMFSample* *ppItem)
+ {
+ if (IsEmpty())
+ {
+ return E_FAIL;
+ }
+ if (ppItem == NULL)
+ {
+ return E_POINTER;
+ }
+
+ Node *pNode = m_anchor.next;
+
+ // The next node's previous is this node's previous.
+ pNode->next->prev = m_anchor.next->prev;
+
+ // The previous node's next is this node's next.
+ pNode->prev->next = pNode->next;
+
+ *ppItem = pNode->item;
+ delete pNode;
+
+ return S_OK;
+ }
+
+ BOOL IsEmpty() const { return m_anchor.next == &m_anchor; }
+
+ void Clear()
+ {
+ Node *n = m_anchor.next;
+
+ // Delete the nodes
+ while (n != &m_anchor)
+ {
+ if (n->item)
+ {
+ n->item->Release();
+ }
+
+ Node *tmp = n->next;
+ delete n;
+ n = tmp;
+ }
+
+ // Reset the anchor to point at itself
+ m_anchor.next = &m_anchor;
+ m_anchor.prev = &m_anchor;
+ }
+
+};
+
+
+
+//////////////////////////////////////////////////////////////////////////
+// CMFStreamSource
+// Description: Media stream object.
+//////////////////////////////////////////////////////////////////////////
+
+
+class CMFStreamSource : public IMFMediaStream
+{
+ friend class CMFSource;
+
+public:
+
+ // IMFCustomSource
+ HRESULT CopyVideoBuffer(UINT32 nWidth, UINT32 nHeight, const void* pBufferPtr, UINT32 nBufferSize);
+
+ // IUnknown
+ STDMETHODIMP QueryInterface(REFIID iid, void** ppv);
+ STDMETHODIMP_(ULONG) AddRef();
+ STDMETHODIMP_(ULONG) Release();
+
+ // IMFMediaEventGenerator
+ STDMETHODIMP BeginGetEvent(IMFAsyncCallback* pCallback,IUnknown* punkState);
+ STDMETHODIMP EndGetEvent(IMFAsyncResult* pResult, IMFMediaEvent** ppEvent);
+ STDMETHODIMP GetEvent(DWORD dwFlags, IMFMediaEvent** ppEvent);
+ STDMETHODIMP QueueEvent(MediaEventType met, REFGUID guidExtendedType, HRESULT hrStatus, const PROPVARIANT* pvValue);
+
+ // IMFMediaStream
+ STDMETHODIMP GetMediaSource(IMFMediaSource** ppMediaSource);
+ STDMETHODIMP GetStreamDescriptor(IMFStreamDescriptor** ppStreamDescriptor);
+ STDMETHODIMP RequestSample(IUnknown* pToken);
+
+private:
+
+ CMFStreamSource(CMFSource *pSource, IMFStreamDescriptor *pSD, HRESULT& hr);
+ ~CMFStreamSource();
+
+
+ HRESULT CheckShutdown() const
+ {
+ if (m_IsShutdown)
+ {
+ return MF_E_SHUTDOWN;
+ }
+ else
+ {
+ return S_OK;
+ }
+ }
+
+ HRESULT InitializeParams();
+ HRESULT Shutdown();
+ HRESULT CreateSample(IMFSample **pSample);
+ HRESULT DeliverSample(IMFSample *pSample);
+ HRESULT DeliverQueuedSamples();
+ HRESULT Flush();
+
+ LONGLONG GetCurrentPosition() const { return m_rtCurrentPosition; }
+ HRESULT SetPosition(LONGLONG rtNewPosition);
+ HRESULT CheckEndOfStream();
+
+
+ long m_nRefCount; // reference count
+ CRITICAL_SECTION m_critSec;
+ BOOL m_IsShutdown; // Flag to indicate if source's Shutdown() method was called.
+ LONGLONG m_rtCurrentPosition; // Current position in the stream, in 100-ns units
+ UINT64 m_rtDuration; // Sample duration, in 100-ns units
+ BOOL m_discontinuity; // Is the next sample a discontinuity?
+ BOOL m_EOS; // Did we reach the end of the stream?
+
+ IMFMediaEventQueue *m_pEventQueue; // Event generator helper.
+ CMFSource *m_pSource; // Parent media source
+ IMFStreamDescriptor *m_pStreamDescriptor; // Stream descriptor for this stream.
+
+ SampleQueue m_sampleQueue; // Queue for samples while paused.
+ GUID m_guidMajorType; // major media type (e.g. MFMediaType_Video or MFMediaType_Audio)
+ GUID m_guidSubType; // Media subtype (e.g. MFVideoFormat_RGB32 or MFVideoFormat_H264)
+ IMFMediaBuffer *m_pMediaBuffer; // Pointer to the data to deliver
+ UINT32 m_nBufferSize; // Size of the data to deliver
+
+ struct
+ {
+ UINT32 nWidth;
+ UINT32 nHeigh;
+ UINT32 nFps;
+ }
+ m_structVideoParams;
+};
+
+
+#endif /* PLUGIN_WIN_MF_CUSTOM_SOURCE_H */
diff --git a/plugins/pluginWinMF/internals/mf_devices.cxx b/plugins/pluginWinMF/internals/mf_devices.cxx
new file mode 100644
index 0000000..22b862e
--- /dev/null
+++ b/plugins/pluginWinMF/internals/mf_devices.cxx
@@ -0,0 +1,151 @@
+/* Copyright (C) 2013 Mamadou DIOP
+* Copyright (C) 2013 Doubango Telecom <http://www.doubango.org>
+*
+* This file is part of Open Source Doubango Framework.
+*
+* DOUBANGO is free software: you can redistribute it and/or modify
+* it under the terms of the GNU General Public License as published by
+* the Free Software Foundation, either version 3 of the License, or
+* (at your option) any later version.
+*
+* DOUBANGO is distributed in the hope that it will be useful,
+* but WITHOUT ANY WARRANTY; without even the implied warranty of
+* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
+* GNU General Public License for more details.
+*
+* You should have received a copy of the GNU General Public License
+* along with DOUBANGO.
+*/
+#include "mf_devices.h"
+#include "mf_utils.h"
+
+DeviceList::DeviceList()
+: m_ppDevices(NULL)
+, m_cDevices(0)
+{
+
+}
+
+DeviceList::~DeviceList()
+{
+ Clear();
+}
+
+UINT32 DeviceList::Count()const
+{
+ return m_cDevices;
+}
+
+void DeviceList::Clear()
+{
+ for (UINT32 i = 0; i < m_cDevices; i++) {
+ SafeRelease(&m_ppDevices[i]);
+ }
+ CoTaskMemFree(m_ppDevices);
+ m_ppDevices = NULL;
+
+ m_cDevices = 0;
+}
+
+HRESULT DeviceList::EnumerateDevices(const GUID& sourceType)
+{
+ HRESULT hr = S_OK;
+ IMFAttributes *pAttributes = NULL;
+
+ Clear();
+
+ // Initialize an attribute store. We will use this to
+ // specify the enumeration parameters.
+
+ hr = MFCreateAttributes(&pAttributes, 1);
+
+ // Ask for source type = video capture devices
+ if (SUCCEEDED(hr))
+ {
+ hr = pAttributes->SetGUID(
+ MF_DEVSOURCE_ATTRIBUTE_SOURCE_TYPE,
+ sourceType
+ );
+ }
+
+ // Enumerate devices.
+ if (SUCCEEDED(hr))
+ {
+ hr = MFEnumDeviceSources(pAttributes, &m_ppDevices, &m_cDevices);
+ }
+
+ SafeRelease(&pAttributes);
+
+ return hr;
+}
+
+HRESULT DeviceList::GetDeviceAtIndex(UINT32 index, IMFActivate **ppActivate)
+{
+ if (index >= Count())
+ {
+ return E_INVALIDARG;
+ }
+
+ *ppActivate = m_ppDevices[index];
+ (*ppActivate)->AddRef();
+
+ return S_OK;
+}
+
+HRESULT DeviceList::GetDeviceBest(IMFActivate **ppActivate, WCHAR *pszName /*= NULL*/)
+{
+ UINT32 index = 0;
+ if(pszName)
+ {
+ WCHAR *_pszName = NULL;
+ BOOL bFound = FALSE;
+ for(UINT32 i = 0; i < Count() && !bFound; ++i)
+ {
+ if((SUCCEEDED(GetDeviceName(i, &_pszName))))
+ {
+ if(wcscmp(_pszName, pszName) == 0)
+ {
+ index = i;
+ bFound = TRUE;
+ // do not break the loop because we need to free(_pszName)
+ }
+ }
+ if(_pszName)
+ {
+ CoTaskMemFree(_pszName), _pszName = NULL;
+ }
+ }
+ }
+ return GetDeviceAtIndex(index, ppActivate);
+}
+
+// The caller must free the memory for the string by calling CoTaskMemFree
+HRESULT DeviceList::GetDeviceName(UINT32 index, WCHAR **ppszName)
+{
+ if (index >= Count())
+ {
+ return E_INVALIDARG;
+ }
+
+ HRESULT hr = S_OK;
+
+ hr = m_ppDevices[index]->GetAllocatedString(
+ MF_DEVSOURCE_ATTRIBUTE_FRIENDLY_NAME,
+ ppszName,
+ NULL
+ );
+
+ return hr;
+}
+
+HRESULT DeviceListAudio::EnumerateDevices()
+{
+ // call base class function
+ return DeviceList::EnumerateDevices(MF_DEVSOURCE_ATTRIBUTE_SOURCE_TYPE_AUDCAP_GUID);
+}
+
+HRESULT DeviceListVideo::EnumerateDevices()
+{
+ // call base class function
+ return DeviceList::EnumerateDevices(MF_DEVSOURCE_ATTRIBUTE_SOURCE_TYPE_VIDCAP_GUID);
+}
diff --git a/plugins/pluginWinMF/internals/mf_devices.h b/plugins/pluginWinMF/internals/mf_devices.h
new file mode 100644
index 0000000..03d010f
--- /dev/null
+++ b/plugins/pluginWinMF/internals/mf_devices.h
@@ -0,0 +1,64 @@
+/* Copyright (C) 2013 Mamadou DIOP
+* Copyright (C) 2013 Doubango Telecom <http://www.doubango.org>
+*
+* This file is part of Open Source Doubango Framework.
+*
+* DOUBANGO is free software: you can redistribute it and/or modify
+* it under the terms of the GNU General Public License as published by
+* the Free Software Foundation, either version 3 of the License, or
+* (at your option) any later version.
+*
+* DOUBANGO is distributed in the hope that it will be useful,
+* but WITHOUT ANY WARRANTY; without even the implied warranty of
+* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
+* GNU General Public License for more details.
+*
+* You should have received a copy of the GNU General Public License
+* along with DOUBANGO.
+*/
+#ifndef PLUGIN_WIN_MF_DEVICES_H
+#define PLUGIN_WIN_MF_DEVICES_H
+
+#include "../plugin_win_mf_config.h"
+
+#include <new>
+#include <mfapi.h>
+#include <mfidl.h>
+#include <Mferror.h>
+#include <shlwapi.h>
+
+//
+// DeviceList [Declaration]
+//
+class DeviceList
+{
+ UINT32 m_cDevices;
+ IMFActivate **m_ppDevices;
+
+public:
+ DeviceList();
+ virtual ~DeviceList();
+
+ UINT32 Count()const;
+ void Clear();
+ HRESULT GetDeviceAtIndex(UINT32 index, IMFActivate **ppActivate);
+ HRESULT GetDeviceBest(IMFActivate **ppActivate, WCHAR *pszName = NULL);
+ HRESULT GetDeviceName(UINT32 index, WCHAR **ppszName);
+
+protected:
+ HRESULT EnumerateDevices(const GUID& sourceType);
+};
+
+class DeviceListAudio : public DeviceList
+{
+public:
+ HRESULT EnumerateDevices();
+};
+
+class DeviceListVideo : public DeviceList
+{
+public:
+ HRESULT EnumerateDevices();
+};
+
+#endif /* PLUGIN_WIN_MF_DEVICES_H */
diff --git a/plugins/pluginWinMF/internals/mf_display_watcher.cxx b/plugins/pluginWinMF/internals/mf_display_watcher.cxx
new file mode 100644
index 0000000..62dbc5f
--- /dev/null
+++ b/plugins/pluginWinMF/internals/mf_display_watcher.cxx
@@ -0,0 +1,160 @@
+/* Copyright (C) 2013 Mamadou DIOP
+* Copyright (C) 2013 Doubango Telecom <http://www.doubango.org>
+*
+* This file is part of Open Source Doubango Framework.
+*
+* DOUBANGO is free software: you can redistribute it and/or modify
+* it under the terms of the GNU General Public License as published by
+* the Free Software Foundation, either version 3 of the License, or
+* (at your option) any later version.
+*
+* DOUBANGO is distributed in the hope that it will be useful,
+* but WITHOUT ANY WARRANTY; without even the implied warranty of
+* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
+* GNU General Public License for more details.
+*
+* You should have received a copy of the GNU General Public License
+* along with DOUBANGO.
+*/
+#include "mf_display_watcher.h"
+#include "mf_utils.h"
+
+#include "tsk_debug.h"
+
+#include <assert.h>
+
+DisplayWatcher::DisplayWatcher(HWND hWnd, IMFMediaSink* pMediaSink, HRESULT &hr)
+: m_pDisplayControl(NULL)
+, m_hWnd(hWnd)
+, m_pWndProc(NULL)
+, m_bStarted(FALSE)
+, m_bFullScreen(FALSE)
+{
+ IMFGetService *pService = NULL;
+
+ CHECK_HR(hr = pMediaSink->QueryInterface(__uuidof(IMFGetService), (void**)&pService));
+ CHECK_HR(hr = pService->GetService(MR_VIDEO_RENDER_SERVICE, __uuidof(IMFVideoDisplayControl), (void**)&m_pDisplayControl));
+ CHECK_HR(hr = m_pDisplayControl->SetAspectRatioMode(MFVideoARMode_PreservePicture));
+bail:
+ SafeRelease(&pService);
+}
+
+DisplayWatcher::~DisplayWatcher()
+{
+ Stop();
+
+ SafeRelease(&m_pDisplayControl);
+}
+
+HRESULT DisplayWatcher::Start()
+{
+ HRESULT hr = S_OK;
+ HWND hWnd = m_hWnd; // save()
+ CHECK_HR(hr = Stop());
+
+ if((m_hWnd = hWnd) && m_pDisplayControl)
+ {
+ CHECK_HR(hr = m_pDisplayControl->SetVideoWindow(hWnd));
+
+ BOOL ret = SetPropA(m_hWnd, "This", this);
+ assert(ret);
+
+#if _M_X64
+ m_pWndProc = (WNDPROC)SetWindowLongPtr(m_hWnd, GWLP_WNDPROC, (LONG_PTR)DisplayWatcher::WndProc);
+#else
+ m_pWndProc = (WNDPROC)SetWindowLongPtr(m_hWnd, GWL_WNDPROC, (LONG)DisplayWatcher::WndProc);
+#endif
+
+ UpdatePosition(); // black screen if attached later
+ }
+ m_bStarted = TRUE;
+bail:
+ return hr;
+}
+
+HRESULT DisplayWatcher::SetFullscreen(BOOL bEnabled)
+{
+ if(m_pDisplayControl)
+ {
+ HRESULT hr = m_pDisplayControl->SetFullscreen(bEnabled);
+ m_bFullScreen = SUCCEEDED(hr);
+ return hr;
+ }
+
+ return E_FAIL;
+}
+
+HRESULT DisplayWatcher::SetHwnd(HWND hWnd)
+{
+ BOOL bWasStarted = m_bStarted;
+ Stop();
+ m_hWnd = hWnd;
+ if(bWasStarted)
+ {
+ return Start();
+ }
+ return S_OK;
+}
+
+HRESULT DisplayWatcher::Stop()
+{
+ if(m_hWnd && m_pWndProc)
+ {
+ // Restore
+
+#if _M_X64
+ SetWindowLongPtr(m_hWnd, GWLP_WNDPROC, (LONG_PTR)m_pWndProc);
+#else
+ SetWindowLongPtr(m_hWnd, GWL_WNDPROC, (LONG)m_pWndProc);
+#endif
+ }
+ m_hWnd = NULL;
+ m_pWndProc = NULL;
+ m_bStarted = FALSE;
+ return S_OK;
+}
+
+void DisplayWatcher::UpdatePosition()
+{
+ if(m_pDisplayControl && m_hWnd)
+ {
+ RECT rcDst = { 0, 0, 0, 0 };
+ GetClientRect(m_hWnd, &rcDst);
+ m_pDisplayControl->SetVideoPosition(NULL, &rcDst);
+ }
+}
+
+LRESULT CALLBACK DisplayWatcher::WndProc(HWND hWnd, UINT uMsg, WPARAM wParam, LPARAM lParam)
+{
+ switch(uMsg)
+ {
+ case WM_CREATE:
+ case WM_SIZE:
+ case WM_MOVE:
+ {
+ DisplayWatcher* This = dynamic_cast<DisplayWatcher*>((DisplayWatcher*)GetPropA(hWnd, "This"));
+ if(This)
+ {
+ This->UpdatePosition();
+ }
+ break;
+ }
+
+ case WM_CHAR:
+ case WM_KEYUP:
+ {
+ DisplayWatcher* This = dynamic_cast<DisplayWatcher*>((DisplayWatcher*)GetPropA(hWnd, "This"));
+ if(This)
+ {
+ if(This->m_bFullScreen && (wParam == 0x1B || wParam == VK_ESCAPE))
+ {
+ This->SetFullscreen(FALSE);
+ }
+ }
+
+ break;
+ }
+ }
+
+ return DefWindowProc(hWnd, uMsg, wParam, lParam);
+} \ No newline at end of file
diff --git a/plugins/pluginWinMF/internals/mf_display_watcher.h b/plugins/pluginWinMF/internals/mf_display_watcher.h
new file mode 100644
index 0000000..d41d6a6
--- /dev/null
+++ b/plugins/pluginWinMF/internals/mf_display_watcher.h
@@ -0,0 +1,55 @@
+/* Copyright (C) 2013 Mamadou DIOP
+* Copyright (C) 2013 Doubango Telecom <http://www.doubango.org>
+*
+* This file is part of Open Source Doubango Framework.
+*
+* DOUBANGO is free software: you can redistribute it and/or modify
+* it under the terms of the GNU General Public License as published by
+* the Free Software Foundation, either version 3 of the License, or
+* (at your option) any later version.
+*
+* DOUBANGO is distributed in the hope that it will be useful,
+* but WITHOUT ANY WARRANTY; without even the implied warranty of
+* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
+* GNU General Public License for more details.
+*
+* You should have received a copy of the GNU General Public License
+* along with DOUBANGO.
+*/
+#ifndef PLUGIN_WIN_MF_DISPLAY_WATCHER_H
+#define PLUGIN_WIN_MF_DISPLAY_WATCHER_H
+
+#include "../plugin_win_mf_config.h"
+
+#include <new>
+#include <mfapi.h>
+#include <mfidl.h>
+#include <Mferror.h>
+#include <shlwapi.h>
+#include <Evr.h>
+
+class DisplayWatcher
+{
+public:
+ DisplayWatcher(HWND hWnd, IMFMediaSink* pMediaSink, HRESULT &hr);
+ virtual ~DisplayWatcher();
+
+public:
+ HRESULT Start();
+ HRESULT SetFullscreen(BOOL bEnabled);
+ HRESULT SetHwnd(HWND hWnd);
+ HRESULT Stop();
+
+private:
+ void UpdatePosition();
+ static LRESULT CALLBACK WndProc(HWND hWnd, UINT uMsg, WPARAM wParam, LPARAM lParam);
+
+private:
+ IMFVideoDisplayControl *m_pDisplayControl;
+ HWND m_hWnd;
+ WNDPROC m_pWndProc;
+ BOOL m_bStarted;
+ BOOL m_bFullScreen;
+};
+
+#endif /* PLUGIN_WIN_MF_DISPLAY_WATCHER_H */
diff --git a/plugins/pluginWinMF/internals/mf_sample_grabber.cxx b/plugins/pluginWinMF/internals/mf_sample_grabber.cxx
new file mode 100644
index 0000000..87aa6af
--- /dev/null
+++ b/plugins/pluginWinMF/internals/mf_sample_grabber.cxx
@@ -0,0 +1,135 @@
+/* Copyright (C) 2013 Mamadou DIOP
+* Copyright (C) 2013 Doubango Telecom <http://www.doubango.org>
+*
+* This file is part of Open Source Doubango Framework.
+*
+* DOUBANGO is free software: you can redistribute it and/or modify
+* it under the terms of the GNU General Public License as published by
+* the Free Software Foundation, either version 3 of the License, or
+* (at your option) any later version.
+*
+* DOUBANGO is distributed in the hope that it will be useful,
+* but WITHOUT ANY WARRANTY; without even the implied warranty of
+* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
+* GNU General Public License for more details.
+*
+* You should have received a copy of the GNU General Public License
+* along with DOUBANGO.
+*/
+#include "mf_sample_grabber.h"
+
+#include "tinymedia/tmedia_producer.h"
+
+#include "tsk_memory.h"
+#include "tsk_debug.h"
+
+#include <assert.h>
+
+// Create a new instance of the object.
+HRESULT SampleGrabberCB::CreateInstance(const struct tmedia_producer_s* pcWrappedProducer, SampleGrabberCB **ppCB)
+{
+ assert(pcWrappedProducer);
+
+ *ppCB = new (std::nothrow) SampleGrabberCB(pcWrappedProducer);
+
+ if (ppCB == NULL)
+ {
+ return E_OUTOFMEMORY;
+ }
+ return S_OK;
+}
+
+STDMETHODIMP SampleGrabberCB::QueryInterface(REFIID riid, void** ppv)
+{
+ static const QITAB qit[] =
+ {
+ QITABENT(SampleGrabberCB, IMFSampleGrabberSinkCallback),
+ QITABENT(SampleGrabberCB, IMFClockStateSink),
+ { 0 }
+ };
+ return QISearch(this, qit, riid, ppv);
+}
+
+STDMETHODIMP_(ULONG) SampleGrabberCB::AddRef()
+{
+ return InterlockedIncrement(&m_cRef);
+}
+
+STDMETHODIMP_(ULONG) SampleGrabberCB::Release()
+{
+ ULONG cRef = InterlockedDecrement(&m_cRef);
+ if (cRef == 0)
+ {
+ delete this;
+ }
+ return cRef;
+
+}
+
+// IMFClockStateSink methods.
+
+// In these example, the IMFClockStateSink methods do not perform any actions.
+// You can use these methods to track the state of the sample grabber sink.
+
+STDMETHODIMP SampleGrabberCB::OnClockStart(MFTIME hnsSystemTime, LONGLONG llClockStartOffset)
+{
+ TSK_DEBUG_INFO("SampleGrabberCB::OnClockStart(%lld, %lld)", hnsSystemTime, llClockStartOffset);
+ return S_OK;
+}
+
+STDMETHODIMP SampleGrabberCB::OnClockStop(MFTIME hnsSystemTime)
+{
+ TSK_DEBUG_INFO("SampleGrabberCB::OnClockStop(%lld)", hnsSystemTime);
+ return S_OK;
+}
+
+STDMETHODIMP SampleGrabberCB::OnClockPause(MFTIME hnsSystemTime)
+{
+ TSK_DEBUG_INFO("SampleGrabberCB::OnClockPause(%lld)", hnsSystemTime);
+ return S_OK;
+}
+
+STDMETHODIMP SampleGrabberCB::OnClockRestart(MFTIME hnsSystemTime)
+{
+ TSK_DEBUG_INFO("SampleGrabberCB::OnClockRestart(%lld)", hnsSystemTime);
+ return S_OK;
+}
+
+STDMETHODIMP SampleGrabberCB::OnClockSetRate(MFTIME hnsSystemTime, float flRate)
+{
+ TSK_DEBUG_INFO("SampleGrabberCB::OnClockSetRate(%lld, %f)", hnsSystemTime, flRate);
+ return S_OK;
+}
+
+// IMFSampleGrabberSink methods.
+
+STDMETHODIMP SampleGrabberCB::OnSetPresentationClock(IMFPresentationClock* pClock)
+{
+ TSK_DEBUG_INFO("SampleGrabberCB::OnSetPresentationClock");
+ return S_OK;
+}
+
+STDMETHODIMP SampleGrabberCB::OnProcessSample(
+ REFGUID guidMajorMediaType, DWORD dwSampleFlags,
+ LONGLONG llSampleTime, LONGLONG llSampleDuration, const BYTE * pSampleBuffer,
+ DWORD dwSampleSize)
+{
+ if (m_pWrappedProducer && TMEDIA_PRODUCER(m_pWrappedProducer)->enc_cb.callback) {
+#if 1
+ if (m_bMuted) {
+ // Send zeros. Do not skip sending data to avoid NAT issues and session deconnection.
+ // Some TelePresence systems disconnect the session when the remote peer stops sending video data.
+ memset((void*)pSampleBuffer, 0, dwSampleSize);
+ }
+#endif
+ TMEDIA_PRODUCER(m_pWrappedProducer)->enc_cb.callback(TMEDIA_PRODUCER(m_pWrappedProducer)->enc_cb.callback_data, pSampleBuffer, dwSampleSize);
+ }
+
+ return S_OK;
+}
+
+STDMETHODIMP SampleGrabberCB::OnShutdown()
+{
+ TSK_DEBUG_INFO("SampleGrabberCB::OnShutdown");
+ return S_OK;
+} \ No newline at end of file
diff --git a/plugins/pluginWinMF/internals/mf_sample_grabber.h b/plugins/pluginWinMF/internals/mf_sample_grabber.h
new file mode 100644
index 0000000..858f3c1
--- /dev/null
+++ b/plugins/pluginWinMF/internals/mf_sample_grabber.h
@@ -0,0 +1,68 @@
+/* Copyright (C) 2013 Mamadou DIOP
+* Copyright (C) 2013 Doubango Telecom <http://www.doubango.org>
+*
+* This file is part of Open Source Doubango Framework.
+*
+* DOUBANGO is free software: you can redistribute it and/or modify
+* it under the terms of the GNU General Public License as published by
+* the Free Software Foundation, either version 3 of the License, or
+* (at your option) any later version.
+*
+* DOUBANGO is distributed in the hope that it will be useful,
+* but WITHOUT ANY WARRANTY; without even the implied warranty of
+* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
+* GNU General Public License for more details.
+*
+* You should have received a copy of the GNU General Public License
+* along with DOUBANGO.
+*/
+#ifndef PLUGIN_WIN_MF_SAMPLE_GRABBER_H
+#define PLUGIN_WIN_MF_SAMPLE_GRABBER_H
+
+#include "../plugin_win_mf_config.h"
+
+#include <new>
+#include <mfapi.h>
+#include <mfidl.h>
+#include <Mferror.h>
+#include <shlwapi.h>
+
+//
+// Sample Grabber callback [Declaration]
+// http://msdn.microsoft.com/en-us/library/windows/desktop/hh184779(v=vs.85).aspx
+//
+class SampleGrabberCB : public IMFSampleGrabberSinkCallback
+{
+ bool m_bMuted;
+ long m_cRef;
+ const struct tmedia_producer_s* m_pWrappedProducer;
+
+ SampleGrabberCB(const struct tmedia_producer_s* pcWrappedProducer) : m_cRef(1), m_bMuted(false), m_pWrappedProducer(pcWrappedProducer) {}
+
+public:
+ static HRESULT CreateInstance(const struct tmedia_producer_s* pcWrappedProducer, SampleGrabberCB **ppCB);
+
+ void SetMute(bool bMuted) { m_bMuted = bMuted; }
+
+ // IUnknown methods
+ STDMETHODIMP QueryInterface(REFIID iid, void** ppv);
+ STDMETHODIMP_(ULONG) AddRef();
+ STDMETHODIMP_(ULONG) Release();
+
+ // IMFClockStateSink methods
+ STDMETHODIMP OnClockStart(MFTIME hnsSystemTime, LONGLONG llClockStartOffset);
+ STDMETHODIMP OnClockStop(MFTIME hnsSystemTime);
+ STDMETHODIMP OnClockPause(MFTIME hnsSystemTime);
+ STDMETHODIMP OnClockRestart(MFTIME hnsSystemTime);
+ STDMETHODIMP OnClockSetRate(MFTIME hnsSystemTime, float flRate);
+
+ // IMFSampleGrabberSinkCallback methods
+ STDMETHODIMP OnSetPresentationClock(IMFPresentationClock* pClock);
+ STDMETHODIMP OnProcessSample(REFGUID guidMajorMediaType, DWORD dwSampleFlags,
+ LONGLONG llSampleTime, LONGLONG llSampleDuration, const BYTE * pSampleBuffer,
+ DWORD dwSampleSize);
+ STDMETHODIMP OnShutdown();
+};
+
+
+#endif /* PLUGIN_WIN_MF_SAMPLE_GRABBER_H */
diff --git a/plugins/pluginWinMF/internals/mf_sample_queue.cxx b/plugins/pluginWinMF/internals/mf_sample_queue.cxx
new file mode 100644
index 0000000..05c2bc6
--- /dev/null
+++ b/plugins/pluginWinMF/internals/mf_sample_queue.cxx
@@ -0,0 +1,158 @@
+/* Copyright (C) 2013 Mamadou DIOP
+* Copyright (C) 2013 Doubango Telecom <http://www.doubango.org>
+*
+* This file is part of Open Source Doubango Framework.
+*
+* DOUBANGO is free software: you can redistribute it and/or modify
+* it under the terms of the GNU General Public License as published by
+* the Free Software Foundation, either version 3 of the License, or
+* (at your option) any later version.
+*
+* DOUBANGO is distributed in the hope that it will be useful,
+* but WITHOUT ANY WARRANTY; without even the implied warranty of
+* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
+* GNU General Public License for more details.
+*
+* You should have received a copy of the GNU General Public License
+* along with DOUBANGO.
+*/
+#include "mf_sample_queue.h"
+
+#include <assert.h>
+
+MFSampleQueue::MFSampleQueue()
+: m_nRefCount(1)
+, m_nCount(0)
+{
+ InitializeCriticalSection(&m_critSec);
+
+ m_anchor.next = &m_anchor;
+ m_anchor.prev = &m_anchor;
+}
+
+MFSampleQueue::~MFSampleQueue()
+{
+ assert(m_nRefCount == 0);
+
+ Clear();
+
+ DeleteCriticalSection(&m_critSec);
+}
+
+STDMETHODIMP MFSampleQueue::QueryInterface(REFIID iid, void** ppv)
+{
+ return E_NOTIMPL;
+}
+
+STDMETHODIMP_(ULONG) MFSampleQueue::AddRef()
+{
+ return InterlockedIncrement(&m_nRefCount);
+}
+
+STDMETHODIMP_(ULONG) MFSampleQueue::Release()
+{
+ ULONG uCount = InterlockedDecrement(&m_nRefCount);
+ if (uCount == 0)
+ {
+ delete this;
+ }
+ // For thread safety, return a temporary variable.
+ return uCount;
+}
+
+HRESULT MFSampleQueue::Queue(IMFSample* item)
+{
+ if (item == NULL)
+ {
+ return E_POINTER;
+ }
+
+ Node *pNode = new (std::nothrow) Node(item);
+ if (pNode == NULL)
+ {
+ return E_OUTOFMEMORY;
+ }
+
+ item->AddRef();
+
+ EnterCriticalSection(&m_critSec);
+
+ Node *pBefore = m_anchor.prev;
+
+ Node *pAfter = pBefore->next;
+
+ pBefore->next = pNode;
+ pAfter->prev = pNode;
+
+ pNode->prev = pBefore;
+ pNode->next = pAfter;
+
+ m_nCount++;
+
+ LeaveCriticalSection(&m_critSec);
+
+ return S_OK;
+}
+
+HRESULT MFSampleQueue::Dequeue(IMFSample* *ppItem)
+{
+ if (ppItem == NULL)
+ {
+ return E_POINTER;
+ }
+
+ EnterCriticalSection(&m_critSec);
+
+ if (IsEmpty())
+ {
+ LeaveCriticalSection(&m_critSec);
+ return E_FAIL;
+ }
+
+ Node *pNode = m_anchor.next;
+
+ // The next node's previous is this node's previous.
+ pNode->next->prev = m_anchor.next->prev;
+
+ // The previous node's next is this node's next.
+ pNode->prev->next = pNode->next;
+
+ *ppItem = pNode->item;
+ delete pNode;
+
+ m_nCount--;
+
+ LeaveCriticalSection(&m_critSec);
+
+ return S_OK;
+}
+
+HRESULT MFSampleQueue::Clear()
+{
+ EnterCriticalSection(&m_critSec);
+
+ Node *n = m_anchor.next;
+
+ // Delete the nodes
+ while (n != &m_anchor)
+ {
+ if (n->item)
+ {
+ n->item->Release();
+ }
+
+ Node *tmp = n->next;
+ delete n;
+ n = tmp;
+ }
+
+ // Reset the anchor to point at itself
+ m_anchor.next = &m_anchor;
+ m_anchor.prev = &m_anchor;
+
+ m_nCount = 0;
+
+ LeaveCriticalSection(&m_critSec);
+
+ return S_OK;
+}
diff --git a/plugins/pluginWinMF/internals/mf_sample_queue.h b/plugins/pluginWinMF/internals/mf_sample_queue.h
new file mode 100644
index 0000000..b42ecde
--- /dev/null
+++ b/plugins/pluginWinMF/internals/mf_sample_queue.h
@@ -0,0 +1,81 @@
+/* Copyright (C) 2013 Mamadou DIOP
+* Copyright (C) 2013 Doubango Telecom <http://www.doubango.org>
+*
+* This file is part of Open Source Doubango Framework.
+*
+* DOUBANGO is free software: you can redistribute it and/or modify
+* it under the terms of the GNU General Public License as published by
+* the Free Software Foundation, either version 3 of the License, or
+* (at your option) any later version.
+*
+* DOUBANGO is distributed in the hope that it will be useful,
+* but WITHOUT ANY WARRANTY; without even the implied warranty of
+* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
+* GNU General Public License for more details.
+*
+* You should have received a copy of the GNU General Public License
+* along with DOUBANGO.
+*/
+#ifndef PLUGIN_WIN_MF_SAMPLE_QUEUE_H
+#define PLUGIN_WIN_MF_SAMPLE_QUEUE_H
+
+#include "../plugin_win_mf_config.h"
+
+#include <new>
+#include <mfapi.h>
+#include <mfidl.h>
+#include <Mferror.h>
+#include <shlwapi.h>
+
+class MFSampleQueue : public IUnknown
+{
+protected:
+
+ // Nodes in the linked list
+ struct Node
+ {
+ Node *prev;
+ Node *next;
+ IMFSample* item;
+
+ Node() : prev(NULL), next(NULL)
+ {
+ }
+
+ Node(IMFSample* item) : prev(NULL), next(NULL)
+ {
+ this->item = item;
+ }
+
+ IMFSample* Item() const { return item; }
+ };
+
+
+protected:
+ Node m_anchor;
+ long m_nCount;
+ CRITICAL_SECTION m_critSec;
+
+private:
+ long m_nRefCount;
+
+public:
+
+ MFSampleQueue();
+ virtual ~MFSampleQueue();
+
+ // IUnknown
+ STDMETHODIMP QueryInterface(REFIID iid, void** ppv);
+ STDMETHODIMP_(ULONG) AddRef();
+ STDMETHODIMP_(ULONG) Release();
+
+ HRESULT Queue(IMFSample* item);
+ HRESULT Dequeue(IMFSample* *ppItem);
+ HRESULT Clear();
+
+ inline BOOL IsEmpty() const { return m_anchor.next == &m_anchor; }
+ inline long Count() { return m_nCount; }
+};
+
+
+#endif /* PLUGIN_WIN_MF_SAMPLE_QUEUE_H */ \ No newline at end of file
diff --git a/plugins/pluginWinMF/internals/mf_utils.cxx b/plugins/pluginWinMF/internals/mf_utils.cxx
new file mode 100644
index 0000000..d1f326c
--- /dev/null
+++ b/plugins/pluginWinMF/internals/mf_utils.cxx
@@ -0,0 +1,2104 @@
+/* Copyright (C) 2013 Mamadou DIOP
+* Copyright (C) 2013 Doubango Telecom <http://www.doubango.org>
+*
+* This file is part of Open Source Doubango Framework.
+*
+* DOUBANGO is free software: you can redistribute it and/or modify
+* it under the terms of the GNU General Public License as published by
+* the Free Software Foundation, either version 3 of the License, or
+* (at your option) any later version.
+*
+* DOUBANGO is distributed in the hope that it will be useful,
+* but WITHOUT ANY WARRANTY; without even the implied warranty of
+* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
+* GNU General Public License for more details.
+*
+* You should have received a copy of the GNU General Public License
+* along with DOUBANGO.
+*/
+#include "mf_utils.h"
+#include "mf_codec.h"
+
+#include "tinymedia/tmedia_common.h"
+
+#include "tsk_debug.h"
+
+#include <KS.h>/* KS.H must be included before codecapi.H */
+#include <Codecapi.h>
+#include <initguid.h>
+#include <wmcodecdsp.h>
+#include <d3d9.h>
+#include <assert.h>
+
+
+#ifdef _MSC_VER
+#pragma comment(lib, "strmiids.lib")
+#pragma comment(lib, "wmcodecdspuuid.lib")
+#pragma comment(lib, "d3d9")
+#endif
+
+#if !defined(PLUGIN_MF_DISABLE_CODECS)
+// Must be "0" to use "Microsoft"/"Intel Quick Sync" MFT codecs. Testing: When set to "1", libx264 and FFmpeg will be used.
+// Metropolis code (G2J.COM TelePresence client) has "PLUGIN_MF_DISABLE_CODECS=1" because of interop issues against Tandberg and Intel QuickSync H.264 implementations.
+# define PLUGIN_MF_DISABLE_CODECS 1
+#endif
+#if !defined(PLUGIN_MF_DISABLE_MS_H264_ENCODER)
+// MS H.264 encoder produces artifacts when bundled with the producer. Disable until we found why this happens.
+// What is strange is that NVIDIA CUDA H.264 decoder doesn't produce artifacts when decoding MS frames while FFmpeg and MS decoder do.
+// To encode with MS and decode with CUDA:
+// - Force "bMFEncoderIsRegistered" value to "FALSE" in plugin_win_mf_producer_video.cxx
+// Metropolis code (G2J.COM TelePresence) has "PLUGIN_MF_DISABLE_MS_H264_ENCODER=1" beacause Microsoft H.264 not fully tested against Tandberg, Polycom, Hartallo...
+# define PLUGIN_MF_DISABLE_MS_H264_ENCODER 1
+#endif
+
+#if !defined(PLUGIN_MF_DISABLE_ASYNC_DECODERS)
+// Not fully tested
+# define PLUGIN_MF_DISABLE_ASYNC_DECODERS 1
+#endif
+
+BOOL MFUtils::g_bStarted = FALSE;
+
+DWORD MFUtils::g_dwMajorVersion = -1;
+DWORD MFUtils::g_dwMinorVersion = -1;
+
+BOOL MFUtils::g_bLowLatencyH264Checked = FALSE;
+BOOL MFUtils::g_bLowLatencyH264Supported = FALSE;
+BOOL MFUtils::g_bLowLatencyH264SupportsMaxSliceSize = FALSE;
+
+BOOL MFUtils::g_bD3D9Checked = FALSE;
+BOOL MFUtils::g_bD3D9Supported = FALSE;
+
+const TOPOID MFUtils::g_ullTopoIdSinkMain = 111;
+const TOPOID MFUtils::g_ullTopoIdSinkPreview = 222;
+const TOPOID MFUtils::g_ullTopoIdSource = 333;
+const TOPOID MFUtils::g_ullTopoIdVideoProcessor = 444;
+
+// Preferred VideoSubTypes
+static const VideoSubTypeGuidPair PreferredVideoSubTypeGuidPairs[] =
+{
+ { tmedia_chroma_yuv420p, MFVideoFormat_I420 },
+ { tmedia_chroma_nv12, MFVideoFormat_NV12 },
+ { tmedia_chroma_uyvy422, MFVideoFormat_UYVY },
+ { tmedia_chroma_yuyv422, MFVideoFormat_YUY2 },
+ /* TODO: Add more YUV formats */
+ { tmedia_chroma_rgb565le, MFVideoFormat_RGB565 },
+ { tmedia_chroma_bgr24, MFVideoFormat_RGB24 },
+ { tmedia_chroma_rgb32, MFVideoFormat_RGB32 },
+};
+static const tsk_size_t PreferredVideoSubTypeGuidPairsCount = sizeof(PreferredVideoSubTypeGuidPairs)/sizeof(PreferredVideoSubTypeGuidPairs[0]);
+
+// Video Processor
+DEFINE_GUID(CLSID_VideoProcessorMFT,
+ 0x88753b26, 0x5b24, 0x49bd, 0xb2, 0xe7, 0xc, 0x44, 0x5c, 0x78, 0xc9, 0x82);
+
+// {4BE8D3C0-0515-4A37-AD55-E4BAE19AF471}
+DEFINE_GUID(CLSID_MF_INTEL_H264EncFilter, // Intel Quick Sync Encoder
+0x4be8d3c0, 0x0515, 0x4a37, 0xad, 0x55, 0xe4, 0xba, 0xe1, 0x9a, 0xf4, 0x71);
+
+// {0855C9AC-BC6F-4371-8954-671CCD4EC16F}
+DEFINE_GUID(CLSID_MF_INTEL_H264DecFilter, // Intel Quick Sync Decoder
+0x0855c9ac, 0xbc6f, 0x4371, 0x89, 0x54, 0x67, 0x1c, 0xcd, 0x4e, 0xc1, 0x6f);
+
+#if WINVER < 0x0602/* From "sdkddkver.h" and defines the SDK version not the host */
+// 6ca50344-051a-4ded-9779-a43305165e35
+DEFINE_GUID(CLSID_CMSH264EncoderMFT, // MS H.264 encoder
+0x6ca50344, 0x051a, 0x4ded, 0x97, 0x79, 0xa4, 0x33, 0x05, 0x16, 0x5e, 0x35);
+#endif /* WINVER */
+
+#define IsWin7_OrLater(dwMajorVersion, dwMinorVersion) ( (dwMajorVersion > 6) || ( (dwMajorVersion == 6) && (dwMinorVersion >= 1) ) )
+#define IsWin8_OrLater(dwMajorVersion, dwMinorVersion) ( (dwMajorVersion > 6) || ( (dwMajorVersion == 6) && (dwMinorVersion >= 2) ) )
+
+
+HRESULT MFUtils::Startup()
+{
+ if(!g_bStarted)
+ {
+ HRESULT hr = CoInitializeEx(NULL, COINIT_MULTITHREADED);
+ if(SUCCEEDED(hr) || hr == 0x80010106) // 0x80010106 when called from managed code (e.g. Boghe) - More info: http://support.microsoft.com/kb/824480
+ {
+ hr = MFStartup(MF_VERSION);
+ }
+ g_bStarted = SUCCEEDED(hr);
+
+ OSVERSIONINFO osvi;
+ ZeroMemory(&osvi, sizeof(OSVERSIONINFO));
+ osvi.dwOSVersionInfoSize = sizeof(OSVERSIONINFO);
+ GetVersionEx(&osvi);
+ g_dwMajorVersion = osvi.dwMajorVersion;
+ g_dwMinorVersion = osvi.dwMinorVersion;
+
+ return hr;
+ }
+ return S_OK;
+}
+
+HRESULT MFUtils::Shutdown()
+{
+ if(g_bStarted)
+ {
+ g_bStarted = false;
+ return S_OK;
+ }
+ return S_OK;
+}
+
+BOOL MFUtils::IsD3D9Supported()
+{
+ if (MFUtils::g_bD3D9Checked)
+ {
+ return MFUtils::g_bD3D9Supported;
+ }
+ MFUtils::g_bD3D9Checked = TRUE;
+ HRESULT hr = S_OK;
+ IDirect3D9* pD3D = NULL;
+ D3DDISPLAYMODE mode = { 0 };
+ D3DPRESENT_PARAMETERS pp = {0};
+ IDirect3DDevice9* pDevice = NULL;
+
+ CHECK_HR(hr = MFUtils::Startup());
+
+ if (!(pD3D = Direct3DCreate9(D3D_SDK_VERSION)))
+ {
+ CHECK_HR(hr = E_OUTOFMEMORY);
+ }
+
+ hr = pD3D->GetAdapterDisplayMode(
+ D3DADAPTER_DEFAULT,
+ &mode
+ );
+ if (FAILED(hr))
+ {
+ goto bail;
+ }
+
+ hr = pD3D->CheckDeviceType(
+ D3DADAPTER_DEFAULT,
+ D3DDEVTYPE_HAL,
+ mode.Format,
+ D3DFMT_X8R8G8B8,
+ TRUE // windowed
+ );
+ if (FAILED(hr))
+ {
+ goto bail;
+ }
+ pp.BackBufferFormat = D3DFMT_X8R8G8B8;
+ pp.SwapEffect = D3DSWAPEFFECT_DISCARD;
+ pp.PresentationInterval = D3DPRESENT_INTERVAL_IMMEDIATE;
+ pp.Windowed = TRUE;
+ pp.hDeviceWindow = GetDesktopWindow();
+ hr = pD3D->CreateDevice(
+ D3DADAPTER_DEFAULT,
+ D3DDEVTYPE_HAL,
+ pp.hDeviceWindow,
+ D3DCREATE_HARDWARE_VERTEXPROCESSING,
+ &pp,
+ &pDevice
+ );
+ if (FAILED(hr))
+ {
+ goto bail;
+ }
+
+ // Everythings is OK
+ MFUtils::g_bD3D9Supported = TRUE;
+ TSK_DEBUG_INFO("D3D9 supported");
+
+bail:
+ if (!MFUtils::g_bD3D9Supported) {
+ TSK_DEBUG_WARN("D3D9 not supported");
+ }
+ SafeRelease(&pDevice);
+ SafeRelease(&pD3D);
+ return MFUtils::g_bD3D9Supported;
+}
+
+BOOL MFUtils::IsLowLatencyH264Supported()
+{
+ if(MFUtils::g_bLowLatencyH264Checked)
+ {
+ return MFUtils::g_bLowLatencyH264Supported;
+ }
+
+#if PLUGIN_MF_DISABLE_CODECS
+ MFUtils::g_bLowLatencyH264Checked = TRUE;
+ MFUtils::g_bLowLatencyH264Supported = FALSE;
+#else
+ Startup();
+
+ HRESULT hr = S_OK;
+ IMFTransform *pEncoderMFT = NULL;
+ IMFTransform *pDecoderMFT = NULL;
+ MFCodecVideoH264* pEncoderCodec = NULL;
+ MFCodecVideoH264* pDecoderCodec = NULL;
+
+ static const BOOL IsEncoderYes = TRUE;
+
+ // Encoder
+ hr = MFUtils::GetBestCodec(IsEncoderYes, MFMediaType_Video, MFVideoFormat_NV12, MFVideoFormat_H264, &pEncoderMFT);
+ if(FAILED(hr))
+ {
+ TSK_DEBUG_INFO("No low latency H.264 encoder");
+ goto bail;
+ }
+
+ // Decoder
+ hr = MFUtils::GetBestCodec(!IsEncoderYes, MFMediaType_Video, MFVideoFormat_H264, MFVideoFormat_NV12, &pDecoderMFT);
+ if(FAILED(hr))
+ {
+ TSK_DEBUG_INFO("No low latency H.264 decoder");
+ goto bail;
+ }
+
+ // Make sure both encoder and decoder are working well. Check encoding/decoding 1080p@30 would work.
+
+ TSK_DEBUG_INFO("Probing H.264 MFT encoder...");
+ pEncoderCodec = MFCodecVideoH264::CreateCodecH264Main(MFCodecType_Encoder, pEncoderMFT);
+ if(!pEncoderCodec)
+ {
+ CHECK_HR(hr = E_FAIL);
+ }
+ CHECK_HR(hr = pEncoderCodec->Initialize(
+ 30, // FPS
+ 1920, // WIDTH
+ 1080, // HEIGHT
+ tmedia_get_video_bandwidth_kbps_2(1920, 1080, 30) * 1024) // BITRATE
+ );
+ CHECK_HR(pEncoderCodec->IsSetSliceMaxSizeInBytesSupported(MFUtils::g_bLowLatencyH264SupportsMaxSliceSize));
+
+ TSK_DEBUG_INFO("Probing H.264 MFT decoder...");
+ pDecoderCodec = MFCodecVideoH264::CreateCodecH264Main(MFCodecType_Decoder, pDecoderMFT);
+ if(!pDecoderCodec)
+ {
+ CHECK_HR(hr = E_FAIL);
+ }
+ CHECK_HR(hr = pDecoderCodec->Initialize(
+ 30, // FPS
+ 1920, // WIDTH
+ 1080 // HEIGHT
+ ));
+
+bail:
+ MFUtils::g_bLowLatencyH264Checked = TRUE;
+ MFUtils::g_bLowLatencyH264Supported = SUCCEEDED(hr) ? TRUE : FALSE;
+ SafeRelease(&pEncoderMFT);
+ SafeRelease(&pEncoderCodec);
+ SafeRelease(&pDecoderMFT);
+ SafeRelease(&pDecoderCodec);
+#endif /* PLUGIN_MF_DISABLE_CODECS */
+
+ return MFUtils::g_bLowLatencyH264Supported;
+}
+
+BOOL MFUtils::IsLowLatencyH264SupportsMaxSliceSize()
+{
+ return MFUtils::IsLowLatencyH264Supported() && MFUtils::g_bLowLatencyH264SupportsMaxSliceSize;
+}
+
+HRESULT MFUtils::IsAsyncMFT(
+ IMFTransform *pMFT, // The MFT to check
+ BOOL* pbIsAsync // Whether the MFT is Async
+ )
+{
+ if(!pbIsAsync || !pMFT)
+ {
+ return E_POINTER;
+ }
+
+ IMFAttributes *pAttributes = NULL;
+ UINT32 nIsAsync = 0;
+ HRESULT hr = S_OK;
+
+ hr = pMFT->GetAttributes(&pAttributes);
+ if(SUCCEEDED(hr))
+ {
+ hr = pAttributes->GetUINT32(MF_TRANSFORM_ASYNC, &nIsAsync);
+ }
+
+ // Never fails: just say not Async
+ CHECK_HR(hr = S_OK);
+
+ *pbIsAsync = !!nIsAsync;
+
+bail:
+ return hr;
+}
+
+HRESULT MFUtils::UnlockAsyncMFT(
+ IMFTransform *pMFT // The MFT to unlock
+ )
+{
+ IMFAttributes *pAttributes = NULL;
+ UINT32 nValue = 0;
+ HRESULT hr = S_OK;
+
+ hr = pMFT->GetAttributes(&pAttributes);
+ if(FAILED(hr))
+ {
+ hr = S_OK;
+ goto bail;
+ }
+
+ hr = pAttributes->GetUINT32(MF_TRANSFORM_ASYNC, &nValue);
+ if(FAILED(hr))
+ {
+ hr = S_OK;
+ goto bail;
+ }
+
+ if(nValue == TRUE)
+ {
+ CHECK_HR(hr = pAttributes->SetUINT32(MF_TRANSFORM_ASYNC_UNLOCK, TRUE));
+ }
+
+bail:
+ SafeRelease(&pAttributes);
+ return hr;
+}
+//-------------------------------------------------------------------
+// CreatePCMAudioType
+//
+// Creates a media type that describes an uncompressed PCM audio
+// format.
+//-------------------------------------------------------------------
+
+HRESULT MFUtils::CreatePCMAudioType(
+ UINT32 sampleRate, // Samples per second
+ UINT32 bitsPerSample, // Bits per sample
+ UINT32 cChannels, // Number of channels
+ IMFMediaType **ppType // Receives a pointer to the media type.
+ )
+{
+ HRESULT hr = S_OK;
+
+ IMFMediaType *pType = NULL;
+
+ // Calculate derived values.
+ UINT32 blockAlign = cChannels * (bitsPerSample / 8);
+ UINT32 bytesPerSecond = blockAlign * sampleRate;
+
+ // Create the empty media type.
+ hr = MFCreateMediaType(&pType);
+
+ // Set attributes on the type.
+ if (SUCCEEDED(hr))
+ {
+ hr = pType->SetGUID(MF_MT_MAJOR_TYPE, MFMediaType_Audio);
+ }
+
+ if (SUCCEEDED(hr))
+ {
+ hr = pType->SetGUID(MF_MT_SUBTYPE, MFAudioFormat_PCM);
+ }
+
+ if (SUCCEEDED(hr))
+ {
+ hr = pType->SetUINT32(MF_MT_AUDIO_NUM_CHANNELS, cChannels);
+ }
+
+ if (SUCCEEDED(hr))
+ {
+ hr = pType->SetUINT32(MF_MT_AUDIO_SAMPLES_PER_SECOND, sampleRate);
+ }
+
+ if (SUCCEEDED(hr))
+ {
+ hr = pType->SetUINT32(MF_MT_AUDIO_BLOCK_ALIGNMENT, blockAlign);
+ }
+
+ if (SUCCEEDED(hr))
+ {
+ hr = pType->SetUINT32(MF_MT_AUDIO_AVG_BYTES_PER_SECOND, bytesPerSecond);
+ }
+
+ if (SUCCEEDED(hr))
+ {
+ hr = pType->SetUINT32(MF_MT_AUDIO_BITS_PER_SAMPLE, bitsPerSample);
+ }
+
+ if (SUCCEEDED(hr))
+ {
+ hr = pType->SetUINT32(MF_MT_ALL_SAMPLES_INDEPENDENT, TRUE);
+ }
+
+ if (SUCCEEDED(hr))
+ {
+ // Return the type to the caller.
+ *ppType = pType;
+ (*ppType)->AddRef();
+ }
+
+ SafeRelease(&pType);
+ return hr;
+}
+
+
+//-------------------------------------------------------------------
+// CreateVideoType
+//
+// Creates a media type that describes a video subtype
+// format.
+//-------------------------------------------------------------------
+HRESULT MFUtils::CreateVideoType(
+ const GUID* subType, // video subType
+ IMFMediaType **ppType, // Receives a pointer to the media type.
+ UINT32 unWidth, // Video width (0 to ignore)
+ UINT32 unHeight // Video height (0 to ignore)
+ )
+{
+ HRESULT hr = S_OK;
+
+ IMFMediaType *pType = NULL;
+
+ CHECK_HR(hr = MFCreateMediaType(&pType));
+
+ CHECK_HR(hr = pType->SetGUID(MF_MT_MAJOR_TYPE, MFMediaType_Video));
+
+ CHECK_HR(hr = pType->SetGUID(MF_MT_SUBTYPE, *subType));
+
+ CHECK_HR(hr = pType->SetUINT32(MF_MT_ALL_SAMPLES_INDEPENDENT, TRUE)); // UnCompressed
+
+ CHECK_HR(hr = pType->SetUINT32(MF_MT_FIXED_SIZE_SAMPLES, TRUE)); // UnCompressed
+
+ CHECK_HR(hr = pType->SetUINT32(MF_MT_INTERLACE_MODE, MFVideoInterlace_Progressive));
+
+ if(unWidth > 0 && unHeight > 0)
+ {
+ CHECK_HR(hr = MFSetAttributeSize(pType, MF_MT_FRAME_SIZE, unWidth, unHeight));
+ }
+
+ *ppType = pType;
+ (*ppType)->AddRef();
+
+bail:
+ SafeRelease(&pType);
+ return hr;
+}
+
+//-------------------------------------------------------------------
+// Name: ValidateVideoFormat
+// Description: Validates a media type for this sink.
+//-------------------------------------------------------------------
+HRESULT MFUtils::ValidateVideoFormat(IMFMediaType *pmt)
+{
+ GUID major_type = GUID_NULL;
+ GUID subtype = GUID_NULL;
+ MFVideoInterlaceMode interlace = MFVideoInterlace_Unknown;
+ UINT32 val = 0;
+ BOOL bFoundMatchingSubtype = FALSE;
+
+ HRESULT hr = S_OK;
+
+ // Major type must be video.
+ CHECK_HR(hr = pmt->GetGUID(MF_MT_MAJOR_TYPE, &major_type));
+
+ if (major_type != MFMediaType_Video)
+ {
+ CHECK_HR(hr = MF_E_INVALIDMEDIATYPE);
+ }
+
+ // Subtype must be one of the subtypes in our global list.
+
+ // Get the subtype GUID.
+ CHECK_HR(hr = pmt->GetGUID(MF_MT_SUBTYPE, &subtype));
+
+#if 0
+ // Look for the subtype in our list of accepted types.
+ for (DWORD i = 0; i < g_NumVideoSubtypes; i++)
+ {
+ if (subtype == *g_VideoSubtypes[i])
+ {
+ bFoundMatchingSubtype = TRUE;
+ break;
+ }
+ }
+ if (!bFoundMatchingSubtype)
+ {
+ CHECK_HR(hr = MF_E_INVALIDMEDIATYPE);
+ }
+#endif
+
+ // Video must be progressive frames.
+ CHECK_HR(hr = pmt->GetUINT32(MF_MT_INTERLACE_MODE, (UINT32*)&interlace));
+ if (interlace != MFVideoInterlace_Progressive)
+ {
+ CHECK_HR(hr = MF_E_INVALIDMEDIATYPE);
+ }
+
+bail:
+ return hr;
+}
+
+HRESULT MFUtils::ConvertVideoTypeToUncompressedType(
+ IMFMediaType *pType, // Pointer to an encoded video type.
+ const GUID& subtype, // Uncompressed subtype (eg, RGB-32, AYUV)
+ IMFMediaType **ppType // Receives a matching uncompressed video type.
+ )
+{
+ IMFMediaType *pTypeUncomp = NULL;
+
+ HRESULT hr = S_OK;
+ GUID majortype = { 0 };
+ MFRatio par = { 0 };
+
+ hr = pType->GetMajorType(&majortype);
+
+ if (majortype != MFMediaType_Video)
+ {
+ return MF_E_INVALIDMEDIATYPE;
+ }
+
+ // Create a new media type and copy over all of the items.
+ // This ensures that extended color information is retained.
+
+ if (SUCCEEDED(hr))
+ {
+ hr = MFCreateMediaType(&pTypeUncomp);
+ }
+
+ if (SUCCEEDED(hr))
+ {
+ hr = pType->CopyAllItems(pTypeUncomp);
+ }
+
+ // Set the subtype.
+ if (SUCCEEDED(hr))
+ {
+ hr = pTypeUncomp->SetGUID(MF_MT_SUBTYPE, subtype);
+ }
+
+ // Uncompressed means all samples are independent.
+ if (SUCCEEDED(hr))
+ {
+ hr = pTypeUncomp->SetUINT32(MF_MT_ALL_SAMPLES_INDEPENDENT, TRUE);
+ }
+
+ // Fix up PAR if not set on the original type.
+ if (SUCCEEDED(hr))
+ {
+ hr = MFGetAttributeRatio(
+ pTypeUncomp,
+ MF_MT_PIXEL_ASPECT_RATIO,
+ (UINT32*)&par.Numerator,
+ (UINT32*)&par.Denominator
+ );
+
+ // Default to square pixels.
+ if (FAILED(hr))
+ {
+ hr = MFSetAttributeRatio(
+ pTypeUncomp,
+ MF_MT_PIXEL_ASPECT_RATIO,
+ 1, 1
+ );
+ }
+ }
+
+ if (SUCCEEDED(hr))
+ {
+ *ppType = pTypeUncomp;
+ (*ppType)->AddRef();
+ }
+
+ SafeRelease(&pTypeUncomp);
+ return hr;
+}
+
+HRESULT MFUtils::CreateMediaSample(
+ DWORD cbData, // Maximum buffer size
+ IMFSample **ppSample // Receives the sample
+)
+{
+ assert(ppSample);
+
+ HRESULT hr = S_OK;
+
+ IMFSample *pSample = NULL;
+ IMFMediaBuffer *pBuffer = NULL;
+
+ CHECK_HR(hr = MFCreateSample(&pSample));
+ CHECK_HR(hr = MFCreateMemoryBuffer(cbData, &pBuffer));
+ CHECK_HR(hr = pSample->AddBuffer(pBuffer));
+
+ *ppSample = pSample;
+ (*ppSample)->AddRef();
+
+bail:
+ SafeRelease(&pSample);
+ SafeRelease(&pBuffer);
+ return hr;
+}
+
+// Gets the best encoder and decoder. Up to the caller to release the returned pointer
+HRESULT MFUtils::GetBestCodec(
+ BOOL bEncoder, // Whether we request an encoder or not (TRUE=encoder, FALSE=decoder)
+ const GUID& mediaType, // The MediaType
+ const GUID& inputFormat, // The input MediaFormat (e.g. MFVideoFormat_NV12)
+ const GUID& outputFormat, // The output MediaFormat (e.g. MFVideoFormat_H264)
+ IMFTransform **ppMFT // Receives the decoder/encoder transform
+ )
+{
+ assert(ppMFT);
+ assert(mediaType == MFMediaType_Video || mediaType == MFMediaType_Audio); // only audio and video codecs are support for now
+
+ *ppMFT = NULL;
+
+ HRESULT hr = S_OK;
+
+ if(outputFormat == MFVideoFormat_H264 || inputFormat == MFVideoFormat_H264)
+ {
+ if(bEncoder)
+ {
+ // Force using Intel Quick Sync Encoder
+ hr = CoCreateInstance(CLSID_MF_INTEL_H264EncFilter, NULL,
+ CLSCTX_INPROC_SERVER, IID_PPV_ARGS(ppMFT));
+ if(SUCCEEDED(hr) && *ppMFT)
+ {
+ TSK_DEBUG_INFO("Using Intel Quick Sync encoder :)");
+ return hr;
+ }
+ TSK_DEBUG_INFO("Not using Intel Quick Sync encoder :(");
+ }
+ else
+ {
+#if !PLUGIN_MF_DISABLE_ASYNC_DECODERS // Intel Quick Sync decoder is asynchronous
+ // Force using Intel Quick Sync Decoder
+ hr = CoCreateInstance(CLSID_MF_INTEL_H264DecFilter, NULL,
+ CLSCTX_INPROC_SERVER, IID_PPV_ARGS(ppMFT));
+#endif
+ if(SUCCEEDED(hr) && *ppMFT)
+ {
+ TSK_DEBUG_INFO("Using Intel Quick Sync decoder :)");
+ return hr;
+ }
+ TSK_DEBUG_INFO("Not using Intel Quick Sync decoder :(");
+ }
+ }
+
+ UINT32 count = 0;
+ BOOL bAsync = FALSE;
+ GUID guidActivateCLSID = GUID_NULL;
+
+ IMFActivate **ppActivate = NULL;
+
+ MFT_REGISTER_TYPE_INFO infoInput = { mediaType, inputFormat };
+ MFT_REGISTER_TYPE_INFO infoOutput = { mediaType, outputFormat };
+
+ UINT32 unFlags = MFT_ENUM_FLAG_HARDWARE |
+ MFT_ENUM_FLAG_SYNCMFT |
+ MFT_ENUM_FLAG_ASYNCMFT |
+ MFT_ENUM_FLAG_LOCALMFT |
+ MFT_ENUM_FLAG_TRANSCODE_ONLY | // Otherwise Intel Quick Sync will not be listed
+ MFT_ENUM_FLAG_SORTANDFILTER;
+
+ hr = MFTEnumEx(
+ (mediaType == MFMediaType_Video) ? (bEncoder ? MFT_CATEGORY_VIDEO_ENCODER : MFT_CATEGORY_VIDEO_DECODER) : (bEncoder ? MFT_CATEGORY_AUDIO_ENCODER : MFT_CATEGORY_AUDIO_DECODER),
+ unFlags,
+ (inputFormat == GUID_NULL) ? NULL : &infoInput, // Input type
+ (outputFormat == GUID_NULL) ? NULL : &infoOutput, // Output type
+ &ppActivate,
+ &count
+ );
+
+ for(UINT32 i = 0; i < count; ++i)
+ {
+ SafeRelease(ppMFT);
+ hr = ppActivate[i]->GetGUID(MFT_TRANSFORM_CLSID_Attribute, &guidActivateCLSID);
+ if(FAILED(hr))
+ {
+ continue;
+ }
+
+ if(bEncoder)
+ {
+ // Encoder
+ if(guidActivateCLSID == CLSID_CMSH264EncoderMFT) // MS H.264 encoder ?
+ {
+ if(PLUGIN_MF_DISABLE_MS_H264_ENCODER)
+ {
+ // Microsoft H.264 encoder is disabled
+ TSK_DEBUG_INFO("MS H.264 encoder is disabled...skipping");
+ continue;
+ }
+ if(!IsWin8_OrLater(g_dwMajorVersion, g_dwMinorVersion))
+ {
+ // Microsoft H.264 encoder doesn't support low latency on Win7.
+ TSK_DEBUG_INFO("MS H.264 encoder doesn't support low delay on (%ld, %ld)...skipping", g_dwMajorVersion, g_dwMinorVersion);
+ continue;
+ }
+ }
+ }
+ else
+ {
+ // Decoder
+ if(guidActivateCLSID == CLSID_CMSH264DecoderMFT) // MS H.264 decoder ?
+ {
+ if(!IsWin8_OrLater(g_dwMajorVersion, g_dwMinorVersion))
+ {
+ // Microsoft H.264 decoder doesn't support low latency on Win7.
+ TSK_DEBUG_INFO("MS H.264 decoder doesn't support low delay on (%ld, %ld)...skipping", g_dwMajorVersion, g_dwMinorVersion);
+ continue;
+ }
+ }
+ }
+
+ hr = ppActivate[i]->ActivateObject(IID_PPV_ARGS(ppMFT));
+ if(SUCCEEDED(hr) && *ppMFT) // For now we just get the first one. FIXME: Give HW encoders/decoders higher priority.
+ {
+ if(bEncoder)
+ {
+ // Encoder
+
+ }
+ else
+ {
+ // Decoder
+#if PLUGIN_MF_DISABLE_ASYNC_DECODERS
+ hr = IsAsyncMFT(*ppMFT, &bAsync);
+ if(bAsync)
+ {
+ TSK_DEBUG_INFO("Skipping async decoder because not supported yet");
+ continue; // Async decoders not supported yet
+ }
+#endif
+ }
+ break;
+ }
+ }
+
+ for (UINT32 i = 0; i < count; i++)
+ {
+ ppActivate[i]->Release();
+ }
+ CoTaskMemFree(ppActivate);
+
+ return *ppMFT ? S_OK : MF_E_NOT_FOUND;
+}
+
+HRESULT MFUtils::IsVideoProcessorSupported(BOOL *pbSupported)
+{
+ HRESULT hr = S_OK;
+ IMFTransform *pTransform = NULL;
+
+ if(!pbSupported)
+ {
+ CHECK_HR(hr = E_POINTER);
+ }
+
+ hr = CoCreateInstance(CLSID_VideoProcessorMFT, NULL,
+ CLSCTX_INPROC_SERVER, IID_PPV_ARGS(&pTransform));
+ *pbSupported = SUCCEEDED(hr);
+ if(FAILED(hr))
+ {
+ hr = S_OK; // not an error
+ }
+
+bail:
+ SafeRelease(&pTransform);
+ return hr;
+}
+
+HRESULT MFUtils::GetBestVideoProcessor(
+ const GUID& inputFormat, // The input MediaFormat (e.g. MFVideoFormat_I420)
+ const GUID& outputFormat, // The output MediaFormat (e.g. MFVideoFormat_NV12)
+ IMFTransform **ppProcessor // Receives the video processor
+ )
+{
+ assert(ppProcessor);
+
+ *ppProcessor = NULL;
+
+ HRESULT hr = S_OK;
+ UINT32 count = 0;
+
+ IMFActivate **ppActivate = NULL;
+
+ MFT_REGISTER_TYPE_INFO infoInput = { MFMediaType_Video, inputFormat };
+ MFT_REGISTER_TYPE_INFO infoOutput = { MFMediaType_Video, outputFormat };
+
+ UINT32 unFlags = MFT_ENUM_FLAG_HARDWARE |
+ MFT_ENUM_FLAG_SYNCMFT |
+ MFT_ENUM_FLAG_LOCALMFT |
+ MFT_ENUM_FLAG_SORTANDFILTER;
+
+ hr = MFTEnumEx(
+ MFT_CATEGORY_VIDEO_PROCESSOR,
+ unFlags,
+ &infoInput, // Input type
+ &infoOutput, // Output type
+ &ppActivate,
+ &count
+ );
+
+ for(UINT32 i = 0; i < count; ++i)
+ {
+ hr = ppActivate[i]->ActivateObject(IID_PPV_ARGS(ppProcessor));
+ if(SUCCEEDED(hr) && *ppProcessor)
+ {
+ break;
+ }
+ SafeRelease(ppProcessor);
+ }
+
+ for (UINT32 i = 0; i < count; i++)
+ {
+ ppActivate[i]->Release();
+ }
+ CoTaskMemFree(ppActivate);
+
+ return *ppProcessor ? S_OK : MF_E_NOT_FOUND;
+}
+
+// Add an transform node to a topology.
+HRESULT MFUtils::AddTransformNode(
+ IMFTopology *pTopology, // Topology.
+ IMFTransform *pMFT, // MFT.
+ DWORD dwId, // Identifier of the stream sink.
+ IMFTopologyNode **ppNode // Receives the node pointer.
+ )
+{
+ *ppNode = NULL;
+
+ IMFTopologyNode *pNode = NULL;
+ HRESULT hr = S_OK;
+
+ // Create the node.
+ CHECK_HR(hr = MFCreateTopologyNode(MF_TOPOLOGY_TRANSFORM_NODE, &pNode));
+ // Set the object pointer.
+ CHECK_HR(hr = pNode->SetObject(pMFT));
+ CHECK_HR(hr = pNode->SetUINT32(MF_TOPONODE_STREAMID, dwId));
+ CHECK_HR(hr = pNode->SetUINT32(MF_TOPONODE_NOSHUTDOWN_ON_REMOVE, FALSE));
+ // Add the node to the topology.
+ CHECK_HR(hr = pTopology->AddNode(pNode));
+
+ // Return the pointer to the caller.
+ *ppNode = pNode;
+ (*ppNode)->AddRef();
+
+bail:
+ SafeRelease(&pNode);
+ return hr;
+}
+
+// Sets the IMFStreamSink pointer on an output node.
+HRESULT MFUtils::BindOutputNode(
+ IMFTopologyNode *pNode // The Node
+ )
+{
+ assert(pNode);
+
+ HRESULT hr = S_OK;
+ IUnknown *pNodeObject = NULL;
+ IMFActivate *pActivate = NULL;
+ IMFStreamSink *pStream = NULL;
+ IMFMediaSink *pSink = NULL;
+
+ // Get the node's object pointer.
+ CHECK_HR(hr = pNode->GetObject(&pNodeObject));
+
+ // The object pointer should be one of the following:
+ // 1. An activation object for the media sink.
+ // 2. The stream sink.
+
+ // If it's #2, then we're already done.
+
+ // First, check if it's an activation object.
+ CHECK_HR(hr = pNodeObject->QueryInterface(IID_PPV_ARGS(&pActivate)));
+
+ if (SUCCEEDED(hr))
+ {
+ DWORD dwStreamID = 0;
+
+ // The object pointer is an activation object.
+
+ // Try to create the media sink.
+ hr = pActivate->ActivateObject(IID_PPV_ARGS(&pSink));
+
+ // Look up the stream ID. (Default to zero.)
+
+ if (SUCCEEDED(hr))
+ {
+ dwStreamID = MFGetAttributeUINT32(pNode, MF_TOPONODE_STREAMID, 0);
+ }
+
+ // Now try to get or create the stream sink.
+
+ // Check if the media sink already has a stream sink with the requested ID.
+
+ if (SUCCEEDED(hr))
+ {
+ hr = pSink->GetStreamSinkById(dwStreamID, &pStream);
+ if (FAILED(hr))
+ {
+ // Try to add a new stream sink.
+ hr = pSink->AddStreamSink(dwStreamID, NULL, &pStream);
+ }
+ }
+
+ // Replace the node's object pointer with the stream sink.
+ if (SUCCEEDED(hr))
+ {
+ hr = pNode->SetObject(pStream);
+ }
+ }
+ else
+ {
+ // Not an activation object. Is it a stream sink?
+ hr = pNodeObject->QueryInterface(IID_PPV_ARGS(&pStream));
+ }
+
+bail:
+ SafeRelease(&pNodeObject);
+ SafeRelease(&pActivate);
+ SafeRelease(&pStream);
+ SafeRelease(&pSink);
+ return hr;
+}
+
+// Add an output node to a topology.
+HRESULT MFUtils::AddOutputNode(
+ IMFTopology *pTopology, // Topology.
+ IMFActivate *pActivate, // Media sink activation object.
+ DWORD dwId, // Identifier of the stream sink.
+ IMFTopologyNode **ppNode) // Receives the node pointer
+{
+ IMFTopologyNode *pNode = NULL;
+
+ HRESULT hr = S_OK;
+ CHECK_HR(hr = MFCreateTopologyNode(MF_TOPOLOGY_OUTPUT_NODE, &pNode));
+ CHECK_HR(hr = pNode->SetObject(pActivate));
+ CHECK_HR(hr = pNode->SetUINT32(MF_TOPONODE_STREAMID, dwId));
+ CHECK_HR(hr = pNode->SetUINT32(MF_TOPONODE_NOSHUTDOWN_ON_REMOVE, FALSE));
+ CHECK_HR(hr = pTopology->AddNode(pNode));
+
+ // Return the pointer to the caller.
+ *ppNode = pNode;
+ (*ppNode)->AddRef();
+
+bail:
+ SafeRelease(&pNode);
+ return hr;
+}
+
+// Add a source node to a topology
+HRESULT MFUtils::AddSourceNode(
+ IMFTopology *pTopology, // Topology.
+ IMFMediaSource *pSource, // Media source.
+ IMFPresentationDescriptor *pPD, // Presentation descriptor.
+ IMFStreamDescriptor *pSD, // Stream descriptor.
+ IMFTopologyNode **ppNode // Receives the node pointer.
+ )
+{
+ IMFTopologyNode *pNode = NULL;
+
+ HRESULT hr = S_OK;
+ CHECK_HR(hr = MFCreateTopologyNode(MF_TOPOLOGY_SOURCESTREAM_NODE, &pNode));
+ CHECK_HR(hr = pNode->SetUnknown(MF_TOPONODE_SOURCE, pSource));
+ CHECK_HR(hr = pNode->SetUnknown(MF_TOPONODE_PRESENTATION_DESCRIPTOR, pPD));
+ CHECK_HR(hr = pNode->SetUnknown(MF_TOPONODE_STREAM_DESCRIPTOR, pSD));
+ CHECK_HR(hr = pTopology->AddNode(pNode));
+
+ // Return the pointer to the caller.
+ *ppNode = pNode;
+ (*ppNode)->AddRef();
+
+bail:
+ SafeRelease(&pNode);
+ return hr;
+}
+
+// Create the topology
+//
+// [source] -> (Transform) -> [SinkMain]
+// \-> (SinkPreview)
+//
+HRESULT MFUtils::CreateTopology(
+ IMFMediaSource *pSource, // Media source
+ IMFTransform *pTransform, // Transform filter (e.g. encoder or decoder) to insert between the source and Sink. NULL is valid.
+ IMFActivate *pSinkActivateMain, // Main sink (e.g. sample grabber or EVR).
+ IMFActivate *pSinkActivatePreview, // Preview sink. Optional. Could be NULL.
+ IMFMediaType *pIputTypeMain, // Main sink input MediaType
+ IMFTopology **ppTopo // Receives the newly created topology
+ )
+{
+ IMFTopology *pTopology = NULL;
+ IMFPresentationDescriptor *pPD = NULL;
+ IMFStreamDescriptor *pSD = NULL;
+ IMFMediaTypeHandler *pHandler = NULL;
+ IMFTopologyNode *pNodeSource = NULL;
+ IMFTopologyNode *pNodeSinkMain = NULL;
+ IMFTopologyNode *pNodeSinkPreview = NULL;
+ IMFTopologyNode *pNodeTransform = NULL;
+ IMFTopologyNode *pNodeTee = NULL;
+ IMFMediaType *pMediaType = NULL;
+ IMFTransform *pVideoProcessor = NULL;
+ IMFTopologyNode *pNodeVideoProcessor = NULL;
+ IMFTransform *pConvFrameRate = NULL;
+ IMFTransform *pConvSize = NULL;
+ IMFTransform *pConvColor = NULL;
+ IMFTopologyNode *pNodeConvFrameRate = NULL;
+ IMFTopologyNode *pNodeConvSize = NULL;
+ IMFTopologyNode *pNodeConvColor = NULL;
+ IMFMediaType *pTransformInputType = NULL;
+ IMFMediaType *pSinkMainInputType = NULL;
+ const IMFTopologyNode *pcNodeBeforeSinkMain = NULL;
+
+ HRESULT hr = S_OK;
+ DWORD cStreams = 0;
+ BOOL bSourceFound = FALSE;
+ BOOL bSupportedSize = FALSE;
+ BOOL bSupportedFps = FALSE;
+ BOOL bSupportedFormat = FALSE;
+ BOOL bVideoProcessorSupported = FALSE;
+ GUID inputMajorType, inputSubType;
+
+ CHECK_HR(hr = IsVideoProcessorSupported(&bVideoProcessorSupported));
+ CHECK_HR(hr = pIputTypeMain->GetMajorType(&inputMajorType));
+
+ CHECK_HR(hr = MFCreateTopology(&pTopology));
+ CHECK_HR(hr = pSource->CreatePresentationDescriptor(&pPD));
+ CHECK_HR(hr = pPD->GetStreamDescriptorCount(&cStreams));
+
+ for (DWORD i = 0; i < cStreams; i++)
+ {
+ BOOL fSelected = FALSE;
+ GUID majorType;
+
+ CHECK_HR(hr = pPD->GetStreamDescriptorByIndex(i, &fSelected, &pSD));
+ CHECK_HR(hr = pSD->GetMediaTypeHandler(&pHandler));
+ CHECK_HR(hr = pHandler->GetMajorType(&majorType));
+
+ if (majorType == inputMajorType && fSelected)
+ {
+ CHECK_HR(hr = AddSourceNode(pTopology, pSource, pPD, pSD, &pNodeSource));
+ CHECK_HR(hr = pNodeSource->SetTopoNodeID(MFUtils::g_ullTopoIdSource));
+ CHECK_HR(hr = AddOutputNode(pTopology, pSinkActivateMain, 0, &pNodeSinkMain));
+ CHECK_HR(hr = pNodeSinkMain->SetTopoNodeID(MFUtils::g_ullTopoIdSinkMain));
+ CHECK_HR(hr = MFUtils::BindOutputNode(pNodeSinkMain)); // To avoid MF_E_TOPO_SINK_ACTIVATES_UNSUPPORTED
+
+ //
+ // Create preview
+ //
+
+ if(pSinkActivatePreview)
+ {
+ CHECK_HR(hr = AddOutputNode(pTopology, pSinkActivatePreview, 0, &pNodeSinkPreview));
+ CHECK_HR(hr = pNodeSinkPreview->SetTopoNodeID(MFUtils::g_ullTopoIdSinkPreview));
+ CHECK_HR(hr = MFUtils::BindOutputNode(pNodeSinkPreview));
+
+ CHECK_HR(hr = MFCreateTopologyNode(MF_TOPOLOGY_TEE_NODE, &pNodeTee));
+ CHECK_HR(hr = pTopology->AddNode(pNodeTee));
+ }
+
+ //
+ // Create converters
+ //
+
+ if(majorType == MFMediaType_Video)
+ {
+ // Even when size matches the topology could add a resizer which doesn't keep ratio when resizing while video processor does.
+ if(!bVideoProcessorSupported)
+ {
+ hr = IsSupported(
+ pPD,
+ i,
+ pIputTypeMain,
+ &bSupportedSize,
+ &bSupportedFps,
+ &bSupportedFormat);
+ }
+
+ CHECK_HR(hr = pIputTypeMain->GetGUID(MF_MT_SUBTYPE, &inputSubType));
+
+ if(!bSupportedSize || !bSupportedFps || !bSupportedFormat)
+ {
+ // Use video processor single MFT or 3 different MFTs
+ if(!pVideoProcessor)
+ {
+ hr = CoCreateInstance(CLSID_VideoProcessorMFT, NULL,
+ CLSCTX_INPROC_SERVER, IID_PPV_ARGS(&pVideoProcessor));
+ }
+ if(!pVideoProcessor)
+ {
+ // Video Resizer DSP(http://msdn.microsoft.com/en-us/library/windows/desktop/ff819491(v=vs.85).aspx) supports I420 only
+ if(!bSupportedSize && !pConvSize && inputSubType == MFVideoFormat_I420)
+ {
+ hr = CoCreateInstance(CLSID_CResizerDMO, NULL,
+ CLSCTX_INPROC_SERVER, IID_PPV_ARGS(&pConvSize));
+ }
+ // Frame Rate Converter DSP(http://msdn.microsoft.com/en-us/library/windows/desktop/ff819100(v=vs.85).aspx) supports neither NV12 nor I420
+ /*if(!bSupportedFps && !pConvFrameRate)
+ {
+ hr = CoCreateInstance(CLSID_CFrameRateConvertDmo, NULL,
+ CLSCTX_INPROC_SERVER, IID_PPV_ARGS(&pConvFrameRate));
+ }*/
+ // Color Converter DSP (http://msdn.microsoft.com/en-us/library/windows/desktop/ff819079(v=vs.85).aspx) supports both NV12 and I420
+ if(!bSupportedFormat && !pConvColor)
+ {
+ hr = CoCreateInstance(CLSID_CColorConvertDMO, NULL,
+ CLSCTX_INPROC_SERVER, IID_PPV_ARGS(&pConvColor));
+ }
+ }
+ }
+ else
+ {
+ // MediaType supported
+ CHECK_HR(hr = pHandler->SetCurrentMediaType(pIputTypeMain));
+ }
+
+ if(pVideoProcessor && !pNodeVideoProcessor)
+ {
+ CHECK_HR(hr = AddTransformNode(pTopology, pVideoProcessor, 0, &pNodeVideoProcessor));
+ CHECK_HR(hr = pNodeVideoProcessor->SetTopoNodeID(MFUtils::g_ullTopoIdVideoProcessor));
+ }
+ if(pConvColor && !pNodeConvColor)
+ {
+ CHECK_HR(hr = AddTransformNode(pTopology, pConvColor, 0, &pNodeConvColor));
+ }
+ if(pConvFrameRate && !pNodeConvFrameRate)
+ {
+ CHECK_HR(hr = AddTransformNode(pTopology, pConvFrameRate, 0, &pNodeConvFrameRate));
+ }
+ if(pConvSize && !pNodeConvSize)
+ {
+ CHECK_HR(hr = AddTransformNode(pTopology, pConvSize, 0, &pNodeConvSize));
+ }
+ } // if(majorType == MFMediaType_Video)
+
+
+ //
+ // Set media type
+ //
+
+ if(pTransform)
+ {
+ CHECK_HR(hr = AddTransformNode(pTopology, pTransform, 0, &pNodeTransform));
+ hr = pTransform->GetInputCurrentType(0, &pTransformInputType);
+ if(FAILED(hr))
+ {
+ pTransformInputType = pIputTypeMain;
+ pTransformInputType->AddRef();
+ hr = S_OK;
+ }
+ if(pVideoProcessor)
+ {
+ CHECK_HR(hr = pVideoProcessor->SetOutputType(0, pTransformInputType, 0));
+ }
+ else
+ {
+ if(pConvColor)
+ {
+ /*CHECK_HR*/(hr = pConvColor->SetOutputType(0, pTransformInputType, 0));
+ }
+ if(pConvFrameRate)
+ {
+ /*CHECK_HR*/(hr = pConvFrameRate->SetOutputType(0, pTransformInputType, 0));
+ }
+ if(pConvSize)
+ {
+ // Transform requires NV12
+ //Video Resizer DSP(http://msdn.microsoft.com/en-us/library/windows/desktop/ff819491(v=vs.85).aspx) doesn't support NV12
+ //*CHECK_HR*/(hr = pConvSize->SetOutputType(0, pTransformInputType, 0));
+ }
+ }
+ }
+ else
+ {
+ hr = pNodeSinkMain->GetInputPrefType(0, &pSinkMainInputType);
+ if(FAILED(hr))
+ {
+ pSinkMainInputType = pIputTypeMain;
+ pSinkMainInputType->AddRef();
+ hr = S_OK;
+ }
+ if(SUCCEEDED(hr))
+ {
+ if(pVideoProcessor)
+ {
+ CHECK_HR(hr = pVideoProcessor->SetOutputType(0, pSinkMainInputType, 0));
+ }
+ else
+ {
+ //!\ MUST NOT SET OUTPUT TYPE
+ if(pConvColor)
+ {
+ //*CHECK_HR*/(hr = pConvColor->SetOutputType(0, pSinkMainInputType, 0));
+ }
+ if(pConvFrameRate)
+ {
+ //*CHECK_HR*/(hr = pConvFrameRate->SetOutputType(0, pSinkMainInputType, 0));
+ }
+ if(pConvSize)
+ {
+ //*CHECK_HR*/(hr = pConvSize->SetOutputType(0, pSinkMainInputType, 0));
+ }
+ }
+ }
+ }
+
+ //
+ // Connect
+ //
+
+ if(pNodeTee)
+ {
+ // Connect(Source -> Tee)
+ CHECK_HR(hr = pNodeSource->ConnectOutput(0, pNodeTee, 0));
+
+ // Connect(Tee -> SinkPreview)
+ CHECK_HR(hr = pNodeTee->ConnectOutput(1, pNodeSinkPreview, 0));
+
+ // Connect(Tee ->(Processors)
+ if(pVideoProcessor)
+ {
+ CHECK_HR(hr = pNodeTee->ConnectOutput(0, pNodeVideoProcessor, 0));
+ pcNodeBeforeSinkMain = pNodeVideoProcessor;
+ }
+ else if(pNodeConvFrameRate || pNodeConvSize || pNodeConvColor)
+ {
+ CHECK_HR(hr = ConnectConverters(
+ pNodeTee,
+ 0,
+ pNodeConvFrameRate,
+ pNodeConvColor,
+ pNodeConvSize
+ ));
+ pcNodeBeforeSinkMain = pNodeConvSize ? pNodeConvSize : (pNodeConvColor ? pNodeConvColor : pNodeConvFrameRate);
+ }
+ else
+ {
+ pcNodeBeforeSinkMain = pNodeTee;
+ }
+ }
+ else
+ {
+ // Connect(Source -> (Processors))
+ if(pVideoProcessor)
+ {
+ CHECK_HR(hr = pNodeSource->ConnectOutput(0, pNodeVideoProcessor, 0));
+ pcNodeBeforeSinkMain = pNodeVideoProcessor;
+ }
+ else if(pNodeConvFrameRate || pNodeConvFrameRate || pNodeConvColor)
+ {
+ CHECK_HR(hr = ConnectConverters(
+ pNodeSource,
+ 0,
+ pNodeConvFrameRate,
+ pNodeConvSize,
+ pNodeConvColor
+ ));
+ pcNodeBeforeSinkMain = pNodeConvSize ? pNodeConvSize : (pNodeConvColor ? pNodeConvColor : pNodeConvFrameRate);
+ }
+ else
+ {
+ pcNodeBeforeSinkMain = pNodeSource;
+ }
+ }
+
+
+ if(pNodeTransform)
+ {
+ // Connect(X->Transform)
+ CHECK_HR(hr = ((IMFTopologyNode *)pcNodeBeforeSinkMain)->ConnectOutput(0, pNodeTransform, 0));
+ pcNodeBeforeSinkMain = pNodeTransform;
+ }
+
+ // Connect(X -> SinkMain)
+ CHECK_HR(hr = ((IMFTopologyNode *)pcNodeBeforeSinkMain)->ConnectOutput(0, pNodeSinkMain, 0));
+
+ bSourceFound = TRUE;
+ break;
+ }
+ else
+ {
+ CHECK_HR(hr = pPD->DeselectStream(i));
+ }
+ SafeRelease(&pSD);
+ SafeRelease(&pHandler);
+ }
+
+ *ppTopo = pTopology;
+ (*ppTopo)->AddRef();
+
+bail:
+ SafeRelease(&pTopology);
+ SafeRelease(&pNodeSource);
+ SafeRelease(&pNodeSinkMain);
+ SafeRelease(&pNodeSinkPreview);
+ SafeRelease(&pNodeTransform);
+ SafeRelease(&pNodeTee);
+ SafeRelease(&pPD);
+ SafeRelease(&pSD);
+ SafeRelease(&pHandler);
+ SafeRelease(&pMediaType);
+ SafeRelease(&pTransformInputType);
+ SafeRelease(&pSinkMainInputType);
+
+ SafeRelease(&pVideoProcessor);
+ SafeRelease(&pNodeVideoProcessor);
+ SafeRelease(&pConvFrameRate);
+ SafeRelease(&pConvSize);
+ SafeRelease(&pConvColor);
+ SafeRelease(&pNodeConvFrameRate);
+ SafeRelease(&pNodeConvSize);
+ SafeRelease(&pNodeConvColor);
+
+ if(!bSourceFound)
+ {
+ TSK_DEBUG_ERROR("No source node found");
+ return E_NOT_SET;
+ }
+
+ return hr;
+}
+
+// Creates a fully loaded topology from the input partial topology.
+HRESULT MFUtils::ResolveTopology(
+ IMFTopology *pInputTopo, // A pointer to the IMFTopology interface of the partial topology to be resolved.
+ IMFTopology **ppOutputTopo, // Receives a pointer to the IMFTopology interface of the completed topology. The caller must release the interface.
+ IMFTopology *pCurrentTopo /*= NULL*/ // A pointer to the IMFTopology interface of the previous full topology. The topology loader can re-use objects from this topology in the new topology. This parameter can be NULL.
+ )
+{
+ assert(ppOutputTopo && pInputTopo);
+
+ HRESULT hr = S_OK;
+ IMFTopoLoader* pTopoLoader = NULL;
+
+ *ppOutputTopo = NULL;
+
+ CHECK_HR(hr = MFCreateTopoLoader(&pTopoLoader));
+ CHECK_HR(hr = pTopoLoader->Load(pInputTopo, ppOutputTopo, pCurrentTopo));
+
+bail:
+ SafeRelease(&pTopoLoader);
+ return hr;
+}
+
+HRESULT MFUtils::FindNodeObject(
+ IMFTopology *pInputTopo, // The Topology containing the node to find
+ TOPOID qwTopoNodeID, //The identifier for the node
+ void** ppObject // Receives the Object
+ )
+{
+ assert(pInputTopo && ppObject);
+
+ *ppObject = NULL;
+
+ IMFTopologyNode *pNode = NULL;
+ HRESULT hr = S_OK;
+
+ CHECK_HR(hr = pInputTopo->GetNodeByID(qwTopoNodeID, &pNode));
+ CHECK_HR(hr = pNode->GetObject((IUnknown**)ppObject));
+
+bail:
+ SafeRelease(&pNode);
+ return hr;
+}
+
+// Create an activation object for a renderer, based on the stream media type.
+HRESULT MFUtils::CreateMediaSinkActivate(
+ IMFStreamDescriptor *pSourceSD, // Pointer to the stream descriptor.
+ HWND hVideoWindow, // Handle to the video clipping window.
+ IMFActivate **ppActivate
+)
+{
+ HRESULT hr = S_OK;
+ IMFMediaTypeHandler *pHandler = NULL;
+ IMFActivate *pActivate = NULL;
+
+ // Get the media type handler for the stream.
+ CHECK_HR(hr = pSourceSD->GetMediaTypeHandler(&pHandler));
+ // Get the major media type.
+ GUID guidMajorType;
+ CHECK_HR(hr = pHandler->GetMajorType(&guidMajorType));
+
+ // Create an IMFActivate object for the renderer, based on the media type.
+ if (MFMediaType_Audio == guidMajorType)
+ {
+ // Create the audio renderer.
+ CHECK_HR(hr = MFCreateAudioRendererActivate(&pActivate));
+ }
+ else if (MFMediaType_Video == guidMajorType)
+ {
+ // Create the video renderer.
+ CHECK_HR(hr = MFCreateVideoRendererActivate(hVideoWindow, &pActivate));
+ }
+ else
+ {
+ // Unknown stream type.
+ hr = E_FAIL;
+ // Optionally, you could deselect this stream instead of failing.
+ }
+ if (FAILED(hr))
+ {
+ goto bail;
+ }
+
+ // Return IMFActivate pointer to caller.
+ *ppActivate = pActivate;
+ (*ppActivate)->AddRef();
+
+bail:
+ SafeRelease(&pHandler);
+ SafeRelease(&pActivate);
+ return hr;
+}
+
+// Set source output media type
+HRESULT MFUtils::SetMediaType(
+ IMFMediaSource *pSource, // Media source.
+ IMFMediaType* pMediaType // Media Type.
+ )
+{
+ assert(pSource && pMediaType);
+
+ IMFPresentationDescriptor *pPD = NULL;
+ IMFStreamDescriptor *pSD = NULL;
+ IMFMediaTypeHandler *pHandler = NULL;
+
+ HRESULT hr = S_OK;
+ DWORD cStreams = 0;
+ GUID inputMajorType;
+
+ CHECK_HR(hr = pSource->CreatePresentationDescriptor(&pPD));
+ CHECK_HR(hr = pPD->GetStreamDescriptorCount(&cStreams));
+ CHECK_HR(hr = pMediaType->GetMajorType(&inputMajorType));
+
+ for (DWORD i = 0; i < cStreams; i++)
+ {
+ BOOL fSelected = FALSE;
+ GUID majorType;
+
+ CHECK_HR(hr = pPD->GetStreamDescriptorByIndex(i, &fSelected, &pSD));
+ CHECK_HR(hr = pSD->GetMediaTypeHandler(&pHandler));
+ CHECK_HR(hr = pHandler->GetMajorType(&majorType));
+
+ if (majorType == inputMajorType && fSelected)
+ {
+ CHECK_HR(hr = pHandler->SetCurrentMediaType(pMediaType));
+ }
+ else
+ {
+ CHECK_HR(hr = pPD->DeselectStream(i));
+ }
+ SafeRelease(&pSD);
+ SafeRelease(&pHandler);
+ }
+
+
+bail:
+ SafeRelease(&pPD);
+ SafeRelease(&pSD);
+ SafeRelease(&pHandler);
+
+ return hr;
+}
+
+HRESULT MFUtils::SetVideoWindow(
+ IMFTopology *pTopology, // Topology.
+ IMFMediaSource *pSource, // Media source.
+ HWND hVideoWnd // Window for video playback.
+ )
+{
+ HRESULT hr = S_OK;
+ IMFStreamDescriptor *pSD = NULL;
+ IMFPresentationDescriptor *pPD = NULL;
+ IMFActivate *pSinkActivate = NULL;
+ IMFTopologyNode *pSourceNode = NULL;
+ IMFTopologyNode *pOutputNode = NULL;
+ DWORD cStreams = 0, iStream;
+
+ CHECK_HR(hr = pSource->CreatePresentationDescriptor(&pPD));
+ CHECK_HR(hr = pPD->GetStreamDescriptorCount(&cStreams));
+
+ for(iStream = 0; iStream < cStreams; ++iStream)
+ {
+ BOOL fSelected = FALSE;
+
+ CHECK_HR(hr = pPD->GetStreamDescriptorByIndex(iStream, &fSelected, &pSD));
+
+ if (fSelected)
+ {
+ // Create the media sink activation object.
+ CHECK_HR(hr = CreateMediaSinkActivate(pSD, hVideoWnd, &pSinkActivate));
+ // Add a source node for this stream.
+ CHECK_HR(hr = AddSourceNode(pTopology, pSource, pPD, pSD, &pSourceNode));
+ // Create the output node for the renderer.
+ CHECK_HR(hr = AddOutputNode(pTopology, pSinkActivate, 0, &pOutputNode));
+ // Connect the source node to the output node.
+ CHECK_HR(hr = pSourceNode->ConnectOutput(0, pOutputNode, 0));
+ }
+ // else: If not selected, don't add the branch.
+ }
+
+bail:
+ SafeRelease(&pPD);
+ SafeRelease(&pSD);
+ SafeRelease(&pSinkActivate);
+ SafeRelease(&pSourceNode);
+ SafeRelease(&pOutputNode);
+ return hr;
+}
+
+// Run the session
+HRESULT MFUtils::RunSession(
+ IMFMediaSession *pSession, // Session to run
+ IMFTopology *pTopology // The toppology
+ )
+{
+ assert(pSession && pTopology);
+
+ IMFMediaEvent *pEvent = NULL;
+
+ PROPVARIANT var;
+ PropVariantInit(&var);
+
+ MediaEventType met;
+ HRESULT hrStatus = S_OK;
+ HRESULT hr = S_OK;
+ CHECK_HR(hr = pSession->SetTopology(MFSESSION_SETTOPOLOGY_IMMEDIATE, pTopology)); // MFSESSION_SETTOPOLOGY_IMMEDIATE required to update (reload) topology when media type change
+ CHECK_HR(hr = pSession->Start(&GUID_NULL, &var));
+
+ // Check first event
+ hr = pSession->GetEvent(MF_EVENT_FLAG_NO_WAIT, &pEvent);
+ if(hr == MF_E_NO_EVENTS_AVAILABLE || hr == MF_E_MULTIPLE_SUBSCRIBERS){ // MF_E_MULTIPLE_SUBSCRIBERS means already listening
+ hr = S_OK;
+ goto bail;
+ }
+ if(pEvent) {
+ CHECK_HR(hr = pEvent->GetStatus(&hrStatus));
+ }
+ else {
+ hrStatus = hr;
+ }
+ if (FAILED(hrStatus))
+ {
+ CHECK_HR(hr = pEvent->GetType(&met));
+ TSK_DEBUG_ERROR("Session error: 0x%x (event id: %d)\n", hrStatus, met);
+ hr = hrStatus;
+ goto bail;
+ }
+
+bail:
+ SafeRelease(&pEvent);
+ return hr;
+}
+
+// Stop session
+HRESULT MFUtils::ShutdownSession(
+ IMFMediaSession *pSession, // The Session
+ IMFMediaSource *pSource // Source to shutdown (optional)
+ )
+{
+ // MUST be source then session
+ if(pSource){
+ pSource->Stop();
+ pSource->Shutdown();
+ }
+ if(pSession){
+ pSession->Shutdown();
+ }
+ return S_OK;
+}
+
+// Pause session
+HRESULT MFUtils::PauseSession(
+ IMFMediaSession *pSession, // The session
+ IMFMediaSource *pSource // Source to pause (optional)
+ )
+{
+ if(!pSession){
+ return E_INVALIDARG;
+ }
+ if(pSource){
+ pSource->Pause();
+ }
+ return pSession->Pause();
+}
+
+// Returns -1 if none is supported
+INT MFUtils::GetSupportedSubTypeIndex(
+ IMFMediaSource *pSource, // The source
+ const GUID& mediaType, // The MediaType
+ const VideoSubTypeGuidPair* subTypes, UINT subTypesCount // List of preferred subtypes (in ascending order)
+ )
+{
+ assert(pSource);
+
+ IMFPresentationDescriptor *pPD = NULL;
+ IMFStreamDescriptor *pSD = NULL;
+ IMFMediaTypeHandler *pHandler = NULL;
+ IMFMediaType *pMediaType = NULL;
+
+ INT nIndex = -1;
+ HRESULT hr = S_OK;
+ DWORD cStreams = 0, cMediaTypesCount;
+ GUID majorType, subType;
+ BOOL fSelected;
+
+ CHECK_HR(hr = pSource->CreatePresentationDescriptor(&pPD));
+ CHECK_HR(hr = pPD->GetStreamDescriptorCount(&cStreams));
+
+ for (UINT subTypesIndex = 0; subTypesIndex < subTypesCount && nIndex == -1; ++subTypesIndex)
+ {
+ for (DWORD cStreamIndex = 0; cStreamIndex < cStreams && nIndex == -1; ++cStreamIndex)
+ {
+ fSelected = FALSE;
+ CHECK_HR(hr = pPD->GetStreamDescriptorByIndex(cStreamIndex, &fSelected, &pSD));
+ if(fSelected)
+ {
+ CHECK_HR(hr = pSD->GetMediaTypeHandler(&pHandler));
+ CHECK_HR(hr = pHandler->GetMajorType(&majorType));
+ if(majorType == mediaType)
+ {
+ CHECK_HR(hr = pHandler->GetMediaTypeCount(&cMediaTypesCount));
+ for(DWORD cMediaTypesIndex = 0; cMediaTypesIndex < cMediaTypesCount && nIndex == -1; ++cMediaTypesIndex)
+ {
+ CHECK_HR(hr = pHandler->GetMediaTypeByIndex(cMediaTypesIndex, &pMediaType));
+ CHECK_HR(hr = pMediaType->GetGUID(MF_MT_SUBTYPE, &subType));
+ if (subTypes[subTypesIndex].fourcc == subType)
+ {
+ nIndex = subTypesIndex;
+ break;
+ }
+ SafeRelease(&pMediaType);
+ }
+ }
+ }
+
+ SafeRelease(&pSD);
+ SafeRelease(&pHandler);
+ }
+ }
+
+bail:
+ SafeRelease(&pMediaType);
+ SafeRelease(&pPD);
+ SafeRelease(&pSD);
+ SafeRelease(&pHandler);
+
+ return nIndex;
+}
+
+HRESULT MFUtils::IsSupported(
+ IMFPresentationDescriptor *pPD,
+ DWORD cStreamIndex,
+ UINT32 nWidth,
+ UINT32 nHeight,
+ UINT32 nFps,
+ const GUID& guidFormat,
+ BOOL* pbSupportedSize,
+ BOOL* pbSupportedFps,
+ BOOL* pbSupportedFormat
+ )
+{
+ HRESULT hr = S_OK;
+
+ BOOL fSelected = FALSE;
+ IMFStreamDescriptor *pSD = NULL;
+ IMFMediaTypeHandler *pHandler = NULL;
+ IMFMediaType *pMediaType = NULL;
+ UINT32 _nWidth = 0, _nHeight = 0, numeratorFps = 0, denominatorFps = 0;
+ GUID subType;
+ DWORD cMediaTypesCount;
+
+ if(!pPD || !pbSupportedSize || !pbSupportedFps || !pbSupportedFormat)
+ {
+ CHECK_HR(hr = E_POINTER);
+ }
+
+ *pbSupportedSize = FALSE;
+ *pbSupportedFps = FALSE;
+ *pbSupportedFormat = FALSE;
+
+ CHECK_HR(hr = pPD->GetStreamDescriptorByIndex(cStreamIndex, &fSelected, &pSD));
+ if(fSelected)
+ {
+ CHECK_HR(hr = pSD->GetMediaTypeHandler(&pHandler));
+ CHECK_HR(hr = pHandler->GetMediaTypeCount(&cMediaTypesCount));
+ for(DWORD cMediaTypesIndex = 0; cMediaTypesIndex < cMediaTypesCount; ++cMediaTypesIndex)
+ {
+ CHECK_HR(hr = pHandler->GetMediaTypeByIndex(cMediaTypesIndex, &pMediaType));
+ CHECK_HR(hr = MFGetAttributeSize(pMediaType, MF_MT_FRAME_SIZE, &_nWidth, &_nHeight));
+ CHECK_HR(hr = pMediaType->GetGUID(MF_MT_SUBTYPE, &subType));
+ if(FAILED(hr = MFGetAttributeRatio(pMediaType, MF_MT_FRAME_RATE, &numeratorFps, &denominatorFps)))
+ {
+ numeratorFps = 30;
+ denominatorFps = 1;
+ }
+
+ // all must match for the same stream
+ if(_nWidth == nWidth && _nHeight == nHeight && subType == guidFormat && (numeratorFps/denominatorFps) == nFps)
+ {
+ *pbSupportedSize = TRUE;
+ *pbSupportedFormat = TRUE;
+ *pbSupportedFps = TRUE;
+ break;
+ }
+
+ SafeRelease(&pMediaType);
+ }
+ SafeRelease(&pHandler);
+ }
+
+bail:
+ SafeRelease(&pSD);
+ SafeRelease(&pHandler);
+ SafeRelease(&pMediaType);
+
+ return hr;
+}
+
+HRESULT MFUtils::IsSupported(
+ IMFPresentationDescriptor *pPD,
+ DWORD cStreamIndex,
+ IMFMediaType* pMediaType,
+ BOOL* pbSupportedSize,
+ BOOL* pbSupportedFps,
+ BOOL* pbSupportedFormat
+ )
+{
+ HRESULT hr = S_OK;
+
+ UINT32 nWidth = 0, nHeight = 0, nFps = 0, numeratorFps = 30, denominatorFps = 1;
+ GUID subType;
+
+ if(!pPD || !pMediaType || !pbSupportedSize || !pbSupportedFps || !pbSupportedFormat)
+ {
+ CHECK_HR(hr = E_POINTER);
+ }
+
+ CHECK_HR(hr = MFGetAttributeSize(pMediaType, MF_MT_FRAME_SIZE, &nWidth, &nHeight));
+ CHECK_HR(hr = pMediaType->GetGUID(MF_MT_SUBTYPE, &subType));
+ if(FAILED(hr = MFGetAttributeRatio(pMediaType, MF_MT_FRAME_RATE, &numeratorFps, &denominatorFps)))
+ {
+ numeratorFps = 30;
+ denominatorFps = 1;
+ }
+
+ CHECK_HR(hr = IsSupported(
+ pPD,
+ cStreamIndex,
+ nWidth,
+ nHeight,
+ (numeratorFps / denominatorFps),
+ subType,
+ pbSupportedSize,
+ pbSupportedFps,
+ pbSupportedFormat
+ ));
+bail:
+ return hr;
+}
+
+HRESULT MFUtils::IsSupportedByInput(
+ IMFPresentationDescriptor *pPD,
+ DWORD cStreamIndex,
+ IMFTopologyNode *pNode,
+ BOOL* pbSupportedSize,
+ BOOL* pbSupportedFps,
+ BOOL* pbSupportedFormat
+ )
+{
+ HRESULT hr = S_OK;
+
+ IMFMediaType *pMediaType = NULL;
+ IUnknown* pObject = NULL;
+ IMFActivate *pActivate = NULL;
+ IMFMediaSink *pMediaSink = NULL;
+ IMFTransform *pTransform = NULL;
+ IMFStreamSink *pStreamSink = NULL;
+ IMFMediaTypeHandler *pHandler = NULL;
+
+ if(!pPD || !pNode || !pbSupportedSize || !pbSupportedFps || !pbSupportedFormat)
+ {
+ CHECK_HR(hr = E_POINTER);
+ }
+
+ CHECK_HR(hr = pNode->GetObject(&pObject));
+ hr = pObject->QueryInterface(IID_PPV_ARGS(&pActivate));
+ if(SUCCEEDED(hr))
+ {
+ SafeRelease(&pObject);
+ hr = pActivate->ActivateObject(IID_IMFMediaSink, (void**)&pObject);
+ if(FAILED(hr))
+ {
+ hr = pActivate->ActivateObject(IID_IMFTransform, (void**)&pObject);
+ }
+ }
+
+ if(!pObject)
+ {
+ CHECK_HR(hr = E_NOINTERFACE);
+ }
+
+ hr = pObject->QueryInterface(IID_PPV_ARGS(&pMediaSink));
+ if(FAILED(hr))
+ {
+ hr = pObject->QueryInterface(IID_PPV_ARGS(&pTransform));
+ }
+
+
+
+ if(pMediaSink)
+ {
+ CHECK_HR(hr = pMediaSink->GetStreamSinkByIndex(0, &pStreamSink));
+ CHECK_HR(hr = pStreamSink->GetMediaTypeHandler(&pHandler));
+ CHECK_HR(hr = pHandler->GetCurrentMediaType(&pMediaType));
+
+ }
+ else if(pTransform)
+ {
+ CHECK_HR(hr = pTransform->GetInputCurrentType(0, &pMediaType));
+ }
+ else
+ {
+ CHECK_HR(hr = pNode->GetInputPrefType(0, &pMediaType));
+ }
+
+ CHECK_HR(hr = IsSupported(
+ pPD,
+ cStreamIndex,
+ pMediaType,
+ pbSupportedSize,
+ pbSupportedFps,
+ pbSupportedFormat
+ ));
+
+bail:
+ SafeRelease(&pObject);
+ SafeRelease(&pActivate);
+ SafeRelease(&pMediaType);
+ SafeRelease(&pStreamSink);
+ SafeRelease(&pHandler);
+ return hr;
+}
+
+HRESULT MFUtils::ConnectConverters(
+ IMFTopologyNode *pNode,
+ DWORD dwOutputIndex,
+ IMFTopologyNode *pNodeConvFrameRate,
+ IMFTopologyNode *pNodeConvColor,
+ IMFTopologyNode *pNodeConvSize
+ )
+{
+ HRESULT hr = S_OK;
+
+ if(!pNode)
+ {
+ CHECK_HR(hr = E_POINTER);
+ }
+
+ if(pNodeConvFrameRate)
+ {
+ CHECK_HR(hr = pNode->ConnectOutput(dwOutputIndex, pNodeConvFrameRate, 0));
+ if(pNodeConvSize)
+ {
+ CHECK_HR(hr = pNodeConvFrameRate->ConnectOutput(0, pNodeConvSize, 0));
+ if(pNodeConvColor)
+ {
+ CHECK_HR(hr = pNodeConvSize->ConnectOutput(0, pNodeConvColor, 0));
+ }
+ }
+ else
+ {
+ if(pNodeConvColor)
+ {
+ CHECK_HR(hr = pNodeConvFrameRate->ConnectOutput(0, pNodeConvColor, 0));
+ }
+ }
+ }
+ else
+ {
+ if(pNodeConvSize)
+ {
+ CHECK_HR(hr = pNode->ConnectOutput(dwOutputIndex, pNodeConvSize, 0));
+ if(pNodeConvColor)
+ {
+ CHECK_HR(hr = pNodeConvSize->ConnectOutput(0, pNodeConvColor, 0));
+ }
+ }
+ else
+ {
+ if(pNodeConvColor)
+ {
+ CHECK_HR(hr = pNode->ConnectOutput(dwOutputIndex, pNodeConvColor, 0));
+ }
+ }
+ }
+
+bail:
+ return hr;
+}
+
+// This function should be called only if VideoProcessor is not supported
+HRESULT MFUtils::GetBestFormat(
+ IMFMediaSource *pSource,
+ const GUID *pSubType,
+ UINT32 nWidth,
+ UINT32 nHeight,
+ UINT32 nFps,
+ UINT32 *pnWidth,
+ UINT32 *pnHeight,
+ UINT32 *pnFps,
+ const VideoSubTypeGuidPair **ppSubTypeGuidPair
+ )
+{
+
+#define _FindPairByGuid(_guid, _index) { \
+ int _i; _index = -1; \
+ for (_i = 0; _i < PreferredVideoSubTypeGuidPairsCount; ++_i) { \
+ if (PreferredVideoSubTypeGuidPairs[_i].fourcc == _guid) { \
+ _index = _i; break; \
+ } \
+ } \
+}
+#if 0
+ *pnWidth = 640;
+ *pnHeight = 480;
+ *pnFps = 30;
+ return S_OK;
+#else
+ HRESULT hr = S_OK;
+ IMFPresentationDescriptor *pPD = NULL;
+ IMFStreamDescriptor *pSD = NULL;
+ IMFMediaTypeHandler *pHandler = NULL;
+ IMFMediaType *pMediaType = NULL;
+ DWORD cStreams = 0, cMediaTypesCount;
+ GUID majorType, subType, _BestSubType;
+ BOOL bFound = FALSE, fSelected;
+ UINT32 _nWidth, _nHeight, numeratorFps, denominatorFps, _nFps, _nScore, _nBestScore;
+ int PreferredVideoSubTypeGuidPairIndex;
+ static const UINT32 kSubTypeMismatchPad = _UI32_MAX >> 4;
+ static const UINT32 kFpsMismatchPad = _UI32_MAX >> 2;
+
+ if (!ppSubTypeGuidPair || !pSubType) {
+ CHECK_HR(hr = E_INVALIDARG);
+ }
+ _FindPairByGuid(*pSubType, PreferredVideoSubTypeGuidPairIndex);
+ if (PreferredVideoSubTypeGuidPairIndex == -1) {
+ CHECK_HR(hr = E_INVALIDARG);
+ }
+ *ppSubTypeGuidPair = &PreferredVideoSubTypeGuidPairs[PreferredVideoSubTypeGuidPairIndex];
+
+ _nBestScore = _UI32_MAX;
+ CHECK_HR(hr = pSource->CreatePresentationDescriptor(&pPD));
+ CHECK_HR(hr = pPD->GetStreamDescriptorCount(&cStreams));
+
+
+ for (DWORD i = 0; i < cStreams; i++)
+ {
+ fSelected = FALSE;
+
+ CHECK_HR(hr = pPD->GetStreamDescriptorByIndex(i, &fSelected, &pSD));
+
+ if (fSelected)
+ {
+ CHECK_HR(hr = pSD->GetMediaTypeHandler(&pHandler));
+
+ CHECK_HR(hr = pHandler->GetMajorType(&majorType));
+
+ if(majorType == MFMediaType_Video)
+ {
+ CHECK_HR(hr = pHandler->GetMediaTypeCount(&cMediaTypesCount));
+
+ for(DWORD cMediaTypesIndex = 0; cMediaTypesIndex < cMediaTypesCount; ++cMediaTypesIndex)
+ {
+ CHECK_HR(hr = pHandler->GetMediaTypeByIndex(cMediaTypesIndex, &pMediaType));
+
+ CHECK_HR(hr = pMediaType->GetGUID(MF_MT_SUBTYPE, &subType));
+ // if(subType == *pSubType)
+ {
+ CHECK_HR(hr = MFGetAttributeSize(pMediaType, MF_MT_FRAME_SIZE, &_nWidth, &_nHeight));
+ CHECK_HR(hr = MFGetAttributeRatio(pMediaType, MF_MT_FRAME_RATE, &numeratorFps, &denominatorFps));
+ _nFps = (numeratorFps / denominatorFps);
+
+ if (subType == *pSubType) {
+ _nScore = 0;
+ }
+ else {
+ _FindPairByGuid(subType, PreferredVideoSubTypeGuidPairIndex);
+ if (PreferredVideoSubTypeGuidPairIndex == -1) {
+ _nScore = kSubTypeMismatchPad; // Not a must but important: If(!VideoProcess) then CLSID_CColorConvertDMO
+ }
+ else {
+ _nScore = kSubTypeMismatchPad >> (PreferredVideoSubTypeGuidPairsCount - PreferredVideoSubTypeGuidPairIndex);
+ }
+ }
+ _nScore += abs((int)(_nWidth - nWidth)); // Not a must: If(!VideoProcess) then CLSID_CResizerDMO
+ _nScore += abs((int)(_nHeight - nHeight)); // Not a must: If(!VideoProcess) then CLSID_CResizerDMO
+ _nScore += (_nFps == nFps) ? 0 : kFpsMismatchPad; // Fps is a must because without video processor no alternative exist (CLSID_CFrameRateConvertDmo doesn't support I420)
+
+ if (_nScore <= _nBestScore || !bFound)
+ {
+ *pnWidth = _nWidth;
+ *pnHeight = _nHeight;
+ *pnFps = _nFps;
+ bFound = TRUE;
+ _BestSubType = subType;
+ _nBestScore = _nScore;
+ }
+ }
+
+ SafeRelease(&pMediaType);
+ }
+ }
+ }
+
+ SafeRelease(&pHandler);
+ SafeRelease(&pSD);
+ }
+
+bail:
+ SafeRelease(&pPD);
+ SafeRelease(&pSD);
+ SafeRelease(&pHandler);
+ SafeRelease(&pMediaType);
+
+ _FindPairByGuid(_BestSubType, PreferredVideoSubTypeGuidPairIndex);
+ if (PreferredVideoSubTypeGuidPairIndex != -1) {
+ *ppSubTypeGuidPair = &PreferredVideoSubTypeGuidPairs[PreferredVideoSubTypeGuidPairIndex];
+ }
+ else /*if (_nBestScore > kSubTypeMismatchPad)*/ {
+ *pnWidth = 640;
+ *pnHeight = 480;
+ *pnFps = 30;
+ TSK_DEBUG_WARN("Failed to math subtype...using VGA@30fps");
+ }
+
+ return SUCCEEDED(hr) ? (bFound ? S_OK : E_NOT_SET): hr;
+#endif
+
+}
+
+HWND MFUtils::GetConsoleHwnd(void)
+{
+ #define MY_BUFSIZE 1024 // Buffer size for console window titles.
+ HWND hwndFound; // This is what is returned to the caller.
+ TCHAR pszNewWindowTitle[MY_BUFSIZE]; // Contains fabricated
+ // WindowTitle.
+ TCHAR pszOldWindowTitle[MY_BUFSIZE]; // Contains original
+ // WindowTitle.
+
+ // Fetch current window title.
+ GetConsoleTitle(pszOldWindowTitle, MY_BUFSIZE);
+
+ // Format a "unique" NewWindowTitle.
+ wsprintf(pszNewWindowTitle,TEXT("%d/%d"),
+ GetTickCount(),
+ GetCurrentProcessId());
+
+ // Change current window title.
+ SetConsoleTitle(pszNewWindowTitle);
+
+ // Ensure window title has been updated.
+ Sleep(40);
+
+ // Look for NewWindowTitle.
+ hwndFound=FindWindow(NULL, pszNewWindowTitle);
+
+ // Restore original window title.
+ SetConsoleTitle(pszOldWindowTitle);
+
+ return(hwndFound);
+}
diff --git a/plugins/pluginWinMF/internals/mf_utils.h b/plugins/pluginWinMF/internals/mf_utils.h
new file mode 100644
index 0000000..0819597
--- /dev/null
+++ b/plugins/pluginWinMF/internals/mf_utils.h
@@ -0,0 +1,260 @@
+/* Copyright (C) 2013 Mamadou DIOP
+* Copyright (C) 2013 Doubango Telecom <http://www.doubango.org>
+*
+* This file is part of Open Source Doubango Framework.
+*
+* DOUBANGO is free software: you can redistribute it and/or modify
+* it under the terms of the GNU General Public License as published by
+* the Free Software Foundation, either version 3 of the License, or
+* (at your option) any later version.
+*
+* DOUBANGO is distributed in the hope that it will be useful,
+* but WITHOUT ANY WARRANTY; without even the implied warranty of
+* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
+* GNU General Public License for more details.
+*
+* You should have received a copy of the GNU General Public License
+* along with DOUBANGO.
+*/
+#ifndef PLUGIN_WIN_MF_UTILS_H
+#define PLUGIN_WIN_MF_UTILS_H
+
+#include "../plugin_win_mf_config.h"
+
+#include <new>
+#include <mfapi.h>
+#include <mfidl.h>
+#include <Mferror.h>
+#include <shlwapi.h>
+
+#undef SafeRelease
+#define SafeRelease(ppT) \
+{ \
+ if (*ppT) \
+ { \
+ (*ppT)->Release(); \
+ *ppT = NULL; \
+ } \
+}
+
+#undef CHECK_HR
+// In CHECK_HR(x) When (x) is a function it will be executed twice when used in "TSK_DEBUG_ERROR(x)" and "If(x)"
+#define CHECK_HR(x) { HRESULT __hr__ = (x); if (FAILED(__hr__)) { TSK_DEBUG_ERROR("Operation Failed (%08x)", __hr__); goto bail; } }
+
+typedef struct VideoSubTypeGuidPair
+{
+ enum tmedia_chroma_e chroma;
+ const GUID& fourcc;
+}
+VideoSubTypeGuidPair;
+
+class MFUtils
+{
+public:
+
+static HRESULT Startup();
+static HRESULT Shutdown();
+
+static BOOL IsD3D9Supported();
+static BOOL IsLowLatencyH264Supported();
+static BOOL IsLowLatencyH264SupportsMaxSliceSize();
+
+static HRESULT IsAsyncMFT(
+ IMFTransform *pMFT, // The MFT to check
+ BOOL* pbIsAsync // Whether the MFT is Async
+ );
+static HRESULT UnlockAsyncMFT(
+ IMFTransform *pMFT // The MFT to unlock
+ );
+
+static HRESULT CreatePCMAudioType(
+ UINT32 sampleRate, // Samples per second
+ UINT32 bitsPerSample, // Bits per sample
+ UINT32 cChannels, // Number of channels
+ IMFMediaType **ppType // Receives a pointer to the media type.
+ );
+static HRESULT CreateVideoType(
+ const GUID* subType, // video subType
+ IMFMediaType **ppType, // Receives a pointer to the media type.
+ UINT32 unWidth = 0, // Video width (0 to ignore)
+ UINT32 unHeight = 0 // Video height (0 to ignore)
+ );
+static HRESULT ConvertVideoTypeToUncompressedType(
+ IMFMediaType *pType, // Pointer to an encoded video type.
+ const GUID& subtype, // Uncompressed subtype (eg, RGB-32, AYUV)
+ IMFMediaType **ppType // Receives a matching uncompressed video type.
+ );
+static HRESULT CreateMediaSample(
+ DWORD cbData, // Maximum buffer size
+ IMFSample **ppSample // Receives the sample
+ );
+static HRESULT ValidateVideoFormat(
+ IMFMediaType *pmt
+ );
+static HRESULT IsVideoProcessorSupported(BOOL *pbSupported);
+static HRESULT GetBestVideoProcessor(
+ const GUID& inputFormat, // The input MediaFormat (e.g. MFVideoFormat_I420)
+ const GUID& outputFormat, // The output MediaFormat (e.g. MFVideoFormat_NV12)
+ IMFTransform **ppProcessor // Receives the video processor
+ );
+static HRESULT GetBestCodec(
+ BOOL bEncoder, // Whether we request an encoder or not (TRUE=encoder, FALSE=decoder)
+ const GUID& mediaType, // The MediaType
+ const GUID& inputFormat, // The input MediaFormat (e.g. MFVideoFormat_NV12)
+ const GUID& outputFormat, // The output MediaFormat (e.g. MFVideoFormat_H264)
+ IMFTransform **ppMFT // Receives the decoder/encoder transform
+ );
+static HRESULT BindOutputNode(
+ IMFTopologyNode *pNode // The Node
+ );
+static HRESULT AddOutputNode(
+ IMFTopology *pTopology, // Topology.
+ IMFActivate *pActivate, // Media sink activation object.
+ DWORD dwId, // Identifier of the stream sink.
+ IMFTopologyNode **ppNode // Receives the node pointer.
+ );
+static HRESULT AddTransformNode(
+ IMFTopology *pTopology, // Topology.
+ IMFTransform *pMFT, // MFT.
+ DWORD dwId, // Identifier of the stream sink.
+ IMFTopologyNode **ppNode // Receives the node pointer.
+ );
+static HRESULT AddSourceNode(
+ IMFTopology *pTopology, // Topology.
+ IMFMediaSource *pSource, // Media source.
+ IMFPresentationDescriptor *pPD, // Presentation descriptor.
+ IMFStreamDescriptor *pSD, // Stream descriptor.
+ IMFTopologyNode **ppNode // Receives the node pointer.
+ );
+static HRESULT CreateTopology(
+ IMFMediaSource *pSource, // Media source
+ IMFTransform *pTransform, // Transform filter (e.g. encoder or decoder) to insert between the source and Sink. NULL is valid.
+ IMFActivate *pSinkActivateMain, // Main sink (e.g. sample grabber or EVR).
+ IMFActivate *pSinkActivatePreview, // Preview sink. Optional. Could be NULL.
+ IMFMediaType *pIputTypeMain, // Main sink input MediaType
+ IMFTopology **ppTopo // Receives the newly created topology
+ );
+static HRESULT ResolveTopology(
+ IMFTopology *pInputTopo, // A pointer to the IMFTopology interface of the partial topology to be resolved.
+ IMFTopology **ppOutputTopo, // Receives a pointer to the IMFTopology interface of the completed topology. The caller must release the interface.
+ IMFTopology *pCurrentTopo = NULL // A pointer to the IMFTopology interface of the previous full topology. The topology loader can re-use objects from this topology in the new topology. This parameter can be NULL.
+ );
+static HRESULT FindNodeObject(
+ IMFTopology *pInputTopo, // The Topology containing the node to find
+ TOPOID qwTopoNodeID, //The identifier for the node
+ void** ppObject // Receives the Object
+ );
+static HRESULT CreateMediaSinkActivate(
+ IMFStreamDescriptor *pSourceSD, // Pointer to the stream descriptor.
+ HWND hVideoWindow, // Handle to the video clipping window.
+ IMFActivate **ppActivate
+);
+static HRESULT SetMediaType(
+ IMFMediaSource *pSource, // Media source.
+ IMFMediaType* pMediaType // Media Type.
+ );
+static HRESULT SetVideoWindow(
+ IMFTopology *pTopology, // Topology.
+ IMFMediaSource *pSource, // Media source.
+ HWND hVideoWnd // Window for video playback.
+ );
+static HRESULT RunSession(
+ IMFMediaSession *pSession, // Session to run
+ IMFTopology *pTopology // The toppology
+ );
+static HRESULT ShutdownSession(
+ IMFMediaSession *pSession, // The Session
+ IMFMediaSource *pSource = NULL // Source to shutdown (optional)
+ );
+static HRESULT PauseSession(
+ IMFMediaSession *pSession, // The session
+ IMFMediaSource *pSource = NULL// Source to pause (optional)
+ );
+static INT GetSupportedSubTypeIndex(
+ IMFMediaSource *pSource, // The source
+ const GUID& mediaType, // The MediaType
+ const VideoSubTypeGuidPair* subTypes, UINT subTypesCount // List of preferred subtypes (in ascending order)
+ );
+static HRESULT IsSupported(
+ IMFPresentationDescriptor *pPD,
+ DWORD cStreamIndex,
+ UINT32 nWidth,
+ UINT32 nHeight,
+ UINT32 nFps,
+ const GUID& guidFormat,
+ BOOL* pbSupportedSize,
+ BOOL* pbSupportedFps,
+ BOOL* pbSupportedFormat
+ );
+static HRESULT IsSupported(
+ IMFPresentationDescriptor *pPD,
+ DWORD cStreamIndex,
+ IMFMediaType* pMediaType,
+ BOOL* pbSupportedSize,
+ BOOL* pbSupportedFps,
+ BOOL* pbSupportedFormat
+ );
+static HRESULT IsSupportedByInput(
+ IMFPresentationDescriptor *pPD,
+ DWORD cStreamIndex,
+ IMFTopologyNode *pNode,
+ BOOL* pbSupportedSize,
+ BOOL* pbSupportedFps,
+ BOOL* pbSupportedFormat
+ );
+static HRESULT ConnectConverters(
+ IMFTopologyNode *pNode,
+ DWORD dwOutputIndex,
+ IMFTopologyNode *pNodeConvFrameRate,
+ IMFTopologyNode *pNodeConvColor,
+ IMFTopologyNode *pNodeConvSize
+ );
+static HRESULT GetBestFormat(
+ IMFMediaSource *pSource,
+ const GUID *pSubType,
+ UINT32 nWidth,
+ UINT32 nHeight,
+ UINT32 nFps,
+ UINT32 *pnWidth,
+ UINT32 *pnHeight,
+ UINT32 *pnFps,
+ const VideoSubTypeGuidPair **pSubTypeGuidPair
+ );
+
+static HWND GetConsoleHwnd(void);
+
+template <class Q>
+static HRESULT GetTopoNodeObject(IMFTopologyNode *pNode, Q **ppObject)
+{
+ IUnknown *pUnk = NULL; // zero output
+
+ HRESULT hr = pNode->GetObject(&pUnk);
+ if (SUCCEEDED(hr))
+ {
+ pUnk->QueryInterface(IID_PPV_ARGS(ppObject));
+ pUnk->Release();
+ }
+ return hr;
+}
+
+private:
+ static BOOL g_bStarted;
+
+ static DWORD g_dwMajorVersion;
+ static DWORD g_dwMinorVersion;
+
+ static BOOL g_bLowLatencyH264Checked;
+ static BOOL g_bLowLatencyH264Supported;
+ static BOOL g_bLowLatencyH264SupportsMaxSliceSize;
+
+ static BOOL g_bD3D9Checked;
+ static BOOL g_bD3D9Supported;
+
+public:
+ static const TOPOID g_ullTopoIdSinkMain;
+ static const TOPOID g_ullTopoIdSinkPreview;
+ static const TOPOID g_ullTopoIdSource;
+ static const TOPOID g_ullTopoIdVideoProcessor;
+};
+
+#endif /* PLUGIN_WIN_MF_UTILS_H */
diff --git a/plugins/pluginWinMF/pluginWinMF.vcproj b/plugins/pluginWinMF/pluginWinMF.vcproj
new file mode 100644
index 0000000..c9f620b
--- /dev/null
+++ b/plugins/pluginWinMF/pluginWinMF.vcproj
@@ -0,0 +1,319 @@
+<?xml version="1.0" encoding="Windows-1252"?>
+<VisualStudioProject
+ ProjectType="Visual C++"
+ Version="9.00"
+ Name="pluginWinMF"
+ ProjectGUID="{E8596446-CB3A-4AD5-83C3-6562EE426494}"
+ RootNamespace="pluginWinMF"
+ Keyword="Win32Proj"
+ TargetFrameworkVersion="196613"
+ >
+ <Platforms>
+ <Platform
+ Name="Win32"
+ />
+ </Platforms>
+ <ToolFiles>
+ </ToolFiles>
+ <Configurations>
+ <Configuration
+ Name="Debug|Win32"
+ OutputDirectory="$(SolutionDir)$(ConfigurationName)"
+ IntermediateDirectory="$(ConfigurationName)"
+ ConfigurationType="2"
+ CharacterSet="1"
+ >
+ <Tool
+ Name="VCPreBuildEventTool"
+ />
+ <Tool
+ Name="VCCustomBuildTool"
+ />
+ <Tool
+ Name="VCXMLDataGeneratorTool"
+ />
+ <Tool
+ Name="VCWebServiceProxyGeneratorTool"
+ />
+ <Tool
+ Name="VCMIDLTool"
+ />
+ <Tool
+ Name="VCCLCompilerTool"
+ Optimization="0"
+ AdditionalIncludeDirectories=".;..\..\thirdparties\win32\include;..\..\tinySAK\src;..\..\tinyMEDIA\include;..\..\tinySDP\include;..\..\tinyDAV\include;..\..\tinyRTP\include"
+ PreprocessorDefinitions="WIN32;_DEBUG;_WINDOWS;_USRDLL;PLUGIN_WIN_MFP_EXPORTS;DEBUG_LEVEL=DEBUG_LEVEL_INFO;TINYDAV_EXPORTS"
+ MinimalRebuild="true"
+ BasicRuntimeChecks="3"
+ RuntimeLibrary="3"
+ UsePrecompiledHeader="0"
+ WarningLevel="3"
+ WarnAsError="true"
+ DebugInformationFormat="4"
+ />
+ <Tool
+ Name="VCManagedResourceCompilerTool"
+ />
+ <Tool
+ Name="VCResourceCompilerTool"
+ />
+ <Tool
+ Name="VCPreLinkEventTool"
+ />
+ <Tool
+ Name="VCLinkerTool"
+ AdditionalDependencies="$(OutDir)\tinySAK.lib $(OutDir)\tinyMEDIA.lib"
+ LinkIncremental="2"
+ GenerateDebugInformation="true"
+ SubSystem="2"
+ TargetMachine="1"
+ />
+ <Tool
+ Name="VCALinkTool"
+ />
+ <Tool
+ Name="VCManifestTool"
+ />
+ <Tool
+ Name="VCXDCMakeTool"
+ />
+ <Tool
+ Name="VCBscMakeTool"
+ />
+ <Tool
+ Name="VCFxCopTool"
+ />
+ <Tool
+ Name="VCAppVerifierTool"
+ />
+ <Tool
+ Name="VCPostBuildEventTool"
+ />
+ </Configuration>
+ <Configuration
+ Name="Release|Win32"
+ OutputDirectory="$(SolutionDir)$(ConfigurationName)"
+ IntermediateDirectory="$(ConfigurationName)"
+ ConfigurationType="2"
+ CharacterSet="1"
+ WholeProgramOptimization="1"
+ >
+ <Tool
+ Name="VCPreBuildEventTool"
+ />
+ <Tool
+ Name="VCCustomBuildTool"
+ />
+ <Tool
+ Name="VCXMLDataGeneratorTool"
+ />
+ <Tool
+ Name="VCWebServiceProxyGeneratorTool"
+ />
+ <Tool
+ Name="VCMIDLTool"
+ />
+ <Tool
+ Name="VCCLCompilerTool"
+ Optimization="3"
+ EnableIntrinsicFunctions="true"
+ AdditionalIncludeDirectories=".;..\..\thirdparties\win32\include;..\..\tinySAK\src;..\..\tinyMEDIA\include;..\..\tinySDP\include;..\..\tinyDAV\include;..\..\tinyRTP\include"
+ PreprocessorDefinitions="WIN32;NDEBUG;_WINDOWS;_USRDLL;PLUGIN_WIN_MFP_EXPORTS;DEBUG_LEVEL=DEBUG_LEVEL_ERROR;TINYDAV_EXPORTS"
+ RuntimeLibrary="2"
+ EnableFunctionLevelLinking="true"
+ UsePrecompiledHeader="0"
+ WarningLevel="3"
+ WarnAsError="true"
+ DebugInformationFormat="0"
+ />
+ <Tool
+ Name="VCManagedResourceCompilerTool"
+ />
+ <Tool
+ Name="VCResourceCompilerTool"
+ />
+ <Tool
+ Name="VCPreLinkEventTool"
+ />
+ <Tool
+ Name="VCLinkerTool"
+ AdditionalDependencies="$(OutDir)\tinySAK.lib $(OutDir)\tinyMEDIA.lib"
+ LinkIncremental="1"
+ GenerateDebugInformation="false"
+ SubSystem="2"
+ OptimizeReferences="2"
+ EnableCOMDATFolding="2"
+ TargetMachine="1"
+ />
+ <Tool
+ Name="VCALinkTool"
+ />
+ <Tool
+ Name="VCManifestTool"
+ />
+ <Tool
+ Name="VCXDCMakeTool"
+ />
+ <Tool
+ Name="VCBscMakeTool"
+ />
+ <Tool
+ Name="VCFxCopTool"
+ />
+ <Tool
+ Name="VCAppVerifierTool"
+ />
+ <Tool
+ Name="VCPostBuildEventTool"
+ />
+ </Configuration>
+ </Configurations>
+ <References>
+ </References>
+ <Files>
+ <Filter
+ Name="Source Files"
+ Filter="cpp;c;cc;cxx;def;odl;idl;hpj;bat;asm;asmx"
+ UniqueIdentifier="{4FC737F1-C7A5-4376-A066-2A32D752A2FF}"
+ >
+ <File
+ RelativePath=".\dllmain_mf.cxx"
+ >
+ </File>
+ <File
+ RelativePath=".\plugin_win_mf_codec_h264.cxx"
+ >
+ </File>
+ <File
+ RelativePath=".\plugin_win_mf_consumer_audio.cxx"
+ >
+ </File>
+ <File
+ RelativePath=".\plugin_win_mf_consumer_video.cxx"
+ >
+ </File>
+ <File
+ RelativePath=".\plugin_win_mf_converter_video.cxx"
+ >
+ </File>
+ <File
+ RelativePath=".\plugin_win_mf_producer_audio.cxx"
+ >
+ </File>
+ <File
+ RelativePath=".\plugin_win_mf_producer_video.cxx"
+ >
+ </File>
+ <Filter
+ Name="internals"
+ >
+ <File
+ RelativePath=".\internals\mf_codec.cxx"
+ >
+ </File>
+ <File
+ RelativePath=".\internals\mf_custom_src.cxx"
+ >
+ </File>
+ <File
+ RelativePath=".\internals\mf_devices.cxx"
+ >
+ </File>
+ <File
+ RelativePath=".\internals\mf_display_watcher.cxx"
+ >
+ </File>
+ <File
+ RelativePath=".\internals\mf_sample_grabber.cxx"
+ >
+ </File>
+ <File
+ RelativePath=".\internals\mf_sample_queue.cxx"
+ >
+ </File>
+ <File
+ RelativePath=".\internals\mf_utils.cxx"
+ >
+ </File>
+ </Filter>
+ </Filter>
+ <Filter
+ Name="Header Files"
+ Filter="h;hpp;hxx;hm;inl;inc;xsd"
+ UniqueIdentifier="{93995380-89BD-4b04-88EB-625FBE52EBFB}"
+ >
+ <File
+ RelativePath=".\plugin_win_mf_config.h"
+ >
+ </File>
+ <Filter
+ Name="internals"
+ >
+ <File
+ RelativePath=".\internals\mf_codec.h"
+ >
+ </File>
+ <File
+ RelativePath=".\internals\mf_custom_src.h"
+ >
+ </File>
+ <File
+ RelativePath=".\internals\mf_devices.h"
+ >
+ </File>
+ <File
+ RelativePath=".\internals\mf_display_watcher.h"
+ >
+ </File>
+ <File
+ RelativePath=".\internals\mf_sample_grabber.h"
+ >
+ </File>
+ <File
+ RelativePath=".\internals\mf_sample_queue.h"
+ >
+ </File>
+ <File
+ RelativePath=".\internals\mf_utils.h"
+ >
+ </File>
+ </Filter>
+ </Filter>
+ <Filter
+ Name="Resource Files"
+ Filter="rc;ico;cur;bmp;dlg;rc2;rct;bin;rgs;gif;jpg;jpeg;jpe;resx;tiff;tif;png;wav"
+ UniqueIdentifier="{67DA6AB6-F800-4c08-8B7A-83BB121AAD01}"
+ >
+ <File
+ RelativePath=".\version.rc"
+ >
+ </File>
+ </Filter>
+ <Filter
+ Name="tdav"
+ >
+ <File
+ RelativePath="..\..\tinyDAV\include\tinydav\codecs\h264\tdav_codec_h264_common.h"
+ >
+ </File>
+ <File
+ RelativePath="..\..\tinyDAV\src\codecs\h264\tdav_codec_h264_rtp.c"
+ >
+ </File>
+ <File
+ RelativePath="..\..\tinyDAV\include\tinydav\codecs\h264\tdav_codec_h264_rtp.h"
+ >
+ </File>
+ <File
+ RelativePath="..\..\tinyDAV\src\audio\tdav_consumer_audio.c"
+ >
+ </File>
+ <File
+ RelativePath="..\..\tinyDAV\src\audio\tdav_producer_audio.c"
+ >
+ </File>
+ </Filter>
+ </Files>
+ <Globals>
+ </Globals>
+</VisualStudioProject>
diff --git a/plugins/pluginWinMF/plugin_win_mf_codec_h264.cxx b/plugins/pluginWinMF/plugin_win_mf_codec_h264.cxx
new file mode 100644
index 0000000..bee00f0
--- /dev/null
+++ b/plugins/pluginWinMF/plugin_win_mf_codec_h264.cxx
@@ -0,0 +1,750 @@
+/* Copyright (C) 2013 Mamadou DIOP
+* Copyright (C) 2013 Doubango Telecom <http://www.doubango.org>
+*
+* This file is part of Open Source Doubango Framework.
+*
+* DOUBANGO is free software: you can redistribute it and/or modify
+* it under the terms of the GNU General Public License as published by
+* the Free Software Foundation, either version 3 of the License, or
+* (at your option) any later version.
+*
+* DOUBANGO is distributed in the hope that it will be useful,
+* but WITHOUT ANY WARRANTY; without even the implied warranty of
+* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
+* GNU General Public License for more details.
+*
+* You should have received a copy of the GNU General Public License
+* along with DOUBANGO.
+*/
+#include "internals/mf_codec.h"
+#include "internals/mf_utils.h"
+
+#include "tinydav/codecs/h264/tdav_codec_h264_common.h"
+
+#include "tinyrtp/rtp/trtp_rtp_packet.h"
+
+#include "tinymedia/tmedia_codec.h"
+#include "tinymedia/tmedia_params.h"
+#include "tinymedia/tmedia_defaults.h"
+
+#include "tsk_params.h"
+#include "tsk_memory.h"
+#include "tsk_debug.h"
+
+typedef struct mf_codec_h264_s
+{
+ TDAV_DECLARE_CODEC_H264_COMMON;
+
+ // Encoder
+ struct{
+ MFCodecVideoH264* pInst;
+ void* buffer;
+ int64_t frame_count;
+ tsk_bool_t force_idr;
+ int32_t quality; // [1-31]
+ int rotation;
+ int neg_width;
+ int neg_height;
+ int neg_fps;
+ int max_bitrate_bps;
+ int32_t max_bw_kpbs;
+ tsk_bool_t passthrough; // whether to bypass encoding
+ } encoder;
+
+ // decoder
+ struct{
+ MFCodecVideoH264* pInst;
+ void* accumulator;
+ tsk_size_t accumulator_pos;
+ tsk_size_t accumulator_size;
+ uint16_t last_seq;
+ tsk_bool_t passthrough; // whether to bypass decoding
+ } decoder;
+}
+mf_codec_h264_t;
+
+#if !defined(PLUGIN_MF_H264_GOP_SIZE_IN_SECONDS)
+# define PLUGIN_MF_H264_GOP_SIZE_IN_SECONDS 25
+#endif
+
+static int mf_codec_h264_init(mf_codec_h264_t* self, profile_idc_t profile);
+static int mf_codec_h264_deinit(mf_codec_h264_t* self);
+static int mf_codec_h264_open_encoder(mf_codec_h264_t* self);
+static int mf_codec_h264_close_encoder(mf_codec_h264_t* self);
+static int mf_codec_h264_open_decoder(mf_codec_h264_t* self);
+static int mf_codec_h264_close_decoder(mf_codec_h264_t* self);
+
+/* ============ H.264 Base/Main Profile X.X Plugin interface functions ================= */
+
+static int mf_codec_h264_set(tmedia_codec_t* self, const tmedia_param_t* param)
+{
+ mf_codec_h264_t* h264 = (mf_codec_h264_t*)self;
+ if(!self->opened){
+ TSK_DEBUG_ERROR("Codec not opened");
+ return -1;
+ }
+ if(param->value_type == tmedia_pvt_int32){
+ if(tsk_striequals(param->key, "action")){
+ tmedia_codec_action_t action = (tmedia_codec_action_t)TSK_TO_INT32((uint8_t*)param->value);
+ switch(action){
+ case tmedia_codec_action_encode_idr:
+ {
+ h264->encoder.force_idr = tsk_true;
+ break;
+ }
+ case tmedia_codec_action_bw_down:
+ {
+ h264->encoder.quality = TSK_CLAMP(1, (h264->encoder.quality + 1), 31);
+ break;
+ }
+ case tmedia_codec_action_bw_up:
+ {
+ h264->encoder.quality = TSK_CLAMP(1, (h264->encoder.quality - 1), 31);
+ break;
+ }
+ }
+ return 0;
+ }
+ else if(tsk_striequals(param->key, "bypass-encoding")){
+ h264->encoder.passthrough = *((int32_t*)param->value) ? tsk_true : tsk_false;
+ h264->encoder.pInst->setBundled(h264->encoder.passthrough);
+ TSK_DEBUG_INFO("[H.264] bypass-encoding = %d", h264->encoder.passthrough);
+ return 0;
+ }
+ else if(tsk_striequals(param->key, "bypass-decoding")){
+ h264->decoder.passthrough = *((int32_t*)param->value) ? tsk_true : tsk_false;
+ h264->decoder.pInst->setBundled(h264->decoder.passthrough);
+ TSK_DEBUG_INFO("[H.264] bypass-decoding = %d", h264->decoder.passthrough);
+ return 0;
+ }
+ else if(tsk_striequals(param->key, "rotation")){
+ int rotation = *((int32_t*)param->value);
+ if(h264->encoder.rotation != rotation){
+ if(self->opened){
+ int ret;
+ h264->encoder.rotation = rotation;
+ if((ret = mf_codec_h264_close_encoder(h264))){
+ return ret;
+ }
+ if((ret = mf_codec_h264_open_encoder(h264))){
+ return ret;
+ }
+ }
+ }
+ return 0;
+ }
+ }
+ return -1;
+}
+
+
+static int mf_codec_h264_open(tmedia_codec_t* self)
+{
+ int ret;
+ mf_codec_h264_t* h264 = (mf_codec_h264_t*)self;
+
+ if(!h264){
+ TSK_DEBUG_ERROR("Invalid parameter");
+ return -1;
+ }
+
+ /* the caller (base class) already checked that the codec is not opened */
+
+ // Encoder
+ if((ret = mf_codec_h264_open_encoder(h264))){
+ return ret;
+ }
+
+ // Decoder
+ if((ret = mf_codec_h264_open_decoder(h264))){
+ return ret;
+ }
+
+ return 0;
+}
+
+static int mf_codec_h264_close(tmedia_codec_t* self)
+{
+ mf_codec_h264_t* h264 = (mf_codec_h264_t*)self;
+
+ if(!h264){
+ TSK_DEBUG_ERROR("Invalid parameter");
+ return -1;
+ }
+
+ /* the caller (base class) alreasy checked that the codec is opened */
+
+ // Encoder
+ mf_codec_h264_close_encoder(h264);
+
+ // Decoder
+ mf_codec_h264_close_decoder(h264);
+
+ return 0;
+}
+
+static tsk_size_t mf_codec_h264_encode(tmedia_codec_t* self, const void* in_data, tsk_size_t in_size, void** out_data, tsk_size_t* out_max_size)
+{
+ int ret = 0;
+ tsk_bool_t send_idr, send_hdr;
+
+ mf_codec_h264_t* h264 = (mf_codec_h264_t*)self;
+ tdav_codec_h264_common_t* common = (tdav_codec_h264_common_t*)self;
+
+ if(!self || !in_data || !in_size){
+ TSK_DEBUG_ERROR("Invalid parameter");
+ return 0;
+ }
+
+ if(!self->opened || !h264->encoder.pInst || !h264->encoder.pInst->IsReady()){
+ TSK_DEBUG_ERROR("Encoder not opened or not ready");
+ return 0;
+ }
+
+
+ HRESULT hr = S_OK;
+ IMFSample *pSampleOut = NULL;
+ IMFMediaBuffer* pBufferOut = NULL;
+
+ // send IDR for:
+ // - the first frame
+ // - remote peer requested an IDR
+ // - every second within the first 4seconds
+ send_idr = (
+ h264->encoder.frame_count++ == 0
+ || h264 ->encoder.force_idr
+ || ( (h264->encoder.frame_count < h264->encoder.neg_fps * 4) && ((h264->encoder.frame_count % h264->encoder.neg_fps)==0) )
+ );
+
+ if(send_idr) {
+ CHECK_HR(hr = h264->encoder.pInst->RequestKeyFrame());
+ }
+
+ // send SPS and PPS headers for:
+ // - IDR frames (not required but it's the easiest way to deal with pkt loss)
+ // - every 5 seconds after the first 4seconds
+ send_hdr = (
+ send_idr
+ || ( (h264->encoder.frame_count % (h264->encoder.neg_fps * 5))==0 )
+ );
+ if(send_hdr){
+ //FIXME: MF_MT_MPEG_SEQUENCE_HEADER
+ // tdav_codec_h264_rtp_encap(TDAV_CODEC_H264_COMMON(h264), h264->encoder.context->extradata, (tsk_size_t)h264->encoder.context->extradata_size);
+ }
+
+ if (h264->encoder.passthrough) {
+ tdav_codec_h264_rtp_encap(common, (const uint8_t*)in_data, in_size);
+ return 0;
+ }
+
+ // Encode data
+ CHECK_HR(hr = h264->encoder.pInst->Process(in_data, (UINT32)in_size, &pSampleOut));
+ if(pSampleOut) {
+ CHECK_HR(hr = pSampleOut->GetBufferByIndex(0, &pBufferOut));
+
+ BYTE* pBufferPtr = NULL;
+ DWORD dwDataLength = 0;
+ CHECK_HR(hr = pBufferOut->GetCurrentLength(&dwDataLength));
+ if(dwDataLength > 0) {
+ CHECK_HR(hr = pBufferOut->Lock(&pBufferPtr, NULL, NULL));
+ tdav_codec_h264_rtp_encap(common, (const uint8_t*)pBufferPtr, (tsk_size_t)dwDataLength);
+ CHECK_HR(hr = pBufferOut->Unlock());
+ }
+ }
+
+ // reset
+ h264->encoder.force_idr = tsk_false;
+
+bail:
+ SafeRelease(&pSampleOut);
+ SafeRelease(&pBufferOut);
+ return 0;
+}
+
+static tsk_size_t mf_codec_h264_decode(tmedia_codec_t* self, const void* in_data, tsk_size_t in_size, void** out_data, tsk_size_t* out_max_size, const tsk_object_t* proto_hdr)
+{
+ mf_codec_h264_t* h264 = (mf_codec_h264_t*)self;
+ const trtp_rtp_header_t* rtp_hdr = (const trtp_rtp_header_t*)proto_hdr;
+
+ const uint8_t* pay_ptr = tsk_null;
+ tsk_size_t pay_size = 0;
+ int ret;
+ tsk_bool_t append_scp, end_of_unit;
+ tsk_bool_t sps_or_pps;
+ tsk_size_t retsize = 0, size_to_copy = 0;
+ static const tsk_size_t xmax_size = (3840 * 2160 * 3) >> 3; // >>3 instead of >>1 (not an error)
+ static tsk_size_t start_code_prefix_size = sizeof(H264_START_CODE_PREFIX);
+
+ if(!h264 || !in_data || !in_size || !out_data)
+ {
+ TSK_DEBUG_ERROR("Invalid parameter");
+ return 0;
+ }
+
+ if(!self->opened || !h264->encoder.pInst || !h264->decoder.pInst->IsReady()){
+ TSK_DEBUG_ERROR("Decoder not opened or not ready");
+ return 0;
+ }
+
+ HRESULT hr = S_OK;
+ IMFSample *pSampleOut = NULL;
+ IMFMediaBuffer* pBufferOut = NULL;
+
+ /* Packet lost? */
+ if((h264->decoder.last_seq + 1) != rtp_hdr->seq_num && h264->decoder.last_seq){
+ TSK_DEBUG_INFO("[H.264] Packet loss, seq_num=%d", (h264->decoder.last_seq + 1));
+ }
+ h264->decoder.last_seq = rtp_hdr->seq_num;
+
+
+ /* 5.3. NAL Unit Octet Usage
+ +---------------+
+ |0|1|2|3|4|5|6|7|
+ +-+-+-+-+-+-+-+-+
+ |F|NRI| Type |
+ +---------------+
+ */
+ if (*((uint8_t*)in_data) & 0x80) {
+ TSK_DEBUG_WARN("F=1");
+ /* reset accumulator */
+ h264->decoder.accumulator_pos = 0;
+ return 0;
+ }
+
+ /* get payload */
+ if ((ret = tdav_codec_h264_get_pay(in_data, in_size, (const void**)&pay_ptr, &pay_size, &append_scp, &end_of_unit)) || !pay_ptr || !pay_size){
+ TSK_DEBUG_ERROR("Depayloader failed to get H.264 content");
+ return 0;
+ }
+ //append_scp = tsk_true;
+ size_to_copy = pay_size + (append_scp ? start_code_prefix_size : 0);
+ // whether it's SPS or PPS (append_scp is false for subsequent FUA chuncks)
+ sps_or_pps = append_scp && pay_ptr && ((pay_ptr[0] & 0x1F) == 7 || (pay_ptr[0] & 0x1F) == 8);
+
+ // start-accumulator
+ if (!h264->decoder.accumulator) {
+ if (size_to_copy > xmax_size) {
+ TSK_DEBUG_ERROR("%u too big to contain valid encoded data. xmax_size=%u", size_to_copy, xmax_size);
+ return 0;
+ }
+ if (!(h264->decoder.accumulator = tsk_calloc(size_to_copy, sizeof(uint8_t)))) {
+ TSK_DEBUG_ERROR("Failed to allocated new buffer");
+ return 0;
+ }
+ h264->decoder.accumulator_size = size_to_copy;
+ }
+ if ((h264->decoder.accumulator_pos + size_to_copy) >= xmax_size) {
+ TSK_DEBUG_ERROR("BufferOverflow");
+ h264->decoder.accumulator_pos = 0;
+ return 0;
+ }
+ if ((h264->decoder.accumulator_pos + size_to_copy) > h264->decoder.accumulator_size) {
+ if(!(h264->decoder.accumulator = tsk_realloc(h264->decoder.accumulator, (h264->decoder.accumulator_pos + size_to_copy)))){
+ TSK_DEBUG_ERROR("Failed to reallocated new buffer");
+ h264->decoder.accumulator_pos = 0;
+ h264->decoder.accumulator_size = 0;
+ return 0;
+ }
+ h264->decoder.accumulator_size = (h264->decoder.accumulator_pos + size_to_copy);
+ }
+
+ if (append_scp) {
+ memcpy(&((uint8_t*)h264->decoder.accumulator)[h264->decoder.accumulator_pos], H264_START_CODE_PREFIX, start_code_prefix_size);
+ h264->decoder.accumulator_pos += start_code_prefix_size;
+ }
+ memcpy(&((uint8_t*)h264->decoder.accumulator)[h264->decoder.accumulator_pos], pay_ptr, pay_size);
+ h264->decoder.accumulator_pos += pay_size;
+ // end-accumulator
+
+ /*if(sps_or_pps){
+ // http://libav-users.943685.n4.nabble.com/Decode-H264-streams-how-to-fill-AVCodecContext-from-SPS-PPS-td2484472.html
+ // SPS and PPS should be bundled with IDR
+ TSK_DEBUG_INFO("Receiving SPS or PPS ...to be tied to an IDR");
+ }
+ else */if (rtp_hdr->marker) {
+ if (h264->decoder.passthrough) {
+ if (*out_max_size < h264->decoder.accumulator_pos) {
+ if ((*out_data = tsk_realloc(*out_data, h264->decoder.accumulator_pos))) {
+ *out_max_size = h264->decoder.accumulator_pos;
+ }
+ else {
+ *out_max_size = 0;
+ return 0;
+ }
+ }
+ memcpy(*out_data, h264->decoder.accumulator, h264->decoder.accumulator_pos);
+ retsize = h264->decoder.accumulator_pos;
+ }
+ else { // !h264->decoder.passthrough
+ /* decode the picture */
+ CHECK_HR(hr = h264->decoder.pInst->Process(h264->decoder.accumulator, (UINT32)h264->decoder.accumulator_pos, &pSampleOut));
+ if (pSampleOut) {
+ CHECK_HR(hr = pSampleOut->GetBufferByIndex(0, &pBufferOut));
+
+ BYTE* pBufferPtr = NULL;
+ DWORD dwDataLength = 0;
+ CHECK_HR(hr = pBufferOut->GetCurrentLength(&dwDataLength));
+ if (dwDataLength > 0) {
+ CHECK_HR(hr = pBufferOut->Lock(&pBufferPtr, NULL, NULL));
+ {
+ /* IDR ? */
+ if(((pay_ptr[0] & 0x1F) == 0x05) && TMEDIA_CODEC_VIDEO(self)->in.callback){
+ TSK_DEBUG_INFO("Decoded H.264 IDR");
+ TMEDIA_CODEC_VIDEO(self)->in.result.type = tmedia_video_decode_result_type_idr;
+ TMEDIA_CODEC_VIDEO(self)->in.result.proto_hdr = proto_hdr;
+ TMEDIA_CODEC_VIDEO(self)->in.callback(&TMEDIA_CODEC_VIDEO(self)->in.result);
+ }
+ /* fill out */
+ if(*out_max_size < dwDataLength){
+ if((*out_data = tsk_realloc(*out_data, dwDataLength))){
+ *out_max_size = dwDataLength;
+ }
+ else{
+ *out_max_size = 0;
+ return 0;
+ }
+ }
+ retsize = (tsk_size_t)dwDataLength;
+ TMEDIA_CODEC_VIDEO(h264)->in.width = h264->decoder.pInst->GetWidth();
+ TMEDIA_CODEC_VIDEO(h264)->in.height = h264->decoder.pInst->GetHeight();
+ memcpy(*out_data, pBufferPtr, retsize);
+ }
+ CHECK_HR(hr = pBufferOut->Unlock());
+ }
+ }
+ }// else(!h264->decoder.passthrough)
+ } // else if(rtp_hdr->marker)
+
+bail:
+ if (rtp_hdr->marker) {
+ h264->decoder.accumulator_pos = 0;
+ }
+ if (FAILED(hr) /*|| (!pSampleOut && rtp_hdr->marker)*/){
+ TSK_DEBUG_INFO("Failed to decode the buffer with error code =%d, size=%u, append=%s", ret, h264->decoder.accumulator_pos, append_scp ? "yes" : "no");
+ if(TMEDIA_CODEC_VIDEO(self)->in.callback){
+ TMEDIA_CODEC_VIDEO(self)->in.result.type = tmedia_video_decode_result_type_error;
+ TMEDIA_CODEC_VIDEO(self)->in.result.proto_hdr = proto_hdr;
+ TMEDIA_CODEC_VIDEO(self)->in.callback(&TMEDIA_CODEC_VIDEO(self)->in.result);
+ }
+ }
+ SafeRelease(&pSampleOut);
+ SafeRelease(&pBufferOut);
+ return retsize;
+}
+
+static tsk_bool_t mf_codec_h264_sdp_att_match(const tmedia_codec_t* self, const char* att_name, const char* att_value)
+{
+ return tdav_codec_h264_common_sdp_att_match((tdav_codec_h264_common_t*)self, att_name, att_value);
+}
+
+static char* mf_codec_h264_sdp_att_get(const tmedia_codec_t* self, const char* att_name)
+{
+ char* att = tdav_codec_h264_common_sdp_att_get((const tdav_codec_h264_common_t*)self, att_name);
+ if(att && tsk_striequals(att_name, "fmtp")) {
+ tsk_strcat(&att, "; impl=MF");
+ }
+ return att;
+}
+
+
+
+
+/* ============ H.264 Base Profile Plugin interface ================= */
+
+/* constructor */
+static tsk_object_t* mf_codec_h264_base_ctor(tsk_object_t * self, va_list * app)
+{
+ mf_codec_h264_t *h264 = (mf_codec_h264_t*)self;
+ if(h264){
+ /* init base: called by tmedia_codec_create() */
+ /* init self */
+ if(mf_codec_h264_init(h264, profile_idc_baseline) != 0){
+ return tsk_null;
+ }
+ }
+ return self;
+}
+/* destructor */
+static tsk_object_t* mf_codec_h264_base_dtor(tsk_object_t * self)
+{
+ mf_codec_h264_t *h264 = (mf_codec_h264_t*)self;
+ if(h264){
+ /* deinit base */
+ tdav_codec_h264_common_deinit(TDAV_CODEC_H264_COMMON(self));
+ /* deinit self */
+ mf_codec_h264_deinit(h264);
+ }
+
+ return self;
+}
+/* object definition */
+static const tsk_object_def_t mf_codec_h264_base_def_s =
+{
+ sizeof(mf_codec_h264_t),
+ mf_codec_h264_base_ctor,
+ mf_codec_h264_base_dtor,
+ tmedia_codec_cmp,
+};
+/* plugin definition*/
+static const tmedia_codec_plugin_def_t mf_codec_h264_base_plugin_def_s =
+{
+ &mf_codec_h264_base_def_s,
+
+ tmedia_video,
+ tmedia_codec_id_h264_bp,
+ "H264",
+ "H264 Base Profile (Media Foundation)",
+ TMEDIA_CODEC_FORMAT_H264_BP,
+ tsk_true,
+ 90000, // rate
+
+ /* audio */
+ { 0 },
+
+ /* video (width, height, fps) */
+ {176, 144, 0}, // fps is @deprecated
+
+ mf_codec_h264_set,
+ mf_codec_h264_open,
+ mf_codec_h264_close,
+ mf_codec_h264_encode,
+ mf_codec_h264_decode,
+ mf_codec_h264_sdp_att_match,
+ mf_codec_h264_sdp_att_get
+};
+const tmedia_codec_plugin_def_t *mf_codec_h264_base_plugin_def_t = &mf_codec_h264_base_plugin_def_s;
+
+/* ============ H.264 Main Profile Plugin interface ================= */
+
+/* constructor */
+static tsk_object_t* mf_codec_h264_main_ctor(tsk_object_t * self, va_list * app)
+{
+ mf_codec_h264_t *h264 = (mf_codec_h264_t*)self;
+ if(h264){
+ /* init base: called by tmedia_codec_create() */
+ /* init self */
+ if(mf_codec_h264_init(h264, profile_idc_main) != 0){
+ return tsk_null;
+ }
+ }
+ return self;
+}
+/* destructor */
+static tsk_object_t* mf_codec_h264_main_dtor(tsk_object_t * self)
+{
+ mf_codec_h264_t *h264 = (mf_codec_h264_t*)self;
+ if(h264){
+ /* deinit base */
+ tdav_codec_h264_common_deinit(TDAV_CODEC_H264_COMMON(self));
+ /* deinit self */
+ mf_codec_h264_deinit(h264);
+
+ }
+
+ return self;
+}
+/* object definition */
+static const tsk_object_def_t mf_codec_h264_main_def_s =
+{
+ sizeof(mf_codec_h264_t),
+ mf_codec_h264_main_ctor,
+ mf_codec_h264_main_dtor,
+ tmedia_codec_cmp,
+};
+/* plugin definition*/
+static const tmedia_codec_plugin_def_t mf_codec_h264_main_plugin_def_s =
+{
+ &mf_codec_h264_main_def_s,
+
+ tmedia_video,
+ tmedia_codec_id_h264_mp,
+ "H264",
+ "H264 Main Profile (Media Foundation)",
+ TMEDIA_CODEC_FORMAT_H264_MP,
+ tsk_true,
+ 90000, // rate
+
+ /* audio */
+ { 0 },
+
+ /* video (width, height, fps)*/
+ {176, 144, 0},// fps is @deprecated
+
+ mf_codec_h264_set,
+ mf_codec_h264_open,
+ mf_codec_h264_close,
+ mf_codec_h264_encode,
+ mf_codec_h264_decode,
+ mf_codec_h264_sdp_att_match,
+ mf_codec_h264_sdp_att_get
+};
+const tmedia_codec_plugin_def_t *mf_codec_h264_main_plugin_def_t = &mf_codec_h264_main_plugin_def_s;
+
+
+
+/* ============ Common To all H264 codecs ================= */
+
+int mf_codec_h264_open_encoder(mf_codec_h264_t* self)
+{
+ HRESULT hr = S_OK;
+ int32_t max_bw_kpbs;
+ tdav_codec_h264_common_t* common = (tdav_codec_h264_common_t*)self;
+
+ if(self->encoder.pInst) {
+ TSK_DEBUG_ERROR("Encoder already initialized");
+#if defined(E_ILLEGAL_METHOD_CALL)
+ CHECK_HR(hr = E_ILLEGAL_METHOD_CALL);
+#else
+ CHECK_HR(hr = 0x8000000EL);
+#endif
+ }
+
+ // create encoder
+ if(!(self->encoder.pInst = (common->profile == profile_idc_baseline) ? MFCodecVideoH264::CreateCodecH264Base(MFCodecType_Encoder) : MFCodecVideoH264::CreateCodecH264Main(MFCodecType_Encoder))){
+ TSK_DEBUG_ERROR("Failed to find H.264 encoder");
+ CHECK_HR(hr = E_OUTOFMEMORY);
+ }
+
+ //self->encoder.context->pix_fmt = PIX_FMT_YUV420P;
+ //self->encoder.context->time_base.num = 1;
+ //self->encoder.context->time_base.den = TMEDIA_CODEC_VIDEO(self)->out.fps;
+ self->encoder.neg_width = (self->encoder.rotation == 90 || self->encoder.rotation == 270) ? TMEDIA_CODEC_VIDEO(self)->out.height : TMEDIA_CODEC_VIDEO(self)->out.width;
+ self->encoder.neg_height = (self->encoder.rotation == 90 || self->encoder.rotation == 270) ? TMEDIA_CODEC_VIDEO(self)->out.width : TMEDIA_CODEC_VIDEO(self)->out.height;
+ self->encoder.neg_fps = TMEDIA_CODEC_VIDEO(self)->out.fps;
+ max_bw_kpbs = TSK_CLAMP(
+ 0,
+ tmedia_get_video_bandwidth_kbps_2(self->encoder.neg_width, self->encoder.neg_height, self->encoder.neg_fps),
+ self->encoder.max_bw_kpbs
+ );
+ self->encoder.max_bitrate_bps = (max_bw_kpbs * 1024);
+
+ TSK_DEBUG_INFO("[H.264 MF Encoder] neg_width=%d, neg_height=%d, neg_fps=%d, max_bitrate_bps=%d",
+ self->encoder.neg_width,
+ self->encoder.neg_height,
+ self->encoder.neg_fps,
+ self->encoder.max_bitrate_bps
+ );
+
+ CHECK_HR(hr = self->encoder.pInst->Initialize(
+ self->encoder.neg_fps,
+ self->encoder.neg_width,
+ self->encoder.neg_height,
+ self->encoder.max_bitrate_bps));
+
+ CHECK_HR(hr = self->encoder.pInst->SetGOPSize(self->encoder.neg_fps * PLUGIN_MF_H264_GOP_SIZE_IN_SECONDS));
+ CHECK_HR(hr = self->encoder.pInst->SetSliceMaxSizeInBytes((H264_RTP_PAYLOAD_SIZE - 100)));
+bail:
+ return SUCCEEDED(hr) ? 0 : -1;
+}
+
+int mf_codec_h264_close_encoder(mf_codec_h264_t* self)
+{
+ if(self){
+ SafeRelease(&self->encoder.pInst);
+ if(self->encoder.buffer){
+ TSK_FREE(self->encoder.buffer);
+ }
+ self->encoder.frame_count = 0;
+ }
+
+ return 0;
+}
+
+int mf_codec_h264_open_decoder(mf_codec_h264_t* self)
+{
+ HRESULT hr = S_OK;
+ tdav_codec_h264_common_t* common = (tdav_codec_h264_common_t*)self;
+
+ if(self->decoder.pInst) {
+ TSK_DEBUG_ERROR("Decoder already initialized");
+#if defined(E_ILLEGAL_METHOD_CALL)
+ CHECK_HR(hr = E_ILLEGAL_METHOD_CALL);
+#else
+ CHECK_HR(hr = 0x8000000EL);
+#endif
+ }
+
+ // create decoder
+ if(!(self->decoder.pInst = (common->profile == profile_idc_baseline) ? MFCodecVideoH264::CreateCodecH264Base(MFCodecType_Decoder) : MFCodecVideoH264::CreateCodecH264Main(MFCodecType_Decoder))){
+ TSK_DEBUG_ERROR("Failed to find H.264 encoder");
+ CHECK_HR(hr = E_OUTOFMEMORY);
+ }
+
+ TSK_DEBUG_INFO("[H.264 MF Decoder] neg_width=%d, neg_height=%d, neg_fps=%d",
+ TMEDIA_CODEC_VIDEO(self)->in.width,
+ TMEDIA_CODEC_VIDEO(self)->in.height,
+ TMEDIA_CODEC_VIDEO(self)->in.fps
+ );
+
+ CHECK_HR(hr = self->decoder.pInst->Initialize(
+ TMEDIA_CODEC_VIDEO(self)->in.fps,
+ TMEDIA_CODEC_VIDEO(self)->in.width,
+ TMEDIA_CODEC_VIDEO(self)->in.height));
+
+bail:
+ return SUCCEEDED(hr) ? 0 : -1;
+}
+
+int mf_codec_h264_close_decoder(mf_codec_h264_t* self)
+{
+ if(self){
+ SafeRelease(&self->decoder.pInst);
+ TSK_FREE(self->decoder.accumulator);
+ self->decoder.accumulator_pos = 0;
+ }
+
+ return 0;
+}
+
+int mf_codec_h264_init(mf_codec_h264_t* self, profile_idc_t profile)
+{
+ int ret = 0;
+ level_idc_t level;
+ tdav_codec_h264_common_t* common = (tdav_codec_h264_common_t*)self;
+
+ if(!self){
+ TSK_DEBUG_ERROR("Invalid parameter");
+ return -1;
+ }
+
+ if((ret = tdav_codec_h264_common_init(common))){
+ TSK_DEBUG_ERROR("mf_codec_h264_common_init() faile with error code=%d", ret);
+ return ret;
+ }
+
+ if((ret = tdav_codec_h264_common_level_from_size(TMEDIA_CODEC_VIDEO(self)->out.width, TMEDIA_CODEC_VIDEO(self)->out.height, &level))){
+ TSK_DEBUG_ERROR("Failed to find level for size=[%u, %u]", TMEDIA_CODEC_VIDEO(self)->out.width, TMEDIA_CODEC_VIDEO(self)->out.height);
+ return ret;
+ }
+
+ (self)->encoder.max_bw_kpbs = tmedia_defaults_get_bandwidth_video_upload_max();
+ if (MFUtils::IsLowLatencyH264SupportsMaxSliceSize()) {
+ common->pack_mode_local = H264_PACKETIZATION_MODE;
+ }
+ else {
+ common->pack_mode_local = Non_Interleaved_Mode;
+ }
+ common->profile = profile;
+ common->level = level;
+ TMEDIA_CODEC_VIDEO(self)->in.max_mbps = TMEDIA_CODEC_VIDEO(self)->out.max_mbps = H264_MAX_MBPS*1000;
+ TMEDIA_CODEC_VIDEO(self)->in.max_br = TMEDIA_CODEC_VIDEO(self)->out.max_br = H264_MAX_BR*1000;
+
+ TMEDIA_CODEC_VIDEO(self)->in.chroma = tmedia_chroma_nv12;
+ TMEDIA_CODEC_VIDEO(self)->out.chroma = tmedia_chroma_nv12;
+
+ self->encoder.quality = 1;
+
+ return ret;
+}
+
+int mf_codec_h264_deinit(mf_codec_h264_t* self)
+{
+ if(!self){
+ TSK_DEBUG_ERROR("Invalid parameter");
+ return -1;
+ }
+
+ mf_codec_h264_close((tmedia_codec_t*)self);
+
+ return 0;
+} \ No newline at end of file
diff --git a/plugins/pluginWinMF/plugin_win_mf_config.h b/plugins/pluginWinMF/plugin_win_mf_config.h
new file mode 100644
index 0000000..f4f692a
--- /dev/null
+++ b/plugins/pluginWinMF/plugin_win_mf_config.h
@@ -0,0 +1,75 @@
+/* Copyright (C) 2013 Mamadou DIOP
+* Copyright (C) 2013 Doubango Telecom <http://www.doubango.org>
+*
+* This file is part of Open Source Doubango Framework.
+*
+* DOUBANGO is free software: you can redistribute it and/or modify
+* it under the terms of the GNU General Public License as published by
+* the Free Software Foundation, either version 3 of the License, or
+* (at your option) any later version.
+*
+* DOUBANGO is distributed in the hope that it will be useful,
+* but WITHOUT ANY WARRANTY; without even the implied warranty of
+* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
+* GNU General Public License for more details.
+*
+* You should have received a copy of the GNU General Public License
+* along with DOUBANGO.
+*/
+#ifndef PLUGIN_WIN_MF_CONFIG_H
+#define PLUGIN_WIN_MF_CONFIG_H
+
+#ifdef __SYMBIAN32__
+#undef _WIN32 /* Because of WINSCW */
+#endif
+
+
+// Windows (XP/Vista/7/CE and Windows Mobile) macro definition
+#if defined(WIN32)|| defined(_WIN32) || defined(_WIN32_WCE)
+# define PLUGIN_WIN_MF_UNDER_WINDOWS 1
+# if defined(WINAPI_FAMILY) && (WINAPI_FAMILY == WINAPI_FAMILY_PHONE_APP || WINAPI_FAMILY == WINAPI_FAMILY_APP)
+# define PLUGIN_WIN_MF_UNDER_WINDOWS_RT 1
+# endif
+#endif
+
+#if (PLUGIN_WIN_MF_UNDER_WINDOWS || defined(__SYMBIAN32__)) && defined(PLUGIN_WIN_MFP_EXPORTS)
+# define PLUGIN_WIN_MFP_API __declspec(dllexport)
+# define PLUGIN_WIN_MFP_GEXTERN extern __declspec(dllexport)
+#elif (PLUGIN_WIN_MF_UNDER_WINDOWS || defined(__SYMBIAN32__)) && !defined(PLUGIN_WIN_MFP_IMPORTS_IGNORE)
+# define PLUGIN_WIN_MFP_API __declspec(dllimport)
+# define PLUGIN_WIN_MFP_GEXTERN __declspec(dllimport)
+#else
+# define PLUGIN_WIN_MFP_API
+# define PLUGIN_WIN_MFP_GEXTERN extern
+#endif
+
+// x86
+#if defined(__x86_64__) || defined(__x86__) || defined(__i386__)
+# define PLUGIN_WIN_MF_UNDER_X86 1
+#endif
+
+// Guards against C++ name mangling
+#ifdef __cplusplus
+# define PLUGIN_WIN_MF_BEGIN_DECLS extern "C" {
+# define PLUGIN_WIN_MF_END_DECLS }
+#else
+# define PLUGIN_WIN_MF_BEGIN_DECLS
+# define PLUGIN_WIN_MF_END_DECLS
+#endif
+
+#ifdef _MSC_VER
+# define inline __inline
+# define _CRT_SECURE_NO_WARNINGS
+# define _ALLOW_KEYWORD_MACROS
+#endif
+
+#include <stdint.h>
+#ifdef __SYMBIAN32__
+#include <stdlib.h>
+#endif
+
+#if HAVE_CONFIG_H
+ #include <config.h>
+#endif
+
+#endif // PLUGIN_WIN_MF_CONFIG_H
diff --git a/plugins/pluginWinMF/plugin_win_mf_consumer_audio.cxx b/plugins/pluginWinMF/plugin_win_mf_consumer_audio.cxx
new file mode 100644
index 0000000..026f510
--- /dev/null
+++ b/plugins/pluginWinMF/plugin_win_mf_consumer_audio.cxx
@@ -0,0 +1,163 @@
+/* Copyright (C) 2013 Mamadou DIOP
+* Copyright (C) 2013 Doubango Telecom <http://www.doubango.org>
+*
+* This file is part of Open Source Doubango Framework.
+*
+* DOUBANGO is free software: you can redistribute it and/or modify
+* it under the terms of the GNU General Public License as published by
+* the Free Software Foundation, either version 3 of the License, or
+* (at your option) any later version.
+*
+* DOUBANGO is distributed in the hope that it will be useful,
+* but WITHOUT ANY WARRANTY; without even the implied warranty of
+* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
+* GNU General Public License for more details.
+*
+* You should have received a copy of the GNU General Public License
+* along with DOUBANGO.
+*/
+#include "plugin_win_mf_config.h"
+#include "internals/mf_utils.h"
+
+#include "tinydav/audio/tdav_consumer_audio.h"
+
+#include "tsk_debug.h"
+
+typedef struct plugin_win_mf_consumer_audio_s
+{
+ TDAV_DECLARE_CONSUMER_AUDIO;
+
+ bool bStarted;
+}
+plugin_win_mf_consumer_audio_t;
+
+
+/* ============ Consumer Interface ================= */
+static int plugin_win_mf_consumer_audio_set(tmedia_consumer_t* self, const tmedia_param_t* param)
+{
+ plugin_win_mf_consumer_audio_t* pSelf = (plugin_win_mf_consumer_audio_t*)self;
+ int ret = tdav_consumer_audio_set(TDAV_CONSUMER_AUDIO(self), param);
+
+ if(ret == 0){
+
+ }
+
+ return ret;
+}
+
+static int plugin_win_mf_consumer_audio_prepare(tmedia_consumer_t* self, const tmedia_codec_t* codec)
+{
+ plugin_win_mf_consumer_audio_t* pSelf = (plugin_win_mf_consumer_audio_t*)self;
+
+ if(!pSelf){
+ TSK_DEBUG_ERROR("Invalid parameter");
+ return -1;
+ }
+
+ return 0;
+}
+
+static int plugin_win_mf_consumer_audio_start(tmedia_consumer_t* self)
+{
+ plugin_win_mf_consumer_audio_t* pSelf = (plugin_win_mf_consumer_audio_t*)self;
+
+ pSelf->bStarted = true;
+
+ return 0;
+}
+
+static int plugin_win_mf_consumer_audio_consume(tmedia_consumer_t* self, const void* buffer, tsk_size_t size, const tsk_object_t* proto_hdr)
+{
+ if(!self || !buffer || !size){
+ TSK_DEBUG_ERROR("Invalid parameter");
+ return -1;
+ }
+ /* buffer is already decoded */
+ return tdav_consumer_audio_put(TDAV_CONSUMER_AUDIO(self), buffer, size, proto_hdr);
+}
+
+static int plugin_win_mf_consumer_audio_pause(tmedia_consumer_t* self)
+{
+ return 0;
+}
+
+static int plugin_win_mf_consumer_audio_stop(tmedia_consumer_t* self)
+{
+ plugin_win_mf_consumer_audio_t* pSelf = (plugin_win_mf_consumer_audio_t*)self;
+
+ if(!pSelf){
+ TSK_DEBUG_ERROR("Invalid parameter");
+ return -1;
+ }
+
+ if(!pSelf->bStarted){
+ TSK_DEBUG_INFO("WinMF audio consumer not started");
+ return 0;
+ }
+
+ /* should be done here */
+ pSelf->bStarted = false;
+
+ return 0;
+}
+
+
+//
+// WaveAPI consumer object definition
+//
+/* constructor */
+static tsk_object_t* plugin_win_mf_consumer_audio_ctor(tsk_object_t * self, va_list * app)
+{
+ MFUtils::Startup();
+
+ plugin_win_mf_consumer_audio_t *pSelf = (plugin_win_mf_consumer_audio_t *)self;
+ if(pSelf){
+ /* init base */
+ tdav_consumer_audio_init(TDAV_CONSUMER_AUDIO(pSelf));
+ /* init self */
+
+ }
+ return self;
+}
+/* destructor */
+static tsk_object_t* plugin_win_mf_consumer_audio_dtor(tsk_object_t * self)
+{
+ plugin_win_mf_consumer_audio_t *pSelf = (plugin_win_mf_consumer_audio_t *)self;
+ if(pSelf){
+ /* stop */
+ if(pSelf->bStarted){
+ plugin_win_mf_consumer_audio_stop(TMEDIA_CONSUMER(pSelf));
+ }
+
+ /* deinit base */
+ tdav_consumer_audio_deinit(TDAV_CONSUMER_AUDIO(pSelf));
+ /* deinit self */
+
+ }
+
+ return self;
+}
+/* object definition */
+static const tsk_object_def_t plugin_win_mf_consumer_audio_def_s =
+{
+ sizeof(plugin_win_mf_consumer_audio_t),
+ plugin_win_mf_consumer_audio_ctor,
+ plugin_win_mf_consumer_audio_dtor,
+ tdav_consumer_audio_cmp,
+};
+/* plugin definition*/
+static const tmedia_consumer_plugin_def_t plugin_win_mf_consumer_audio_plugin_def_s =
+{
+ &plugin_win_mf_consumer_audio_def_s,
+
+ tmedia_audio,
+ "Windows Media Foundation audio consumer",
+
+ plugin_win_mf_consumer_audio_set,
+ plugin_win_mf_consumer_audio_prepare,
+ plugin_win_mf_consumer_audio_start,
+ plugin_win_mf_consumer_audio_consume,
+ plugin_win_mf_consumer_audio_pause,
+ plugin_win_mf_consumer_audio_stop
+};
+const tmedia_consumer_plugin_def_t *plugin_win_mf_consumer_audio_plugin_def_t = &plugin_win_mf_consumer_audio_plugin_def_s;
diff --git a/plugins/pluginWinMF/plugin_win_mf_consumer_video.cxx b/plugins/pluginWinMF/plugin_win_mf_consumer_video.cxx
new file mode 100644
index 0000000..f6bef59
--- /dev/null
+++ b/plugins/pluginWinMF/plugin_win_mf_consumer_video.cxx
@@ -0,0 +1,1620 @@
+/* Copyright (C) 2013 Mamadou DIOP
+* Copyright (C) 2013 Doubango Telecom <http://www.doubango.org>
+*
+* This file is part of Open Source Doubango Framework.
+*
+* DOUBANGO is free software: you can redistribute it and/or modify
+* it under the terms of the GNU General Public License as published by
+* the Free Software Foundation, either version 3 of the License, or
+* (at your option) any later version.
+*
+* DOUBANGO is distributed in the hope that it will be useful,
+* but WITHOUT ANY WARRANTY; without even the implied warranty of
+* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
+* GNU General Public License for more details.
+*
+* You should have received a copy of the GNU General Public License
+* along with DOUBANGO.
+*/
+#include "plugin_win_mf_config.h"
+#include "internals/mf_utils.h"
+
+#include "tinymedia/tmedia_consumer.h"
+
+#include "tsk_safeobj.h"
+#include "tsk_string.h"
+#include "tsk_thread.h"
+#include "tsk_debug.h"
+
+#include <initguid.h>
+#include <assert.h>
+
+// Whether to use Direct3D device for direct rendering or Media Foundation topology and custom source
+// Using Media Foundation (MF) introduce delay when the input fps is different than the one in the custom src.
+// It's very hard to have someting accurate when using MF because the input FPS change depending on the congestion control. D3D is the best choice as frames are displayed as they arrive
+#if !defined(PLUGIN_MF_CV_USE_D3D9)
+# define PLUGIN_MF_CV_USE_D3D9 1
+#endif
+
+/******* ********/
+
+#if PLUGIN_MF_CV_USE_D3D9
+
+#include <d3d9.h>
+#include <dxva2api.h>
+
+#ifdef _MSC_VER
+#pragma comment(lib, "d3d9")
+#endif
+
+const DWORD NUM_BACK_BUFFERS = 2;
+
+static HRESULT CreateDeviceD3D9(
+ HWND hWnd,
+ IDirect3DDevice9** ppDevice,
+ IDirect3D9 **ppD3D,
+ D3DPRESENT_PARAMETERS &d3dpp
+ );
+static HRESULT TestCooperativeLevel(
+ struct plugin_win_mf_consumer_video_s *pSelf
+ );
+static HRESULT CreateSwapChain(
+ HWND hWnd,
+ UINT32 nFrameWidth,
+ UINT32 nFrameHeight,
+ IDirect3DDevice9* pDevice,
+ IDirect3DSwapChain9 **ppSwapChain);
+
+static LRESULT CALLBACK WndProc(HWND hWnd, UINT uMsg, WPARAM wParam, LPARAM lParam);
+
+static inline HWND Window(struct plugin_win_mf_consumer_video_s *pSelf);
+static inline LONG Width(const RECT& r);
+static inline LONG Height(const RECT& r);
+static inline RECT CorrectAspectRatio(const RECT& src, const MFRatio& srcPAR);
+static inline RECT LetterBoxRect(const RECT& rcSrc, const RECT& rcDst);
+static inline HRESULT UpdateDestinationRect(struct plugin_win_mf_consumer_video_s *pSelf, BOOL bForce = FALSE);
+static HRESULT ResetDevice(struct plugin_win_mf_consumer_video_s *pSelf, BOOL bUpdateDestinationRect = FALSE);
+static HRESULT SetFullscreen(struct plugin_win_mf_consumer_video_s *pSelf, BOOL bFullScreen);
+static HWND CreateFullScreenWindow(struct plugin_win_mf_consumer_video_s *pSelf);
+static HRESULT HookWindow(struct plugin_win_mf_consumer_video_s *pSelf, HWND hWnd);
+static HRESULT UnhookWindow(struct plugin_win_mf_consumer_video_s *pSelf);
+
+
+
+typedef struct plugin_win_mf_consumer_video_s
+{
+ TMEDIA_DECLARE_CONSUMER;
+
+ BOOL bStarted, bPrepared, bPaused, bFullScreen, bWindowHooked;
+ BOOL bPluginFireFox, bPluginWebRTC4All;
+ HWND hWindow;
+ WNDPROC wndProc;
+ HWND hWindowFullScreen;
+ RECT rcWindow;
+ RECT rcDest;
+ MFRatio pixelAR;
+
+ UINT32 nNegWidth;
+ UINT32 nNegHeight;
+ UINT32 nNegFps;
+
+ D3DLOCKED_RECT rcLock;
+ IDirect3DDevice9* pDevice;
+ IDirect3D9 *pD3D;
+ IDirect3DSwapChain9 *pSwapChain;
+ D3DPRESENT_PARAMETERS d3dpp;
+
+ TSK_DECLARE_SAFEOBJ;
+}
+plugin_win_mf_consumer_video_t;
+
+static int _plugin_win_mf_consumer_video_unprepare(plugin_win_mf_consumer_video_t* pSelf);
+
+/* ============ Media Consumer Interface ================= */
+static int plugin_win_mf_consumer_video_set(tmedia_consumer_t *self, const tmedia_param_t* param)
+{
+ int ret = 0;
+ HRESULT hr = S_OK;
+ plugin_win_mf_consumer_video_t* pSelf = (plugin_win_mf_consumer_video_t*)self;
+
+ if(!self || !param)
+ {
+ TSK_DEBUG_ERROR("Invalid parameter");
+ CHECK_HR(hr = E_POINTER);
+ }
+
+ if(param->value_type == tmedia_pvt_int64)
+ {
+ if(tsk_striequals(param->key, "remote-hwnd"))
+ {
+ HWND hWnd = reinterpret_cast<HWND>((INT64)*((int64_t*)param->value));
+ if(hWnd != pSelf->hWindow)
+ {
+ tsk_safeobj_lock(pSelf); // block consumer thread
+ pSelf->hWindow = hWnd;
+ if(pSelf->bPrepared)
+ {
+ hr = ResetDevice(pSelf);
+ }
+ tsk_safeobj_unlock(pSelf); // unblock consumer thread
+ }
+ }
+ }
+ else if(param->value_type == tmedia_pvt_int32)
+ {
+ if(tsk_striequals(param->key, "fullscreen"))
+ {
+ BOOL bFullScreen = !!*((int32_t*)param->value);
+ TSK_DEBUG_INFO("[MF video consumer] Full Screen = %d", bFullScreen);
+ CHECK_HR(hr = SetFullscreen(pSelf, bFullScreen));
+ }
+ else if(tsk_striequals(param->key, "create-on-current-thead"))
+ {
+ // DSCONSUMER(self)->create_on_ui_thread = *((int32_t*)param->value) ? tsk_false : tsk_true;
+ }
+ else if(tsk_striequals(param->key, "plugin-firefox"))
+ {
+ pSelf->bPluginFireFox = (*((int32_t*)param->value) != 0);
+ }
+ else if(tsk_striequals(param->key, "plugin-webrtc4all"))
+ {
+ pSelf->bPluginWebRTC4All = (*((int32_t*)param->value) != 0);
+ }
+ }
+
+ CHECK_HR(hr);
+
+bail:
+ return SUCCEEDED(hr) ? 0 : -1;
+}
+
+
+static int plugin_win_mf_consumer_video_prepare(tmedia_consumer_t* self, const tmedia_codec_t* codec)
+{
+ plugin_win_mf_consumer_video_t* pSelf = (plugin_win_mf_consumer_video_t*)self;
+
+ if(!pSelf || !codec && codec->plugin){
+ TSK_DEBUG_ERROR("Invalid parameter");
+ return -1;
+ }
+ if(pSelf->bPrepared){
+ TSK_DEBUG_WARN("D3D9 video consumer already prepared");
+ return -1;
+ }
+
+ // FIXME: DirectShow requires flipping but not D3D9
+ // The Core library always tries to flip when OSType==Win32. Must be changed
+ TMEDIA_CODEC_VIDEO(codec)->in.flip = tsk_false;
+
+ HRESULT hr = S_OK;
+ HWND hWnd = Window(pSelf);
+
+ TMEDIA_CONSUMER(pSelf)->video.fps = TMEDIA_CODEC_VIDEO(codec)->in.fps;
+ TMEDIA_CONSUMER(pSelf)->video.in.width = TMEDIA_CODEC_VIDEO(codec)->in.width;
+ TMEDIA_CONSUMER(pSelf)->video.in.height = TMEDIA_CODEC_VIDEO(codec)->in.height;
+
+ if(!TMEDIA_CONSUMER(pSelf)->video.display.width){
+ TMEDIA_CONSUMER(pSelf)->video.display.width = TMEDIA_CONSUMER(pSelf)->video.in.width;
+ }
+ if(!TMEDIA_CONSUMER(pSelf)->video.display.height){
+ TMEDIA_CONSUMER(pSelf)->video.display.height = TMEDIA_CONSUMER(pSelf)->video.in.height;
+ }
+
+ pSelf->nNegFps = TMEDIA_CONSUMER(pSelf)->video.fps;
+ pSelf->nNegWidth = (UINT32)TMEDIA_CONSUMER(pSelf)->video.display.width;
+ pSelf->nNegHeight = (UINT32)TMEDIA_CONSUMER(pSelf)->video.display.height;
+
+ TSK_DEBUG_INFO("D3D9 video consumer: fps=%d, width=%d, height=%d",
+ pSelf->nNegFps,
+ pSelf->nNegWidth,
+ pSelf->nNegHeight);
+
+ TMEDIA_CONSUMER(pSelf)->video.display.chroma = tmedia_chroma_rgb32;
+ TMEDIA_CONSUMER(pSelf)->decoder.codec_id = tmedia_codec_id_none; // means accept RAW fames
+
+ // The window handle is not created until the call is connect (incoming only) - At least on Internet Explorer 10
+ if(hWnd && !pSelf->bPluginWebRTC4All)
+ {
+ CHECK_HR(hr = CreateDeviceD3D9(hWnd, &pSelf->pDevice, &pSelf->pD3D, pSelf->d3dpp));
+ CHECK_HR(hr = CreateSwapChain(hWnd, pSelf->nNegWidth, pSelf->nNegHeight, pSelf->pDevice, &pSelf->pSwapChain));
+ }
+ else
+ {
+ if(hWnd && pSelf->bPluginWebRTC4All)
+ {
+ TSK_DEBUG_INFO("[MF consumer] HWND is defined but we detected webrtc4all...delaying D3D9 device creating until session get connected");
+ }
+ else
+ {
+ TSK_DEBUG_WARN("Delaying D3D9 device creation because HWND is not defined yet");
+ }
+ }
+
+bail:
+ pSelf->bPrepared = SUCCEEDED(hr);
+ return pSelf->bPrepared ? 0 : -1;
+}
+
+static int plugin_win_mf_consumer_video_start(tmedia_consumer_t* self)
+{
+ plugin_win_mf_consumer_video_t* pSelf = (plugin_win_mf_consumer_video_t*)self;
+
+ if(!pSelf){
+ TSK_DEBUG_ERROR("Invalid parameter");
+ return -1;
+ }
+
+ if(pSelf->bStarted){
+ TSK_DEBUG_INFO("D3D9 video consumer already started");
+ return 0;
+ }
+ if(!pSelf->bPrepared){
+ TSK_DEBUG_ERROR("D3D9 video consumer not prepared");
+ return -1;
+ }
+
+ HRESULT hr = S_OK;
+
+ pSelf->bPaused = false;
+ pSelf->bStarted = true;
+
+ return SUCCEEDED(hr) ? 0 : -1;
+}
+
+static int plugin_win_mf_consumer_video_consume(tmedia_consumer_t* self, const void* buffer, tsk_size_t size, const tsk_object_t* proto_hdr)
+{
+ plugin_win_mf_consumer_video_t* pSelf = (plugin_win_mf_consumer_video_t*)self;
+
+ HRESULT hr = S_OK;
+ HWND hWnd = Window(pSelf);
+
+ IDirect3DSurface9 *pSurf = NULL;
+ IDirect3DSurface9 *pBB = NULL;
+
+ if(!pSelf)
+ {
+ TSK_DEBUG_ERROR("Invalid parameter");
+ return -1; // because of the mutex lock do it here
+ }
+
+ tsk_safeobj_lock(pSelf);
+
+ if(!buffer || !size)
+ {
+ TSK_DEBUG_ERROR("Invalid parameter");
+ CHECK_HR(hr = E_INVALIDARG);
+ }
+
+ if(!pSelf->bStarted)
+ {
+ TSK_DEBUG_INFO("D3D9 video consumer not started");
+ CHECK_HR(hr = E_FAIL);
+ }
+
+ if(!hWnd)
+ {
+ TSK_DEBUG_INFO("Do not draw frame because HWND not set");
+ goto bail; // not an error as the application can decide to set the HWND at any time
+ }
+
+ if (!pSelf->bWindowHooked)
+ {
+ // Do not hook "hWnd" as it could be the fullscreen handle which is always hooked.
+ CHECK_HR(hr = HookWindow(pSelf, pSelf->hWindow));
+ }
+
+ if(!pSelf->pDevice || !pSelf->pD3D || !pSelf->pSwapChain)
+ {
+ if(pSelf->pDevice || pSelf->pD3D || pSelf->pSwapChain)
+ {
+ CHECK_HR(hr = E_POINTER); // They must be "all null" or "all valid"
+ }
+
+ if(hWnd)
+ {
+ // means HWND was not set but defined now
+ pSelf->nNegWidth = (UINT32)TMEDIA_CONSUMER(pSelf)->video.in.width;
+ pSelf->nNegHeight = (UINT32)TMEDIA_CONSUMER(pSelf)->video.in.height;
+
+ CHECK_HR(hr = CreateDeviceD3D9(hWnd, &pSelf->pDevice, &pSelf->pD3D, pSelf->d3dpp));
+ CHECK_HR(hr = CreateSwapChain(hWnd, pSelf->nNegWidth, pSelf->nNegHeight, pSelf->pDevice, &pSelf->pSwapChain));
+ }
+ }
+
+ if(pSelf->nNegWidth != TMEDIA_CONSUMER(pSelf)->video.in.width || pSelf->nNegHeight != TMEDIA_CONSUMER(pSelf)->video.in.height){
+ TSK_DEBUG_INFO("Negotiated and input video sizes are different:%d#%d or %d#%d",
+ pSelf->nNegWidth, TMEDIA_CONSUMER(pSelf)->video.in.width,
+ pSelf->nNegHeight, TMEDIA_CONSUMER(pSelf)->video.in.height);
+ // Update media type
+
+ SafeRelease(&pSelf->pSwapChain);
+ CHECK_HR(hr = CreateSwapChain(hWnd, (UINT32)TMEDIA_CONSUMER(pSelf)->video.in.width, (UINT32)TMEDIA_CONSUMER(pSelf)->video.in.height, pSelf->pDevice, &pSelf->pSwapChain));
+
+ pSelf->nNegWidth = (UINT32)TMEDIA_CONSUMER(pSelf)->video.in.width;
+ pSelf->nNegHeight = (UINT32)TMEDIA_CONSUMER(pSelf)->video.in.height;
+
+ // Update Destination will do noting if the window size haven't changed.
+ // Force updating the destination rect if negotiated size change
+ CHECK_HR(hr = UpdateDestinationRect(pSelf, TRUE/* Force */));
+ }
+
+ CHECK_HR(hr = TestCooperativeLevel(pSelf));
+
+ CHECK_HR(hr = UpdateDestinationRect(pSelf));
+
+ CHECK_HR(hr = pSelf->pSwapChain->GetBackBuffer(0, D3DBACKBUFFER_TYPE_MONO, &pSurf));
+ CHECK_HR(hr = pSurf->LockRect(&pSelf->rcLock, NULL, D3DLOCK_NOSYSLOCK ));
+
+ // Fast copy() using MMX, SSE, or SSE2
+ hr = MFCopyImage(
+ (BYTE*)pSelf->rcLock.pBits,
+ pSelf->rcLock.Pitch,
+ (BYTE*)buffer,
+ (pSelf->nNegWidth << 2),
+ (pSelf->nNegWidth << 2),
+ pSelf->nNegHeight
+ );
+ if(FAILED(hr))
+ {
+ // unlock() before leaving
+ pSurf->UnlockRect();
+ CHECK_HR(hr);
+ }
+
+ CHECK_HR(hr = pSurf->UnlockRect());
+
+ // Color fill the back buffer
+ CHECK_HR(hr = pSelf->pDevice->GetBackBuffer(0, 0, D3DBACKBUFFER_TYPE_MONO, &pBB));
+#if METROPOLIS
+ CHECK_HR(hr = pSelf->pDevice->ColorFill(pBB, NULL, D3DCOLOR_XRGB(0x00, 0x00, 0x00)));
+#else
+ CHECK_HR(hr = pSelf->pDevice->ColorFill(pBB, NULL, D3DCOLOR_XRGB(0xFF, 0xFF, 0xFF)));
+#endif
+
+ // Resize keeping aspect ratio and Blit the frame (required)
+ hr = pSelf->pDevice->StretchRect(
+ pSurf,
+ NULL,
+ pBB,
+ &pSelf->rcDest/*NULL*/,
+ D3DTEXF_LINEAR
+ ); // could fail when display is being resized
+ if(SUCCEEDED(hr))
+ {
+ // Present the frame
+ CHECK_HR(hr = pSelf->pDevice->Present(NULL, NULL, NULL, NULL));
+ }
+ else
+ {
+ TSK_DEBUG_INFO("StretchRect returned ...%x", hr);
+ }
+
+bail:
+ SafeRelease(&pSurf);
+ SafeRelease(&pBB);
+
+ tsk_safeobj_unlock(pSelf);
+
+ return SUCCEEDED(hr) ? 0 : -1;
+}
+
+static int plugin_win_mf_consumer_video_pause(tmedia_consumer_t* self)
+{
+ plugin_win_mf_consumer_video_t* pSelf = (plugin_win_mf_consumer_video_t*)self;
+
+ if(!pSelf){
+ TSK_DEBUG_ERROR("Invalid parameter");
+ return -1;
+ }
+ if(!pSelf->bStarted)
+ {
+ TSK_DEBUG_INFO("MF video producer not started");
+ return 0;
+ }
+
+ HRESULT hr = S_OK;
+
+ pSelf->bPaused = true;
+
+ return SUCCEEDED(hr) ? 0 : -1;
+}
+
+static int plugin_win_mf_consumer_video_stop(tmedia_consumer_t* self)
+{
+ plugin_win_mf_consumer_video_t* pSelf = (plugin_win_mf_consumer_video_t*)self;
+
+ if(!pSelf){
+ TSK_DEBUG_ERROR("Invalid parameter");
+ return -1;
+ }
+
+ HRESULT hr = S_OK;
+
+ pSelf->bStarted = false;
+ pSelf->bPaused = false;
+
+ if(pSelf->hWindowFullScreen)
+ {
+ ::InvalidateRect(pSelf->hWindowFullScreen, NULL, FALSE);
+ ::ShowWindow(pSelf->hWindowFullScreen, SW_HIDE);
+ }
+
+ // next start() will be called after prepare()
+ return _plugin_win_mf_consumer_video_unprepare(pSelf);
+}
+
+static int _plugin_win_mf_consumer_video_unprepare(plugin_win_mf_consumer_video_t* pSelf)
+{
+ if(!pSelf)
+ {
+ TSK_DEBUG_ERROR("Invalid parameter");
+ return -1;
+ }
+
+ UnhookWindow(pSelf);
+
+ if(pSelf->bStarted)
+ {
+ // plugin_win_mf_producer_video_stop(TMEDIA_PRODUCER(pSelf));
+ TSK_DEBUG_ERROR("Consumer must be stopped before calling unprepare");
+ return -1;
+ }
+
+ SafeRelease(&pSelf->pDevice);
+ SafeRelease(&pSelf->pD3D);
+ SafeRelease(&pSelf->pSwapChain);
+
+ pSelf->bPrepared = false;
+
+ return 0;
+}
+
+
+//
+// D3D9 video consumer object definition
+//
+/* constructor */
+static tsk_object_t* plugin_win_mf_consumer_video_ctor(tsk_object_t * self, va_list * app)
+{
+ MFUtils::Startup();
+
+ plugin_win_mf_consumer_video_t *pSelf = (plugin_win_mf_consumer_video_t *)self;
+ if(pSelf){
+ /* init base */
+ tmedia_consumer_init(TMEDIA_CONSUMER(pSelf));
+ TMEDIA_CONSUMER(pSelf)->video.display.chroma = tmedia_chroma_rgb32;
+ TMEDIA_CONSUMER(pSelf)->decoder.codec_id = tmedia_codec_id_none; // means accept RAW fames
+
+ /* init self */
+ tsk_safeobj_init(pSelf);
+ TMEDIA_CONSUMER(pSelf)->video.fps = 15;
+ TMEDIA_CONSUMER(pSelf)->video.display.width = 0; // use codec value
+ TMEDIA_CONSUMER(pSelf)->video.display.height = 0; // use codec value
+ TMEDIA_CONSUMER(pSelf)->video.display.auto_resize = tsk_true;
+
+ pSelf->pixelAR.Denominator = pSelf->pixelAR.Numerator = 1;
+ }
+ return self;
+}
+/* destructor */
+static tsk_object_t* plugin_win_mf_consumer_video_dtor(tsk_object_t * self)
+{
+ plugin_win_mf_consumer_video_t *pSelf = (plugin_win_mf_consumer_video_t *)self;
+ if(pSelf){
+ /* stop */
+ if(pSelf->bStarted)
+ {
+ plugin_win_mf_consumer_video_stop(TMEDIA_CONSUMER(pSelf));
+ }
+
+ /* deinit base */
+ tmedia_consumer_deinit(TMEDIA_CONSUMER(pSelf));
+ /* deinit self */
+ _plugin_win_mf_consumer_video_unprepare(pSelf);
+ tsk_safeobj_deinit(pSelf);
+ }
+
+ return self;
+}
+/* object definition */
+static const tsk_object_def_t plugin_win_mf_consumer_video_def_s =
+{
+ sizeof(plugin_win_mf_consumer_video_t),
+ plugin_win_mf_consumer_video_ctor,
+ plugin_win_mf_consumer_video_dtor,
+ tsk_null,
+};
+/* plugin definition*/
+static const tmedia_consumer_plugin_def_t plugin_win_mf_consumer_video_plugin_def_s =
+{
+ &plugin_win_mf_consumer_video_def_s,
+
+ tmedia_video,
+ "D3D9 video consumer",
+
+ plugin_win_mf_consumer_video_set,
+ plugin_win_mf_consumer_video_prepare,
+ plugin_win_mf_consumer_video_start,
+ plugin_win_mf_consumer_video_consume,
+ plugin_win_mf_consumer_video_pause,
+ plugin_win_mf_consumer_video_stop
+};
+const tmedia_consumer_plugin_def_t *plugin_win_mf_consumer_video_plugin_def_t = &plugin_win_mf_consumer_video_plugin_def_s;
+
+// Helper functions
+
+static HRESULT CreateDeviceD3D9(
+ HWND hWnd,
+ IDirect3DDevice9** ppDevice,
+ IDirect3D9 **ppD3D,
+ D3DPRESENT_PARAMETERS &d3dpp
+ )
+{
+ HRESULT hr = S_OK;
+
+ D3DDISPLAYMODE mode = { 0 };
+ D3DPRESENT_PARAMETERS pp = {0};
+
+ if(!ppDevice || *ppDevice || !ppD3D || *ppD3D)
+ {
+ CHECK_HR(hr = E_POINTER);
+ }
+
+ if(!(*ppD3D = Direct3DCreate9(D3D_SDK_VERSION)))
+ {
+ CHECK_HR(hr = E_OUTOFMEMORY);
+ }
+
+ CHECK_HR(hr = (*ppD3D)->GetAdapterDisplayMode(
+ D3DADAPTER_DEFAULT,
+ &mode
+ ));
+
+ CHECK_HR(hr = (*ppD3D)->CheckDeviceType(
+ D3DADAPTER_DEFAULT,
+ D3DDEVTYPE_HAL,
+ mode.Format,
+ D3DFMT_X8R8G8B8,
+ TRUE // windowed
+ ));
+ pp.BackBufferFormat = D3DFMT_X8R8G8B8;
+ pp.SwapEffect = D3DSWAPEFFECT_DISCARD;
+ pp.PresentationInterval = D3DPRESENT_INTERVAL_IMMEDIATE;
+ pp.Windowed = TRUE;
+ pp.hDeviceWindow = hWnd;
+ CHECK_HR(hr = (*ppD3D)->CreateDevice(
+ D3DADAPTER_DEFAULT,
+ D3DDEVTYPE_HAL,
+ hWnd,
+ D3DCREATE_HARDWARE_VERTEXPROCESSING,
+ &pp,
+ ppDevice
+ ));
+
+ d3dpp = pp;
+
+bail:
+ if(FAILED(hr))
+ {
+ SafeRelease(ppD3D);
+ SafeRelease(ppDevice);
+ }
+ return hr;
+}
+
+static HRESULT TestCooperativeLevel(
+ struct plugin_win_mf_consumer_video_s *pSelf
+ )
+{
+ HRESULT hr = S_OK;
+
+ if (!pSelf || !pSelf->pDevice)
+ {
+ CHECK_HR(hr = E_POINTER);
+ }
+
+ switch((hr = pSelf->pDevice->TestCooperativeLevel()))
+ {
+ case D3D_OK:
+ {
+ break;
+ }
+
+ case D3DERR_DEVICELOST:
+ {
+ hr = S_OK;
+ break;
+ }
+
+ case D3DERR_DEVICENOTRESET:
+ {
+ hr = ResetDevice(pSelf, TRUE);
+ break;
+ }
+
+ default:
+ {
+ break;
+ }
+ }
+
+ CHECK_HR(hr);
+
+bail:
+ return hr;
+}
+
+static HRESULT CreateSwapChain(
+ HWND hWnd,
+ UINT32 nFrameWidth,
+ UINT32 nFrameHeight,
+ IDirect3DDevice9* pDevice,
+ IDirect3DSwapChain9 **ppSwapChain
+ )
+{
+ HRESULT hr = S_OK;
+
+ D3DPRESENT_PARAMETERS pp = { 0 };
+
+ if(!pDevice || !ppSwapChain || *ppSwapChain)
+ {
+ CHECK_HR(hr = E_POINTER);
+ }
+
+ pp.BackBufferWidth = nFrameWidth;
+ pp.BackBufferHeight = nFrameHeight;
+ pp.Windowed = TRUE;
+ pp.SwapEffect = D3DSWAPEFFECT_FLIP;
+ pp.hDeviceWindow = hWnd;
+ pp.BackBufferFormat = D3DFMT_X8R8G8B8;
+ pp.Flags =
+ D3DPRESENTFLAG_VIDEO | D3DPRESENTFLAG_DEVICECLIP |
+ D3DPRESENTFLAG_LOCKABLE_BACKBUFFER;
+ pp.PresentationInterval = D3DPRESENT_INTERVAL_IMMEDIATE;
+ pp.BackBufferCount = NUM_BACK_BUFFERS;
+
+ CHECK_HR(hr = pDevice->CreateAdditionalSwapChain(&pp, ppSwapChain));
+
+bail:
+ return hr;
+}
+
+static inline HWND Window(struct plugin_win_mf_consumer_video_s *pSelf)
+{
+ return pSelf ? (pSelf->bFullScreen ? pSelf->hWindowFullScreen : pSelf->hWindow) : NULL;
+}
+
+static inline LONG Width(const RECT& r)
+{
+ return r.right - r.left;
+}
+
+static inline LONG Height(const RECT& r)
+{
+ return r.bottom - r.top;
+}
+
+//-----------------------------------------------------------------------------
+// CorrectAspectRatio
+//
+// Converts a rectangle from the source's pixel aspect ratio (PAR) to 1:1 PAR.
+// Returns the corrected rectangle.
+//
+// For example, a 720 x 486 rect with a PAR of 9:10, when converted to 1x1 PAR,
+// is stretched to 720 x 540.
+// Copyright (C) Microsoft
+//-----------------------------------------------------------------------------
+
+static inline RECT CorrectAspectRatio(const RECT& src, const MFRatio& srcPAR)
+{
+ // Start with a rectangle the same size as src, but offset to the origin (0,0).
+ RECT rc = {0, 0, src.right - src.left, src.bottom - src.top};
+
+ if ((srcPAR.Numerator != 1) || (srcPAR.Denominator != 1))
+ {
+ // Correct for the source's PAR.
+
+ if (srcPAR.Numerator > srcPAR.Denominator)
+ {
+ // The source has "wide" pixels, so stretch the width.
+ rc.right = MulDiv(rc.right, srcPAR.Numerator, srcPAR.Denominator);
+ }
+ else if (srcPAR.Numerator < srcPAR.Denominator)
+ {
+ // The source has "tall" pixels, so stretch the height.
+ rc.bottom = MulDiv(rc.bottom, srcPAR.Denominator, srcPAR.Numerator);
+ }
+ // else: PAR is 1:1, which is a no-op.
+ }
+ return rc;
+}
+
+//-------------------------------------------------------------------
+// LetterBoxDstRect
+//
+// Takes a src rectangle and constructs the largest possible
+// destination rectangle within the specifed destination rectangle
+// such thatthe video maintains its current shape.
+//
+// This function assumes that pels are the same shape within both the
+// source and destination rectangles.
+// Copyright (C) Microsoft
+//-------------------------------------------------------------------
+
+static inline RECT LetterBoxRect(const RECT& rcSrc, const RECT& rcDst)
+{
+ // figure out src/dest scale ratios
+ int iSrcWidth = Width(rcSrc);
+ int iSrcHeight = Height(rcSrc);
+
+ int iDstWidth = Width(rcDst);
+ int iDstHeight = Height(rcDst);
+
+ int iDstLBWidth;
+ int iDstLBHeight;
+
+ if (MulDiv(iSrcWidth, iDstHeight, iSrcHeight) <= iDstWidth) {
+
+ // Column letter boxing ("pillar box")
+
+ iDstLBWidth = MulDiv(iDstHeight, iSrcWidth, iSrcHeight);
+ iDstLBHeight = iDstHeight;
+ }
+ else {
+
+ // Row letter boxing.
+
+ iDstLBWidth = iDstWidth;
+ iDstLBHeight = MulDiv(iDstWidth, iSrcHeight, iSrcWidth);
+ }
+
+
+ // Create a centered rectangle within the current destination rect
+
+ RECT rc;
+
+ LONG left = rcDst.left + ((iDstWidth - iDstLBWidth) >> 1);
+ LONG top = rcDst.top + ((iDstHeight - iDstLBHeight) >> 1);
+
+ SetRect(&rc, left, top, left + iDstLBWidth, top + iDstLBHeight);
+
+ return rc;
+}
+
+static inline HRESULT UpdateDestinationRect(plugin_win_mf_consumer_video_t *pSelf, BOOL bForce /*= FALSE*/)
+{
+ HRESULT hr = S_OK;
+ HWND hwnd = Window(pSelf);
+
+ if(!pSelf)
+ {
+ CHECK_HR(hr = E_POINTER);
+ }
+
+ if(!hwnd)
+ {
+ CHECK_HR(hr = E_HANDLE);
+ }
+ RECT rcClient;
+ GetClientRect(hwnd, &rcClient);
+
+ // only update destination if window size changed
+ if(bForce || (rcClient.bottom != pSelf->rcWindow.bottom || rcClient.left != pSelf->rcWindow.left || rcClient.right != pSelf->rcWindow.right || rcClient.top != pSelf->rcWindow.top))
+ {
+ CHECK_HR(hr = ResetDevice(pSelf));
+
+ pSelf->rcWindow = rcClient;
+#if 1
+ RECT rcSrc = { 0, 0, pSelf->nNegWidth, pSelf->nNegHeight };
+ rcSrc = CorrectAspectRatio(rcSrc, pSelf->pixelAR);
+ pSelf->rcDest = LetterBoxRect(rcSrc, rcClient);
+#else
+ long w = rcClient.right - rcClient.left;
+ long h = rcClient.bottom - rcClient.top;
+ float ratio = ((float)pSelf->nNegWidth/(float)pSelf->nNegHeight);
+ // (w/h)=ratio =>
+ // 1) h=w/ratio
+ // and
+ // 2) w=h*ratio
+ pSelf->rcDest.right = (int)(w/ratio) > h ? (int)(h * ratio) : w;
+ pSelf->rcDest.bottom = (int)(pSelf->rcDest.right/ratio) > h ? h : (int)(pSelf->rcDest.right/ratio);
+ pSelf->rcDest.left = ((w - pSelf->rcDest.right) >> 1);
+ pSelf->rcDest.top = ((h - pSelf->rcDest.bottom) >> 1);
+#endif
+
+ //::InvalidateRect(hwnd, NULL, FALSE);
+ }
+
+bail:
+ return hr;
+}
+
+static HRESULT ResetDevice(plugin_win_mf_consumer_video_t *pSelf, BOOL bUpdateDestinationRect /*= FALSE*/)
+{
+ HRESULT hr = S_OK;
+
+ tsk_safeobj_lock(pSelf);
+
+ HWND hWnd = Window(pSelf);
+
+ if (pSelf->pDevice)
+ {
+ D3DPRESENT_PARAMETERS d3dpp = pSelf->d3dpp;
+
+ hr = pSelf->pDevice->Reset(&d3dpp);
+
+ if (FAILED(hr))
+ {
+ SafeRelease(&pSelf->pDevice);
+ SafeRelease(&pSelf->pD3D);
+ SafeRelease(&pSelf->pSwapChain);
+ }
+ }
+
+ if (pSelf->pDevice == NULL && hWnd)
+ {
+ CHECK_HR(hr = CreateDeviceD3D9(hWnd, &pSelf->pDevice, &pSelf->pD3D, pSelf->d3dpp));
+ CHECK_HR(hr = CreateSwapChain(hWnd, pSelf->nNegWidth, pSelf->nNegHeight, pSelf->pDevice, &pSelf->pSwapChain));
+ }
+
+ if(bUpdateDestinationRect) // endless loop guard
+ {
+ CHECK_HR(hr = UpdateDestinationRect(pSelf));
+ }
+
+bail:
+ tsk_safeobj_unlock(pSelf);
+
+ return hr;
+}
+
+static HRESULT SetFullscreen(struct plugin_win_mf_consumer_video_s *pSelf, BOOL bFullScreen)
+{
+ HRESULT hr = S_OK;
+ if(!pSelf)
+ {
+ CHECK_HR(hr = E_POINTER);
+ }
+
+ if(pSelf->bFullScreen != bFullScreen)
+ {
+ tsk_safeobj_lock(pSelf);
+ if(bFullScreen)
+ {
+ HWND hWnd = CreateFullScreenWindow(pSelf);
+ if(hWnd)
+ {
+ ::ShowWindow(hWnd, SW_SHOWDEFAULT);
+ ::UpdateWindow(hWnd);
+ }
+ }
+ else if(pSelf->hWindowFullScreen)
+ {
+ ::ShowWindow(pSelf->hWindowFullScreen, SW_HIDE);
+ }
+ pSelf->bFullScreen = bFullScreen;
+ if(pSelf->bPrepared)
+ {
+ hr = ResetDevice(pSelf);
+ }
+ tsk_safeobj_unlock(pSelf);
+
+ CHECK_HR(hr);
+ }
+
+bail:
+ return hr;
+}
+
+static LRESULT CALLBACK WndProc(HWND hWnd, UINT uMsg, WPARAM wParam, LPARAM lParam)
+{
+ switch(uMsg)
+ {
+ case WM_CREATE:
+ case WM_SIZE:
+ case WM_MOVE:
+ {
+ struct plugin_win_mf_consumer_video_s* pSelf = dynamic_cast<struct plugin_win_mf_consumer_video_s*>((struct plugin_win_mf_consumer_video_s*)GetPropA(hWnd, "Self"));
+ if (pSelf)
+ {
+
+ }
+ break;
+ }
+
+ case WM_ERASEBKGND:
+ {
+ return TRUE; // avoid background erasing.
+ }
+
+ case WM_CHAR:
+ case WM_KEYUP:
+ {
+ struct plugin_win_mf_consumer_video_s* pSelf = dynamic_cast<struct plugin_win_mf_consumer_video_s*>((struct plugin_win_mf_consumer_video_s*)GetPropA(hWnd, "Self"));
+ if (pSelf)
+ {
+ SetFullscreen(pSelf, FALSE);
+ }
+
+ break;
+ }
+ }
+
+ return DefWindowProc(hWnd, uMsg, wParam, lParam);
+}
+
+static HWND CreateFullScreenWindow(struct plugin_win_mf_consumer_video_s *pSelf)
+{
+ HRESULT hr = S_OK;
+
+ if(!pSelf)
+ {
+ return NULL;
+ }
+
+ if(!pSelf->hWindowFullScreen)
+ {
+ WNDCLASS wc = {0};
+
+ wc.lpfnWndProc = WndProc;
+ wc.hInstance = GetModuleHandle(NULL);
+ wc.hCursor = LoadCursor(NULL, IDC_ARROW);
+ wc.lpszClassName = L"WindowClass";
+ RegisterClass(&wc);
+ pSelf->hWindowFullScreen = ::CreateWindowEx(
+ NULL,
+ wc.lpszClassName,
+ L"Doubango's Video Consumer Fullscreen",
+ WS_EX_TOPMOST | WS_POPUP,
+ 0, 0,
+ GetSystemMetrics(SM_CXSCREEN), GetSystemMetrics(SM_CYSCREEN),
+ NULL,
+ NULL,
+ GetModuleHandle(NULL),
+ NULL);
+
+ SetPropA(pSelf->hWindowFullScreen, "Self", pSelf);
+ }
+ return pSelf->hWindowFullScreen;
+}
+
+static HRESULT HookWindow(plugin_win_mf_consumer_video_s *pSelf, HWND hWnd)
+{
+ HRESULT hr = S_OK;
+
+ tsk_safeobj_lock(pSelf);
+
+ CHECK_HR(hr = UnhookWindow(pSelf));
+
+ if ((pSelf->hWindow = hWnd)) {
+ pSelf->wndProc = (WNDPROC)SetWindowLongPtr(pSelf->hWindow, GWLP_WNDPROC, (LONG_PTR)WndProc);
+ if (!pSelf->wndProc) {
+ TSK_DEBUG_ERROR("HookWindowLongPtr() failed with errcode=%d", GetLastError());
+ CHECK_HR(hr = E_FAIL);
+ }
+ pSelf->bWindowHooked = TRUE;
+ }
+bail:
+ tsk_safeobj_unlock(pSelf);
+ return S_OK;
+}
+
+static HRESULT UnhookWindow(struct plugin_win_mf_consumer_video_s *pSelf)
+{
+ tsk_safeobj_lock(pSelf);
+ if (pSelf->hWindow && pSelf->wndProc) {
+ SetWindowLongPtr(pSelf->hWindow, GWLP_WNDPROC, (LONG_PTR)pSelf->wndProc);
+ pSelf->wndProc = NULL;
+ }
+ if(pSelf->hWindow)
+ {
+ ::InvalidateRect(pSelf->hWindow, NULL, FALSE);
+ }
+ pSelf->bWindowHooked = FALSE;
+ tsk_safeobj_unlock(pSelf);
+ return S_OK;
+}
+
+
+#else /* !PLUGIN_MF_CV_USE_D3D9 */
+
+#include "internals/mf_custom_src.h"
+#include "internals/mf_display_watcher.h"
+#include "internals/mf_codec.h"
+
+#include <KS.h>
+#include <Codecapi.h>
+
+// 0: {{[Source] -> (VideoProcessor) -> SampleGrabber}} , {{[Decoder]}} -> RTP
+// 1: {{[Source] -> (VideoProcessor) -> [Decoder] -> SampleGrabber}} -> RTP
+// (VideoProcessor) is optional
+// "{{" and "}}" defines where the graph starts and ends respectively. For "0", [Decoder] is a stand-alone IMFTransform.
+#if !defined(PLUGIN_MF_CV_BUNDLE_CODEC)
+# define PLUGIN_MF_CV_BUNDLE_CODEC 0
+#endif
+
+// Uncompressed video frame will come from Doubango core and it's up to the converter to match the requested chroma.
+// Supported values: NV12, I420, RGB32 and RGB24. (RGB formats are not recommended because of performance issues)
+// To avoid chroma conversion (performance issues) we use NV12 when the codec is bundled as MediaFoundation codecs most likely only support this format.
+// NV12 is the native format for media foundation codecs (e.g. Intel Quick Sync) and the GPU.
+// I420 is the native format for FFmpeg, libvpx and libtheora.
+const GUID kDefaultUncompressedType
+#if PLUGIN_MF_CV_BUNDLE_CODEC
+= MFVideoFormat_NV12;
+#else
+= MFVideoFormat_I420;
+#endif
+
+DEFINE_GUID(PLUGIN_MF_LOW_LATENCY,
+0x9c27891a, 0xed7a, 0x40e1, 0x88, 0xe8, 0xb2, 0x27, 0x27, 0xa0, 0x24, 0xee);
+
+static void* TSK_STDCALL RunSessionThread(void *pArg);
+static int _plugin_win_mf_consumer_video_unprepare(struct plugin_win_mf_consumer_video_s* pSelf);
+
+typedef struct plugin_win_mf_consumer_video_s
+{
+ TMEDIA_DECLARE_CONSUMER;
+
+ bool bStarted, bPrepared;
+ HWND hWindow;
+ tsk_thread_handle_t* ppTread[1];
+
+ UINT32 nNegWidth;
+ UINT32 nNegHeight;
+ UINT32 nNegFps;
+
+ MFCodecVideo *pDecoder;
+ IMFMediaSession *pSession;
+ CMFSource *pSource;
+ IMFActivate *pSinkActivate;
+ DisplayWatcher* pDisplayWatcher;
+ IMFTopology *pTopologyFull;
+ IMFTopology *pTopologyPartial;
+ IMFMediaType *pOutType;
+}
+plugin_win_mf_consumer_video_t;
+
+
+
+/* ============ Media Consumer Interface ================= */
+static int plugin_win_mf_consumer_video_set(tmedia_consumer_t *self, const tmedia_param_t* param)
+{
+ int ret = 0;
+ HRESULT hr = S_OK;
+ plugin_win_mf_consumer_video_t* pSelf = (plugin_win_mf_consumer_video_t*)self;
+
+ if(!self || !param){
+ TSK_DEBUG_ERROR("Invalid parameter");
+ return -1;
+ }
+
+ if(param->value_type == tmedia_pvt_int64){
+ if(tsk_striequals(param->key, "remote-hwnd")){
+ HWND hWnd = reinterpret_cast<HWND>((INT64)*((int64_t*)param->value));
+ if(hWnd != pSelf->hWindow)
+ {
+ pSelf->hWindow = hWnd;
+ if(pSelf->pDisplayWatcher)
+ {
+ CHECK_HR(hr = pSelf->pDisplayWatcher->SetHwnd(hWnd));
+ }
+ }
+ }
+ }
+ else if(param->value_type == tmedia_pvt_int32){
+ if(tsk_striequals(param->key, "fullscreen")){
+ if(pSelf->pDisplayWatcher)
+ {
+ CHECK_HR(hr = pSelf->pDisplayWatcher->SetFullscreen(!!*((int32_t*)param->value)));
+ }
+ }
+ else if(tsk_striequals(param->key, "create-on-current-thead")){
+ // DSCONSUMER(self)->create_on_ui_thread = *((int32_t*)param->value) ? tsk_false : tsk_true;
+ }
+ else if(tsk_striequals(param->key, "plugin-firefox")){
+ /*DSCONSUMER(self)->plugin_firefox = (*((int32_t*)param->value) != 0);
+ if(DSCONSUMER(self)->display){
+ DSCONSUMER(self)->display->setPluginFirefox((DSCONSUMER(self)->plugin_firefox == tsk_true));
+ }*/
+ }
+ }
+
+bail:
+ return SUCCEEDED(hr) ? 0 : -1;
+}
+
+
+static int plugin_win_mf_consumer_video_prepare(tmedia_consumer_t* self, const tmedia_codec_t* codec)
+{
+ plugin_win_mf_consumer_video_t* pSelf = (plugin_win_mf_consumer_video_t*)self;
+
+ if(!pSelf || !codec && codec->plugin){
+ TSK_DEBUG_ERROR("Invalid parameter");
+ return -1;
+ }
+ if(pSelf->bPrepared){
+ TSK_DEBUG_WARN("MF video consumer already prepared");
+ return -1;
+ }
+
+ // FIXME: DirectShow requires flipping but not MF
+ // The Core library always tries to flip when OSType==Win32. Must be changed
+ TMEDIA_CODEC_VIDEO(codec)->in.flip = tsk_false;
+
+ HRESULT hr = S_OK;
+
+ TMEDIA_CONSUMER(pSelf)->video.fps = TMEDIA_CODEC_VIDEO(codec)->in.fps;
+ TMEDIA_CONSUMER(pSelf)->video.in.width = TMEDIA_CODEC_VIDEO(codec)->in.width;
+ TMEDIA_CONSUMER(pSelf)->video.in.height = TMEDIA_CODEC_VIDEO(codec)->in.height;
+
+ if(!TMEDIA_CONSUMER(pSelf)->video.display.width){
+ TMEDIA_CONSUMER(pSelf)->video.display.width = TMEDIA_CONSUMER(pSelf)->video.in.width;
+ }
+ if(!TMEDIA_CONSUMER(pSelf)->video.display.height){
+ TMEDIA_CONSUMER(pSelf)->video.display.height = TMEDIA_CONSUMER(pSelf)->video.in.height;
+ }
+
+ pSelf->nNegFps = TMEDIA_CONSUMER(pSelf)->video.fps;
+ pSelf->nNegWidth = TMEDIA_CONSUMER(pSelf)->video.display.width;
+ pSelf->nNegHeight = TMEDIA_CONSUMER(pSelf)->video.display.height;
+
+ TSK_DEBUG_INFO("MF video consumer: fps=%d, width=%d, height=%d",
+ pSelf->nNegFps,
+ pSelf->nNegWidth,
+ pSelf->nNegHeight);
+
+ if(kDefaultUncompressedType == MFVideoFormat_NV12) {
+ TMEDIA_CONSUMER(pSelf)->video.display.chroma = tmedia_chroma_nv12;
+ }
+ else if(kDefaultUncompressedType == MFVideoFormat_I420) {
+ TMEDIA_CONSUMER(pSelf)->video.display.chroma = tmedia_chroma_yuv420p;
+ }
+ else if(kDefaultUncompressedType == MFVideoFormat_RGB32) {
+ TMEDIA_CONSUMER(pSelf)->video.display.chroma = tmedia_chroma_rgb32;
+ }
+ else if(kDefaultUncompressedType == MFVideoFormat_RGB24) {
+ TMEDIA_CONSUMER(pSelf)->video.display.chroma = tmedia_chroma_rgb24;
+ }
+ else {
+ CHECK_HR(hr = E_NOTIMPL);
+ }
+ TMEDIA_CONSUMER(pSelf)->decoder.codec_id = tmedia_codec_id_none; // means accept RAW fames
+
+ IMFMediaSink* pMediaSink = NULL;
+ IMFAttributes* pSessionAttributes = NULL;
+
+ // Set session attributes
+ CHECK_HR(hr = MFCreateAttributes(&pSessionAttributes, 1));
+ CHECK_HR(hr = pSessionAttributes->SetUINT32(PLUGIN_MF_LOW_LATENCY, 1));
+
+ CHECK_HR(hr = MFCreateMediaType(&pSelf->pOutType));
+ CHECK_HR(hr = pSelf->pOutType->SetGUID(MF_MT_MAJOR_TYPE, MFMediaType_Video));
+
+#if PLUGIN_MF_CV_BUNDLE_CODEC
+ if((codec->id == tmedia_codec_id_h264_bp || codec->id == tmedia_codec_id_h264_mp) && MFUtils::IsLowLatencyH264Supported()) {
+ // both Microsoft and Intel encoders support NV12 only as input
+ // static const BOOL kIsEncoder = FALSE;
+ // hr = MFUtils::GetBestCodec(kIsEncoder, MFMediaType_Video, MFVideoFormat_H264, MFVideoFormat_NV12, &pSelf->pDecoder);
+ pSelf->pDecoder = (codec->id == tmedia_codec_id_h264_bp) ? MFCodecVideoH264::CreateCodecH264Base(MFCodecType_Decoder) : MFCodecVideoH264::CreateCodecH264Main(MFCodecType_Decoder);
+ if(pSelf->pDecoder)
+ {
+ hr = pSelf->pDecoder->Initialize(
+ pSelf->nNegFps,
+ pSelf->nNegWidth,
+ pSelf->nNegHeight);
+
+ if(FAILED(hr))
+ {
+ SafeRelease(&pSelf->pDecoder);
+ hr = S_OK;
+ }
+ }
+ if(SUCCEEDED(hr) && pSelf->pDecoder) {
+ TMEDIA_CONSUMER(pSelf)->decoder.codec_id = codec->id; // means accept ENCODED fames
+ CHECK_HR(hr = pSelf->pOutType->SetGUID(MF_MT_SUBTYPE, MFVideoFormat_H264));
+ }
+ else {
+ SafeRelease(&pSelf->pDecoder);
+ TSK_DEBUG_WARN("Failed to find H.264 HW encoder...fallback to SW implementation");
+ }
+ }
+#endif
+
+ if(!pSelf->pDecoder){
+ CHECK_HR(hr = pSelf->pOutType->SetGUID(MF_MT_SUBTYPE, kDefaultUncompressedType));
+ TMEDIA_CONSUMER(pSelf)->video.display.chroma = kDefaultUncompressedType == MFVideoFormat_NV12 ? tmedia_chroma_nv12 : tmedia_chroma_yuv420p;
+ }
+ CHECK_HR(hr = pSelf->pOutType->SetUINT32(MF_MT_INTERLACE_MODE, MFVideoInterlace_Progressive));
+ CHECK_HR(hr = pSelf->pOutType->SetUINT32(MF_MT_ALL_SAMPLES_INDEPENDENT, TRUE));
+ CHECK_HR(hr = MFSetAttributeSize(pSelf->pOutType, MF_MT_FRAME_SIZE, pSelf->nNegWidth, pSelf->nNegHeight));
+ CHECK_HR(hr = MFSetAttributeRatio(pSelf->pOutType, MF_MT_FRAME_RATE, pSelf->nNegFps, 1));
+ CHECK_HR(hr = MFSetAttributeRatio(pSelf->pOutType, MF_MT_PIXEL_ASPECT_RATIO, 1, 1));
+
+ CHECK_HR(hr = CMFSource::CreateInstanceEx(IID_IMFMediaSource, (void**)&pSelf->pSource, pSelf->pOutType));
+
+ // Apply Encoder output type (must be called before SetInputType)
+ //if(pSelf->pDecoder) {
+ // CHECK_HR(hr = pSelf->pDecoder->SetOutputType(0, pSelf->pOutType, 0/*MFT_SET_TYPE_TEST_ONLY*/));
+ //}
+
+ // Create the Media Session.
+ CHECK_HR(hr = MFCreateMediaSession(pSessionAttributes, &pSelf->pSession));
+
+ // Create the EVR activation object.
+ CHECK_HR(hr = MFCreateVideoRendererActivate(pSelf->hWindow, &pSelf->pSinkActivate));
+
+ // Create the topology.
+ CHECK_HR(hr = MFUtils::CreateTopology(
+ pSelf->pSource,
+ pSelf->pDecoder ? pSelf->pDecoder->GetMFT() : NULL,
+ pSelf->pSinkActivate,
+ NULL/*Preview*/,
+ pSelf->pOutType,
+ &pSelf->pTopologyPartial));
+ // Resolve topology (adds video processors if needed).
+ CHECK_HR(hr = MFUtils::ResolveTopology(pSelf->pTopologyPartial, &pSelf->pTopologyFull));
+
+ // Find EVR
+ CHECK_HR(hr = MFUtils::FindNodeObject(pSelf->pTopologyFull, MFUtils::g_ullTopoIdSinkMain, (void**)&pMediaSink));
+
+ // Create EVR watcher
+ pSelf->pDisplayWatcher = new DisplayWatcher(pSelf->hWindow, pMediaSink, hr);
+ CHECK_HR(hr);
+
+bail:
+ SafeRelease(&pMediaSink);
+ SafeRelease(&pSessionAttributes);
+
+ pSelf->bPrepared = SUCCEEDED(hr);
+ return pSelf->bPrepared ? 0 : -1;
+}
+
+static int plugin_win_mf_consumer_video_start(tmedia_consumer_t* self)
+{
+ plugin_win_mf_consumer_video_t* pSelf = (plugin_win_mf_consumer_video_t*)self;
+
+ if(!pSelf){
+ TSK_DEBUG_ERROR("Invalid parameter");
+ return -1;
+ }
+
+ if(pSelf->bStarted){
+ TSK_DEBUG_INFO("MF video consumer already started");
+ return 0;
+ }
+ if(!pSelf->bPrepared){
+ TSK_DEBUG_ERROR("MF video consumer not prepared");
+ return -1;
+ }
+
+ HRESULT hr = S_OK;
+
+ // Run EVR watcher
+ if(pSelf->pDisplayWatcher) {
+ CHECK_HR(hr = pSelf->pDisplayWatcher->Start());
+ }
+
+ // Run the media session.
+ CHECK_HR(hr = MFUtils::RunSession(pSelf->pSession, pSelf->pTopologyFull));
+
+ // Start asynchronous watcher thread
+ pSelf->bStarted = true;
+ int ret = tsk_thread_create(&pSelf->ppTread[0], RunSessionThread, pSelf);
+ if(ret != 0) {
+ TSK_DEBUG_ERROR("Failed to create thread");
+ hr = E_FAIL;
+ pSelf->bStarted = false;
+ if(pSelf->ppTread[0]){
+ tsk_thread_join(&pSelf->ppTread[0]);
+ }
+ MFUtils::ShutdownSession(pSelf->pSession, pSelf->pSource);
+ CHECK_HR(hr = E_FAIL);
+ }
+
+bail:
+ return SUCCEEDED(hr) ? 0 : -1;
+}
+
+static int plugin_win_mf_consumer_video_consume(tmedia_consumer_t* self, const void* buffer, tsk_size_t size, const tsk_object_t* proto_hdr)
+{
+ plugin_win_mf_consumer_video_t* pSelf = (plugin_win_mf_consumer_video_t*)self;
+
+ HRESULT hr = S_OK;
+
+ if(!pSelf || !buffer || !size) {
+ TSK_DEBUG_ERROR("Invalid parameter");
+ CHECK_HR(hr = E_INVALIDARG);
+ }
+
+ if(!pSelf->bStarted) {
+ TSK_DEBUG_INFO("MF video consumer not started");
+ CHECK_HR(hr = E_FAIL);
+ }
+ if(!pSelf->pSource) {
+ TSK_DEBUG_ERROR("No video custom source");
+ CHECK_HR(hr = E_FAIL);
+ }
+
+ if(pSelf->nNegWidth != TMEDIA_CONSUMER(pSelf)->video.in.width || pSelf->nNegHeight != TMEDIA_CONSUMER(pSelf)->video.in.height){
+ TSK_DEBUG_INFO("Negotiated and input video sizes are different:%d#%d or %d#%d",
+ pSelf->nNegWidth, TMEDIA_CONSUMER(pSelf)->video.in.width,
+ pSelf->nNegHeight, TMEDIA_CONSUMER(pSelf)->video.in.height);
+ // Update media type
+ CHECK_HR(hr = MFSetAttributeSize(pSelf->pOutType, MF_MT_FRAME_SIZE, TMEDIA_CONSUMER(pSelf)->video.in.width, TMEDIA_CONSUMER(pSelf)->video.in.height));
+ CHECK_HR(hr = MFSetAttributeRatio(pSelf->pOutType, MF_MT_FRAME_RATE, TMEDIA_CONSUMER(pSelf)->video.fps, 1));
+
+ CHECK_HR(hr = pSelf->pSession->ClearTopologies());
+
+ //
+ // FIXME: Using same EVR when the size is just swapped (e.g. [640, 480] -> [480, 640]) doesn't work while other changes does (e.g. [352, 288] -> [640, 480])
+ // /!\This look like a bug in Media Foundation
+ //
+ if(pSelf->nNegWidth == TMEDIA_CONSUMER(pSelf)->video.in.height && pSelf->nNegHeight == TMEDIA_CONSUMER(pSelf)->video.in.width) // swapped?
+ {
+ TSK_DEBUG_INFO("/!\\ Size swapped");
+
+ IMFActivate* pSinkActivate = NULL;
+ IMFTopology* pTopologyPartial = NULL;
+ hr = MFCreateVideoRendererActivate(pSelf->hWindow, &pSinkActivate);
+ if(FAILED(hr)) goto end_of_swapping;
+ hr = MFUtils::CreateTopology(
+ pSelf->pSource,
+ pSelf->pDecoder ? pSelf->pDecoder->GetMFT() : NULL,
+ pSinkActivate,
+ NULL/*Preview*/,
+ pSelf->pOutType,
+ &pTopologyPartial);
+ if(FAILED(hr)) goto end_of_swapping;
+
+ if(SUCCEEDED(hr)) {
+ SafeRelease(&pSelf->pSinkActivate);
+ SafeRelease(&pSelf->pTopologyPartial);
+ pSelf->pSinkActivate = pSinkActivate; pSinkActivate = NULL;
+ pSelf->pTopologyPartial = pTopologyPartial; pTopologyPartial = NULL;
+
+ }
+
+end_of_swapping:
+ SafeRelease(&pSinkActivate);
+ SafeRelease(&pTopologyPartial);
+ CHECK_HR(hr);
+ }
+
+ // Set media type again (not required but who know)
+ CHECK_HR(hr = MFUtils::SetMediaType(pSelf->pSource, pSelf->pOutType));
+
+ // Rebuild topology using the partial one
+ IMFTopology* pTopologyFull = NULL;
+ hr = MFUtils::ResolveTopology(pSelf->pTopologyPartial, &pTopologyFull);
+ if(SUCCEEDED(hr)){
+ SafeRelease(&pSelf->pTopologyFull);
+ pSelf->pTopologyFull = pTopologyFull; pTopologyFull = NULL;
+ }
+ SafeRelease(&pTopologyFull);
+ CHECK_HR(hr);
+
+ // Find Main Sink
+ IMFMediaSink* pMediaSink = NULL;
+ hr = MFUtils::FindNodeObject(pSelf->pTopologyFull, MFUtils::g_ullTopoIdSinkMain, (void**)&pMediaSink);
+ if(SUCCEEDED(hr)) {
+ if(pSelf->pDisplayWatcher){
+ delete pSelf->pDisplayWatcher, pSelf->pDisplayWatcher = NULL;
+ }
+ pSelf->pDisplayWatcher = new DisplayWatcher(pSelf->hWindow, pMediaSink, hr);
+ if(SUCCEEDED(hr) && pSelf->bStarted) {
+ hr = pSelf->pDisplayWatcher->Start();
+ }
+ }
+ SafeRelease(&pMediaSink);
+ CHECK_HR(hr);
+
+ // Update the topology associated to the media session
+ CHECK_HR(hr = pSelf->pSession->SetTopology(MFSESSION_SETTOPOLOGY_IMMEDIATE, pSelf->pTopologyFull));
+
+ // Update negotiated width and height
+ pSelf->nNegWidth = TMEDIA_CONSUMER(pSelf)->video.in.width;
+ pSelf->nNegHeight = TMEDIA_CONSUMER(pSelf)->video.in.height;
+ }
+
+ // Deliver buffer
+ CHECK_HR(hr = pSelf->pSource->CopyVideoBuffer(pSelf->nNegWidth, pSelf->nNegHeight, buffer, size));
+
+bail:
+ return SUCCEEDED(hr) ? 0 : -1;
+}
+
+static int plugin_win_mf_consumer_video_pause(tmedia_consumer_t* self)
+{
+ plugin_win_mf_consumer_video_t* pSelf = (plugin_win_mf_consumer_video_t*)self;
+
+ if(!pSelf){
+ TSK_DEBUG_ERROR("Invalid parameter");
+ return -1;
+ }
+ if(!pSelf->bStarted)
+ {
+ TSK_DEBUG_INFO("MF video producer not started");
+ return 0;
+ }
+
+ HRESULT hr = MFUtils::PauseSession(pSelf->pSession);
+
+ return SUCCEEDED(hr) ? 0 : -1;
+}
+
+static int plugin_win_mf_consumer_video_stop(tmedia_consumer_t* self)
+{
+ plugin_win_mf_consumer_video_t* pSelf = (plugin_win_mf_consumer_video_t*)self;
+
+ if(!pSelf){
+ TSK_DEBUG_ERROR("Invalid parameter");
+ return -1;
+ }
+
+ HRESULT hr = S_OK;
+
+ // stop EVR watcher
+ if(pSelf->pDisplayWatcher) {
+ hr = pSelf->pDisplayWatcher->Stop();
+ }
+
+ // for the thread
+ pSelf->bStarted = false;
+ hr = MFUtils::ShutdownSession(pSelf->pSession, NULL); // stop session to wakeup the asynchronous thread
+ if(pSelf->ppTread[0]){
+ tsk_thread_join(&pSelf->ppTread[0]);
+ }
+ hr = MFUtils::ShutdownSession(NULL, pSelf->pSource); // stop source to release the camera
+
+ // next start() will be called after prepare()
+ return _plugin_win_mf_consumer_video_unprepare(pSelf);
+}
+
+static int _plugin_win_mf_consumer_video_unprepare(plugin_win_mf_consumer_video_t* pSelf)
+{
+ if(!pSelf){
+ TSK_DEBUG_ERROR("Invalid parameter");
+ return -1;
+ }
+
+ if(pSelf->bStarted) {
+ // plugin_win_mf_producer_video_stop(TMEDIA_PRODUCER(pSelf));
+ TSK_DEBUG_ERROR("Consumer must be stopped before calling unprepare");
+ }
+
+ if(pSelf->pDisplayWatcher) {
+ pSelf->pDisplayWatcher->Stop();
+ }
+ if(pSelf->pSource){
+ pSelf->pSource->Shutdown();
+ pSelf->pSource = NULL;
+ }
+ if(pSelf->pSession){
+ pSelf->pSession->Shutdown();
+ pSelf->pSession = NULL;
+ }
+
+ SafeRelease(&pSelf->pDecoder);
+ SafeRelease(&pSelf->pSession);
+ SafeRelease(&pSelf->pSource);
+ SafeRelease(&pSelf->pSinkActivate);
+ SafeRelease(&pSelf->pTopologyFull);
+ SafeRelease(&pSelf->pTopologyPartial);
+ SafeRelease(&pSelf->pOutType);
+
+ if(pSelf->pDisplayWatcher) {
+ delete pSelf->pDisplayWatcher;
+ pSelf->pDisplayWatcher = NULL;
+ }
+
+ pSelf->bPrepared = false;
+
+ return 0;
+}
+
+
+//
+// Media Foundation video consumer object definition
+//
+/* constructor */
+static tsk_object_t* plugin_win_mf_consumer_video_ctor(tsk_object_t * self, va_list * app)
+{
+ MFUtils::Startup();
+
+ plugin_win_mf_consumer_video_t *pSelf = (plugin_win_mf_consumer_video_t *)self;
+ if(pSelf){
+ /* init base */
+ tmedia_consumer_init(TMEDIA_CONSUMER(pSelf));
+ TMEDIA_CONSUMER(pSelf)->video.display.chroma = tmedia_chroma_yuv420p;
+ TMEDIA_CONSUMER(pSelf)->decoder.codec_id = tmedia_codec_id_none; // means accept RAW fames
+
+ /* init self */
+ // consumer->create_on_ui_thread = tsk_true;
+ TMEDIA_CONSUMER(pSelf)->video.fps = 15;
+ TMEDIA_CONSUMER(pSelf)->video.display.width = 0; // use codec value
+ TMEDIA_CONSUMER(pSelf)->video.display.height = 0; // use codec value
+ TMEDIA_CONSUMER(pSelf)->video.display.auto_resize = tsk_true;
+
+ TSK_DEBUG_INFO("Create WinMF video consumer");
+ }
+ return self;
+}
+/* destructor */
+static tsk_object_t* plugin_win_mf_consumer_video_dtor(tsk_object_t * self)
+{
+ plugin_win_mf_consumer_video_t *pSelf = (plugin_win_mf_consumer_video_t *)self;
+ if(pSelf){
+ /* stop */
+ if(pSelf->bStarted){
+ plugin_win_mf_consumer_video_stop(TMEDIA_CONSUMER(pSelf));
+ }
+
+ /* deinit base */
+ tmedia_consumer_deinit(TMEDIA_CONSUMER(pSelf));
+ /* deinit self */
+ _plugin_win_mf_consumer_video_unprepare(pSelf);
+ }
+
+ return self;
+}
+/* object definition */
+static const tsk_object_def_t plugin_win_mf_consumer_video_def_s =
+{
+ sizeof(plugin_win_mf_consumer_video_t),
+ plugin_win_mf_consumer_video_ctor,
+ plugin_win_mf_consumer_video_dtor,
+ tsk_null,
+};
+/* plugin definition*/
+static const tmedia_consumer_plugin_def_t plugin_win_mf_consumer_video_plugin_def_s =
+{
+ &plugin_win_mf_consumer_video_def_s,
+
+ tmedia_video,
+ "Media Foundation video consumer",
+
+ plugin_win_mf_consumer_video_set,
+ plugin_win_mf_consumer_video_prepare,
+ plugin_win_mf_consumer_video_start,
+ plugin_win_mf_consumer_video_consume,
+ plugin_win_mf_consumer_video_pause,
+ plugin_win_mf_consumer_video_stop
+};
+const tmedia_consumer_plugin_def_t *plugin_win_mf_consumer_video_plugin_def_t = &plugin_win_mf_consumer_video_plugin_def_s;
+
+// Run session async thread
+static void* TSK_STDCALL RunSessionThread(void *pArg)
+{
+ plugin_win_mf_consumer_video_t *pSelf = (plugin_win_mf_consumer_video_t *)pArg;
+ HRESULT hrStatus = S_OK;
+ HRESULT hr = S_OK;
+ IMFMediaEvent *pEvent = NULL;
+ MediaEventType met;
+
+ TSK_DEBUG_INFO("RunSessionThread (MF video consumer) - ENTER");
+
+ while(pSelf->bStarted){
+ CHECK_HR(hr = pSelf->pSession->GetEvent(0, &pEvent));
+ CHECK_HR(hr = pEvent->GetStatus(&hrStatus));
+ CHECK_HR(hr = pEvent->GetType(&met));
+
+ if (FAILED(hrStatus) /*&& hrStatus != MF_E_NO_SAMPLE_TIMESTAMP*/)
+ {
+ TSK_DEBUG_ERROR("Session error: 0x%x (event id: %d)\n", hrStatus, met);
+ hr = hrStatus;
+ goto bail;
+ }
+ if (met == MESessionEnded)
+ {
+ break;
+ }
+ SafeRelease(&pEvent);
+ }
+
+bail:
+ TSK_DEBUG_INFO("RunSessionThread (MF video consumer) - EXIT");
+
+ return NULL;
+}
+
+#endif /* PLUGIN_MF_CV_USE_D3D9 */ \ No newline at end of file
diff --git a/plugins/pluginWinMF/plugin_win_mf_converter_video.cxx b/plugins/pluginWinMF/plugin_win_mf_converter_video.cxx
new file mode 100644
index 0000000..0e6abcb
--- /dev/null
+++ b/plugins/pluginWinMF/plugin_win_mf_converter_video.cxx
@@ -0,0 +1,600 @@
+/* Copyright (C) 2013-2015 Mamadou DIOP
+* Copyright (C) 2013-2015 Doubango Telecom <http://www.doubango.org>
+*
+* This file is part of Open Source Doubango Framework.
+*
+* DOUBANGO is free software: you can redistribute it and/or modify
+* it under the terms of the GNU General Public License as published by
+* the Free Software Foundation, either version 3 of the License, or
+* (at your option) any later version.
+*
+* DOUBANGO is distributed in the hope that it will be useful,
+* but WITHOUT ANY WARRANTY; without even the implied warranty of
+* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
+* GNU General Public License for more details.
+*
+* You should have received a copy of the GNU General Public License
+* along with DOUBANGO.
+*/
+/*
+Video Processor MFT (http://msdn.microsoft.com/en-us/library/windows/desktop/hh162913(v=vs.85).aspx)
+* The video processor supports GPU-accelerated video processing.
+* The video processor MFT is a Microsoft Media Foundation transform (MFT) that performs :
+ - colorspace conversion
+ - video resizing
+ - deinterlacing
+ - frame rate conversion
+ - rotation
+ - cropping
+ - spatial left and right view unpacking
+ - and mirroring
+*/
+#include "plugin_win_mf_config.h"
+#include "internals/mf_utils.h"
+
+#include "tinymedia/tmedia_converter_video.h"
+
+#include "tsk_memory.h"
+#include "tsk_debug.h"
+
+#include <assert.h>
+#include <initguid.h>
+#include <dmo.h>
+#include <wmcodecdsp.h>
+
+#ifdef _MSC_VER
+#pragma comment(lib, "strmiids.lib")
+#pragma comment(lib, "wmcodecdspuuid.lib")
+#endif
+
+EXTERN_C const GUID CLSID_VideoProcessorMFT; // defined in mf_utils.cxx
+
+#if !defined(_WIN32_WINNT_WIN8)
+#define _WIN32_WINNT_WIN8 0x0602
+#endif /* _WIN32_WINNT_WIN8 */
+
+#if (WINVER < _WIN32_WINNT_WIN8)
+DEFINE_GUID(MF_SA_D3D11_AWARE,
+ 0x206b4fc8, 0xfcf9, 0x4c51, 0xaf, 0xe3, 0x97, 0x64, 0x36, 0x9e, 0x33, 0xa0);
+#endif /* MF_SA_D3D11_AWARE */
+
+#if !defined(HAVE_IMFVideoProcessorControl)
+# if defined(__IMFVideoProcessorControl_INTERFACE_DEFINED__)
+# define HAVE_IMFVideoProcessorControl 1
+# else
+# define HAVE_IMFVideoProcessorControl 0
+# endif
+#endif /* HAVE_IMFVideoProcessorControl */
+#if !defined(E_BOUNDS)
+# define E_BOUNDS _HRESULT_TYPEDEF_(0x8000000BL)
+#endif /* E_BOUNDS */
+#if !defined(PLUGIN_MF_VC_FPS)
+#define PLUGIN_MF_VC_FPS 120 // Samples requires timestamp
+#endif /* PLUGIN_MF_VC_FPS */
+
+typedef struct plugin_win_mf_converter_video_ms_s
+{
+ TMEDIA_DECLARE_CONVERTER_VIDEO;
+
+ GUID fmtSrc;
+ tsk_size_t widthSrc;
+ tsk_size_t heightSrc;
+
+ GUID fmtDst;
+ tsk_size_t widthDst;
+ tsk_size_t heightDst;
+
+ UINT32 rotation;
+ UINT32 xOutputSize;
+ UINT32 xInputSize;
+ BOOL flip;
+
+ IMFSample* pSampleOut;
+ IMFSample* pSampleIn;
+
+ LONGLONG rtStart;
+ UINT64 rtDuration;
+
+ IMFTransform* pMFT; // "CLSID_VideoProcessorMFT" or "CLSID_CColorConvertDMO"
+#if HAVE_IMFVideoProcessorControl
+ IMFVideoProcessorControl* pVPC;
+#endif
+ BOOL isVideoProcessor;
+}
+plugin_win_mf_converter_video_ms_t;
+
+static inline const GUID& _plugin_win_mf_converter_video_ms_get_pixfmt(tmedia_chroma_t chroma);
+static inline tsk_size_t _plugin_win_mf_converter_video_ms_get_size(tmedia_chroma_t chroma, tsk_size_t w, tsk_size_t h);
+static inline HRESULT _plugin_win_mf_converter_video_ms_copy_rgb32_down_top(
+ BYTE* pDst,
+ const BYTE* pSrc,
+ INT dwWidthInPixels,
+ INT dwHeightInPixels
+ );
+static HRESULT _plugin_win_mf_converter_video_ms_process_input(plugin_win_mf_converter_video_ms_t* pSelf, IMFSample* pSample);
+static HRESULT _plugin_win_mf_converter_video_ms_process_output(plugin_win_mf_converter_video_ms_t* pSelf, IMFSample **ppSample);
+static HRESULT _plugin_win_mf_converter_video_ms_process(plugin_win_mf_converter_video_ms_t* pSelf, const void* pcInputPtr, UINT32 nInputSize, IMFSample **ppSampleOut);
+
+static int plugin_win_mf_converter_video_ms_init(tmedia_converter_video_t* self, tsk_size_t srcWidth, tsk_size_t srcHeight, tmedia_chroma_t srcChroma, tsk_size_t dstWidth, tsk_size_t dstHeight, tmedia_chroma_t dstChroma)
+{
+ plugin_win_mf_converter_video_ms_t* pSelf = (plugin_win_mf_converter_video_ms_t*)self;
+ TSK_DEBUG_INFO("Initializing new MF Video Converter src=(%dx%d@%d) dst=(%dx%d@%d)", srcWidth, srcHeight, srcChroma, dstWidth, dstHeight, dstChroma);
+
+ if((pSelf->fmtSrc = _plugin_win_mf_converter_video_ms_get_pixfmt(srcChroma)) == GUID_NULL)
+ {
+ TSK_DEBUG_ERROR("Invalid source chroma");
+ return -2;
+ }
+ if((pSelf->fmtDst = _plugin_win_mf_converter_video_ms_get_pixfmt(dstChroma)) == GUID_NULL)
+ {
+ TSK_DEBUG_ERROR("Invalid destination chroma");
+ return -3;
+ }
+
+ pSelf->rtStart = 0;
+
+ pSelf->widthSrc = srcWidth;
+ pSelf->heightSrc = srcHeight;
+ pSelf->widthDst = dstWidth;
+ pSelf->heightDst = dstHeight;
+ pSelf->rotation = 0;
+ pSelf->xOutputSize = (UINT32)_plugin_win_mf_converter_video_ms_get_size(dstChroma, dstWidth, dstHeight);
+ pSelf->xInputSize = (UINT32)_plugin_win_mf_converter_video_ms_get_size(srcChroma, srcWidth, srcHeight);
+
+ SafeRelease(&pSelf->pSampleOut);
+ SafeRelease(&pSelf->pSampleIn);
+ SafeRelease(&pSelf->pMFT);
+#if HAVE_IMFVideoProcessorControl
+ SafeRelease(&pSelf->pVPC);
+#endif
+
+ HRESULT hr = S_OK;
+
+ IMFMediaType* pTypeSrc = NULL;
+ IMFMediaType* pTypeDst = NULL;
+
+ // Get video processor or Color convertor
+ hr = CoCreateInstance(CLSID_VideoProcessorMFT, NULL,
+ CLSCTX_INPROC_SERVER, IID_PPV_ARGS(&pSelf->pMFT));
+ pSelf->isVideoProcessor = SUCCEEDED(hr);
+ if(FAILED(hr))
+ {
+ TSK_DEBUG_INFO("CoCreateInstance(CLSID_VideoProcessorMFT) failed");
+ if(pSelf->widthSrc == pSelf->widthDst && pSelf->heightSrc == pSelf->heightDst)
+ {
+ TSK_DEBUG_INFO("No video scaling is required...perform CoCreateInstance(CLSID_CColorConvertDMO)");
+ CHECK_HR(hr = CoCreateInstance(CLSID_CColorConvertDMO, NULL,
+ CLSCTX_INPROC_SERVER, IID_PPV_ARGS(&pSelf->pMFT)));
+ }
+ else
+ {
+ CHECK_HR(hr);
+ }
+ }
+
+
+
+ if(pSelf->isVideoProcessor)
+ {
+ IMFAttributes* pAttributes = NULL;
+ UINT32 GPU = 0;
+ hr = pSelf->pMFT->GetAttributes(&pAttributes);
+ if (SUCCEEDED(hr)) {
+ hr = pAttributes->GetUINT32(MF_SA_D3D11_AWARE, &GPU);
+ }
+ SafeRelease(&pAttributes);
+ TSK_DEBUG_INFO("MF_SA_D3D11_AWARE = %d", GPU);
+#if HAVE_IMFVideoProcessorControl
+ CHECK_HR(hr = pSelf->pMFT->QueryInterface(IID_PPV_ARGS(&pSelf->pVPC)));
+#endif
+ }
+
+
+ CHECK_HR(hr = MFUtils::CreateVideoType(&pSelf->fmtSrc, &pTypeSrc, (UINT32)pSelf->widthSrc, (UINT32)pSelf->heightSrc));
+ CHECK_HR(hr = MFUtils::CreateVideoType(&pSelf->fmtDst, &pTypeDst, (UINT32)pSelf->widthDst, (UINT32)pSelf->heightDst));
+
+ CHECK_HR(hr = pSelf->pMFT->SetInputType(0, pTypeSrc, 0));
+ CHECK_HR(hr = pSelf->pMFT->SetOutputType(0, pTypeDst, 0));
+
+bail:
+ SafeRelease(&pTypeSrc);
+ SafeRelease(&pTypeDst);
+
+ if(FAILED(hr))
+ {
+ SafeRelease(&pSelf->pMFT);
+#if HAVE_IMFVideoProcessorControl
+ SafeRelease(&pSelf->pVPC);
+#endif
+ return -4;
+ }
+
+ return 0;
+}
+
+static tsk_size_t plugin_win_mf_converter_video_ms_process(tmedia_converter_video_t* _self, const void* buffer, tsk_size_t buffer_size, void** output, tsk_size_t* output_max_size)
+{
+ plugin_win_mf_converter_video_ms_t* pSelf = (plugin_win_mf_converter_video_ms_t*)_self;
+
+ HRESULT hr = S_OK;
+
+ IMFSample *pSampleOut = NULL;
+ IMFMediaBuffer* pBufferOut = NULL;
+
+ if(!pSelf || !buffer || !output || !output_max_size)
+ {
+ CHECK_HR(hr = E_POINTER);
+ }
+
+ if(!pSelf->pMFT)
+ {
+ TSK_DEBUG_ERROR("Not initialized");
+ CHECK_HR(hr = E_FAIL);
+ }
+#if HAVE_IMFVideoProcessorControl
+ if(!pSelf->pVPC && pSelf->isVideoProcessor)
+ {
+ TSK_DEBUG_ERROR("Not initialized");
+ CHECK_HR(hr = E_FAIL);
+ }
+#endif
+
+ if(*output_max_size < pSelf->xOutputSize)
+ {
+ if(!(*output = tsk_realloc(*output, pSelf->xOutputSize)))
+ {
+ *output_max_size = 0;
+ TSK_DEBUG_ERROR("Failed to allocate buffer with size = %u", pSelf->xOutputSize);
+ CHECK_HR(hr = E_OUTOFMEMORY);
+ }
+ *output_max_size = pSelf->xOutputSize;
+ }
+#if HAVE_IMFVideoProcessorControl
+ if(pSelf->pVPC && !!_self->flip != !!pSelf->flip)
+ {
+ pSelf->flip = !!_self->flip;
+ CHECK_HR(hr = pSelf->pVPC->SetMirror(pSelf->flip ? MIRROR_NONE : MIRROR_VERTICAL));
+ }
+ if(pSelf->pVPC && _self->rotation != pSelf->rotation)
+ {
+ _self->rotation = pSelf->rotation;
+ CHECK_HR(hr = pSelf->pVPC->SetRotation(pSelf->rotation == 0 ? ROTATION_NONE : ROTATION_NORMAL));
+
+ }
+#endif
+
+ CHECK_HR(hr = _plugin_win_mf_converter_video_ms_process(
+ pSelf, buffer, pSelf->xInputSize, &pSampleOut));
+
+ if(pSampleOut)
+ {
+ CHECK_HR(hr = pSampleOut->GetBufferByIndex(0, &pBufferOut));
+
+ BYTE* pBufferPtr = NULL;
+ DWORD dwDataLength = 0;
+ CHECK_HR(hr = pBufferOut->GetCurrentLength(&dwDataLength));
+ if(dwDataLength > 0)
+ {
+ if(dwDataLength != pSelf->xOutputSize)
+ {
+ TSK_DEBUG_ERROR("Output size mismatch");
+ CHECK_HR(hr = E_BOUNDS);
+ }
+ CHECK_HR(hr = pBufferOut->Lock(&pBufferPtr, NULL, NULL));
+
+ // MFCopyImage() is optimized: MMX, SSE, or SSE2
+ switch(_self->dstChroma)
+ {
+ // Don't waste your time guessing which parameter to use: The consumer will always request RGB32. If not used for consumer then, just memcpy()
+ case tmedia_chroma_rgb32:
+ {
+ if(pSelf->isVideoProcessor)
+ {
+ hr = _plugin_win_mf_converter_video_ms_copy_rgb32_down_top(
+ (BYTE*)*output,
+ (const BYTE*)pBufferPtr,
+ (INT)pSelf->widthDst,
+ (INT)pSelf->heightDst
+ );
+ }
+ else
+ {
+ hr = MFCopyImage(
+ (BYTE*)*output,
+ (LONG)(pSelf->widthDst << 2),
+ (BYTE*)pBufferPtr,
+ (LONG)(pSelf->widthDst << 2),
+ (DWORD)(pSelf->widthDst << 2),
+ (DWORD)pSelf->heightDst
+ );
+ }
+
+
+ if(FAILED(hr))
+ {
+ // unlock() before leaving
+ pBufferOut->Unlock();
+ CHECK_HR(hr);
+ }
+ break;
+ }
+ default:
+ {
+ memcpy(*output, pBufferPtr, dwDataLength);
+ }
+ }
+ CHECK_HR(hr = pBufferOut->Unlock());
+ }
+ }
+
+ pSelf->rtStart += pSelf->rtDuration;
+
+bail:
+ SafeRelease(&pSampleOut);
+ SafeRelease(&pBufferOut);
+
+ return SUCCEEDED(hr) ? pSelf->xOutputSize : 0;
+}
+
+static tsk_object_t* plugin_win_mf_converter_video_ms_ctor(tsk_object_t * self, va_list * app)
+{
+ plugin_win_mf_converter_video_ms_t *pSelf = (plugin_win_mf_converter_video_ms_t *)self;
+ if(pSelf){
+ HRESULT hr = MFFrameRateToAverageTimePerFrame(PLUGIN_MF_VC_FPS, 1, &pSelf->rtDuration);
+ if(FAILED(hr)){
+ pSelf->rtDuration = 83333; // 120 FPS
+ }
+ }
+ return self;
+}
+static tsk_object_t* plugin_win_mf_converter_video_ms_dtor(tsk_object_t * self)
+{
+ plugin_win_mf_converter_video_ms_t *pSelf = (plugin_win_mf_converter_video_ms_t *)self;
+ if(pSelf){
+ SafeRelease(&pSelf->pSampleOut);
+ SafeRelease(&pSelf->pSampleIn);
+ SafeRelease(&pSelf->pMFT);
+#if HAVE_IMFVideoProcessorControl
+ SafeRelease(&pSelf->pVPC);
+#endif
+ }
+
+ return self;
+}
+static const tsk_object_def_t plugin_win_mf_converter_video_ms_def_s =
+{
+ sizeof(plugin_win_mf_converter_video_ms_t),
+ plugin_win_mf_converter_video_ms_ctor,
+ plugin_win_mf_converter_video_ms_dtor,
+ tsk_null,
+};
+const tsk_object_def_t *plugin_win_mf_converter_video_ms_def_t = &plugin_win_mf_converter_video_ms_def_s;
+static const tmedia_converter_video_plugin_def_t plugin_win_mf_converter_video_ms_plugin_def_s =
+{
+ &plugin_win_mf_converter_video_ms_def_s,
+
+ plugin_win_mf_converter_video_ms_init,
+ plugin_win_mf_converter_video_ms_process
+};
+const tmedia_converter_video_plugin_def_t *plugin_win_mf_converter_video_ms_plugin_def_t = &plugin_win_mf_converter_video_ms_plugin_def_s;
+
+
+static inline tsk_size_t _plugin_win_mf_converter_video_ms_get_size(tmedia_chroma_t chroma, tsk_size_t w, tsk_size_t h)
+{
+ switch(chroma){
+ case tmedia_chroma_rgb24:
+ case tmedia_chroma_bgr24:
+ return (w * h * 3);
+ case tmedia_chroma_rgb565le:
+ return ((w * h) << 1);
+ case tmedia_chroma_rgb32:
+ return ((w * h) << 2);
+ case tmedia_chroma_nv21:
+ return ((w * h * 3) >> 1);
+ case tmedia_chroma_nv12:
+ return ((w * h * 3) >> 1);
+ case tmedia_chroma_yuv422p:
+ return ((w * h) << 1);
+ case tmedia_chroma_uyvy422:
+ case tmedia_chroma_yuyv422:
+ return ((w * h) << 1);
+ case tmedia_chroma_yuv420p:
+ return ((w * h * 3) >> 1);
+ default:
+ TSK_DEBUG_ERROR("Invalid chroma %d", (int)chroma);
+ return 0;
+ }
+}
+
+static inline const GUID& _plugin_win_mf_converter_video_ms_get_pixfmt(tmedia_chroma_t chroma)
+{
+ switch(chroma){
+ case tmedia_chroma_rgb24:
+ case tmedia_chroma_bgr24:
+ return MFVideoFormat_RGB24;
+ case tmedia_chroma_rgb565le:
+ return MFVideoFormat_RGB565;
+ case tmedia_chroma_rgb32:
+ return MFVideoFormat_RGB32;
+ case tmedia_chroma_nv12:
+ return MFVideoFormat_NV12;
+ case tmedia_chroma_yuv420p:
+ return MFVideoFormat_I420;
+ case tmedia_chroma_yuyv422:
+ return MFVideoFormat_YUY2;
+ case tmedia_chroma_uyvy422:
+ return MFVideoFormat_UYVY;
+ default:
+ TSK_DEBUG_ERROR("Invalid chroma %d", (int)chroma);
+ return GUID_NULL;
+ }
+}
+
+// For RGB32:
+// Direct3D -> Top-Down
+// Video Processor -> Down-Top
+static inline HRESULT _plugin_win_mf_converter_video_ms_copy_rgb32_down_top(
+ BYTE* pDst,
+ const BYTE* pSrc,
+ INT dwWidthInPixels,
+ INT dwHeightInPixels
+ )
+{
+ RGBQUAD *pSrcPixel = &((RGBQUAD*)pSrc)[(dwWidthInPixels * dwHeightInPixels) - dwWidthInPixels];
+ RGBQUAD *pDestPixel = &((RGBQUAD*)pDst)[0];
+
+ register INT x;
+ register INT y;
+
+ for (y = dwHeightInPixels; y > 0 ; --y)
+ {
+ for (x = 0; x < dwWidthInPixels; ++x)
+ {
+ pDestPixel[x] = pSrcPixel[x];
+ }
+ pDestPixel += dwWidthInPixels;
+ pSrcPixel -= dwWidthInPixels;
+ }
+ return S_OK;
+}
+
+static HRESULT _plugin_win_mf_converter_video_ms_process_input(plugin_win_mf_converter_video_ms_t* pSelf, IMFSample* pSample)
+{
+ return pSelf->pMFT->ProcessInput(0, pSample, 0);
+}
+
+static HRESULT _plugin_win_mf_converter_video_ms_process_output(plugin_win_mf_converter_video_ms_t* pSelf, IMFSample **ppSample)
+{
+ *ppSample = NULL;
+
+ IMFMediaBuffer* pBufferOut = NULL;
+
+ DWORD dwStatus;
+
+ HRESULT hr = S_OK;
+
+ MFT_OUTPUT_STREAM_INFO mftStreamInfo = { 0 };
+ MFT_OUTPUT_DATA_BUFFER mftOutputData = { 0 };
+
+ CHECK_HR(hr = pSelf->pMFT->GetOutputStreamInfo(0, &mftStreamInfo));
+
+ if(!pSelf->pSampleOut)
+ {
+ CHECK_HR(hr = MFUtils::CreateMediaSample(mftStreamInfo.cbSize, &pSelf->pSampleOut));
+ hr = pSelf->pSampleOut->GetBufferByIndex(0, &pBufferOut);
+ if(FAILED(hr))
+ {
+ SafeRelease(&pSelf->pSampleOut);
+ CHECK_HR(hr);
+ }
+ }
+ else
+ {
+ DWORD dwMaxLength = 0;
+ CHECK_HR(hr = pSelf->pSampleOut->GetBufferByIndex(0, &pBufferOut));
+ CHECK_HR(hr = pBufferOut->GetMaxLength(&dwMaxLength));
+ if(dwMaxLength < mftStreamInfo.cbSize)
+ {
+ CHECK_HR(hr = pSelf->pSampleOut->RemoveAllBuffers());
+ SafeRelease(&pBufferOut);
+ CHECK_HR(hr = MFCreateMemoryBuffer(mftStreamInfo.cbSize, &pBufferOut));
+ CHECK_HR(hr = pSelf->pSampleOut->AddBuffer(pBufferOut));
+ }
+ }
+
+ CHECK_HR(hr = pBufferOut->SetCurrentLength(0));
+
+ //Set the output sample
+ mftOutputData.pSample = pSelf->pSampleOut;
+ //Set the output id
+ mftOutputData.dwStreamID = 0;
+
+ //Generate the output sample
+ CHECK_HR(hr = pSelf->pMFT->ProcessOutput(0, 1, &mftOutputData, &dwStatus));
+ /*if (hr == MF_E_TRANSFORM_NEED_MORE_INPUT)
+ {
+ hr = S_OK;
+ goto bail;
+ }*/
+
+ // TODO: Handle MF_E_TRANSFORM_STREAM_CHANGE
+
+ *ppSample = pSelf->pSampleOut;
+ (*ppSample)->AddRef();
+
+bail:
+ SafeRelease(&pBufferOut);
+ return hr;
+}
+
+static HRESULT _plugin_win_mf_converter_video_ms_process(plugin_win_mf_converter_video_ms_t* pSelf, const void* pcInputPtr, UINT32 nInputSize, IMFSample **ppSampleOut)
+{
+ if(!pcInputPtr || !nInputSize || !ppSampleOut)
+ {
+ TSK_DEBUG_ERROR("Invalid parameter");
+ return E_INVALIDARG;
+ }
+
+ *ppSampleOut = NULL;
+
+ HRESULT hr = S_OK;
+
+ IMFMediaBuffer* pBufferIn = NULL;
+ BYTE* pBufferPtr = NULL;
+
+ if(!pSelf->pSampleIn)
+ {
+ CHECK_HR(hr = MFUtils::CreateMediaSample(nInputSize, &pSelf->pSampleIn));
+ hr = pSelf->pSampleIn->GetBufferByIndex(0, &pBufferIn);
+ if(FAILED(hr))
+ {
+ SafeRelease(&pSelf->pSampleIn);
+ CHECK_HR(hr);
+ }
+ }
+ else
+ {
+ DWORD dwMaxLength = 0;
+ CHECK_HR(hr = pSelf->pSampleIn->GetBufferByIndex(0, &pBufferIn));
+ CHECK_HR(hr = pBufferIn->GetMaxLength(&dwMaxLength));
+ if(dwMaxLength < nInputSize)
+ {
+ CHECK_HR(hr = pSelf->pSampleIn->RemoveAllBuffers());
+ SafeRelease(&pBufferIn);
+ CHECK_HR(hr = MFCreateMemoryBuffer(nInputSize, &pBufferIn));
+ CHECK_HR(hr = pSelf->pSampleIn->AddBuffer(pBufferIn));
+ }
+ }
+
+ CHECK_HR(hr = pBufferIn->Lock(&pBufferPtr, NULL, NULL));
+ memcpy(pBufferPtr, pcInputPtr, nInputSize);
+ CHECK_HR(hr = pBufferIn->Unlock());
+ CHECK_HR(hr = pBufferIn->SetCurrentLength(nInputSize));
+
+ CHECK_HR(hr = pSelf->pSampleIn->SetSampleDuration(pSelf->rtDuration));
+ CHECK_HR(hr = pSelf->pSampleIn->SetSampleTime(pSelf->rtStart));
+
+ hr = _plugin_win_mf_converter_video_ms_process_input(pSelf, pSelf->pSampleIn);
+ while(hr == MF_E_NOTACCEPTING)
+ {
+ TSK_DEBUG_INFO("MF_E_NOTACCEPTING");
+ IMFSample* pSample = NULL;
+ hr = _plugin_win_mf_converter_video_ms_process_output(pSelf, &pSample);
+ if(SUCCEEDED(hr) && pSample)
+ {
+ SafeRelease(ppSampleOut);
+ *ppSampleOut = pSample, pSample = NULL;
+
+ hr = pSelf->pSampleIn->SetUINT32(MFSampleExtension_Discontinuity, TRUE);
+ hr = _plugin_win_mf_converter_video_ms_process_input(pSelf, pSelf->pSampleIn);
+ }
+ }
+ if(!*ppSampleOut)
+ {
+ CHECK_HR(hr = _plugin_win_mf_converter_video_ms_process_output(pSelf, ppSampleOut));
+ }
+
+bail:
+ SafeRelease(&pBufferIn);
+ return hr;
+} \ No newline at end of file
diff --git a/plugins/pluginWinMF/plugin_win_mf_producer_audio.cxx b/plugins/pluginWinMF/plugin_win_mf_producer_audio.cxx
new file mode 100644
index 0000000..2a3c314
--- /dev/null
+++ b/plugins/pluginWinMF/plugin_win_mf_producer_audio.cxx
@@ -0,0 +1,333 @@
+/* Copyright (C) 2013 Mamadou DIOP
+* Copyright (C) 2013 Doubango Telecom <http://www.doubango.org>
+*
+* This file is part of Open Source Doubango Framework.
+*
+* DOUBANGO is free software: you can redistribute it and/or modify
+* it under the terms of the GNU General Public License as published by
+* the Free Software Foundation, either version 3 of the License, or
+* (at your option) any later version.
+*
+* DOUBANGO is distributed in the hope that it will be useful,
+* but WITHOUT ANY WARRANTY; without even the implied warranty of
+* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
+* GNU General Public License for more details.
+*
+* You should have received a copy of the GNU General Public License
+* along with DOUBANGO.
+*/
+#include "plugin_win_mf_config.h"
+#include "internals/mf_utils.h"
+#include "internals/mf_sample_grabber.h"
+#include "internals/mf_devices.h"
+
+#include "tinydav/audio/tdav_producer_audio.h"
+
+#include "tsk_thread.h"
+#include "tsk_debug.h"
+
+static void* TSK_STDCALL RunSessionThread(void *pArg);
+
+typedef struct plugin_win_mf_producer_audio_s
+{
+ TDAV_DECLARE_PRODUCER_AUDIO;
+
+ bool bStarted;
+ tsk_thread_handle_t* ppTread[1];
+
+ DeviceListAudio* pDeviceList;
+
+ IMFMediaSession *pSession;
+ IMFMediaSource *pSource;
+ SampleGrabberCB *pCallback;
+ IMFActivate *pSinkActivate;
+ IMFTopology *pTopology;
+ IMFMediaType *pType;
+}
+plugin_win_mf_producer_audio_t;
+
+/* ============ Media Producer Interface ================= */
+static int plugin_win_mf_producer_audio_set(tmedia_producer_t* self, const tmedia_param_t* param)
+{
+ plugin_win_mf_producer_audio_t* pSelf = (plugin_win_mf_producer_audio_t*)self;
+ if(param->plugin_type == tmedia_ppt_producer){
+ }
+ return tdav_producer_audio_set(TDAV_PRODUCER_AUDIO(pSelf), param);
+}
+
+static int plugin_win_mf_producer_audio_prepare(tmedia_producer_t* self, const tmedia_codec_t* codec)
+{
+ plugin_win_mf_producer_audio_t* pSelf = (plugin_win_mf_producer_audio_t*)self;
+
+ if(!pSelf || !codec){
+ TSK_DEBUG_ERROR("Invalid parameter");
+ return -1;
+ }
+
+ TMEDIA_PRODUCER(pSelf)->audio.channels = TMEDIA_CODEC_CHANNELS_AUDIO_ENCODING(codec);
+ TMEDIA_PRODUCER(pSelf)->audio.rate = TMEDIA_CODEC_RATE_ENCODING(codec);
+ TMEDIA_PRODUCER(pSelf)->audio.ptime = TMEDIA_CODEC_PTIME_AUDIO_ENCODING(codec);
+
+ TSK_DEBUG_INFO("MF audio producer: channels=%d, rate=%d, ptime=%d",
+ TMEDIA_PRODUCER(pSelf)->audio.channels,
+ TMEDIA_PRODUCER(pSelf)->audio.rate,
+ TMEDIA_PRODUCER(pSelf)->audio.ptime
+ );
+
+ HRESULT hr = S_OK;
+
+ // create device list object
+ if(!pSelf->pDeviceList && !(pSelf->pDeviceList = new DeviceListAudio())){
+ TSK_DEBUG_ERROR("Failed to create device list");
+ hr = E_OUTOFMEMORY;
+ goto bail;
+ }
+ // enumerate devices
+ hr = pSelf->pDeviceList->EnumerateDevices();
+ if(!SUCCEEDED(hr)){
+ goto bail;
+ }
+
+ // check if we have at least one MF video source connected to the PC
+ if(pSelf->pDeviceList->Count() == 0){
+ TSK_DEBUG_WARN("No MF video source could be found...no video will be sent");
+ // do not break the negotiation as one-way video connection is a valid use-case
+ }
+ else{
+ IMFActivate* pActivate = NULL;
+ // Get best MF audio source
+ hr = pSelf->pDeviceList->GetDeviceBest(&pActivate);
+ if(!SUCCEEDED(hr) || !pActivate){
+ TSK_DEBUG_ERROR("Failed to get best MF audio source");
+ if(!pActivate){
+ hr = E_OUTOFMEMORY;
+ }
+ goto bail;
+ }
+
+ // Create the media source for the device.
+ hr = pActivate->ActivateObject(
+ __uuidof(IMFMediaSource),
+ (void**)&pSelf->pSource
+ );
+ SafeRelease(&pActivate);
+ if(!SUCCEEDED(hr)){
+ TSK_DEBUG_ERROR("ActivateObject(MF audio source) failed");
+ goto bail;
+ }
+
+ // Create and configure the media type
+ CHECK_HR(hr = MFCreateMediaType(&pSelf->pType));
+ CHECK_HR(hr = pSelf->pType->SetGUID(MF_MT_MAJOR_TYPE, MFMediaType_Audio));
+ CHECK_HR(hr = pSelf->pType->SetGUID(MF_MT_SUBTYPE, MFAudioFormat_PCM));
+ CHECK_HR(hr = pSelf->pType->SetUINT32(MF_MT_AUDIO_NUM_CHANNELS, TMEDIA_PRODUCER(pSelf)->audio.channels));
+ CHECK_HR(hr = pSelf->pType->SetUINT32(MF_MT_AUDIO_SAMPLES_PER_SECOND, TMEDIA_PRODUCER(pSelf)->audio.rate));
+ CHECK_HR(hr = pSelf->pType->SetUINT32(MF_MT_AUDIO_BITS_PER_SAMPLE, TMEDIA_PRODUCER(pSelf)->audio.bits_per_sample));
+ CHECK_HR(hr = pSelf->pType->SetUINT32(MF_MT_ALL_SAMPLES_INDEPENDENT, TRUE)); // because uncompressed media type
+ CHECK_HR(hr = pSelf->pType->SetUINT32(MF_MT_FIXED_SIZE_SAMPLES, TRUE));
+ UINT32 nBlockAlign = TMEDIA_PRODUCER(pSelf)->audio.channels * (TMEDIA_PRODUCER(pSelf)->audio.bits_per_sample >> 3);
+ UINT32 nAvgBytesPerSec = (nBlockAlign * TMEDIA_PRODUCER(pSelf)->audio.rate);
+ CHECK_HR(hr = pSelf->pType->SetUINT32(MF_MT_AUDIO_BLOCK_ALIGNMENT, nBlockAlign));
+ CHECK_HR(hr = pSelf->pType->SetUINT32(MF_MT_AUDIO_AVG_BYTES_PER_SECOND, nAvgBytesPerSec));
+
+ // Create the sample grabber sink.
+ CHECK_HR(hr = SampleGrabberCB::CreateInstance(TMEDIA_PRODUCER(pSelf), &pSelf->pCallback));
+ CHECK_HR(hr = MFCreateSampleGrabberSinkActivate(pSelf->pType, pSelf->pCallback, &pSelf->pSinkActivate));
+
+ // To run as fast as possible, set this attribute (requires Windows 7):
+ CHECK_HR(hr = pSelf->pSinkActivate->SetUINT32(MF_SAMPLEGRABBERSINK_IGNORE_CLOCK, TRUE));
+
+ // Create the Media Session.
+ CHECK_HR(hr = MFCreateMediaSession(NULL, &pSelf->pSession));
+
+ // Create the topology.
+ CHECK_HR(hr = MFUtils::CreateTopology(pSelf->pSource, NULL/*NO ENCODER*/, pSelf->pSinkActivate, NULL/*Preview*/, pSelf->pType, &pSelf->pTopology));
+ }
+
+bail:
+ return SUCCEEDED(hr) ? 0 : -1;
+}
+
+static int plugin_win_mf_producer_audio_start(tmedia_producer_t* self)
+{
+ plugin_win_mf_producer_audio_t* pSelf = (plugin_win_mf_producer_audio_t*)self;
+
+ if(!pSelf){
+ TSK_DEBUG_ERROR("Invalid parameter");
+ return -1;
+ }
+
+ if(pSelf->bStarted){
+ TSK_DEBUG_INFO("MF audio producer already started");
+ return 0;
+ }
+
+ HRESULT hr = S_OK;
+
+ // Run the media session.
+ CHECK_HR(hr = MFUtils::RunSession(pSelf->pSession, pSelf->pTopology));
+
+ // Start asynchronous watcher thread
+ pSelf->bStarted = true;
+ int ret = tsk_thread_create(&pSelf->ppTread[0], RunSessionThread, pSelf);
+ if(ret != 0) {
+ TSK_DEBUG_ERROR("Failed to create thread");
+ hr = E_FAIL;
+ pSelf->bStarted = false;
+ if(pSelf->ppTread[0]){
+ tsk_thread_join(&pSelf->ppTread[0]);
+ }
+ MFUtils::ShutdownSession(pSelf->pSession, pSelf->pSource);
+ goto bail;
+ }
+
+bail:
+ return SUCCEEDED(hr) ? 0 : -1;
+}
+
+static int plugin_win_mf_producer_audio_pause(tmedia_producer_t* self)
+{
+ plugin_win_mf_producer_audio_t* pSelf = (plugin_win_mf_producer_audio_t*)self;
+
+ if(!pSelf){
+ TSK_DEBUG_ERROR("Invalid parameter");
+ return -1;
+ }
+
+ HRESULT hr = MFUtils::PauseSession(pSelf->pSession);
+
+ return SUCCEEDED(hr) ? 0 : -1;
+}
+
+static int plugin_win_mf_producer_audio_stop(tmedia_producer_t* self)
+{
+ plugin_win_mf_producer_audio_t* pSelf = (plugin_win_mf_producer_audio_t*)self;
+
+ if(!pSelf){
+ TSK_DEBUG_ERROR("Invalid parameter");
+ return -1;
+ }
+
+ HRESULT hr = S_OK;
+
+ // for the thread
+ pSelf->bStarted = false;
+ hr = MFUtils::ShutdownSession(pSelf->pSession, NULL); // stop session to wakeup the asynchronous thread
+ if(pSelf->ppTread[0]){
+ tsk_thread_join(&pSelf->ppTread[0]);
+ }
+ hr = MFUtils::ShutdownSession(NULL, pSelf->pSource); // stop source to release the camera
+
+ return 0;
+}
+
+
+//
+// WaveAPI producer object definition
+//
+/* constructor */
+static tsk_object_t* plugin_win_mf_producer_audio_ctor(tsk_object_t * self, va_list * app)
+{
+ MFUtils::Startup();
+
+ plugin_win_mf_producer_audio_t *pSelf = (plugin_win_mf_producer_audio_t*)self;
+ if(pSelf){
+ /* init base */
+ tdav_producer_audio_init(TDAV_PRODUCER_AUDIO(pSelf));
+ /* init self */
+
+ }
+ return self;
+}
+/* destructor */
+static tsk_object_t* plugin_win_mf_producer_audio_dtor(tsk_object_t * self)
+{
+ plugin_win_mf_producer_audio_t *pSelf = (plugin_win_mf_producer_audio_t *)self;
+ if(pSelf){
+ /* stop */
+ if(pSelf->bStarted){
+ plugin_win_mf_producer_audio_stop(TMEDIA_PRODUCER(pSelf));
+ }
+
+ /* deinit base */
+ tdav_producer_audio_deinit(TDAV_PRODUCER_AUDIO(pSelf));
+ /* deinit self */
+ if(pSelf->pDeviceList){
+ delete pSelf->pDeviceList, pSelf->pDeviceList = NULL;
+ }
+ if(pSelf->pSource){
+ pSelf->pSource->Shutdown();
+ }
+ if(pSelf->pSession){
+ pSelf->pSession->Shutdown();
+ }
+
+ SafeRelease(&pSelf->pSession);
+ SafeRelease(&pSelf->pSource);
+ SafeRelease(&pSelf->pCallback);
+ SafeRelease(&pSelf->pSinkActivate);
+ SafeRelease(&pSelf->pTopology);
+ SafeRelease(&pSelf->pType);
+ }
+
+ return self;
+}
+/* object definition */
+static const tsk_object_def_t plugin_win_mf_producer_audio_def_s =
+{
+ sizeof(plugin_win_mf_producer_audio_t),
+ plugin_win_mf_producer_audio_ctor,
+ plugin_win_mf_producer_audio_dtor,
+ tdav_producer_audio_cmp,
+};
+/* plugin definition*/
+static const tmedia_producer_plugin_def_t plugin_win_mf_producer_audio_plugin_def_s =
+{
+ &plugin_win_mf_producer_audio_def_s,
+
+ tmedia_audio,
+ "Media Foundation audio producer",
+
+ plugin_win_mf_producer_audio_set,
+ plugin_win_mf_producer_audio_prepare,
+ plugin_win_mf_producer_audio_start,
+ plugin_win_mf_producer_audio_pause,
+ plugin_win_mf_producer_audio_stop
+};
+const tmedia_producer_plugin_def_t *plugin_win_mf_producer_audio_plugin_def_t = &plugin_win_mf_producer_audio_plugin_def_s;
+
+
+// Run session async thread
+static void* TSK_STDCALL RunSessionThread(void *pArg)
+{
+ plugin_win_mf_producer_audio_t *pSelf = (plugin_win_mf_producer_audio_t *)pArg;
+ HRESULT hrStatus = S_OK;
+ HRESULT hr = S_OK;
+ IMFMediaEvent *pEvent = NULL;
+ MediaEventType met;
+
+ TSK_DEBUG_INFO("RunSessionThread (audio) - ENTER");
+
+ while(pSelf->bStarted){
+ CHECK_HR(hr = pSelf->pSession->GetEvent(0, &pEvent));
+ CHECK_HR(hr = pEvent->GetStatus(&hrStatus));
+ CHECK_HR(hr = pEvent->GetType(&met));
+
+ if (FAILED(hrStatus))
+ {
+ TSK_DEBUG_ERROR("Session error: 0x%x (event id: %d)\n", hrStatus, met);
+ hr = hrStatus;
+ goto bail;
+ }
+ if (met == MESessionEnded)
+ {
+ break;
+ }
+ SafeRelease(&pEvent);
+ }
+
+bail:
+ TSK_DEBUG_INFO("RunSessionThread (audio) - EXIT");
+
+ return NULL;
+}
diff --git a/plugins/pluginWinMF/plugin_win_mf_producer_video.cxx b/plugins/pluginWinMF/plugin_win_mf_producer_video.cxx
new file mode 100644
index 0000000..886fc45
--- /dev/null
+++ b/plugins/pluginWinMF/plugin_win_mf_producer_video.cxx
@@ -0,0 +1,708 @@
+/* Copyright (C) 2013-2015 Mamadou DIOP
+* Copyright (C) 2013-2015 Doubango Telecom <http://www.doubango.org>
+*
+* This file is part of Open Source Doubango Framework.
+*
+* DOUBANGO is free software: you can redistribute it and/or modify
+* it under the terms of the GNU General Public License as published by
+* the Free Software Foundation, either version 3 of the License, or
+* (at your option) any later version.
+*
+* DOUBANGO is distributed in the hope that it will be useful,
+* but WITHOUT ANY WARRANTY; without even the implied warranty of
+* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
+* GNU General Public License for more details.
+*
+* You should have received a copy of the GNU General Public License
+* along with DOUBANGO.
+*/
+#include "plugin_win_mf_config.h"
+#include "internals/mf_utils.h"
+#include "internals/mf_sample_grabber.h"
+#include "internals/mf_devices.h"
+#include "internals/mf_display_watcher.h"
+#include "internals/mf_custom_src.h"
+#include "internals/mf_codec.h"
+
+#include "tinymedia/tmedia_defaults.h"
+#include "tinymedia/tmedia_producer.h"
+
+#include "tsk_string.h"
+#include "tsk_thread.h"
+#include "tsk_debug.h"
+
+#include <KS.h>
+#include <Codecapi.h>
+#include <assert.h>
+#include <stdlib.h> /* mbstowcs, wchar_t(C) */
+#include <initguid.h>
+
+// 0: {{[Source] -> (VideoProcessor) -> SampleGrabber}} , {{[Encoder]}} -> RTP
+// 1: {{[Source] -> (VideoProcessor) -> [Encoder] -> SampleGrabber}} -> RTP
+// (VideoProcessor) is optional
+// "{{" and "}}" defines where the graph starts and ends respectively. For "0", [Decoder] is a stand-alone IMFTransform.
+#if !defined(PLUGIN_MF_PV_BUNDLE_CODEC)
+# define PLUGIN_MF_PV_BUNDLE_CODEC 1 /* MUST be "1" when the encoder is an Async Transform (e.g. Intel Quick Sync). Use "1" to be sure is will always work. */
+#endif /* PLUGIN_MF_PV_BUNDLE_CODEC */
+
+#if !defined(PLUGIN_MF_GOP_SIZE_IN_SECONDS)
+#define PLUGIN_MF_GOP_SIZE_IN_SECONDS 60
+#endif /* PLUGIN_MF_GOP_SIZE_IN_SECONDS */
+
+DEFINE_GUID(PLUGIN_MF_LOW_LATENCY,
+ 0x9c27891a, 0xed7a, 0x40e1, 0x88, 0xe8, 0xb2, 0x27, 0x27, 0xa0, 0x24, 0xee);
+
+extern const tmedia_codec_plugin_def_t *mf_codec_h264_main_plugin_def_t;
+extern const tmedia_codec_plugin_def_t *mf_codec_h264_base_plugin_def_t;
+
+static void* TSK_STDCALL RunSessionThread(void *pArg);
+static int _plugin_win_mf_producer_video_unprepare(struct plugin_win_mf_producer_video_s* pSelf);
+
+//
+// plugin_win_mf_producer_video_t
+//
+typedef struct plugin_win_mf_producer_video_s
+{
+ TMEDIA_DECLARE_PRODUCER;
+
+ bool bStarted, bPrepared, bMuted;
+ tsk_thread_handle_t* ppTread[1];
+ HWND hWndPreview;
+
+ int32_t bitrate_bps; // used when encoder bundled only
+
+ DeviceListVideo* pDeviceList;
+
+ MFCodecVideo *pEncoder;
+ IMFMediaSession *pSession;
+ IMFMediaSource *pSource;
+ SampleGrabberCB *pCallback;
+ IMFActivate *pSinkGrabber;
+ IMFActivate *pSinkActivatePreview;
+ DisplayWatcher* pWatcherPreview;
+ IMFTopology *pTopology;
+ IMFMediaType *pGrabberInputType;
+}
+plugin_win_mf_producer_video_t;
+
+/* ============ Video MF Producer Interface ================= */
+static int plugin_win_mf_producer_video_set(tmedia_producer_t *self, const tmedia_param_t* param)
+{
+ int ret = 0;
+ HRESULT hr = S_OK;
+ plugin_win_mf_producer_video_t* pSelf = (plugin_win_mf_producer_video_t*)self;
+
+ if (!pSelf || !param){
+ TSK_DEBUG_ERROR("Invalid parameter");
+ return -1;
+ }
+
+ if (tsk_striequals(param->key, "action")){
+ tmedia_codec_action_t action = (tmedia_codec_action_t)TSK_TO_INT32((uint8_t*)param->value);
+ HRESULT hr = S_OK;
+ switch (action){
+ case tmedia_codec_action_encode_idr:
+ {
+ if (pSelf->pEncoder)
+ {
+ CHECK_HR(hr = pSelf->pEncoder->RequestKeyFrame());
+ }
+ break;
+ }
+ case tmedia_codec_action_bw_down:
+ {
+ pSelf->bitrate_bps = TSK_CLAMP(0, (int32_t)((pSelf->bitrate_bps << 1) / 3), TMEDIA_CODEC(pSelf)->bandwidth_max_upload);
+ TSK_DEBUG_INFO("New target bitrate = %d kbps", pSelf->bitrate_bps);
+ if (pSelf->pEncoder)
+ {
+ CHECK_HR(hr = pSelf->pEncoder->SetBitRate(pSelf->bitrate_bps));
+ }
+ break;
+ }
+ case tmedia_codec_action_bw_up:
+ {
+ pSelf->bitrate_bps = TSK_CLAMP(0, (int32_t)((pSelf->bitrate_bps * 3) >> 1), TMEDIA_CODEC(pSelf)->bandwidth_max_upload);
+ TSK_DEBUG_INFO("New target bitrate = %d kbps", pSelf->bitrate_bps);
+ if (pSelf->pEncoder)
+ {
+ CHECK_HR(hr = pSelf->pEncoder->SetBitRate(pSelf->bitrate_bps));
+ }
+ break;
+ }
+ }
+ }
+ else if (param->value_type == tmedia_pvt_int64){
+ if (tsk_striequals(param->key, "local-hwnd")){
+ HWND hWnd = reinterpret_cast<HWND>((INT64)*((int64_t*)param->value));
+ if (hWnd != pSelf->hWndPreview)
+ {
+ pSelf->hWndPreview = hWnd;
+ if (pSelf->pWatcherPreview)
+ {
+ CHECK_HR(hr = pSelf->pWatcherPreview->SetHwnd(hWnd));
+ }
+ }
+ }
+ }
+ else if (param->value_type == tmedia_pvt_int32){
+ if (tsk_striequals(param->key, "mute")){
+ pSelf->bMuted = (TSK_TO_INT32((uint8_t*)param->value) != 0);
+ if (pSelf->pCallback) {
+ pSelf->pCallback->SetMute(pSelf->bMuted);
+ }
+#if 0
+ if (pSelf->bStarted && pSelf->pSession) {
+ if (pSelf->bMuted) {
+ pSelf->pSession->Pause();
+ }
+ else {
+ CHECK_HR(hr = MFUtils::RunSession(pSelf->pSession, pSelf->pTopology));
+ }
+ }
+#endif
+ }
+ else if (tsk_striequals(param->key, "create-on-current-thead")){
+ //producer->create_on_ui_thread = *((int32_t*)param->value) ? tsk_false : tsk_true;
+ }
+ else if (tsk_striequals(param->key, "plugin-firefox")){
+ //producer->plugin_firefox = (*((int32_t*)param->value) != 0);
+ //if(producer->grabber){
+ // producer->grabber->setPluginFirefox((producer->plugin_firefox == tsk_true));
+ //}
+ }
+ }
+
+bail:
+ return SUCCEEDED(hr) ? 0 : -1;
+}
+
+static int plugin_win_mf_producer_video_prepare(tmedia_producer_t* self, const tmedia_codec_t* codec)
+{
+ plugin_win_mf_producer_video_t* pSelf = (plugin_win_mf_producer_video_t*)self;
+
+ if (!pSelf || !codec && codec->plugin){
+ TSK_DEBUG_ERROR("Invalid parameter");
+ return -1;
+ }
+ if (pSelf->bPrepared){
+ TSK_DEBUG_WARN("MF video producer already prepared");
+ return -1;
+ }
+
+ // FIXME: DirectShow requires flipping but not MF
+ // The Core library always tries to flip when OSType==Win32. Must be changed
+ TMEDIA_CODEC_VIDEO(codec)->out.flip = tsk_false;
+
+ TMEDIA_PRODUCER(pSelf)->video.fps = TMEDIA_CODEC_VIDEO(codec)->out.fps;
+ TMEDIA_PRODUCER(pSelf)->video.width = TMEDIA_CODEC_VIDEO(codec)->out.width;
+ TMEDIA_PRODUCER(pSelf)->video.height = TMEDIA_CODEC_VIDEO(codec)->out.height;
+ TMEDIA_PRODUCER(pSelf)->encoder.codec_id = tmedia_codec_id_none; // means RAW frames as input
+
+ TSK_DEBUG_INFO("MF video producer: fps=%d, width=%d, height=%d",
+ TMEDIA_PRODUCER(pSelf)->video.fps,
+ TMEDIA_PRODUCER(pSelf)->video.width,
+ TMEDIA_PRODUCER(pSelf)->video.height);
+
+ HRESULT hr = S_OK;
+ IMFAttributes* pSessionAttributes = NULL;
+ IMFTopology *pTopology = NULL;
+ IMFMediaSink* pEvr = NULL;
+ IMFMediaType* pEncoderInputType = NULL;
+ IMFTopologyNode *pNodeGrabber = NULL;
+ IMFMediaType* pGrabberNegotiatedInputMedia = NULL;
+ BOOL bVideoProcessorIsSupported = FALSE;
+ const VideoSubTypeGuidPair *pcPreferredSubTypeGuidPair = NULL;
+
+ // create device list object
+ if (!pSelf->pDeviceList && !(pSelf->pDeviceList = new DeviceListVideo())){
+ TSK_DEBUG_ERROR("Failed to create device list");
+ hr = E_OUTOFMEMORY;
+ goto bail;
+ }
+ // enumerate devices
+ hr = pSelf->pDeviceList->EnumerateDevices();
+ if (!SUCCEEDED(hr)){
+ goto bail;
+ }
+
+ // check if we have at least one MF video source connected to the PC
+ if (pSelf->pDeviceList->Count() == 0){
+ TSK_DEBUG_WARN("No MF video source could be found...no video will be sent");
+ // do not break the negotiation as one-way video connection is a valid use-case
+ }
+ else{
+ // Get best MF video source
+ IMFActivate* pActivate = NULL;
+ const char* pczSrcFriendlyName = tmedia_producer_get_friendly_name(tmedia_video);
+ if (!tsk_strnullORempty(pczSrcFriendlyName)) {
+ TSK_DEBUG_INFO("MF pref. video source = %s", pczSrcFriendlyName);
+ wchar_t pczwSrcFriendlyName[MAX_PATH] = { 0 };
+ mbstowcs(pczwSrcFriendlyName, pczSrcFriendlyName, sizeof(pczwSrcFriendlyName) / sizeof(pczwSrcFriendlyName[0]));
+ hr = pSelf->pDeviceList->GetDeviceBest(&pActivate, pczwSrcFriendlyName);
+ }
+ else {
+ hr = pSelf->pDeviceList->GetDeviceBest(&pActivate);
+ }
+ if (!SUCCEEDED(hr) || !pActivate){
+ TSK_DEBUG_ERROR("Failed to get best MF video source");
+ if (!pActivate){
+ hr = E_OUTOFMEMORY;
+ }
+ goto bail;
+ }
+
+ // Create the media source for the device.
+ hr = pActivate->ActivateObject(
+ __uuidof(IMFMediaSource),
+ (void**)&pSelf->pSource
+ );
+ SafeRelease(&pActivate);
+ if (!SUCCEEDED(hr)){
+ TSK_DEBUG_ERROR("ActivateObject(MF video source) failed");
+ goto bail;
+ }
+
+ // Check whether video processor (http://msdn.microsoft.com/en-us/library/windows/desktop/hh162913(v=vs.85).aspx) is supported
+ CHECK_HR(hr = MFUtils::IsVideoProcessorSupported(&bVideoProcessorIsSupported));
+
+ // Must not be set because not supported by Frame Rate Converter DSP (http://msdn.microsoft.com/en-us/library/windows/desktop/ff819100(v=vs.85).aspx).aspx) because of color (neither I420 nor NV12)
+ // Video Processor (http://msdn.microsoft.com/en-us/library/windows/desktop/hh162913(v=vs.85).aspx) supports both NV12 and I420
+ if (!bVideoProcessorIsSupported) {
+ UINT32 nWidth, nHeight, nFps;
+ hr = MFUtils::GetBestFormat(
+ pSelf->pSource,
+ &MFVideoFormat_I420,
+ (UINT32)TMEDIA_PRODUCER(pSelf)->video.width,
+ (UINT32)TMEDIA_PRODUCER(pSelf)->video.height,
+ (UINT32)TMEDIA_PRODUCER(pSelf)->video.fps,
+ &nWidth,
+ &nHeight,
+ &nFps,
+ &pcPreferredSubTypeGuidPair
+ );
+ if (SUCCEEDED(hr))
+ {
+ TSK_DEBUG_INFO("Video processor not supported...using source fps=%u, width=%u, height=%u", nFps, nWidth, nHeight);
+ TMEDIA_PRODUCER(pSelf)->video.width = nWidth;
+ TMEDIA_PRODUCER(pSelf)->video.height = nHeight;
+ TMEDIA_PRODUCER(pSelf)->video.fps = nFps;
+ }
+ }
+
+ // If H.264 is negotiated for this session then, try to find hardware encoder
+ // If no HW encoder is found will fallback to SW implementation from x264
+#if PLUGIN_MF_PV_BUNDLE_CODEC
+ // Before embedding a H.264 encoder we have to be sure that:
+ // - Low latency is supported
+ // - The user decided to use MF encoder (Microsoft, Intel Quick Sync or any other)
+ if ((codec->id == tmedia_codec_id_h264_bp || codec->id == tmedia_codec_id_h264_mp) && MFUtils::IsLowLatencyH264Supported()) {
+ BOOL bMFEncoderIsRegistered =
+ (codec->id == tmedia_codec_id_h264_mp && codec->plugin == mf_codec_h264_main_plugin_def_t)
+ || (codec->id == tmedia_codec_id_h264_bp && codec->plugin == mf_codec_h264_base_plugin_def_t);
+ if (bMFEncoderIsRegistered)
+ {
+ // both Microsoft and Intel encoders support NV12 only as input
+ // static const BOOL kIsEncoder = TRUE;
+ // hr = MFUtils::GetBestCodec(kIsEncoder, MFMediaType_Video, MFVideoFormat_NV12, MFVideoFormat_H264, &pSelf->pEncoder);
+ pSelf->pEncoder = (codec->id == tmedia_codec_id_h264_bp) ? MFCodecVideoH264::CreateCodecH264Base(MFCodecType_Encoder) : MFCodecVideoH264::CreateCodecH264Main(MFCodecType_Encoder);
+ if (pSelf->pEncoder)
+ {
+ pSelf->pEncoder->setBundled(TRUE);
+ int32_t avg_bitrate_kbps = tmedia_get_video_bandwidth_kbps_2((unsigned int)TMEDIA_PRODUCER(pSelf)->video.width, (unsigned int)TMEDIA_PRODUCER(pSelf)->video.height, TMEDIA_PRODUCER(pSelf)->video.fps);
+ TSK_DEBUG_INFO("MF_MT_AVG_BITRATE defined with value = %d kbps", avg_bitrate_kbps);
+ pSelf->bitrate_bps = (avg_bitrate_kbps * 1024);
+
+ hr = pSelf->pEncoder->Initialize(
+ (UINT32)TMEDIA_PRODUCER(pSelf)->video.fps,
+ (UINT32)TMEDIA_PRODUCER(pSelf)->video.width,
+ (UINT32)TMEDIA_PRODUCER(pSelf)->video.height,
+ (UINT32)pSelf->bitrate_bps);
+ if (SUCCEEDED(hr))
+ {
+ /*hr =*/ pSelf->pEncoder->SetGOPSize((PLUGIN_MF_GOP_SIZE_IN_SECONDS * TMEDIA_PRODUCER(pSelf)->video.fps));
+ }
+ if (FAILED(hr))
+ {
+ SafeRelease(&pSelf->pEncoder);
+ hr = S_OK;
+ }
+ }
+ if (SUCCEEDED(hr) && pSelf->pEncoder)
+ {
+ TMEDIA_PRODUCER(pSelf)->encoder.codec_id = codec->id; // means encoded frames as input
+ }
+ else
+ {
+ SafeRelease(&pSelf->pEncoder);
+ TSK_DEBUG_WARN("Failed to find H.264 HW encoder...fallback to SW implementation");
+ }
+ }
+ else /* if(!bMFEncoderIsRegistered) */
+ {
+ TSK_DEBUG_INFO("Not bundling MF H.264 encoder even if low latency is supported because another implementation is registered: %s", codec->plugin->desc);
+ }
+ }
+#endif
+
+ // Set session attributes
+ CHECK_HR(hr = MFCreateAttributes(&pSessionAttributes, 1));
+ CHECK_HR(hr = pSessionAttributes->SetUINT32(PLUGIN_MF_LOW_LATENCY, 1));
+
+ // Configure the media type that the Sample Grabber will receive.
+ // Setting the major and subtype is usually enough for the topology loader
+ // to resolve the topology.
+
+ CHECK_HR(hr = MFCreateMediaType(&pSelf->pGrabberInputType));
+ CHECK_HR(hr = pSelf->pGrabberInputType->SetGUID(MF_MT_MAJOR_TYPE, MFMediaType_Video));
+ CHECK_HR(hr = pSelf->pGrabberInputType->SetUINT32(MF_MT_INTERLACE_MODE, MFVideoInterlace_Progressive));
+
+ CHECK_HR(hr = MFSetAttributeSize(pSelf->pGrabberInputType, MF_MT_FRAME_SIZE, (UINT32)TMEDIA_PRODUCER(pSelf)->video.width, (UINT32)TMEDIA_PRODUCER(pSelf)->video.height));
+ CHECK_HR(hr = MFSetAttributeRatio(pSelf->pGrabberInputType, MF_MT_FRAME_RATE, TMEDIA_PRODUCER(pSelf)->video.fps, 1));
+
+ CHECK_HR(hr = MFSetAttributeRatio(pSelf->pGrabberInputType, MF_MT_PIXEL_ASPECT_RATIO, 1, 1));
+ CHECK_HR(hr = pSelf->pGrabberInputType->SetUINT32(MF_MT_ALL_SAMPLES_INDEPENDENT, pSelf->pEncoder ? FALSE : TRUE));
+ CHECK_HR(hr = pSelf->pGrabberInputType->SetUINT32(MF_MT_FIXED_SIZE_SAMPLES, pSelf->pEncoder ? FALSE : TRUE));
+ if (pSelf->pEncoder) {
+ switch (codec->id){
+ case tmedia_codec_id_h264_bp: case tmedia_codec_id_h264_mp:
+ {
+ CHECK_HR(hr = pSelf->pGrabberInputType->SetGUID(MF_MT_SUBTYPE, MFVideoFormat_H264));
+ CHECK_HR(hr = pSelf->pGrabberInputType->SetUINT32(MF_MT_MPEG2_PROFILE, (codec->id == tmedia_codec_id_h264_bp) ? eAVEncH264VProfile_Base : eAVEncH264VProfile_Main));
+ CHECK_HR(hr = pSelf->pGrabberInputType->SetUINT32(MF_MT_AVG_BITRATE, pSelf->bitrate_bps));
+ break;
+ }
+ default:
+ {
+ TSK_DEBUG_ERROR("HW encoder with id = %d not expected", codec->id);
+ assert(false);
+ }
+ }
+ TMEDIA_PRODUCER(pSelf)->video.chroma = tmedia_chroma_nv12;
+ TSK_DEBUG_INFO("MF video producer chroma = NV12 (because of HW encoder)");
+ }
+ else {
+ // Video Processors will be inserted in the topology if the source cannot produce I420 frames
+ // IMPORTANT: Must not be NV12 because not supported by Video Resizer DSP (http://msdn.microsoft.com/en-us/library/windows/desktop/ff819491(v=vs.85).aspx)
+ CHECK_HR(hr = pSelf->pGrabberInputType->SetGUID(MF_MT_SUBTYPE, pcPreferredSubTypeGuidPair ? pcPreferredSubTypeGuidPair->fourcc : MFVideoFormat_I420));
+ TMEDIA_PRODUCER(pSelf)->video.chroma = pcPreferredSubTypeGuidPair ? pcPreferredSubTypeGuidPair->chroma : tmedia_chroma_yuv420p;
+ TSK_DEBUG_INFO("MF video producer chroma = %d", TMEDIA_PRODUCER(pSelf)->video.chroma);
+ }
+
+ if (pSelf->pEncoder) {
+ // Unlock the encoder
+ //BOOL bIsAsyncMFT = FALSE;
+ //CHECK_HR(hr = MFUtils::IsAsyncMFT(pSelf->pEncoder->GetMFT(), &bIsAsyncMFT));
+ //if(bIsAsyncMFT)
+ //{
+ // CHECK_HR(hr = MFUtils::UnlockAsyncMFT(pSelf->pEncoderpSelf->pEncoder->GetMFT()));
+ //}
+ // Apply Encoder output type (must be called before SetInputType)
+ //CHECK_HR(hr = pSelf->pEncoder->GetMFT()->SetOutputType(0, pSelf->pGrabberInputType, 0/*MFT_SET_TYPE_TEST_ONLY*/));
+
+ // HW encoders support only NV12
+ //CHECK_HR(hr = MFUtils::ConvertVideoTypeToUncompressedType(pSelf->pGrabberInputType, MFVideoFormat_NV12, &pEncoderInputType));
+ //CHECK_HR(hr = pSelf->pEncoder->GetMFT()->SetInputType(0, pEncoderInputType, 0/*MFT_SET_TYPE_TEST_ONLY*/));
+ }
+ // Create the sample grabber sink.
+ CHECK_HR(hr = SampleGrabberCB::CreateInstance(TMEDIA_PRODUCER(pSelf), &pSelf->pCallback));
+ CHECK_HR(hr = MFCreateSampleGrabberSinkActivate(pSelf->pGrabberInputType, pSelf->pCallback, &pSelf->pSinkGrabber));
+
+ // To run as fast as possible, set this attribute (requires Windows 7):
+ CHECK_HR(hr = pSelf->pSinkGrabber->SetUINT32(MF_SAMPLEGRABBERSINK_IGNORE_CLOCK, TRUE));
+
+ // Create the Media Session.
+ CHECK_HR(hr = MFCreateMediaSession(pSessionAttributes, &pSelf->pSession));
+
+ // Create the EVR activation object for the preview.
+ CHECK_HR(hr = MFCreateVideoRendererActivate(pSelf->hWndPreview, &pSelf->pSinkActivatePreview));
+
+ // Create the topology.
+ CHECK_HR(hr = MFUtils::CreateTopology(
+ pSelf->pSource,
+ pSelf->pEncoder ? pSelf->pEncoder->GetMFT() : NULL,
+ pSelf->pSinkGrabber,
+ pSelf->pSinkActivatePreview,
+ pSelf->pGrabberInputType,
+ &pTopology));
+ // Resolve topology (adds video processors if needed).
+ CHECK_HR(hr = MFUtils::ResolveTopology(pTopology, &pSelf->pTopology));
+
+ // Find EVR for the preview.
+ CHECK_HR(hr = MFUtils::FindNodeObject(pSelf->pTopology, MFUtils::g_ullTopoIdSinkPreview, (void**)&pEvr));
+
+ // Find negotiated media and update producer
+ UINT32 nNegWidth = (UINT32)TMEDIA_PRODUCER(pSelf)->video.width, nNegHeight = (UINT32)TMEDIA_PRODUCER(pSelf)->video.height, nNegNumeratorFps = (UINT32)TMEDIA_PRODUCER(pSelf)->video.fps, nNegDenominatorFps = 1;
+ CHECK_HR(hr = pSelf->pTopology->GetNodeByID(MFUtils::g_ullTopoIdSinkMain, &pNodeGrabber));
+ CHECK_HR(hr = pNodeGrabber->GetInputPrefType(0, &pGrabberNegotiatedInputMedia));
+ hr = MFGetAttributeSize(pGrabberNegotiatedInputMedia, MF_MT_FRAME_SIZE, &nNegWidth, &nNegHeight);
+ if (SUCCEEDED(hr))
+ {
+ TSK_DEBUG_INFO("MF video producer topology vs sdp parameters: width(%u/%u), height(%u/%u)",
+ TMEDIA_PRODUCER(pSelf)->video.width, nNegWidth,
+ TMEDIA_PRODUCER(pSelf)->video.height, nNegHeight
+ );
+ TMEDIA_PRODUCER(pSelf)->video.width = nNegWidth;
+ TMEDIA_PRODUCER(pSelf)->video.height = nNegHeight;
+ }
+ hr = MFGetAttributeRatio(pGrabberNegotiatedInputMedia, MF_MT_FRAME_RATE, &nNegNumeratorFps, &nNegDenominatorFps);
+ if (SUCCEEDED(hr))
+ {
+ TSK_DEBUG_INFO("MF video producer topology vs sdp parameters: fps(%u/%u)",
+ TMEDIA_PRODUCER(pSelf)->video.fps, (nNegNumeratorFps / nNegDenominatorFps)
+ );
+ TMEDIA_PRODUCER(pSelf)->video.fps = (nNegNumeratorFps / nNegDenominatorFps);
+ }
+
+ // Create EVR watcher for the preview.
+ pSelf->pWatcherPreview = new DisplayWatcher(pSelf->hWndPreview, pEvr, hr);
+ CHECK_HR(hr);
+ }
+
+bail:
+ SafeRelease(&pSessionAttributes);
+ SafeRelease(&pTopology);
+ SafeRelease(&pEvr);
+ SafeRelease(&pEncoderInputType);
+ SafeRelease(&pNodeGrabber);
+ SafeRelease(&pGrabberNegotiatedInputMedia);
+
+ pSelf->bPrepared = SUCCEEDED(hr);
+ return pSelf->bPrepared ? 0 : -1;
+}
+
+static int plugin_win_mf_producer_video_start(tmedia_producer_t* self)
+{
+ plugin_win_mf_producer_video_t* pSelf = (plugin_win_mf_producer_video_t*)self;
+
+ if (!pSelf){
+ TSK_DEBUG_ERROR("Invalid parameter");
+ return -1;
+ }
+
+ if (pSelf->bStarted){
+ TSK_DEBUG_INFO("MF video producer already started");
+ return 0;
+ }
+ if (!pSelf->bPrepared){
+ TSK_DEBUG_ERROR("MF video producer not prepared");
+ return -1;
+ }
+
+ HRESULT hr = S_OK;
+
+ // Run preview watcher
+ if (pSelf->pWatcherPreview) {
+ CHECK_HR(hr = pSelf->pWatcherPreview->Start());
+ }
+
+ // Run the media session.
+ CHECK_HR(hr = MFUtils::RunSession(pSelf->pSession, pSelf->pTopology));
+
+ // Start asynchronous watcher thread
+ pSelf->bStarted = true;
+ int ret = tsk_thread_create(&pSelf->ppTread[0], RunSessionThread, pSelf);
+ if (ret != 0) {
+ TSK_DEBUG_ERROR("Failed to create thread");
+ hr = E_FAIL;
+ pSelf->bStarted = false;
+ if (pSelf->ppTread[0]){
+ tsk_thread_join(&pSelf->ppTread[0]);
+ }
+ MFUtils::ShutdownSession(pSelf->pSession, pSelf->pSource);
+ goto bail;
+ }
+
+bail:
+ return SUCCEEDED(hr) ? 0 : -1;
+}
+
+static int plugin_win_mf_producer_video_pause(tmedia_producer_t* self)
+{
+ plugin_win_mf_producer_video_t* pSelf = (plugin_win_mf_producer_video_t*)self;
+
+ if (!pSelf){
+ TSK_DEBUG_ERROR("Invalid parameter");
+ return -1;
+ }
+ if (!pSelf->bStarted)
+ {
+ TSK_DEBUG_INFO("MF video producer not started");
+ return 0;
+ }
+
+ HRESULT hr = MFUtils::PauseSession(pSelf->pSession);
+
+ return SUCCEEDED(hr) ? 0 : -1;
+}
+
+static int plugin_win_mf_producer_video_stop(tmedia_producer_t* self)
+{
+ plugin_win_mf_producer_video_t* pSelf = (plugin_win_mf_producer_video_t*)self;
+
+ if (!pSelf){
+ TSK_DEBUG_ERROR("Invalid parameter");
+ return -1;
+ }
+
+ HRESULT hr = S_OK;
+
+ if (pSelf->pWatcherPreview){
+ hr = pSelf->pWatcherPreview->Stop();
+ }
+
+ // for the thread
+ pSelf->bStarted = false;
+ hr = MFUtils::ShutdownSession(pSelf->pSession, NULL); // stop session to wakeup the asynchronous thread
+ if (pSelf->ppTread[0]){
+ tsk_thread_join(&pSelf->ppTread[0]);
+ }
+ hr = MFUtils::ShutdownSession(NULL, pSelf->pSource); // stop source to release the camera
+
+ // next start() will be called after prepare()
+ return _plugin_win_mf_producer_video_unprepare(pSelf);
+}
+
+static int _plugin_win_mf_producer_video_unprepare(plugin_win_mf_producer_video_t* pSelf)
+{
+ if (!pSelf){
+ TSK_DEBUG_ERROR("Invalid parameter");
+ return -1;
+ }
+
+ if (pSelf->bStarted) {
+ // plugin_win_mf_producer_video_stop(TMEDIA_PRODUCER(pSelf));
+ TSK_DEBUG_ERROR("Producer must be stopped before calling unprepare");
+ }
+ if (pSelf->pDeviceList){
+ delete pSelf->pDeviceList, pSelf->pDeviceList = NULL;
+ }
+ if (pSelf->pWatcherPreview){
+ pSelf->pWatcherPreview->Stop();
+ }
+ if (pSelf->pSource){
+ pSelf->pSource->Shutdown();
+ }
+ if (pSelf->pSession){
+ pSelf->pSession->Shutdown();
+ }
+
+ SafeRelease(&pSelf->pEncoder);
+ SafeRelease(&pSelf->pSession);
+ SafeRelease(&pSelf->pSource);
+ SafeRelease(&pSelf->pSinkActivatePreview);
+ SafeRelease(&pSelf->pCallback);
+ SafeRelease(&pSelf->pSinkGrabber);
+ SafeRelease(&pSelf->pTopology);
+ SafeRelease(&pSelf->pGrabberInputType);
+
+ if (pSelf->pWatcherPreview){
+ delete pSelf->pWatcherPreview;
+ pSelf->pWatcherPreview = NULL;
+ }
+
+ pSelf->bPrepared = false;
+
+ return 0;
+}
+
+//
+// Windows Media Foundation video producer object definition
+//
+/* constructor */
+static tsk_object_t* plugin_win_mf_producer_video_ctor(tsk_object_t * self, va_list * app)
+{
+ MFUtils::Startup();
+
+ plugin_win_mf_producer_video_t *pSelf = (plugin_win_mf_producer_video_t *)self;
+ if (pSelf){
+ /* init base */
+ tmedia_producer_init(TMEDIA_PRODUCER(pSelf));
+
+ /* init self with default values*/
+ TMEDIA_PRODUCER(pSelf)->encoder.codec_id = tmedia_codec_id_none; // means RAW frames as input
+ TMEDIA_PRODUCER(pSelf)->video.chroma = tmedia_chroma_nv12;
+ TMEDIA_PRODUCER(pSelf)->video.fps = 15;
+ TMEDIA_PRODUCER(pSelf)->video.width = 352;
+ TMEDIA_PRODUCER(pSelf)->video.height = 288;
+
+ TSK_DEBUG_INFO("Create WinMF video producer");
+ }
+ return self;
+}
+/* destructor */
+static tsk_object_t* plugin_win_mf_producer_video_dtor(tsk_object_t * self)
+{
+ plugin_win_mf_producer_video_t *pSelf = (plugin_win_mf_producer_video_t *)self;
+ if (pSelf){
+ /* stop */
+ if (pSelf->bStarted){
+ plugin_win_mf_producer_video_stop(TMEDIA_PRODUCER(pSelf));
+ }
+
+ /* deinit base */
+ tmedia_producer_deinit(TMEDIA_PRODUCER(pSelf));
+ /* deinit self */
+ _plugin_win_mf_producer_video_unprepare(pSelf);
+ }
+
+ return self;
+}
+/* object definition */
+static const tsk_object_def_t plugin_win_mf_producer_video_def_s =
+{
+ sizeof(plugin_win_mf_producer_video_t),
+ plugin_win_mf_producer_video_ctor,
+ plugin_win_mf_producer_video_dtor,
+ tsk_null,
+};
+/* plugin definition*/
+static const tmedia_producer_plugin_def_t plugin_win_mf_producer_video_plugin_def_s =
+{
+ &plugin_win_mf_producer_video_def_s,
+
+ tmedia_video,
+ "Microsoft Windows Media Foundation producer (Video)",
+
+ plugin_win_mf_producer_video_set,
+ plugin_win_mf_producer_video_prepare,
+ plugin_win_mf_producer_video_start,
+ plugin_win_mf_producer_video_pause,
+ plugin_win_mf_producer_video_stop
+};
+const tmedia_producer_plugin_def_t *plugin_win_mf_producer_video_plugin_def_t = &plugin_win_mf_producer_video_plugin_def_s;
+
+
+// Run session async thread
+static void* TSK_STDCALL RunSessionThread(void *pArg)
+{
+ plugin_win_mf_producer_video_t *pSelf = (plugin_win_mf_producer_video_t *)pArg;
+ HRESULT hrStatus = S_OK;
+ HRESULT hr = S_OK;
+ IMFMediaEvent *pEvent = NULL;
+ MediaEventType met;
+
+ TSK_DEBUG_INFO("RunSessionThread (MF video producer) - ENTER");
+
+ while (pSelf->bStarted){
+ CHECK_HR(hr = pSelf->pSession->GetEvent(0, &pEvent));
+ CHECK_HR(hr = pEvent->GetStatus(&hrStatus));
+ CHECK_HR(hr = pEvent->GetType(&met));
+
+ if (FAILED(hrStatus) /*&& hrStatus != MF_E_NO_SAMPLE_TIMESTAMP*/)
+ {
+ TSK_DEBUG_ERROR("Session error: 0x%x (event id: %d)\n", hrStatus, met);
+ hr = hrStatus;
+ goto bail;
+ }
+ if (met == MESessionEnded)
+ {
+ break;
+ }
+ SafeRelease(&pEvent);
+ }
+
+bail:
+ TSK_DEBUG_INFO("RunSessionThread (MF video producer) - EXIT");
+
+ return NULL;
+} \ No newline at end of file
diff --git a/plugins/pluginWinMF/plugin_win_mf_tdav.cxx b/plugins/pluginWinMF/plugin_win_mf_tdav.cxx
new file mode 100644
index 0000000..d08bcfc
--- /dev/null
+++ b/plugins/pluginWinMF/plugin_win_mf_tdav.cxx
@@ -0,0 +1,22 @@
+/* Copyright (C) 2013 Mamadou DIOP
+* Copyright (C) 2013 Doubango Telecom <http://www.doubango.org>
+*
+* This file is part of Open Source Doubango Framework.
+*
+* DOUBANGO is free software: you can redistribute it and/or modify
+* it under the terms of the GNU General Public License as published by
+* the Free Software Foundation, either version 3 of the License, or
+* (at your option) any later version.
+*
+* DOUBANGO is distributed in the hope that it will be useful,
+* but WITHOUT ANY WARRANTY; without even the implied warranty of
+* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
+* GNU General Public License for more details.
+*
+* You should have received a copy of the GNU General Public License
+* along with DOUBANGO.
+*/
+// This file is used to avoid duplication for the .obj files
+#include "../../tinyDAV/src/codecs/h264/tdav_codec_h264_rtp.c"
+#include "../../tinyDAV/src/audio/tdav_consumer_audio.c"
+#include "../../tinyDAV/src/audio/tdav_producer_audio.c" \ No newline at end of file
diff --git a/plugins/pluginWinMF/version.rc b/plugins/pluginWinMF/version.rc
new file mode 100644
index 0000000..446f34a
--- /dev/null
+++ b/plugins/pluginWinMF/version.rc
@@ -0,0 +1,102 @@
+// Microsoft Visual C++ generated resource script.
+//
+// #include "resource.h"
+
+#define APSTUDIO_READONLY_SYMBOLS
+/////////////////////////////////////////////////////////////////////////////
+//
+// Generated from the TEXTINCLUDE 2 resource.
+//
+#include "afxres.h"
+
+/////////////////////////////////////////////////////////////////////////////
+#undef APSTUDIO_READONLY_SYMBOLS
+
+/////////////////////////////////////////////////////////////////////////////
+// English (U.S.) resources
+
+#if !defined(AFX_RESOURCE_DLL) || defined(AFX_TARG_ENU)
+#ifdef _WIN32
+LANGUAGE LANG_ENGLISH, SUBLANG_ENGLISH_US
+#pragma code_page(1252)
+#endif //_WIN32
+
+#ifdef APSTUDIO_INVOKED
+/////////////////////////////////////////////////////////////////////////////
+//
+// TEXTINCLUDE
+//
+
+1 TEXTINCLUDE
+BEGIN
+ "resource.h\0"
+END
+
+2 TEXTINCLUDE
+BEGIN
+ "#include ""afxres.h""\r\n"
+ "\0"
+END
+
+3 TEXTINCLUDE
+BEGIN
+ "\r\n"
+ "\0"
+END
+
+#endif // APSTUDIO_INVOKED
+
+
+/////////////////////////////////////////////////////////////////////////////
+//
+// Version
+//
+
+VS_VERSION_INFO VERSIONINFO
+ FILEVERSION 2.0.0.1156
+ PRODUCTVERSION 2.0.0.1156
+ FILEFLAGSMASK 0x17L
+#ifdef _DEBUG
+ FILEFLAGS 0x1L
+#else
+ FILEFLAGS 0x0L
+#endif
+ FILEOS 0x4L
+ FILETYPE 0x2L
+ FILESUBTYPE 0x0L
+BEGIN
+ BLOCK "StringFileInfo"
+ BEGIN
+ BLOCK "040904b0"
+ BEGIN
+ VALUE "CompanyName", "Doubango Telecom"
+ VALUE "FileDescription", "Doubango IMS Framework Media Foundation Plugin"
+ VALUE "FileVersion", "2.0.0.1156"
+ VALUE "InternalName", "pluginMF.dll"
+ VALUE "LegalCopyright", "(c) 2010-2013 Doubango Telecom. All rights reserved."
+ VALUE "OriginalFilename", "pluginMF.dll"
+ VALUE "ProductName", "Doubango IMS Framework Foundation Plugin"
+ VALUE "ProductVersion", "2.0.0.1156"
+ END
+ END
+ BLOCK "VarFileInfo"
+ BEGIN
+ VALUE "Translation", 0x409, 1200
+ END
+END
+
+#endif // English (U.S.) resources
+/////////////////////////////////////////////////////////////////////////////
+
+
+
+#ifndef APSTUDIO_INVOKED
+/////////////////////////////////////////////////////////////////////////////
+//
+// Generated from the TEXTINCLUDE 3 resource.
+//
+
+
+/////////////////////////////////////////////////////////////////////////////
+#endif // not APSTUDIO_INVOKED
+
OpenPOWER on IntegriCloud