summaryrefslogtreecommitdiffstats
path: root/plugins
diff options
context:
space:
mode:
Diffstat (limited to 'plugins')
-rwxr-xr-xplugins/audio_opensles/audio_opensles.cxx866
-rwxr-xr-xplugins/audio_opensles/audio_opensles.h6
-rwxr-xr-xplugins/audio_opensles/audio_opensles_config.h14
-rwxr-xr-xplugins/audio_opensles/audio_opensles_consumer.cxx347
-rwxr-xr-xplugins/audio_opensles/audio_opensles_consumer.h8
-rwxr-xr-xplugins/audio_opensles/audio_opensles_device.cxx938
-rwxr-xr-xplugins/audio_opensles/audio_opensles_device.h132
-rwxr-xr-xplugins/audio_opensles/audio_opensles_device_impl.cxx46
-rwxr-xr-xplugins/audio_opensles/audio_opensles_device_impl.h26
-rwxr-xr-xplugins/audio_opensles/audio_opensles_producer.cxx341
-rwxr-xr-xplugins/audio_opensles/audio_opensles_producer.h6
-rwxr-xr-xplugins/audio_opensles/dllmain.cxx25
-rwxr-xr-xplugins/audio_webrtc/audio_webrtc.cxx858
-rwxr-xr-xplugins/audio_webrtc/audio_webrtc.h6
-rwxr-xr-xplugins/audio_webrtc/audio_webrtc_config.h52
-rwxr-xr-xplugins/audio_webrtc/audio_webrtc_consumer.cxx309
-rwxr-xr-xplugins/audio_webrtc/audio_webrtc_consumer.h8
-rwxr-xr-xplugins/audio_webrtc/audio_webrtc_producer.cxx315
-rwxr-xr-xplugins/audio_webrtc/audio_webrtc_producer.h6
-rwxr-xr-xplugins/audio_webrtc/audio_webrtc_transport.cxx34
-rwxr-xr-xplugins/audio_webrtc/audio_webrtc_transport.h57
-rwxr-xr-xplugins/audio_webrtc/dllmain.cxx25
-rwxr-xr-xplugins/pluginCUDA/dllmain_cuda.cxx100
-rwxr-xr-xplugins/pluginCUDA/plugin_cuda_codec_h264.cxx2149
-rwxr-xr-xplugins/pluginCUDA/plugin_cuda_config.h12
-rwxr-xr-xplugins/pluginCUDA/plugin_cuda_tdav.cxx6
-rwxr-xr-xplugins/pluginCUDA/plugin_cuda_utils.cxx173
-rwxr-xr-xplugins/pluginCUDA/plugin_cuda_utils.h26
-rwxr-xr-xplugins/pluginDirectShow/dllmain_dshow.cxx144
-rwxr-xr-xplugins/pluginDirectShow/internals/DSBaseCaptureGraph.h36
-rwxr-xr-xplugins/pluginDirectShow/internals/DSBufferWriter.h14
-rwxr-xr-xplugins/pluginDirectShow/internals/DSCaptureFormat.cxx78
-rwxr-xr-xplugins/pluginDirectShow/internals/DSCaptureFormat.h42
-rwxr-xr-xplugins/pluginDirectShow/internals/DSCaptureGraph.cxx591
-rwxr-xr-xplugins/pluginDirectShow/internals/DSCaptureGraph.h82
-rwxr-xr-xplugins/pluginDirectShow/internals/DSCaptureUtils.cxx643
-rwxr-xr-xplugins/pluginDirectShow/internals/DSCaptureUtils.h6
-rwxr-xr-xplugins/pluginDirectShow/internals/DSDibHelper.cxx21
-rwxr-xr-xplugins/pluginDirectShow/internals/DSDisplay.cxx751
-rwxr-xr-xplugins/pluginDirectShow/internals/DSDisplay.h76
-rwxr-xr-xplugins/pluginDirectShow/internals/DSDisplayGraph.cxx470
-rwxr-xr-xplugins/pluginDirectShow/internals/DSDisplayGraph.h120
-rwxr-xr-xplugins/pluginDirectShow/internals/DSDisplayOverlay.VMR.cxx212
-rwxr-xr-xplugins/pluginDirectShow/internals/DSDisplayOverlay.VMR9.cxx277
-rwxr-xr-xplugins/pluginDirectShow/internals/DSDisplayOverlay.cxx27
-rwxr-xr-xplugins/pluginDirectShow/internals/DSDisplayOverlay.h46
-rwxr-xr-xplugins/pluginDirectShow/internals/DSFrameRateFilter.cxx129
-rwxr-xr-xplugins/pluginDirectShow/internals/DSFrameRateFilter.h50
-rwxr-xr-xplugins/pluginDirectShow/internals/DSGrabber.cxx363
-rwxr-xr-xplugins/pluginDirectShow/internals/DSGrabber.h68
-rwxr-xr-xplugins/pluginDirectShow/internals/DSOutputFilter.cxx85
-rwxr-xr-xplugins/pluginDirectShow/internals/DSOutputFilter.h116
-rwxr-xr-xplugins/pluginDirectShow/internals/DSOutputStream.cxx379
-rwxr-xr-xplugins/pluginDirectShow/internals/DSOutputStream.h102
-rwxr-xr-xplugins/pluginDirectShow/internals/DSPushSource.h156
-rwxr-xr-xplugins/pluginDirectShow/internals/DSPushSourceDesktop.cxx268
-rwxr-xr-xplugins/pluginDirectShow/internals/DSScreenCastGraph.cxx346
-rwxr-xr-xplugins/pluginDirectShow/internals/DSScreenCastGraph.h136
-rwxr-xr-xplugins/pluginDirectShow/internals/DSUtils.cxx516
-rwxr-xr-xplugins/pluginDirectShow/internals/DSUtils.h6
-rwxr-xr-xplugins/pluginDirectShow/internals/Resizer.cxx1477
-rwxr-xr-xplugins/pluginDirectShow/internals/Resizer.h16
-rwxr-xr-xplugins/pluginDirectShow/internals/VideoDisplayName.cxx12
-rwxr-xr-xplugins/pluginDirectShow/internals/VideoDisplayName.h20
-rwxr-xr-xplugins/pluginDirectShow/internals/VideoFrame.h95
-rwxr-xr-xplugins/pluginDirectShow/internals/VideoGrabberName.cxx12
-rwxr-xr-xplugins/pluginDirectShow/internals/VideoGrabberName.h20
-rwxr-xr-xplugins/pluginDirectShow/internals/wince/CPropertyBag.cxx52
-rwxr-xr-xplugins/pluginDirectShow/internals/wince/CPropertyBag.h33
-rwxr-xr-xplugins/pluginDirectShow/internals/wince/DSISampleGrabberCB.h11
-rwxr-xr-xplugins/pluginDirectShow/internals/wince/DSNullFilter.cxx12
-rwxr-xr-xplugins/pluginDirectShow/internals/wince/DSNullFilter.h6
-rwxr-xr-xplugins/pluginDirectShow/internals/wince/DSSampleGrabber.cxx299
-rwxr-xr-xplugins/pluginDirectShow/internals/wince/DSSampleGrabber.h58
-rwxr-xr-xplugins/pluginDirectShow/internals/wince/DSSampleGrabberUtils.h20
-rwxr-xr-xplugins/pluginDirectShow/plugin_dshow_config.h12
-rwxr-xr-xplugins/pluginDirectShow/plugin_screencast_dshow_producer.cxx383
-rwxr-xr-xplugins/pluginDirectShow/plugin_video_dshow_consumer.cxx1689
-rwxr-xr-xplugins/pluginDirectShow/plugin_video_dshow_producer.cxx392
-rwxr-xr-xplugins/pluginWASAPI/dllmain_wasapi.cxx105
-rwxr-xr-xplugins/pluginWASAPI/plugin_wasapi_config.h12
-rwxr-xr-xplugins/pluginWASAPI/plugin_wasapi_consumer_audio.cxx953
-rwxr-xr-xplugins/pluginWASAPI/plugin_wasapi_producer_audio.cxx968
-rwxr-xr-xplugins/pluginWASAPI/plugin_wasapi_tdav.cxx6
-rwxr-xr-xplugins/pluginWASAPI/plugin_wasapi_utils.cxx76
-rwxr-xr-xplugins/pluginWASAPI/plugin_wasapi_utils.h16
-rwxr-xr-xplugins/pluginWinAudioDSP/dllmain_audio_dsp.cxx113
-rwxr-xr-xplugins/pluginWinAudioDSP/plugin_audio_dsp_config.h12
-rwxr-xr-xplugins/pluginWinAudioDSP/plugin_audio_dsp_denoiser.cxx556
-rwxr-xr-xplugins/pluginWinAudioDSP/plugin_audio_dsp_mediabuffer.cxx59
-rwxr-xr-xplugins/pluginWinAudioDSP/plugin_audio_dsp_mediabuffer.h8
-rwxr-xr-xplugins/pluginWinAudioDSP/plugin_audio_dsp_resampler.cxx516
-rwxr-xr-xplugins/pluginWinAudioDSP/plugin_audio_dsp_utils.cxx126
-rwxr-xr-xplugins/pluginWinAudioDSP/plugin_audio_dsp_utils.h44
-rwxr-xr-xplugins/pluginWinDD/dllmain_dd.cxx98
-rwxr-xr-xplugins/pluginWinDD/internals/CommonTypes.h22
-rwxr-xr-xplugins/pluginWinDD/internals/DisplayManager.cxx315
-rwxr-xr-xplugins/pluginWinDD/internals/DisplayManager.h44
-rwxr-xr-xplugins/pluginWinDD/internals/DuplicationManager.cxx393
-rwxr-xr-xplugins/pluginWinDD/internals/DuplicationManager.h40
-rwxr-xr-xplugins/pluginWinDD/internals/OutputManager.cxx503
-rwxr-xr-xplugins/pluginWinDD/internals/OutputManager.h66
-rwxr-xr-xplugins/pluginWinDD/internals/ThreadManager.cxx95
-rwxr-xr-xplugins/pluginWinDD/internals/ThreadManager.h28
-rwxr-xr-xplugins/pluginWinDD/plugin_win_dd_config.h4
-rwxr-xr-xplugins/pluginWinDD/plugin_win_dd_producer.cxx1600
-rwxr-xr-xplugins/pluginWinIPSecVista/plugin_win_ipsec_vista.c160
-rwxr-xr-xplugins/pluginWinMF/dllmain_mf.cxx235
-rwxr-xr-xplugins/pluginWinMF/internals/mf_codec.cxx1161
-rwxr-xr-xplugins/pluginWinMF/internals/mf_codec.h163
-rwxr-xr-xplugins/pluginWinMF/internals/mf_codec_topology.cxx619
-rwxr-xr-xplugins/pluginWinMF/internals/mf_codec_topology.h66
-rwxr-xr-xplugins/pluginWinMF/internals/mf_custom_src.cxx821
-rwxr-xr-xplugins/pluginWinMF/internals/mf_custom_src.h126
-rwxr-xr-xplugins/pluginWinMF/internals/mf_devices.cxx95
-rwxr-xr-xplugins/pluginWinMF/internals/mf_devices.h12
-rwxr-xr-xplugins/pluginWinMF/internals/mf_display_watcher.cxx181
-rwxr-xr-xplugins/pluginWinMF/internals/mf_display_watcher.h32
-rwxr-xr-xplugins/pluginWinMF/internals/mf_sample_grabber.cxx99
-rwxr-xr-xplugins/pluginWinMF/internals/mf_sample_grabber.h18
-rwxr-xr-xplugins/pluginWinMF/internals/mf_sample_queue.cxx75
-rwxr-xr-xplugins/pluginWinMF/internals/mf_sample_queue.h37
-rwxr-xr-xplugins/pluginWinMF/internals/mf_utils.cxx2792
-rwxr-xr-xplugins/pluginWinMF/internals/mf_utils.h383
-rwxr-xr-xplugins/pluginWinMF/plugin_win_mf_codec_h264.cxx1163
-rwxr-xr-xplugins/pluginWinMF/plugin_win_mf_config.h12
-rwxr-xr-xplugins/pluginWinMF/plugin_win_mf_consumer_audio.cxx175
-rwxr-xr-xplugins/pluginWinMF/plugin_win_mf_consumer_video.cxx2151
-rwxr-xr-xplugins/pluginWinMF/plugin_win_mf_converter_video.cxx752
-rwxr-xr-xplugins/pluginWinMF/plugin_win_mf_producer_audio.cxx425
-rwxr-xr-xplugins/pluginWinMF/plugin_win_mf_producer_video.cxx1131
-rwxr-xr-xplugins/pluginWinMF/plugin_win_mf_tdav.cxx6
132 files changed, 18251 insertions, 19283 deletions
diff --git a/plugins/audio_opensles/audio_opensles.cxx b/plugins/audio_opensles/audio_opensles.cxx
index 0cf5ea3..1d2e864 100755
--- a/plugins/audio_opensles/audio_opensles.cxx
+++ b/plugins/audio_opensles/audio_opensles.cxx
@@ -1,17 +1,17 @@
/* Copyright (C) 2012 Doubango Telecom <http://www.doubango.org>
-*
+*
* This file is part of Open Source Doubango Framework.
*
* DOUBANGO is free software: you can redistribute it and/or modify
* it under the terms of the GNU General Public License as published by
* the Free Software Foundation, either version 3 of the License, or
* (at your option) any later version.
-*
+*
* DOUBANGO is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
* GNU General Public License for more details.
-*
+*
* You should have received a copy of the GNU General Public License
* along with DOUBANGO.
*/
@@ -29,93 +29,87 @@
#include "tsk_safeobj.h"
#include "tsk_debug.h"
-typedef enum PLUGIN_INDEX_E
-{
- PLUGIN_INDEX_AUDIO_CONSUMER,
- PLUGIN_INDEX_AUDIO_PRODUCER,
- PLUGIN_INDEX_COUNT
+typedef enum PLUGIN_INDEX_E {
+ PLUGIN_INDEX_AUDIO_CONSUMER,
+ PLUGIN_INDEX_AUDIO_PRODUCER,
+ PLUGIN_INDEX_COUNT
}
PLUGIN_INDEX_T;
int __plugin_get_def_count()
{
- return PLUGIN_INDEX_COUNT;
+ return PLUGIN_INDEX_COUNT;
}
tsk_plugin_def_type_t __plugin_get_def_type_at(int index)
{
- switch(index){
- case PLUGIN_INDEX_AUDIO_CONSUMER: return tsk_plugin_def_type_consumer;
- case PLUGIN_INDEX_AUDIO_PRODUCER: return tsk_plugin_def_type_producer;
- default:
- {
- AUDIO_OPENSLES_DEBUG_ERROR("No plugin at index %d", index);
- return tsk_plugin_def_type_none;
- }
- }
+ switch(index) {
+ case PLUGIN_INDEX_AUDIO_CONSUMER:
+ return tsk_plugin_def_type_consumer;
+ case PLUGIN_INDEX_AUDIO_PRODUCER:
+ return tsk_plugin_def_type_producer;
+ default: {
+ AUDIO_OPENSLES_DEBUG_ERROR("No plugin at index %d", index);
+ return tsk_plugin_def_type_none;
+ }
+ }
}
tsk_plugin_def_media_type_t __plugin_get_def_media_type_at(int index)
{
- switch(index){
- case PLUGIN_INDEX_AUDIO_CONSUMER:
- case PLUGIN_INDEX_AUDIO_PRODUCER:
- {
- return tsk_plugin_def_media_type_audio;
- }
- default:
- {
- AUDIO_OPENSLES_DEBUG_ERROR("No plugin at index %d", index);
- return tsk_plugin_def_media_type_none;
- }
- }
+ switch(index) {
+ case PLUGIN_INDEX_AUDIO_CONSUMER:
+ case PLUGIN_INDEX_AUDIO_PRODUCER: {
+ return tsk_plugin_def_media_type_audio;
+ }
+ default: {
+ AUDIO_OPENSLES_DEBUG_ERROR("No plugin at index %d", index);
+ return tsk_plugin_def_media_type_none;
+ }
+ }
}
tsk_plugin_def_ptr_const_t __plugin_get_def_at(int index)
{
- switch(index){
- case PLUGIN_INDEX_AUDIO_CONSUMER:
- {
- return audio_consumer_opensles_plugin_def_t;
- }
- case PLUGIN_INDEX_AUDIO_PRODUCER:
- {
- return audio_producer_opensles_plugin_def_t;
- }
- default:
- {
- AUDIO_OPENSLES_DEBUG_ERROR("No plugin at index %d", index);
- return tsk_null;
- }
- }
+ switch(index) {
+ case PLUGIN_INDEX_AUDIO_CONSUMER: {
+ return audio_consumer_opensles_plugin_def_t;
+ }
+ case PLUGIN_INDEX_AUDIO_PRODUCER: {
+ return audio_producer_opensles_plugin_def_t;
+ }
+ default: {
+ AUDIO_OPENSLES_DEBUG_ERROR("No plugin at index %d", index);
+ return tsk_null;
+ }
+ }
}
//
// SLES AudioInstance
//
-typedef struct audio_opensles_instance_s
-{
- TSK_DECLARE_OBJECT;
-
- uint64_t sessionId;
-
- bool isStarted;
-
- bool isConsumerPrepared;
- bool isConsumerStarted;
- bool isProducerPrepared;
- bool isProducerStarted;
-
- bool isSpeakerAvailable;
- bool isPlayoutAvailable;
- bool isRecordingAvailable;
-
- SLAudioDevice* device;
- SLAudioDeviceCallbackImpl* callback;
-
- TSK_DECLARE_SAFEOBJ;
+typedef struct audio_opensles_instance_s {
+ TSK_DECLARE_OBJECT;
+
+ uint64_t sessionId;
+
+ bool isStarted;
+
+ bool isConsumerPrepared;
+ bool isConsumerStarted;
+ bool isProducerPrepared;
+ bool isProducerStarted;
+
+ bool isSpeakerAvailable;
+ bool isPlayoutAvailable;
+ bool isRecordingAvailable;
+
+ SLAudioDevice* device;
+ SLAudioDeviceCallbackImpl* callback;
+
+ TSK_DECLARE_SAFEOBJ;
}
audio_opensles_instance_t;
typedef tsk_list_t audio_opensles_instances_L_t;
@@ -124,442 +118,442 @@ static audio_opensles_instances_L_t* __audioInstances = tsk_null;
static tsk_object_t* audio_opensles_instance_ctor(tsk_object_t * self, va_list * app)
{
- audio_opensles_instance_t* audioInstance = (audio_opensles_instance_t*)self;
- if(audioInstance){
- tsk_safeobj_init(audioInstance);
- }
- return self;
+ audio_opensles_instance_t* audioInstance = (audio_opensles_instance_t*)self;
+ if(audioInstance) {
+ tsk_safeobj_init(audioInstance);
+ }
+ return self;
}
static tsk_object_t* audio_opensles_instance_dtor(tsk_object_t * self)
-{
- AUDIO_OPENSLES_DEBUG_INFO("Audio Instance destroyed");
- audio_opensles_instance_t* audioInstance = (audio_opensles_instance_t*)self;
- if(audioInstance){
+{
+ AUDIO_OPENSLES_DEBUG_INFO("Audio Instance destroyed");
+ audio_opensles_instance_t* audioInstance = (audio_opensles_instance_t*)self;
+ if(audioInstance) {
tsk_safeobj_lock(audioInstance);
- if(audioInstance->device){
- audioInstance->device->SetCallback(NULL);
- audioInstance->device->Terminate();
- delete audioInstance->device;
- audioInstance->device = tsk_null;
- }
- if(audioInstance->callback){
- delete audioInstance->callback;
- audioInstance->callback = tsk_null;
- }
+ if(audioInstance->device) {
+ audioInstance->device->SetCallback(NULL);
+ audioInstance->device->Terminate();
+ delete audioInstance->device;
+ audioInstance->device = tsk_null;
+ }
+ if(audioInstance->callback) {
+ delete audioInstance->callback;
+ audioInstance->callback = tsk_null;
+ }
tsk_safeobj_unlock(audioInstance);
-
- tsk_safeobj_deinit(audioInstance);
- }
- return self;
+
+ tsk_safeobj_deinit(audioInstance);
+ }
+ return self;
}
static int audio_opensles_instance_cmp(const tsk_object_t *_ai1, const tsk_object_t *_ai2)
{
- return ((int)_ai1 - (int)_ai2);
+ return ((int)_ai1 - (int)_ai2);
}
-static const tsk_object_def_t audio_opensles_instance_def_s =
-{
- sizeof(audio_opensles_instance_t),
- audio_opensles_instance_ctor,
- audio_opensles_instance_dtor,
- audio_opensles_instance_cmp,
+static const tsk_object_def_t audio_opensles_instance_def_s = {
+ sizeof(audio_opensles_instance_t),
+ audio_opensles_instance_ctor,
+ audio_opensles_instance_dtor,
+ audio_opensles_instance_cmp,
};
const tsk_object_def_t *audio_opensles_instance_def_t = &audio_opensles_instance_def_s;
audio_opensles_instance_handle_t* audio_opensles_instance_create(uint64_t sessionId)
{
- audio_opensles_instance_t* audioInstance = tsk_null;
-
- // create list used to hold instances
- if(!__audioInstances && !(__audioInstances = tsk_list_create())){
- AUDIO_OPENSLES_DEBUG_ERROR("Failed to create new list");
- return tsk_null;
- }
-
- //= lock the list
- tsk_list_lock(__audioInstances);
-
- // find the instance from the list
- const tsk_list_item_t* item;
- tsk_list_foreach(item, __audioInstances){
- if(((audio_opensles_instance_t*)item->data)->sessionId == sessionId){
- audioInstance = (audio_opensles_instance_t*)tsk_object_ref(item->data);
- break;
- }
- }
-
- if(!audioInstance){
- audio_opensles_instance_t* _audioInstance;
- if(!(_audioInstance = (audio_opensles_instance_t*)tsk_object_new(&audio_opensles_instance_def_s))){
- AUDIO_OPENSLES_DEBUG_ERROR("Failed to create new audio instance");
- goto done;
- }
-
- if(!(_audioInstance->device = new SLAudioDevice())){
- AUDIO_OPENSLES_DEBUG_ERROR("Failed to create audio device");
- TSK_OBJECT_SAFE_FREE(_audioInstance);
- goto done;
- }
-
- if(!(_audioInstance->callback = new SLAudioDeviceCallbackImpl())){
- AUDIO_OPENSLES_DEBUG_ERROR("Failed to create audio transport");
- TSK_OBJECT_SAFE_FREE(_audioInstance);
- goto done;
- }
- if((_audioInstance->device->SetCallback(_audioInstance->callback))){
- AUDIO_OPENSLES_DEBUG_ERROR("AudioDeviceModule::RegisterAudioCallback() failed");
- TSK_OBJECT_SAFE_FREE(_audioInstance);
- goto done;
- }
-
- if((_audioInstance->device->Init())){
- AUDIO_OPENSLES_DEBUG_ERROR("AudioDeviceModule::Init() failed");
- TSK_OBJECT_SAFE_FREE(_audioInstance);
- goto done;
- }
-
- _audioInstance->sessionId = sessionId;
- audioInstance = _audioInstance;
- tsk_list_push_back_data(__audioInstances, (void**)&_audioInstance);
- }
+ audio_opensles_instance_t* audioInstance = tsk_null;
+
+ // create list used to hold instances
+ if(!__audioInstances && !(__audioInstances = tsk_list_create())) {
+ AUDIO_OPENSLES_DEBUG_ERROR("Failed to create new list");
+ return tsk_null;
+ }
+
+ //= lock the list
+ tsk_list_lock(__audioInstances);
+
+ // find the instance from the list
+ const tsk_list_item_t* item;
+ tsk_list_foreach(item, __audioInstances) {
+ if(((audio_opensles_instance_t*)item->data)->sessionId == sessionId) {
+ audioInstance = (audio_opensles_instance_t*)tsk_object_ref(item->data);
+ break;
+ }
+ }
+
+ if(!audioInstance) {
+ audio_opensles_instance_t* _audioInstance;
+ if(!(_audioInstance = (audio_opensles_instance_t*)tsk_object_new(&audio_opensles_instance_def_s))) {
+ AUDIO_OPENSLES_DEBUG_ERROR("Failed to create new audio instance");
+ goto done;
+ }
+
+ if(!(_audioInstance->device = new SLAudioDevice())) {
+ AUDIO_OPENSLES_DEBUG_ERROR("Failed to create audio device");
+ TSK_OBJECT_SAFE_FREE(_audioInstance);
+ goto done;
+ }
+
+ if(!(_audioInstance->callback = new SLAudioDeviceCallbackImpl())) {
+ AUDIO_OPENSLES_DEBUG_ERROR("Failed to create audio transport");
+ TSK_OBJECT_SAFE_FREE(_audioInstance);
+ goto done;
+ }
+ if((_audioInstance->device->SetCallback(_audioInstance->callback))) {
+ AUDIO_OPENSLES_DEBUG_ERROR("AudioDeviceModule::RegisterAudioCallback() failed");
+ TSK_OBJECT_SAFE_FREE(_audioInstance);
+ goto done;
+ }
+
+ if((_audioInstance->device->Init())) {
+ AUDIO_OPENSLES_DEBUG_ERROR("AudioDeviceModule::Init() failed");
+ TSK_OBJECT_SAFE_FREE(_audioInstance);
+ goto done;
+ }
+
+ _audioInstance->sessionId = sessionId;
+ audioInstance = _audioInstance;
+ tsk_list_push_back_data(__audioInstances, (void**)&_audioInstance);
+ }
done:
- //= unlock the list
- tsk_list_unlock(__audioInstances);
+ //= unlock the list
+ tsk_list_unlock(__audioInstances);
- return audioInstance;
+ return audioInstance;
}
int audio_opensles_instance_prepare_consumer(audio_opensles_instance_handle_t* _self, tmedia_consumer_t** _consumer)
{
- audio_opensles_instance_t* self = (audio_opensles_instance_t*)_self;
- const struct audio_consumer_opensles_s* consumer = (const struct audio_consumer_opensles_s*)*_consumer;
- if(!self || !self->device || !self->callback || !_consumer || !*_consumer){
- AUDIO_OPENSLES_DEBUG_ERROR("invalid parameter");
- return -1;
- }
-
- if(self->isConsumerPrepared){
- AUDIO_OPENSLES_DEBUG_WARN("Consumer already prepared");
- return 0;
- }
-
- int ret;
- bool _bool;
-
- tsk_safeobj_lock(self);
-
- self->callback->SetConsumer(consumer);
-
- if((ret = self->device->SpeakerIsAvailable(&_bool))){
- AUDIO_OPENSLES_DEBUG_ERROR("SpeakerIsAvailable() failed with error code=%d", ret);
- }
- else{
- if(!_bool){
- AUDIO_OPENSLES_DEBUG_ERROR("SpeakerIsAvailable() returned false");
- }
- self->isSpeakerAvailable = _bool;
- }
-
- if((ret = self->device->InitSpeaker())){
- AUDIO_OPENSLES_DEBUG_ERROR("InitSpeaker() failed with error code=%d", ret);
- }
- else if((ret = self->device->SetSpeakerOn(audio_consumer_opensles_is_speakerOn(consumer)))){
- AUDIO_OPENSLES_DEBUG_ERROR("SetSpeakerOn() failed with error code=%d", ret);
- }
-
- if((ret = self->device->PlayoutIsAvailable(&_bool))){
- AUDIO_OPENSLES_DEBUG_ERROR("PlayoutIsAvailable() failed with error code =%d", ret);
- }
- else{
- if(!_bool){
- AUDIO_OPENSLES_DEBUG_ERROR("PlayoutIsAvailable() returned false");
- }
- self->isPlayoutAvailable = _bool;
- }
-
- if((ret = self->device->SetStereoPlayout(((*_consumer)->audio.in.channels == 2)))){
- AUDIO_OPENSLES_DEBUG_ERROR("SetStereoPlayout(%d==2) failed with error code=%d", (*_consumer)->audio.in.channels, ret);
- }
-
- //if((ret = self->device->SetPlayoutBuffer(AudioDeviceModule::kFixedBufferSize, (*_consumer)->audio.ptime))){
- // AUDIO_OPENSLES_DEBUG_ERROR("SetPlayoutBuffer(%d ms) failed with error code=%d", (*_consumer)->audio.ptime, ret);
- //}
- // always request 10ms buffers
- if((ret = self->device->SetPlayoutBuffer(10))){
- AUDIO_OPENSLES_DEBUG_ERROR("SetPlayoutBuffer(%d ms) failed with error code=%d", 10, ret);
- }
-
- int playoutSampleRate = (*_consumer)->audio.out.rate ? (*_consumer)->audio.out.rate : (*_consumer)->audio.in.rate;
- if((ret = self->device->SetPlayoutSampleRate(playoutSampleRate))){
- AUDIO_OPENSLES_DEBUG_ERROR("SetPlayoutSampleRate(%d) failed with error code=%d", playoutSampleRate, ret);
- }
-
- if((ret = self->device->InitPlayout())){
- AUDIO_OPENSLES_DEBUG_ERROR("AudioDeviceModule::InitPlayout() failed with error code = %d", ret);
- goto done;
- }
-
- // init output parameters
- if((ret = self->device->StereoPlayout(&_bool))){
- AUDIO_OPENSLES_DEBUG_ERROR("StereoPlayout() failed with error code=%d", ret);
- }
- else{
- (*_consumer)->audio.out.channels = (_bool ? 2 : 1);
- }
- if((ret = self->device->PlayoutSampleRate(&playoutSampleRate))){
- AUDIO_OPENSLES_DEBUG_ERROR("PlayoutSampleRate() failed with error code=%d", ret);
- }
- else{
- (*_consumer)->audio.out.rate = playoutSampleRate;
- }
+ audio_opensles_instance_t* self = (audio_opensles_instance_t*)_self;
+ const struct audio_consumer_opensles_s* consumer = (const struct audio_consumer_opensles_s*)*_consumer;
+ if(!self || !self->device || !self->callback || !_consumer || !*_consumer) {
+ AUDIO_OPENSLES_DEBUG_ERROR("invalid parameter");
+ return -1;
+ }
+
+ if(self->isConsumerPrepared) {
+ AUDIO_OPENSLES_DEBUG_WARN("Consumer already prepared");
+ return 0;
+ }
+
+ int ret;
+ bool _bool;
+
+ tsk_safeobj_lock(self);
+
+ self->callback->SetConsumer(consumer);
+
+ if((ret = self->device->SpeakerIsAvailable(&_bool))) {
+ AUDIO_OPENSLES_DEBUG_ERROR("SpeakerIsAvailable() failed with error code=%d", ret);
+ }
+ else {
+ if(!_bool) {
+ AUDIO_OPENSLES_DEBUG_ERROR("SpeakerIsAvailable() returned false");
+ }
+ self->isSpeakerAvailable = _bool;
+ }
+
+ if((ret = self->device->InitSpeaker())) {
+ AUDIO_OPENSLES_DEBUG_ERROR("InitSpeaker() failed with error code=%d", ret);
+ }
+ else if((ret = self->device->SetSpeakerOn(audio_consumer_opensles_is_speakerOn(consumer)))) {
+ AUDIO_OPENSLES_DEBUG_ERROR("SetSpeakerOn() failed with error code=%d", ret);
+ }
+
+ if((ret = self->device->PlayoutIsAvailable(&_bool))) {
+ AUDIO_OPENSLES_DEBUG_ERROR("PlayoutIsAvailable() failed with error code =%d", ret);
+ }
+ else {
+ if(!_bool) {
+ AUDIO_OPENSLES_DEBUG_ERROR("PlayoutIsAvailable() returned false");
+ }
+ self->isPlayoutAvailable = _bool;
+ }
+
+ if((ret = self->device->SetStereoPlayout(((*_consumer)->audio.in.channels == 2)))) {
+ AUDIO_OPENSLES_DEBUG_ERROR("SetStereoPlayout(%d==2) failed with error code=%d", (*_consumer)->audio.in.channels, ret);
+ }
+
+ //if((ret = self->device->SetPlayoutBuffer(AudioDeviceModule::kFixedBufferSize, (*_consumer)->audio.ptime))){
+ // AUDIO_OPENSLES_DEBUG_ERROR("SetPlayoutBuffer(%d ms) failed with error code=%d", (*_consumer)->audio.ptime, ret);
+ //}
+ // always request 10ms buffers
+ if((ret = self->device->SetPlayoutBuffer(10))) {
+ AUDIO_OPENSLES_DEBUG_ERROR("SetPlayoutBuffer(%d ms) failed with error code=%d", 10, ret);
+ }
+
+ int playoutSampleRate = (*_consumer)->audio.out.rate ? (*_consumer)->audio.out.rate : (*_consumer)->audio.in.rate;
+ if((ret = self->device->SetPlayoutSampleRate(playoutSampleRate))) {
+ AUDIO_OPENSLES_DEBUG_ERROR("SetPlayoutSampleRate(%d) failed with error code=%d", playoutSampleRate, ret);
+ }
+
+ if((ret = self->device->InitPlayout())) {
+ AUDIO_OPENSLES_DEBUG_ERROR("AudioDeviceModule::InitPlayout() failed with error code = %d", ret);
+ goto done;
+ }
+
+ // init output parameters
+ if((ret = self->device->StereoPlayout(&_bool))) {
+ AUDIO_OPENSLES_DEBUG_ERROR("StereoPlayout() failed with error code=%d", ret);
+ }
+ else {
+ (*_consumer)->audio.out.channels = (_bool ? 2 : 1);
+ }
+ if((ret = self->device->PlayoutSampleRate(&playoutSampleRate))) {
+ AUDIO_OPENSLES_DEBUG_ERROR("PlayoutSampleRate() failed with error code=%d", ret);
+ }
+ else {
+ (*_consumer)->audio.out.rate = playoutSampleRate;
+ }
done:
- tsk_safeobj_unlock(self);
+ tsk_safeobj_unlock(self);
- self->isConsumerPrepared = (ret == 0);
+ self->isConsumerPrepared = (ret == 0);
- return ret;
+ return ret;
}
int audio_opensles_instance_prepare_producer(audio_opensles_instance_handle_t* _self, tmedia_producer_t** _producer)
{
- audio_opensles_instance_t* self = (audio_opensles_instance_t*)_self;
- if(!self || !self->device || !self->callback || !_producer || !*_producer){
- AUDIO_OPENSLES_DEBUG_ERROR("invalid parameter");
- return -1;
- }
-
- if(self->isProducerPrepared){
- AUDIO_OPENSLES_DEBUG_WARN("Producer already prepared");
- return 0;
- }
-
- int ret;
- bool _bool;
-
- tsk_safeobj_lock(self);
-
- self->callback->SetProducer((const struct audio_producer_opensles_s*)*_producer);
-
- if((ret = self->device->RecordingIsAvailable(&_bool))){
- AUDIO_OPENSLES_DEBUG_ERROR("RecordingIsAvailable() failed with error code =%d", ret);
- }
- else{
- if(!_bool){
- AUDIO_OPENSLES_DEBUG_ERROR("RecordingIsAvailable() returned false");
- }
- self->isRecordingAvailable = _bool;
- }
-
- if((ret = self->device->MicrophoneIsAvailable(&_bool))){
- AUDIO_OPENSLES_DEBUG_ERROR("MicrophoneIsAvailable() failed with error code =%d", ret);
- }
- else{
- if(!_bool){
- AUDIO_OPENSLES_DEBUG_ERROR("MicrophoneIsAvailable() returned false");
- }
- else{
- if((ret = self->device->InitMicrophone())){
- AUDIO_OPENSLES_DEBUG_ERROR("InitMicrophone() failed with error code =%d", ret);
- }
- }
- }
-
- if((ret = self->device->SetStereoRecording(((*_producer)->audio.channels == 2)))){
- AUDIO_OPENSLES_DEBUG_ERROR("SetStereoRecording(%d==2) failed with error code=%d", (*_producer)->audio.channels, ret);
- }
-
- int recordingSampleRate = (*_producer)->audio.rate;
- if((ret = self->device->SetRecordingSampleRate(recordingSampleRate))){
- AUDIO_OPENSLES_DEBUG_ERROR("SetRecordingSampleRate(%d) failed with error code=%d", recordingSampleRate, ret);
- }
-
- if((ret = self->device->InitRecording())){
- AUDIO_OPENSLES_DEBUG_ERROR("AudioDeviceModule::InitRecording() failed with error code = %d", ret);
- goto done;
- }
-
- // init output parameters
- if((ret = self->device->StereoRecording(&_bool))){
- AUDIO_OPENSLES_DEBUG_ERROR("StereoRecording() failed with error code=%d", ret);
- }
- else{
- (*_producer)->audio.channels = (_bool ? 2 : 1);
- }
- if((ret = self->device->RecordingSampleRate(&recordingSampleRate))){
- AUDIO_OPENSLES_DEBUG_ERROR("RecordingSampleRate() failed with error code=%d", ret);
- }
- else{
- (*_producer)->audio.rate = recordingSampleRate;
- }
+ audio_opensles_instance_t* self = (audio_opensles_instance_t*)_self;
+ if(!self || !self->device || !self->callback || !_producer || !*_producer) {
+ AUDIO_OPENSLES_DEBUG_ERROR("invalid parameter");
+ return -1;
+ }
+
+ if(self->isProducerPrepared) {
+ AUDIO_OPENSLES_DEBUG_WARN("Producer already prepared");
+ return 0;
+ }
+
+ int ret;
+ bool _bool;
+
+ tsk_safeobj_lock(self);
+
+ self->callback->SetProducer((const struct audio_producer_opensles_s*)*_producer);
+
+ if((ret = self->device->RecordingIsAvailable(&_bool))) {
+ AUDIO_OPENSLES_DEBUG_ERROR("RecordingIsAvailable() failed with error code =%d", ret);
+ }
+ else {
+ if(!_bool) {
+ AUDIO_OPENSLES_DEBUG_ERROR("RecordingIsAvailable() returned false");
+ }
+ self->isRecordingAvailable = _bool;
+ }
+
+ if((ret = self->device->MicrophoneIsAvailable(&_bool))) {
+ AUDIO_OPENSLES_DEBUG_ERROR("MicrophoneIsAvailable() failed with error code =%d", ret);
+ }
+ else {
+ if(!_bool) {
+ AUDIO_OPENSLES_DEBUG_ERROR("MicrophoneIsAvailable() returned false");
+ }
+ else {
+ if((ret = self->device->InitMicrophone())) {
+ AUDIO_OPENSLES_DEBUG_ERROR("InitMicrophone() failed with error code =%d", ret);
+ }
+ }
+ }
+
+ if((ret = self->device->SetStereoRecording(((*_producer)->audio.channels == 2)))) {
+ AUDIO_OPENSLES_DEBUG_ERROR("SetStereoRecording(%d==2) failed with error code=%d", (*_producer)->audio.channels, ret);
+ }
+
+ int recordingSampleRate = (*_producer)->audio.rate;
+ if((ret = self->device->SetRecordingSampleRate(recordingSampleRate))) {
+ AUDIO_OPENSLES_DEBUG_ERROR("SetRecordingSampleRate(%d) failed with error code=%d", recordingSampleRate, ret);
+ }
+
+ if((ret = self->device->InitRecording())) {
+ AUDIO_OPENSLES_DEBUG_ERROR("AudioDeviceModule::InitRecording() failed with error code = %d", ret);
+ goto done;
+ }
+
+ // init output parameters
+ if((ret = self->device->StereoRecording(&_bool))) {
+ AUDIO_OPENSLES_DEBUG_ERROR("StereoRecording() failed with error code=%d", ret);
+ }
+ else {
+ (*_producer)->audio.channels = (_bool ? 2 : 1);
+ }
+ if((ret = self->device->RecordingSampleRate(&recordingSampleRate))) {
+ AUDIO_OPENSLES_DEBUG_ERROR("RecordingSampleRate() failed with error code=%d", ret);
+ }
+ else {
+ (*_producer)->audio.rate = recordingSampleRate;
+ }
done:
- tsk_safeobj_unlock(self);
+ tsk_safeobj_unlock(self);
- self->isProducerPrepared = (ret == 0);
+ self->isProducerPrepared = (ret == 0);
- return ret;
+ return ret;
}
int audio_opensles_instance_start_consumer(audio_opensles_instance_handle_t* _self)
{
- audio_opensles_instance_t* self = (audio_opensles_instance_t*)_self;
- if(!self || !self->device || !self->callback){
- AUDIO_OPENSLES_DEBUG_ERROR("invalid parameter");
- return -1;
- }
-
- tsk_safeobj_lock(self);
- if(!self->isConsumerPrepared){
- AUDIO_OPENSLES_DEBUG_ERROR("Consumer not prepared");
- goto done;
- }
-
- if(self->isConsumerStarted){
- AUDIO_OPENSLES_DEBUG_WARN("Consumer already started");
- goto done;
- }
-
- if(self->isPlayoutAvailable){
- int ret;
- if((ret = self->device->StartPlayout())){
- AUDIO_OPENSLES_DEBUG_ERROR("StartPlayout() failed with error code = %d", ret);
- }
-
- self->isConsumerStarted = self->device->Playing();
- AUDIO_OPENSLES_DEBUG_INFO("isPlaying=%s", (self->isConsumerPrepared ? "true" : "false"));
- }
+ audio_opensles_instance_t* self = (audio_opensles_instance_t*)_self;
+ if(!self || !self->device || !self->callback) {
+ AUDIO_OPENSLES_DEBUG_ERROR("invalid parameter");
+ return -1;
+ }
+
+ tsk_safeobj_lock(self);
+ if(!self->isConsumerPrepared) {
+ AUDIO_OPENSLES_DEBUG_ERROR("Consumer not prepared");
+ goto done;
+ }
+
+ if(self->isConsumerStarted) {
+ AUDIO_OPENSLES_DEBUG_WARN("Consumer already started");
+ goto done;
+ }
+
+ if(self->isPlayoutAvailable) {
+ int ret;
+ if((ret = self->device->StartPlayout())) {
+ AUDIO_OPENSLES_DEBUG_ERROR("StartPlayout() failed with error code = %d", ret);
+ }
+
+ self->isConsumerStarted = self->device->Playing();
+ AUDIO_OPENSLES_DEBUG_INFO("isPlaying=%s", (self->isConsumerPrepared ? "true" : "false"));
+ }
done:
- tsk_safeobj_unlock(self);
- return (self->isConsumerStarted ? 0 : -1);
+ tsk_safeobj_unlock(self);
+ return (self->isConsumerStarted ? 0 : -1);
}
int audio_opensles_instance_start_producer(audio_opensles_instance_handle_t* _self)
{
- audio_opensles_instance_t* self = (audio_opensles_instance_t*)_self;
- if(!self || !self->device || !self->callback){
- AUDIO_OPENSLES_DEBUG_ERROR("invalid parameter");
- return -1;
- }
-
- tsk_safeobj_lock(self);
- if(!self->isProducerPrepared){
- AUDIO_OPENSLES_DEBUG_ERROR("Producer not prepared");
- goto done;
- }
-
- if(self->isProducerStarted){
- AUDIO_OPENSLES_DEBUG_WARN("Consumer already started");
- goto done;
- }
-
- if(self->isRecordingAvailable){
- int ret;
- if((ret = self->device->StartRecording())){
- AUDIO_OPENSLES_DEBUG_ERROR("StartRecording() failed with error code = %d", ret);
- }
-
- self->isProducerStarted = self->device->Recording();
- AUDIO_OPENSLES_DEBUG_INFO("isRecording=%s", (self->isProducerStarted ? "true" : "false"));
- }
+ audio_opensles_instance_t* self = (audio_opensles_instance_t*)_self;
+ if(!self || !self->device || !self->callback) {
+ AUDIO_OPENSLES_DEBUG_ERROR("invalid parameter");
+ return -1;
+ }
+
+ tsk_safeobj_lock(self);
+ if(!self->isProducerPrepared) {
+ AUDIO_OPENSLES_DEBUG_ERROR("Producer not prepared");
+ goto done;
+ }
+
+ if(self->isProducerStarted) {
+ AUDIO_OPENSLES_DEBUG_WARN("Consumer already started");
+ goto done;
+ }
+
+ if(self->isRecordingAvailable) {
+ int ret;
+ if((ret = self->device->StartRecording())) {
+ AUDIO_OPENSLES_DEBUG_ERROR("StartRecording() failed with error code = %d", ret);
+ }
+
+ self->isProducerStarted = self->device->Recording();
+ AUDIO_OPENSLES_DEBUG_INFO("isRecording=%s", (self->isProducerStarted ? "true" : "false"));
+ }
done:
- tsk_safeobj_unlock(self);
- return (self->isProducerStarted ? 0 : -1);
+ tsk_safeobj_unlock(self);
+ return (self->isProducerStarted ? 0 : -1);
}
int audio_opensles_instance_stop_consumer(audio_opensles_instance_handle_t* _self)
{
- audio_opensles_instance_t* self = (audio_opensles_instance_t*)_self;
- if(!self || !self->device || !self->callback){
- AUDIO_OPENSLES_DEBUG_ERROR("invalid parameter");
- return -1;
- }
-
- tsk_safeobj_lock(self);
-
- if(!self->isConsumerStarted){
- goto done;
- }
-
- int ret;
- if((ret = self->device->StopPlayout())){
- AUDIO_OPENSLES_DEBUG_ERROR("StopPlayout() failed with error code = %d", ret);
- }
- else{
- self->isConsumerStarted = self->device->Playing();
- self->isConsumerPrepared = false;
- }
+ audio_opensles_instance_t* self = (audio_opensles_instance_t*)_self;
+ if(!self || !self->device || !self->callback) {
+ AUDIO_OPENSLES_DEBUG_ERROR("invalid parameter");
+ return -1;
+ }
+
+ tsk_safeobj_lock(self);
+
+ if(!self->isConsumerStarted) {
+ goto done;
+ }
+
+ int ret;
+ if((ret = self->device->StopPlayout())) {
+ AUDIO_OPENSLES_DEBUG_ERROR("StopPlayout() failed with error code = %d", ret);
+ }
+ else {
+ self->isConsumerStarted = self->device->Playing();
+ self->isConsumerPrepared = false;
+ }
done:
- tsk_safeobj_unlock(self);
- return (self->isConsumerStarted ? -1 : 0);
+ tsk_safeobj_unlock(self);
+ return (self->isConsumerStarted ? -1 : 0);
}
int audio_opensles_instance_stop_producer(audio_opensles_instance_handle_t* _self)
{
- audio_opensles_instance_t* self = (audio_opensles_instance_t*)_self;
- if(!self || !self->device || !self->callback){
- AUDIO_OPENSLES_DEBUG_ERROR("invalid parameter");
- return -1;
- }
-
- tsk_safeobj_lock(self);
-
- if(!self->isProducerStarted){
- goto done;
- }
-
- int ret;
- if((ret = self->device->StopRecording())){
- AUDIO_OPENSLES_DEBUG_ERROR("StopRecording() failed with error code = %d", ret);
- }
- else{
- self->isProducerStarted = self->device->Recording();
- self->isProducerPrepared = false;
- }
+ audio_opensles_instance_t* self = (audio_opensles_instance_t*)_self;
+ if(!self || !self->device || !self->callback) {
+ AUDIO_OPENSLES_DEBUG_ERROR("invalid parameter");
+ return -1;
+ }
+
+ tsk_safeobj_lock(self);
+
+ if(!self->isProducerStarted) {
+ goto done;
+ }
+
+ int ret;
+ if((ret = self->device->StopRecording())) {
+ AUDIO_OPENSLES_DEBUG_ERROR("StopRecording() failed with error code = %d", ret);
+ }
+ else {
+ self->isProducerStarted = self->device->Recording();
+ self->isProducerPrepared = false;
+ }
done:
- tsk_safeobj_unlock(self);
- return (self->isProducerStarted ? -1 : 0);
+ tsk_safeobj_unlock(self);
+ return (self->isProducerStarted ? -1 : 0);
}
int audio_opensles_instance_set_speakerOn(audio_opensles_instance_handle_t* _self, bool speakerOn)
{
- audio_opensles_instance_t* self = (audio_opensles_instance_t*)_self;
- if(!self || !self->device ){
- AUDIO_OPENSLES_DEBUG_ERROR("Invalid parameter");
- return -1;
- }
- return self->device->SetSpeakerOn(speakerOn);
+ audio_opensles_instance_t* self = (audio_opensles_instance_t*)_self;
+ if(!self || !self->device ) {
+ AUDIO_OPENSLES_DEBUG_ERROR("Invalid parameter");
+ return -1;
+ }
+ return self->device->SetSpeakerOn(speakerOn);
}
int audio_opensles_instance_set_microphone_volume(audio_opensles_instance_handle_t* _self, int32_t volume)
{
- audio_opensles_instance_t* self = (audio_opensles_instance_t*)_self;
- if(!self || !self->device ){
- AUDIO_OPENSLES_DEBUG_ERROR("Invalid parameter");
- return -1;
- }
- return self->device->SetMicrophoneVolume(volume);
+ audio_opensles_instance_t* self = (audio_opensles_instance_t*)_self;
+ if(!self || !self->device ) {
+ AUDIO_OPENSLES_DEBUG_ERROR("Invalid parameter");
+ return -1;
+ }
+ return self->device->SetMicrophoneVolume(volume);
}
-int audio_opensles_instance_destroy(audio_opensles_instance_handle_t** _self){
- if(!_self || !*_self){
- AUDIO_OPENSLES_DEBUG_ERROR("Invalid parameter");
- return -1;
- }
- tsk_list_lock(__audioInstances);
- if(tsk_object_get_refcount(*_self)==1){
- tsk_list_remove_item_by_data(__audioInstances, *_self);
- }
- else {
- tsk_object_unref(*_self);
- }
- tsk_list_unlock(__audioInstances);
- *_self = tsk_null;
- return 0;
+int audio_opensles_instance_destroy(audio_opensles_instance_handle_t** _self)
+{
+ if(!_self || !*_self) {
+ AUDIO_OPENSLES_DEBUG_ERROR("Invalid parameter");
+ return -1;
+ }
+ tsk_list_lock(__audioInstances);
+ if(tsk_object_get_refcount(*_self)==1) {
+ tsk_list_remove_item_by_data(__audioInstances, *_self);
+ }
+ else {
+ tsk_object_unref(*_self);
+ }
+ tsk_list_unlock(__audioInstances);
+ *_self = tsk_null;
+ return 0;
}
diff --git a/plugins/audio_opensles/audio_opensles.h b/plugins/audio_opensles/audio_opensles.h
index 7837509..e96966e 100755
--- a/plugins/audio_opensles/audio_opensles.h
+++ b/plugins/audio_opensles/audio_opensles.h
@@ -1,17 +1,17 @@
/* Copyright (C) 2012 Doubango Telecom <http://www.doubango.org>
-*
+*
* This file is part of Open Source Doubango Framework.
*
* DOUBANGO is free software: you can redistribute it and/or modify
* it under the terms of the GNU General Public License as published by
* the Free Software Foundation, either version 3 of the License, or
* (at your option) any later version.
-*
+*
* DOUBANGO is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
* GNU General Public License for more details.
-*
+*
* You should have received a copy of the GNU General Public License
* along with DOUBANGO.
*/
diff --git a/plugins/audio_opensles/audio_opensles_config.h b/plugins/audio_opensles/audio_opensles_config.h
index a495608..6967492 100755
--- a/plugins/audio_opensles/audio_opensles_config.h
+++ b/plugins/audio_opensles/audio_opensles_config.h
@@ -1,17 +1,17 @@
/* Copyright (C) 2012 Doubango Telecom <http://www.doubango.org>
-*
+*
* This file is part of Open Source Doubango Framework.
*
* DOUBANGO is free software: you can redistribute it and/or modify
* it under the terms of the GNU General Public License as published by
* the Free Software Foundation, either version 3 of the License, or
* (at your option) any later version.
-*
+*
* DOUBANGO is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
* GNU General Public License for more details.
-*
+*
* You should have received a copy of the GNU General Public License
* along with DOUBANGO.
*/
@@ -66,18 +66,18 @@
# define AUDIO_OPENSLES_GEXTERN extern
#endif
-// Guards against C++ name mangling
+// Guards against C++ name mangling
#ifdef __cplusplus
# define AUDIO_OPENSLES_BEGIN_DECLS extern "C" {
# define AUDIO_OPENSLES_END_DECLS }
#else
-# define AUDIO_OPENSLES_BEGIN_DECLS
+# define AUDIO_OPENSLES_BEGIN_DECLS
# define AUDIO_OPENSLES_END_DECLS
#endif
#ifdef _MSC_VER
#if HAVE_FFMPEG // FFMPeg warnings (treated as errors)
-# pragma warning (disable:4244)
+# pragma warning (disable:4244)
#endif
# define inline __inline
# define _CRT_SECURE_NO_WARNINGS
@@ -94,7 +94,7 @@
#endif
#if HAVE_CONFIG_H
- #include "../config.h"
+#include "../config.h"
#endif
#if AUDIO_OPENSLES_UNDER_ANDROID
diff --git a/plugins/audio_opensles/audio_opensles_consumer.cxx b/plugins/audio_opensles/audio_opensles_consumer.cxx
index 57acd7f..5585316 100755
--- a/plugins/audio_opensles/audio_opensles_consumer.cxx
+++ b/plugins/audio_opensles/audio_opensles_consumer.cxx
@@ -1,17 +1,17 @@
/* Copyright (C) 2012 Doubango Telecom <http://www.doubango.org>
-*
+*
* This file is part of Open Source Doubango Framework.
*
* DOUBANGO is free software: you can redistribute it and/or modify
* it under the terms of the GNU General Public License as published by
* the Free Software Foundation, either version 3 of the License, or
* (at your option) any later version.
-*
+*
* DOUBANGO is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
* GNU General Public License for more details.
-*
+*
* You should have received a copy of the GNU General Public License
* along with DOUBANGO.
*/
@@ -25,172 +25,173 @@
#include "tsk_memory.h"
#include "tsk_debug.h"
-typedef struct audio_consumer_opensles_s
-{
- TDAV_DECLARE_CONSUMER_AUDIO;
- audio_opensles_instance_handle_t* audioInstHandle;
- bool isSpeakerOn;
- struct{
- void* ptr;
- bool isFull;
- int size;
- int index;
- } buffer;
+typedef struct audio_consumer_opensles_s {
+ TDAV_DECLARE_CONSUMER_AUDIO;
+ audio_opensles_instance_handle_t* audioInstHandle;
+ bool isSpeakerOn;
+ struct {
+ void* ptr;
+ bool isFull;
+ int size;
+ int index;
+ } buffer;
}
audio_consumer_opensles_t;
int audio_consumer_opensles_get_data_10ms(const audio_consumer_opensles_t* _self, void* audioSamples, int nSamples, int nBytesPerSample, int nChannels, int samplesPerSec, uint32_t &nSamplesOut)
{
- nSamplesOut = 0;
- if(!_self || !audioSamples || !nSamples){
- AUDIO_OPENSLES_DEBUG_ERROR("Invalid parameter");
- return -1;
- }
- if((nSamples != (samplesPerSec / 100))){
- AUDIO_OPENSLES_DEBUG_ERROR("Not producing 10ms samples (nSamples=%d, samplesPerSec=%d)", nSamples, samplesPerSec);
- return -2;
- }
- if((nBytesPerSample != (TMEDIA_CONSUMER(_self)->audio.bits_per_sample >> 3))){
- AUDIO_OPENSLES_DEBUG_ERROR("%d not valid bytes/samples", nBytesPerSample);
- return -3;
- }
- if((nChannels != TMEDIA_CONSUMER(_self)->audio.out.channels)){
- AUDIO_OPENSLES_DEBUG_ERROR("Playout - %d not the expected number of channels but should be %d", nChannels, TMEDIA_CONSUMER(_self)->audio.out.channels);
- return -4;
- }
-
- audio_consumer_opensles_t* self = const_cast<audio_consumer_opensles_t*>(_self);
-
- if(self->buffer.index == self->buffer.size){
- if((tdav_consumer_audio_get(TDAV_CONSUMER_AUDIO(self), self->buffer.ptr, self->buffer.size)) != self->buffer.size){
- nSamplesOut = 0;
- self->buffer.index = self->buffer.size;
- return 0;
- }
- self->buffer.index = 0;
- tdav_consumer_audio_tick(TDAV_CONSUMER_AUDIO(self));
- }
-
- int nSamplesInBits = (nSamples * nBytesPerSample);
- if(_self->buffer.index + nSamplesInBits <= _self->buffer.size){
- memcpy(audioSamples, (((uint8_t*)self->buffer.ptr) + self->buffer.index), nSamplesInBits);
- }
- self->buffer.index += nSamplesInBits;
- TSK_CLAMP(0, self->buffer.index, self->buffer.size);
- nSamplesOut = nSamples;
-
- return 0;
+ nSamplesOut = 0;
+ if(!_self || !audioSamples || !nSamples) {
+ AUDIO_OPENSLES_DEBUG_ERROR("Invalid parameter");
+ return -1;
+ }
+ if((nSamples != (samplesPerSec / 100))) {
+ AUDIO_OPENSLES_DEBUG_ERROR("Not producing 10ms samples (nSamples=%d, samplesPerSec=%d)", nSamples, samplesPerSec);
+ return -2;
+ }
+ if((nBytesPerSample != (TMEDIA_CONSUMER(_self)->audio.bits_per_sample >> 3))) {
+ AUDIO_OPENSLES_DEBUG_ERROR("%d not valid bytes/samples", nBytesPerSample);
+ return -3;
+ }
+ if((nChannels != TMEDIA_CONSUMER(_self)->audio.out.channels)) {
+ AUDIO_OPENSLES_DEBUG_ERROR("Playout - %d not the expected number of channels but should be %d", nChannels, TMEDIA_CONSUMER(_self)->audio.out.channels);
+ return -4;
+ }
+
+ audio_consumer_opensles_t* self = const_cast<audio_consumer_opensles_t*>(_self);
+
+ if(self->buffer.index == self->buffer.size) {
+ if((tdav_consumer_audio_get(TDAV_CONSUMER_AUDIO(self), self->buffer.ptr, self->buffer.size)) != self->buffer.size) {
+ nSamplesOut = 0;
+ self->buffer.index = self->buffer.size;
+ return 0;
+ }
+ self->buffer.index = 0;
+ tdav_consumer_audio_tick(TDAV_CONSUMER_AUDIO(self));
+ }
+
+ int nSamplesInBits = (nSamples * nBytesPerSample);
+ if(_self->buffer.index + nSamplesInBits <= _self->buffer.size) {
+ memcpy(audioSamples, (((uint8_t*)self->buffer.ptr) + self->buffer.index), nSamplesInBits);
+ }
+ self->buffer.index += nSamplesInBits;
+ TSK_CLAMP(0, self->buffer.index, self->buffer.size);
+ nSamplesOut = nSamples;
+
+ return 0;
}
bool audio_consumer_opensles_is_speakerOn(const audio_consumer_opensles_t* self)
{
- if(!self){
- AUDIO_OPENSLES_DEBUG_ERROR("Invalid parameter");
- return false;
- }
- return self->isSpeakerOn;
+ if(!self) {
+ AUDIO_OPENSLES_DEBUG_ERROR("Invalid parameter");
+ return false;
+ }
+ return self->isSpeakerOn;
}
/* ============ Media Consumer Interface ================= */
static int audio_consumer_opensles_set(tmedia_consumer_t* _self, const tmedia_param_t* param)
{
- audio_consumer_opensles_t* self = (audio_consumer_opensles_t*)_self;
- int ret = tdav_consumer_audio_set(TDAV_CONSUMER_AUDIO(self), param);
-
- if(ret == 0){
- if(tsk_striequals(param->key, "volume")){
-
- }
- else if(tsk_striequals(param->key, "speaker-on")){
- self->isSpeakerOn = (TSK_TO_INT32((uint8_t*)param->value) != 0);
- if(self->audioInstHandle){
- return audio_opensles_instance_set_speakerOn(self->audioInstHandle, self->isSpeakerOn);
- }
- else return 0; // will be set when instance is initialized
- }
- }
-
- return ret;
+ audio_consumer_opensles_t* self = (audio_consumer_opensles_t*)_self;
+ int ret = tdav_consumer_audio_set(TDAV_CONSUMER_AUDIO(self), param);
+
+ if(ret == 0) {
+ if(tsk_striequals(param->key, "volume")) {
+
+ }
+ else if(tsk_striequals(param->key, "speaker-on")) {
+ self->isSpeakerOn = (TSK_TO_INT32((uint8_t*)param->value) != 0);
+ if(self->audioInstHandle) {
+ return audio_opensles_instance_set_speakerOn(self->audioInstHandle, self->isSpeakerOn);
+ }
+ else {
+ return 0; // will be set when instance is initialized
+ }
+ }
+ }
+
+ return ret;
}
static int audio_consumer_opensles_prepare(tmedia_consumer_t* _self, const tmedia_codec_t* codec)
{
- audio_consumer_opensles_t* self = (audio_consumer_opensles_t*)_self;
- if(!self){
- AUDIO_OPENSLES_DEBUG_ERROR("Invalid parameter");
- return -1;
- }
-
- // create audio instance
- if(!(self->audioInstHandle = audio_opensles_instance_create(TMEDIA_CONSUMER(self)->session_id))){
- AUDIO_OPENSLES_DEBUG_ERROR("Failed to create audio instance handle");
- return -1;
- }
-
- // initialize input parameters from the codec information
- TMEDIA_CONSUMER(self)->audio.ptime = TMEDIA_CODEC_PTIME_AUDIO_DECODING(codec);
- TMEDIA_CONSUMER(self)->audio.in.channels = TMEDIA_CODEC_CHANNELS_AUDIO_DECODING(codec);
- TMEDIA_CONSUMER(self)->audio.in.rate = TMEDIA_CODEC_RATE_DECODING(codec);
-
- AUDIO_OPENSLES_DEBUG_INFO("audio_consumer_opensles_prepare(channels=%d, rate=%d, ptime=%d)", codec->plugin->audio.channels, codec->plugin->rate, codec->plugin->audio.ptime);
-
- // prepare playout device and update output parameters
- int ret = audio_opensles_instance_prepare_consumer(self->audioInstHandle, &_self);
-
- // now that the producer is prepared we can initialize internal buffer using device caps
- if(ret == 0){
- // allocate buffer
- int xsize = ((TMEDIA_CONSUMER(self)->audio.ptime * TMEDIA_CONSUMER(self)->audio.out.rate) / 1000) * (TMEDIA_CONSUMER(self)->audio.bits_per_sample >> 3);
- if(!(self->buffer.ptr = tsk_realloc(self->buffer.ptr, xsize))){
- AUDIO_OPENSLES_DEBUG_ERROR("Failed to allocate buffer with size = %d", xsize);
- self->buffer.size = 0;
- return -1;
- }
- memset(self->buffer.ptr, 0, xsize);
- self->buffer.size = xsize;
- self->buffer.index = 0;
- self->buffer.isFull = false;
- }
- return ret;
+ audio_consumer_opensles_t* self = (audio_consumer_opensles_t*)_self;
+ if(!self) {
+ AUDIO_OPENSLES_DEBUG_ERROR("Invalid parameter");
+ return -1;
+ }
+
+ // create audio instance
+ if(!(self->audioInstHandle = audio_opensles_instance_create(TMEDIA_CONSUMER(self)->session_id))) {
+ AUDIO_OPENSLES_DEBUG_ERROR("Failed to create audio instance handle");
+ return -1;
+ }
+
+ // initialize input parameters from the codec information
+ TMEDIA_CONSUMER(self)->audio.ptime = TMEDIA_CODEC_PTIME_AUDIO_DECODING(codec);
+ TMEDIA_CONSUMER(self)->audio.in.channels = TMEDIA_CODEC_CHANNELS_AUDIO_DECODING(codec);
+ TMEDIA_CONSUMER(self)->audio.in.rate = TMEDIA_CODEC_RATE_DECODING(codec);
+
+ AUDIO_OPENSLES_DEBUG_INFO("audio_consumer_opensles_prepare(channels=%d, rate=%d, ptime=%d)", codec->plugin->audio.channels, codec->plugin->rate, codec->plugin->audio.ptime);
+
+ // prepare playout device and update output parameters
+ int ret = audio_opensles_instance_prepare_consumer(self->audioInstHandle, &_self);
+
+ // now that the producer is prepared we can initialize internal buffer using device caps
+ if(ret == 0) {
+ // allocate buffer
+ int xsize = ((TMEDIA_CONSUMER(self)->audio.ptime * TMEDIA_CONSUMER(self)->audio.out.rate) / 1000) * (TMEDIA_CONSUMER(self)->audio.bits_per_sample >> 3);
+ if(!(self->buffer.ptr = tsk_realloc(self->buffer.ptr, xsize))) {
+ AUDIO_OPENSLES_DEBUG_ERROR("Failed to allocate buffer with size = %d", xsize);
+ self->buffer.size = 0;
+ return -1;
+ }
+ memset(self->buffer.ptr, 0, xsize);
+ self->buffer.size = xsize;
+ self->buffer.index = 0;
+ self->buffer.isFull = false;
+ }
+ return ret;
}
static int audio_consumer_opensles_start(tmedia_consumer_t* _self)
{
- audio_consumer_opensles_t* self = (audio_consumer_opensles_t*)_self;
- if(!self){
- AUDIO_OPENSLES_DEBUG_ERROR("Invalid parameter");
- return -1;
- }
+ audio_consumer_opensles_t* self = (audio_consumer_opensles_t*)_self;
+ if(!self) {
+ AUDIO_OPENSLES_DEBUG_ERROR("Invalid parameter");
+ return -1;
+ }
- return audio_opensles_instance_start_consumer(self->audioInstHandle);
+ return audio_opensles_instance_start_consumer(self->audioInstHandle);
}
static int audio_consumer_opensles_consume(tmedia_consumer_t* _self, const void* data, tsk_size_t data_size, const tsk_object_t* proto_hdr)
{
- audio_consumer_opensles_t* self = (audio_consumer_opensles_t*)_self;
- if(!self || !data || !data_size){
- AUDIO_OPENSLES_DEBUG_ERROR("1Invalid parameter");
- return -1;
- }
- /* buffer is already decoded */
- return tdav_consumer_audio_put(TDAV_CONSUMER_AUDIO(self), data, data_size, proto_hdr);
+ audio_consumer_opensles_t* self = (audio_consumer_opensles_t*)_self;
+ if(!self || !data || !data_size) {
+ AUDIO_OPENSLES_DEBUG_ERROR("1Invalid parameter");
+ return -1;
+ }
+ /* buffer is already decoded */
+ return tdav_consumer_audio_put(TDAV_CONSUMER_AUDIO(self), data, data_size, proto_hdr);
}
static int audio_consumer_opensles_pause(tmedia_consumer_t* self)
{
- return 0;
+ return 0;
}
static int audio_consumer_opensles_stop(tmedia_consumer_t* _self)
{
- audio_consumer_opensles_t* self = (audio_consumer_opensles_t*)_self;
- if(!self){
- AUDIO_OPENSLES_DEBUG_ERROR("Invalid parameter");
- return -1;
- }
+ audio_consumer_opensles_t* self = (audio_consumer_opensles_t*)_self;
+ if(!self) {
+ AUDIO_OPENSLES_DEBUG_ERROR("Invalid parameter");
+ return -1;
+ }
- return audio_opensles_instance_stop_consumer(self->audioInstHandle);
+ return audio_opensles_instance_stop_consumer(self->audioInstHandle);
}
@@ -200,54 +201,52 @@ static int audio_consumer_opensles_stop(tmedia_consumer_t* _self)
/* constructor */
static tsk_object_t* audio_consumer_opensles_ctor(tsk_object_t *_self, va_list * app)
{
- audio_consumer_opensles_t *self = (audio_consumer_opensles_t *)_self;
- if(self){
- /* init base */
- tdav_consumer_audio_init(TDAV_CONSUMER_AUDIO(self));
- /* init self */
-
- }
- return self;
+ audio_consumer_opensles_t *self = (audio_consumer_opensles_t *)_self;
+ if(self) {
+ /* init base */
+ tdav_consumer_audio_init(TDAV_CONSUMER_AUDIO(self));
+ /* init self */
+
+ }
+ return self;
}
/* destructor */
static tsk_object_t* audio_consumer_opensles_dtor(tsk_object_t *_self)
-{
- audio_consumer_opensles_t *self = (audio_consumer_opensles_t *)_self;
- if(self){
- /* stop */
- audio_consumer_opensles_stop(TMEDIA_CONSUMER(self));
- /* deinit self */
- if(self->audioInstHandle){
- audio_opensles_instance_destroy(&self->audioInstHandle);
- }
- TSK_FREE(self->buffer.ptr);
- /* deinit base */
- tdav_consumer_audio_deinit(TDAV_CONSUMER_AUDIO(self));
- }
-
- return self;
+{
+ audio_consumer_opensles_t *self = (audio_consumer_opensles_t *)_self;
+ if(self) {
+ /* stop */
+ audio_consumer_opensles_stop(TMEDIA_CONSUMER(self));
+ /* deinit self */
+ if(self->audioInstHandle) {
+ audio_opensles_instance_destroy(&self->audioInstHandle);
+ }
+ TSK_FREE(self->buffer.ptr);
+ /* deinit base */
+ tdav_consumer_audio_deinit(TDAV_CONSUMER_AUDIO(self));
+ }
+
+ return self;
}
/* object definition */
-static const tsk_object_def_t audio_consumer_opensles_def_s =
-{
- sizeof(audio_consumer_opensles_t),
- audio_consumer_opensles_ctor,
- audio_consumer_opensles_dtor,
- tdav_consumer_audio_cmp,
+static const tsk_object_def_t audio_consumer_opensles_def_s = {
+ sizeof(audio_consumer_opensles_t),
+ audio_consumer_opensles_ctor,
+ audio_consumer_opensles_dtor,
+ tdav_consumer_audio_cmp,
};
/* plugin definition*/
-static const tmedia_consumer_plugin_def_t audio_consumer_opensles_plugin_def_s =
-{
- &audio_consumer_opensles_def_s,
-
- tmedia_audio,
- "SLES audio consumer",
-
- audio_consumer_opensles_set,
- audio_consumer_opensles_prepare,
- audio_consumer_opensles_start,
- audio_consumer_opensles_consume,
- audio_consumer_opensles_pause,
- audio_consumer_opensles_stop
+static const tmedia_consumer_plugin_def_t audio_consumer_opensles_plugin_def_s = {
+ &audio_consumer_opensles_def_s,
+
+ tmedia_audio,
+ "SLES audio consumer",
+
+ audio_consumer_opensles_set,
+ audio_consumer_opensles_prepare,
+ audio_consumer_opensles_start,
+ audio_consumer_opensles_consume,
+ audio_consumer_opensles_pause,
+ audio_consumer_opensles_stop
};
const tmedia_consumer_plugin_def_t *audio_consumer_opensles_plugin_def_t = &audio_consumer_opensles_plugin_def_s;
diff --git a/plugins/audio_opensles/audio_opensles_consumer.h b/plugins/audio_opensles/audio_opensles_consumer.h
index 1702db6..786813b 100755
--- a/plugins/audio_opensles/audio_opensles_consumer.h
+++ b/plugins/audio_opensles/audio_opensles_consumer.h
@@ -1,17 +1,17 @@
/* Copyright (C) 2012 Doubango Telecom <http://www.doubango.org>
-*
+*
* This file is part of Open Source Doubango Framework.
*
* DOUBANGO is free software: you can redistribute it and/or modify
* it under the terms of the GNU General Public License as published by
* the Free Software Foundation, either version 3 of the License, or
* (at your option) any later version.
-*
+*
* DOUBANGO is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
* GNU General Public License for more details.
-*
+*
* You should have received a copy of the GNU General Public License
* along with DOUBANGO.
*/
@@ -22,7 +22,7 @@
AUDIO_OPENSLES_BEGIN_DECLS
-extern const struct tmedia_consumer_plugin_def_s *audio_consumer_opensles_plugin_def_t;
+extern const struct tmedia_consumer_plugin_def_s *audio_consumer_opensles_plugin_def_t;
int audio_consumer_opensles_get_data_10ms(const struct audio_consumer_opensles_s* self, void* audioSamples, int nSamples, int nBytesPerSample, int nChannels, int samplesPerSec, uint32_t &nSamplesOut);
bool audio_consumer_opensles_is_speakerOn(const struct audio_consumer_opensles_s* self);
diff --git a/plugins/audio_opensles/audio_opensles_device.cxx b/plugins/audio_opensles/audio_opensles_device.cxx
index 3c06bfc..e6fff79 100755
--- a/plugins/audio_opensles/audio_opensles_device.cxx
+++ b/plugins/audio_opensles/audio_opensles_device.cxx
@@ -23,64 +23,77 @@
#define CHECK_MICROPHONE_NOT_INITIALIZED() CHECK_FALSE(m_bMicrophoneInitialized, "Microphone initialized")
#if AUDIO_OPENSLES_UNDER_ANDROID
-static inline SLuint32 SL_SAMPLING_RATE(int RATE_INT){
- switch(RATE_INT){
- case 8000: return SL_SAMPLINGRATE_8;
- case 11025: return SL_SAMPLINGRATE_11_025;
- default:case 16000: return SL_SAMPLINGRATE_16;
- case 22050: return SL_SAMPLINGRATE_22_05;
- case 24000: return SL_SAMPLINGRATE_24;
- case 32000: return SL_SAMPLINGRATE_32;
- case 44100: return SL_SAMPLINGRATE_44_1;
- case 64000: return SL_SAMPLINGRATE_64;
- case 88200: return SL_SAMPLINGRATE_88_2;
- case 96000: return SL_SAMPLINGRATE_96;
- case 192000: return SL_SAMPLINGRATE_192;
- }
+static inline SLuint32 SL_SAMPLING_RATE(int RATE_INT)
+{
+ switch(RATE_INT) {
+ case 8000:
+ return SL_SAMPLINGRATE_8;
+ case 11025:
+ return SL_SAMPLINGRATE_11_025;
+ default:
+ case 16000:
+ return SL_SAMPLINGRATE_16;
+ case 22050:
+ return SL_SAMPLINGRATE_22_05;
+ case 24000:
+ return SL_SAMPLINGRATE_24;
+ case 32000:
+ return SL_SAMPLINGRATE_32;
+ case 44100:
+ return SL_SAMPLINGRATE_44_1;
+ case 64000:
+ return SL_SAMPLINGRATE_64;
+ case 88200:
+ return SL_SAMPLINGRATE_88_2;
+ case 96000:
+ return SL_SAMPLINGRATE_96;
+ case 192000:
+ return SL_SAMPLINGRATE_192;
+ }
}
#endif
SLAudioDevice::SLAudioDevice(const SLAudioDeviceCallback* pCallback):
#if AUDIO_OPENSLES_UNDER_ANDROID
-m_slEngineObject(NULL),
-m_slPlayer(NULL),
-m_slEngine(NULL),
-m_slPlayerPlay(NULL),
-m_slPlayerSimpleBufferQueue(NULL),
-m_slOutputMixObject(NULL),
-m_slSpeakerVolume(NULL),
-m_slRecorder(NULL),
-m_slRecorderRecord(NULL),
-m_slAudioIODeviceCapabilities(NULL),
-m_slRecorderSimpleBufferQueue(NULL),
-m_slMicVolume(NULL),
-_playQueueSeq(0),
-_recCurrentSeq(0),
-_recBufferTotalSize(0),
-_recQueueSeq(0),
+ m_slEngineObject(NULL),
+ m_slPlayer(NULL),
+ m_slEngine(NULL),
+ m_slPlayerPlay(NULL),
+ m_slPlayerSimpleBufferQueue(NULL),
+ m_slOutputMixObject(NULL),
+ m_slSpeakerVolume(NULL),
+ m_slRecorder(NULL),
+ m_slRecorderRecord(NULL),
+ m_slAudioIODeviceCapabilities(NULL),
+ m_slRecorderSimpleBufferQueue(NULL),
+ m_slMicVolume(NULL),
+ _playQueueSeq(0),
+ _recCurrentSeq(0),
+ _recBufferTotalSize(0),
+ _recQueueSeq(0),
#endif
-m_nMicDeviceId(0),
-m_pCallback(pCallback),
-m_bInitialized(false),
-m_bSpeakerInitialized(false),
-m_bSpeakerOn(false),
-m_bPlayoutInitialized(false),
-m_bRecordingInitialized(false),
-m_bMicrophoneInitialized(false),
-m_bStereoPlayout(false),
-m_bStereoRecording(false),
-m_nPlayoutSampleRate(PLAYOUT_SAMPLE_RATE),
-m_nRecordingSampleRate(RECORDING_SAMPLE_RATE),
-m_nRecordingBufferSize(RECORDING_BUFFER_SIZE),
-m_nPlayoutBufferSize(PLAYOUT_BUFFER_SIZE),
-m_bPlaying(false),
-m_bRecording(false),
-m_nSpeakerVolume(0),
-m_nMinSpeakerVolume(0),
-m_nMaxSpeakerVolume(0)
+ m_nMicDeviceId(0),
+ m_pCallback(pCallback),
+ m_bInitialized(false),
+ m_bSpeakerInitialized(false),
+ m_bSpeakerOn(false),
+ m_bPlayoutInitialized(false),
+ m_bRecordingInitialized(false),
+ m_bMicrophoneInitialized(false),
+ m_bStereoPlayout(false),
+ m_bStereoRecording(false),
+ m_nPlayoutSampleRate(PLAYOUT_SAMPLE_RATE),
+ m_nRecordingSampleRate(RECORDING_SAMPLE_RATE),
+ m_nRecordingBufferSize(RECORDING_BUFFER_SIZE),
+ m_nPlayoutBufferSize(PLAYOUT_BUFFER_SIZE),
+ m_bPlaying(false),
+ m_bRecording(false),
+ m_nSpeakerVolume(0),
+ m_nMinSpeakerVolume(0),
+ m_nMaxSpeakerVolume(0)
{
#if AUDIO_OPENSLES_UNDER_ANDROID
- memset(_playQueueBuffer, 0, sizeof(_playQueueBuffer));
+ memset(_playQueueBuffer, 0, sizeof(_playQueueBuffer));
memset(_recQueueBuffer, 0, sizeof(_recQueueBuffer));
memset(_recBuffer, 0, sizeof(_recBuffer));
memset(_recLength, 0, sizeof(_recLength));
@@ -94,22 +107,22 @@ SLAudioDevice::~SLAudioDevice()
}
int SLAudioDevice::SetCallback(const SLAudioDeviceCallback* pCallback)
-{
- m_pCallback = pCallback;
- return 0;
+{
+ m_pCallback = pCallback;
+ return 0;
}
int SLAudioDevice::Init()
{
- CHECK_FALSE(m_bInitialized, "Already initialized");
+ CHECK_FALSE(m_bInitialized, "Already initialized");
- AUDIO_OPENSLES_DEBUG_INFO("SLAudioDevice::Init()");
+ AUDIO_OPENSLES_DEBUG_INFO("SLAudioDevice::Init()");
#if AUDIO_OPENSLES_UNDER_ANDROID
- SLresult slResult;
+ SLresult slResult;
SLEngineOption EngineOption[] = {
- { (SLuint32) SL_ENGINEOPTION_THREADSAFE, (SLuint32) SL_BOOLEAN_TRUE },
+ { (SLuint32) SL_ENGINEOPTION_THREADSAFE, (SLuint32) SL_BOOLEAN_TRUE },
};
slResult = slCreateEngine(&m_slEngineObject, 1, EngineOption, 0, NULL, NULL);
if (slResult != SL_RESULT_SUCCESS) {
@@ -126,152 +139,160 @@ int SLAudioDevice::Init()
}
#endif /* AUDIO_OPENSLES_UNDER_ANDROID */
- m_bInitialized = true;
- AUDIO_OPENSLES_DEBUG_INFO("SL engine initialized");
- return 0;
+ m_bInitialized = true;
+ AUDIO_OPENSLES_DEBUG_INFO("SL engine initialized");
+ return 0;
}
bool SLAudioDevice::Initialized()
{
- return m_bInitialized;
+ return m_bInitialized;
}
int SLAudioDevice::SpeakerIsAvailable(bool *pAvailable)
{
- CHECK_TRUE(m_bInitialized, "Not initialized");
+ CHECK_TRUE(m_bInitialized, "Not initialized");
- if(!pAvailable){
- AUDIO_OPENSLES_DEBUG_ERROR("Invalid parameter");
- return -1;
- }
+ if(!pAvailable) {
+ AUDIO_OPENSLES_DEBUG_ERROR("Invalid parameter");
+ return -1;
+ }
- *pAvailable = true;
- return 0;
+ *pAvailable = true;
+ return 0;
}
int SLAudioDevice::InitSpeaker()
{
- AUDIO_OPENSLES_DEBUG_INFO("SLAudioDevice::InitSpeaker()");
+ AUDIO_OPENSLES_DEBUG_INFO("SLAudioDevice::InitSpeaker()");
- CHECK_TRUE(m_bInitialized, "Not initialized");
-
- if(m_bSpeakerInitialized){
- return 0;
- }
+ CHECK_TRUE(m_bInitialized, "Not initialized");
- m_bSpeakerInitialized = true;
- return 0;
+ if(m_bSpeakerInitialized) {
+ return 0;
+ }
+
+ m_bSpeakerInitialized = true;
+ return 0;
}
int SLAudioDevice::SetMaxSpeakerVolume(int nMaxSpeakerVolume)
{
- CHECK_TRUE(m_bSpeakerInitialized, "Speaker not initialized");
- AUDIO_OPENSLES_DEBUG_INFO("SetMaxSpeakerVolume(%d)", nMaxSpeakerVolume);
- m_nMaxSpeakerVolume = nMaxSpeakerVolume;
- return 0;
+ CHECK_TRUE(m_bSpeakerInitialized, "Speaker not initialized");
+ AUDIO_OPENSLES_DEBUG_INFO("SetMaxSpeakerVolume(%d)", nMaxSpeakerVolume);
+ m_nMaxSpeakerVolume = nMaxSpeakerVolume;
+ return 0;
}
int SLAudioDevice::SetMinSpeakerVolume(int nMinSpeakerVolume)
{
- CHECK_TRUE(m_bSpeakerInitialized, "Speaker not initialized");
- AUDIO_OPENSLES_DEBUG_INFO("SetMinSpeakerVolume(%d)", nMinSpeakerVolume);
- m_nMinSpeakerVolume = nMinSpeakerVolume;
- return 0;
+ CHECK_TRUE(m_bSpeakerInitialized, "Speaker not initialized");
+ AUDIO_OPENSLES_DEBUG_INFO("SetMinSpeakerVolume(%d)", nMinSpeakerVolume);
+ m_nMinSpeakerVolume = nMinSpeakerVolume;
+ return 0;
}
int SLAudioDevice::SetSpeakerVolume(int nSpeakerVolume)
{
- CHECK_TRUE(m_bSpeakerInitialized, "Speaker not initialized");
- AUDIO_OPENSLES_DEBUG_INFO("SetSpeakerVolume(%d)", nSpeakerVolume);
- m_nSpeakerVolume = nSpeakerVolume;
- return 0;
+ CHECK_TRUE(m_bSpeakerInitialized, "Speaker not initialized");
+ AUDIO_OPENSLES_DEBUG_INFO("SetSpeakerVolume(%d)", nSpeakerVolume);
+ m_nSpeakerVolume = nSpeakerVolume;
+ return 0;
}
int SLAudioDevice::SetSpeakerOn(bool bSpeakerOn)
{
- CHECK_TRUE(m_bSpeakerInitialized, "Speaker not initialized");
-
- AUDIO_OPENSLES_DEBUG_INFO("SetSpeakerOn(%s -> %s)", (m_bSpeakerOn ? "true" : "false"), (bSpeakerOn ? "true" : "false"));
- int ret = 0;
- bool oldValue = m_bSpeakerOn;
- m_bSpeakerOn = bSpeakerOn; // update value beacause use in PlayoutApplyNewConfig();
- if(m_bPlayoutInitialized && (oldValue != bSpeakerOn)){
- ret = PlayoutApplyNewConfig();
- }
-
- if(ret != 0){
- m_bSpeakerOn = oldValue;
- }
-
- return ret;
+ CHECK_TRUE(m_bSpeakerInitialized, "Speaker not initialized");
+
+ AUDIO_OPENSLES_DEBUG_INFO("SetSpeakerOn(%s -> %s)", (m_bSpeakerOn ? "true" : "false"), (bSpeakerOn ? "true" : "false"));
+ int ret = 0;
+ bool oldValue = m_bSpeakerOn;
+ m_bSpeakerOn = bSpeakerOn; // update value beacause use in PlayoutApplyNewConfig();
+ if(m_bPlayoutInitialized && (oldValue != bSpeakerOn)) {
+ ret = PlayoutApplyNewConfig();
+ }
+
+ if(ret != 0) {
+ m_bSpeakerOn = oldValue;
+ }
+
+ return ret;
}
int SLAudioDevice::PlayoutIsAvailable(bool *pAvailable)
{
- CHECK_TRUE(m_bInitialized, "Not initialized");
- CHECK_PLAYOUT_NOT_INITIALIZED();
+ CHECK_TRUE(m_bInitialized, "Not initialized");
+ CHECK_PLAYOUT_NOT_INITIALIZED();
- if(!pAvailable){
- AUDIO_OPENSLES_DEBUG_ERROR("Invalid parameter");
- return -1;
- }
+ if(!pAvailable) {
+ AUDIO_OPENSLES_DEBUG_ERROR("Invalid parameter");
+ return -1;
+ }
- *pAvailable = true;
- return 0;
+ *pAvailable = true;
+ return 0;
}
int SLAudioDevice::SetStereoPlayout(bool bEnabled)
{
- CHECK_TRUE(m_bInitialized, "Not initialized");
- CHECK_PLAYOUT_NOT_INITIALIZED();
- m_bStereoPlayout = bEnabled;
- return 0;
+ CHECK_TRUE(m_bInitialized, "Not initialized");
+ CHECK_PLAYOUT_NOT_INITIALIZED();
+ m_bStereoPlayout = bEnabled;
+ return 0;
}
int SLAudioDevice::SetPlayoutBuffer(int nPlayoutBufferSize)
{
- CHECK_TRUE(m_bInitialized, "Not initialized");
- CHECK_PLAYOUT_NOT_INITIALIZED();
+ CHECK_TRUE(m_bInitialized, "Not initialized");
+ CHECK_PLAYOUT_NOT_INITIALIZED();
- if(PLAYOUT_BUFFER_SIZE != nPlayoutBufferSize){
- AUDIO_OPENSLES_DEBUG_ERROR("Invalid parameter");
- return -1;
- }
+ if(PLAYOUT_BUFFER_SIZE != nPlayoutBufferSize) {
+ AUDIO_OPENSLES_DEBUG_ERROR("Invalid parameter");
+ return -1;
+ }
- m_nPlayoutBufferSize = nPlayoutBufferSize;
- return 0;
+ m_nPlayoutBufferSize = nPlayoutBufferSize;
+ return 0;
}
int SLAudioDevice::SetPlayoutSampleRate(int nPlayoutSampleRate)
{
- CHECK_TRUE(m_bInitialized, "Not initialized");
- CHECK_PLAYOUT_NOT_INITIALIZED();
-
- AUDIO_OPENSLES_DEBUG_INFO("SetPlayoutSampleRate(%d)", nPlayoutSampleRate);
-
- switch(nPlayoutSampleRate){
- case 8000: case 11025: case 16000: case 22050: case 24000: case 32000: case 44100: case 64000: case 88200: case 96000: case 192000:
- {
- m_nPlayoutSampleRate = nPlayoutSampleRate;
- return 0;
- }
- default:
- {
- AUDIO_OPENSLES_DEBUG_ERROR("%d not valid sampling rate", nPlayoutSampleRate);
- return -1;
- }
- }
+ CHECK_TRUE(m_bInitialized, "Not initialized");
+ CHECK_PLAYOUT_NOT_INITIALIZED();
+
+ AUDIO_OPENSLES_DEBUG_INFO("SetPlayoutSampleRate(%d)", nPlayoutSampleRate);
+
+ switch(nPlayoutSampleRate) {
+ case 8000:
+ case 11025:
+ case 16000:
+ case 22050:
+ case 24000:
+ case 32000:
+ case 44100:
+ case 64000:
+ case 88200:
+ case 96000:
+ case 192000: {
+ m_nPlayoutSampleRate = nPlayoutSampleRate;
+ return 0;
+ }
+ default: {
+ AUDIO_OPENSLES_DEBUG_ERROR("%d not valid sampling rate", nPlayoutSampleRate);
+ return -1;
+ }
+ }
}
int SLAudioDevice::InitPlayout()
{
- CHECK_TRUE(m_bInitialized, "Not initialized");
-
- AUDIO_OPENSLES_DEBUG_INFO("SLAudioDevice::InitPlayout()");
+ CHECK_TRUE(m_bInitialized, "Not initialized");
- if(m_bPlayoutInitialized){
- return 0;
- }
+ AUDIO_OPENSLES_DEBUG_INFO("SLAudioDevice::InitPlayout()");
+
+ if(m_bPlayoutInitialized) {
+ return 0;
+ }
if (m_bPlaying) {
AUDIO_OPENSLES_DEBUG_ERROR("Playout already started");
@@ -285,7 +306,7 @@ int SLAudioDevice::InitPlayout()
#if AUDIO_OPENSLES_UNDER_ANDROID
- if (m_slEngineObject == NULL || m_slEngine == NULL) {
+ if (m_slEngineObject == NULL || m_slEngine == NULL) {
AUDIO_OPENSLES_DEBUG_ERROR("SLObject or Engiine is NULL");
return -1;
}
@@ -305,68 +326,68 @@ int SLAudioDevice::InitPlayout()
req[i] = SL_BOOLEAN_FALSE;
}
ids[0] = SL_IID_ENVIRONMENTALREVERB;
-
+
if ((slResult = (*m_slEngine)->CreateOutputMix(m_slEngine, &m_slOutputMixObject, 1, ids, req)) != SL_RESULT_SUCCESS) {
AUDIO_OPENSLES_DEBUG_ERROR("CreateOutputMix() for playout failed with error code = %d", slResult);
return -1;
}
-
+
if ((slResult = (*m_slOutputMixObject)->Realize(m_slOutputMixObject, SL_BOOLEAN_FALSE)) != SL_RESULT_SUCCESS) {
AUDIO_OPENSLES_DEBUG_ERROR("Failed to realize SL Output Mix object for playout with error code = %d", slResult);
return -1;
}
-
+
simpleBufferQueue.locatorType = SL_DATALOCATOR_ANDROIDSIMPLEBUFFERQUEUE;
simpleBufferQueue.numBuffers = N_PLAY_QUEUE_BUFFERS;
-
+
pcm.formatType = SL_DATAFORMAT_PCM;
- pcm.numChannels = m_bStereoPlayout ? 2 : 1;
- pcm.samplesPerSec = SL_SAMPLING_RATE(m_nPlayoutSampleRate);
+ pcm.numChannels = m_bStereoPlayout ? 2 : 1;
+ pcm.samplesPerSec = SL_SAMPLING_RATE(m_nPlayoutSampleRate);
pcm.bitsPerSample = SL_PCMSAMPLEFORMAT_FIXED_16;
pcm.containerSize = SL_PCMSAMPLEFORMAT_FIXED_16;
- pcm.channelMask = m_bStereoRecording ? (SL_SPEAKER_FRONT_LEFT | SL_SPEAKER_FRONT_RIGHT) : SL_SPEAKER_FRONT_CENTER;
+ pcm.channelMask = m_bStereoRecording ? (SL_SPEAKER_FRONT_LEFT | SL_SPEAKER_FRONT_RIGHT) : SL_SPEAKER_FRONT_CENTER;
pcm.endianness = SL_BYTEORDER_LITTLEENDIAN;
-
- audioSource.pFormat = (void *) &pcm;
+
+ audioSource.pFormat = (void *) &pcm;
audioSource.pLocator = (void *) &simpleBufferQueue;
locator_outputmix.locatorType = SL_DATALOCATOR_OUTPUTMIX;
locator_outputmix.outputMix = m_slOutputMixObject;
audioSink.pLocator = (void *) &locator_outputmix;
audioSink.pFormat = NULL;
-
+
ids[0] = SL_IID_ANDROIDSIMPLEBUFFERQUEUE;
ids[1] = SL_IID_EFFECTSEND;
- ids[2] = SL_IID_ANDROIDCONFIGURATION;
- ids[3] = SL_IID_VOLUME;
+ ids[2] = SL_IID_ANDROIDCONFIGURATION;
+ ids[3] = SL_IID_VOLUME;
req[0] = SL_BOOLEAN_TRUE;
req[1] = SL_BOOLEAN_TRUE;
- req[2] = SL_BOOLEAN_TRUE;
- req[3] = SL_BOOLEAN_TRUE;
+ req[2] = SL_BOOLEAN_TRUE;
+ req[3] = SL_BOOLEAN_TRUE;
// Create the player
if ((slResult = (*m_slEngine)->CreateAudioPlayer(m_slEngine, &m_slPlayer, &audioSource, &audioSink, 3, ids, req)) != SL_RESULT_SUCCESS) {
AUDIO_OPENSLES_DEBUG_ERROR("Failed to create Audio Player with error code = %d", slResult);
return -1;
}
-
- // set stream type
- if(!m_bSpeakerOn){ // only set if speaker OFF, otherwise default is ON. "SL_ANDROID_STREAM_MEDIA" doen't look to work on all devices
- static SLAndroidConfigurationItf _playerStreamConfig;
- if((slResult = (*m_slPlayer)->GetInterface(m_slPlayer, SL_IID_ANDROIDCONFIGURATION, &_playerStreamConfig)) != SL_RESULT_SUCCESS){
- AUDIO_OPENSLES_DEBUG_ERROR("Failed to get player configuration with error code = %d", slResult);
- return -1;
- }
- else{
- static SLint32 _playerStreamType = m_bSpeakerOn ? SL_ANDROID_STREAM_MEDIA : SL_ANDROID_STREAM_VOICE;
- static SLint32 _playerStreamTypeSize = sizeof(SLint32);
- AUDIO_OPENSLES_DEBUG_INFO("_playerStreamType=%d", _playerStreamType);
- if((slResult = (*_playerStreamConfig)->SetConfiguration(_playerStreamConfig, SL_ANDROID_KEY_STREAM_TYPE, &_playerStreamType, _playerStreamTypeSize))){
- AUDIO_OPENSLES_DEBUG_ERROR("Failed to set player stream type with error code = %d", slResult);
- return -2;
- }
- }
- }
+
+ // set stream type
+ if(!m_bSpeakerOn) { // only set if speaker OFF, otherwise default is ON. "SL_ANDROID_STREAM_MEDIA" doen't look to work on all devices
+ static SLAndroidConfigurationItf _playerStreamConfig;
+ if((slResult = (*m_slPlayer)->GetInterface(m_slPlayer, SL_IID_ANDROIDCONFIGURATION, &_playerStreamConfig)) != SL_RESULT_SUCCESS) {
+ AUDIO_OPENSLES_DEBUG_ERROR("Failed to get player configuration with error code = %d", slResult);
+ return -1;
+ }
+ else {
+ static SLint32 _playerStreamType = m_bSpeakerOn ? SL_ANDROID_STREAM_MEDIA : SL_ANDROID_STREAM_VOICE;
+ static SLint32 _playerStreamTypeSize = sizeof(SLint32);
+ AUDIO_OPENSLES_DEBUG_INFO("_playerStreamType=%d", _playerStreamType);
+ if((slResult = (*_playerStreamConfig)->SetConfiguration(_playerStreamConfig, SL_ANDROID_KEY_STREAM_TYPE, &_playerStreamType, _playerStreamTypeSize))) {
+ AUDIO_OPENSLES_DEBUG_ERROR("Failed to set player stream type with error code = %d", slResult);
+ return -2;
+ }
+ }
+ }
// Realizing the player in synchronous mode
if ((slResult = (*m_slPlayer)->Realize(m_slPlayer, SL_BOOLEAN_FALSE)) != SL_RESULT_SUCCESS) {
@@ -384,53 +405,53 @@ int SLAudioDevice::InitPlayout()
}
// Setup to receive buffer queue event callbacks
- if ((slResult = (*m_slPlayerSimpleBufferQueue)->RegisterCallback(m_slPlayerSimpleBufferQueue, PlayerSimpleBufferQueueCallback, this)) != SL_RESULT_SUCCESS) {
+ if ((slResult = (*m_slPlayerSimpleBufferQueue)->RegisterCallback(m_slPlayerSimpleBufferQueue, PlayerSimpleBufferQueueCallback, this)) != SL_RESULT_SUCCESS) {
AUDIO_OPENSLES_DEBUG_ERROR("Failed to register Player Callback");
return -1;
}
#endif /* AUDIO_OPENSLES_UNDER_ANDROID */
- m_bPlayoutInitialized = true;
+ m_bPlayoutInitialized = true;
- AUDIO_OPENSLES_DEBUG_INFO("Playout initialized");
+ AUDIO_OPENSLES_DEBUG_INFO("Playout initialized");
- return 0;
+ return 0;
}
int SLAudioDevice::StereoPlayout(bool *pEnabled)
{
- CHECK_TRUE(m_bInitialized, "Not initialized");
- CHECK_PLAYOUT_INITIALIZED();
+ CHECK_TRUE(m_bInitialized, "Not initialized");
+ CHECK_PLAYOUT_INITIALIZED();
- if(!pEnabled){
- AUDIO_OPENSLES_DEBUG_ERROR("Invalid parameter");
- return -1;
- }
+ if(!pEnabled) {
+ AUDIO_OPENSLES_DEBUG_ERROR("Invalid parameter");
+ return -1;
+ }
- *pEnabled = m_bStereoPlayout;
- return 0;
+ *pEnabled = m_bStereoPlayout;
+ return 0;
}
int SLAudioDevice::PlayoutSampleRate(int *pPlayoutSampleRate)
{
- CHECK_TRUE(m_bInitialized, "Not initialized");
- CHECK_PLAYOUT_INITIALIZED();
+ CHECK_TRUE(m_bInitialized, "Not initialized");
+ CHECK_PLAYOUT_INITIALIZED();
- if(!pPlayoutSampleRate){
- AUDIO_OPENSLES_DEBUG_ERROR("Invalid parameter");
- return -1;
- }
+ if(!pPlayoutSampleRate) {
+ AUDIO_OPENSLES_DEBUG_ERROR("Invalid parameter");
+ return -1;
+ }
- *pPlayoutSampleRate = m_nPlayoutSampleRate;
- return 0;
+ *pPlayoutSampleRate = m_nPlayoutSampleRate;
+ return 0;
}
int SLAudioDevice::StartPlayout()
{
- AUDIO_OPENSLES_DEBUG_INFO("SLAudioDevice::StartPlayout()");
+ AUDIO_OPENSLES_DEBUG_INFO("SLAudioDevice::StartPlayout()");
- CHECK_TRUE(m_bInitialized, "Not initialized");
- CHECK_PLAYOUT_INITIALIZED();
+ CHECK_TRUE(m_bInitialized, "Not initialized");
+ CHECK_PLAYOUT_INITIALIZED();
if (m_bPlaying) {
return 0;
@@ -450,21 +471,21 @@ int SLAudioDevice::StartPlayout()
SLresult slResult;
/* Enqueue a set of zero buffers to get the ball rolling */
- uint32_t nSample10ms = m_nPlayoutSampleRate / 100;
+ uint32_t nSample10ms = m_nPlayoutSampleRate / 100;
uint8_t playBuffer[nSample10ms << BYTES_PER_SAMPLE_LOG2];
uint32_t noSamplesOut(0);
{
- // get data from jitter buffer
- noSamplesOut = SLAudioDevice::PullPlayoutData(playBuffer, nSample10ms);
- if(noSamplesOut != nSample10ms){
- AUDIO_OPENSLES_DEBUG_WARN("%d not expected as samples output count value", noSamplesOut);
- noSamplesOut = nSample10ms;
- memset(_playQueueBuffer[_playQueueSeq], 0, (noSamplesOut << BYTES_PER_SAMPLE_LOG2));
- }
- else{
- memcpy(_playQueueBuffer[_playQueueSeq], playBuffer, (noSamplesOut << BYTES_PER_SAMPLE_LOG2));
- }
-
+ // get data from jitter buffer
+ noSamplesOut = SLAudioDevice::PullPlayoutData(playBuffer, nSample10ms);
+ if(noSamplesOut != nSample10ms) {
+ AUDIO_OPENSLES_DEBUG_WARN("%d not expected as samples output count value", noSamplesOut);
+ noSamplesOut = nSample10ms;
+ memset(_playQueueBuffer[_playQueueSeq], 0, (noSamplesOut << BYTES_PER_SAMPLE_LOG2));
+ }
+ else {
+ memcpy(_playQueueBuffer[_playQueueSeq], playBuffer, (noSamplesOut << BYTES_PER_SAMPLE_LOG2));
+ }
+
// write the buffer data we into the device
if ((slResult = (*m_slPlayerSimpleBufferQueue)->Enqueue(m_slPlayerSimpleBufferQueue, (void*) _playQueueBuffer[_playQueueSeq], (noSamplesOut << 1))) != SL_RESULT_SUCCESS) {
AUDIO_OPENSLES_DEBUG_ERROR("Player simpler buffer queue Enqueue failed with error code = %d and noSamplesOut = %d", slResult, noSamplesOut);
@@ -473,42 +494,42 @@ int SLAudioDevice::StartPlayout()
}
// Play the PCM samples using a buffer queue
- m_bPlaying = true;
+ m_bPlaying = true;
if ((slResult = (*m_slPlayerPlay)->SetPlayState(m_slPlayerPlay, SL_PLAYSTATE_PLAYING)) != SL_RESULT_SUCCESS) {
AUDIO_OPENSLES_DEBUG_ERROR("Failed to start playout with error code = %d", slResult);
- m_bPlaying = false;
+ m_bPlaying = false;
return -1;
}
#else
- m_bPlaying = true;
+ m_bPlaying = true;
#endif /* AUDIO_OPENSLES_UNDER_ANDROID */
- AUDIO_OPENSLES_DEBUG_INFO("Payout started - rate=%d", m_nPlayoutSampleRate);
+ AUDIO_OPENSLES_DEBUG_INFO("Payout started - rate=%d", m_nPlayoutSampleRate);
- return 0;
+ return 0;
}
bool SLAudioDevice::Playing()
{
- return m_bPlaying;
+ return m_bPlaying;
}
int SLAudioDevice::StopPlayout()
{
- AUDIO_OPENSLES_DEBUG_INFO("SLAudioDevice::StopPlayout()");
+ AUDIO_OPENSLES_DEBUG_INFO("SLAudioDevice::StopPlayout()");
- if(!m_bPlaying){
- return 0;
- }
+ if(!m_bPlaying) {
+ return 0;
+ }
#if AUDIO_OPENSLES_UNDER_ANDROID
if ((m_slPlayerPlay != NULL) && (m_slOutputMixObject != NULL) && (m_slPlayer != NULL)) {
- SLresult slResult;
-
+ SLresult slResult;
+
if ((slResult = (*m_slPlayerPlay)->SetPlayState(m_slPlayerPlay, SL_PLAYSTATE_STOPPED)) != SL_RESULT_SUCCESS) {
AUDIO_OPENSLES_DEBUG_ERROR("Failed to stop playout with error code = %d", slResult);
return -1;
}
-
+
if ((slResult = (*m_slPlayerSimpleBufferQueue)->Clear(m_slPlayerSimpleBufferQueue)) != SL_RESULT_SUCCESS) {
AUDIO_OPENSLES_DEBUG_ERROR("Failed to clear recorder buffer queue");
return -1;
@@ -516,7 +537,7 @@ int SLAudioDevice::StopPlayout()
// Destroy the player
(*m_slPlayer)->Destroy(m_slPlayer);
- // Destroy Output Mix object
+ // Destroy Output Mix object
(*m_slOutputMixObject)->Destroy(m_slOutputMixObject);
m_slPlayer = NULL;
m_slPlayerPlay = NULL;
@@ -525,124 +546,132 @@ int SLAudioDevice::StopPlayout()
}
#endif
- AUDIO_OPENSLES_DEBUG_INFO("Playout stopped");
- m_bPlayoutInitialized = false;
- m_bPlaying = false;
- return 0;
+ AUDIO_OPENSLES_DEBUG_INFO("Playout stopped");
+ m_bPlayoutInitialized = false;
+ m_bPlaying = false;
+ return 0;
}
int SLAudioDevice::RecordingIsAvailable(bool *pAvailable)
{
- CHECK_TRUE(m_bInitialized, "Device not initialized");
- CHECK_RECORDING_NOT_INITIALIZED();
+ CHECK_TRUE(m_bInitialized, "Device not initialized");
+ CHECK_RECORDING_NOT_INITIALIZED();
- if(!pAvailable){
- AUDIO_OPENSLES_DEBUG_ERROR("Invalid parameter");
- return -1;
- }
+ if(!pAvailable) {
+ AUDIO_OPENSLES_DEBUG_ERROR("Invalid parameter");
+ return -1;
+ }
- *pAvailable = true;
- return 0;
+ *pAvailable = true;
+ return 0;
}
int SLAudioDevice::MicrophoneIsAvailable(bool *pAvailable)
{
- CHECK_TRUE(m_bInitialized, "Device not initialized");
- CHECK_RECORDING_NOT_INITIALIZED();
+ CHECK_TRUE(m_bInitialized, "Device not initialized");
+ CHECK_RECORDING_NOT_INITIALIZED();
- if(!pAvailable){
- AUDIO_OPENSLES_DEBUG_ERROR("Invalid parameter");
- return -1;
- }
+ if(!pAvailable) {
+ AUDIO_OPENSLES_DEBUG_ERROR("Invalid parameter");
+ return -1;
+ }
- *pAvailable = true;
- return 0;
+ *pAvailable = true;
+ return 0;
}
int SLAudioDevice::InitMicrophone()
{
- AUDIO_OPENSLES_DEBUG_INFO("SLAudioDevice::InitMicrophone()");
- CHECK_TRUE(m_bInitialized, "Device not initialized");
+ AUDIO_OPENSLES_DEBUG_INFO("SLAudioDevice::InitMicrophone()");
+ CHECK_TRUE(m_bInitialized, "Device not initialized");
- if(m_bMicrophoneInitialized){
- return 0;
- }
+ if(m_bMicrophoneInitialized) {
+ return 0;
+ }
- m_bMicrophoneInitialized = true;
- return 0;
+ m_bMicrophoneInitialized = true;
+ return 0;
}
int SLAudioDevice::SetMicrophoneVolume(int nMicrophoneVolume)
{
- CHECK_MICROPHONE_INITIALIZED();
+ CHECK_MICROPHONE_INITIALIZED();
- AUDIO_OPENSLES_DEBUG_INFO("SetMicrophoneVolume(%d)", nMicrophoneVolume);
+ AUDIO_OPENSLES_DEBUG_INFO("SetMicrophoneVolume(%d)", nMicrophoneVolume);
#if AUDIO_OPENSLES_UNDER_ANDROID
if (m_slMicVolume == NULL) {
- SLresult slResult;
+ SLresult slResult;
if ((slResult = (*m_slEngineObject)->GetInterface(m_slEngineObject, SL_IID_DEVICEVOLUME, (void*) &m_slMicVolume)) != SL_RESULT_SUCCESS) {
AUDIO_OPENSLES_DEBUG_ERROR("Failed to get 'SL_IID_DEVICEVOLUME' interface with error code = %d", slResult);
- return -1;
+ return -1;
}
}
if (m_slMicVolume != NULL) {
- SLresult slResult;
+ SLresult slResult;
int vol(0);
vol = ((nMicrophoneVolume * (m_nMaxSpeakerVolume - m_nMinSpeakerVolume) + (int) (255 / 2)) / (255)) + m_nMinSpeakerVolume;
if ((slResult = (*m_slMicVolume)->SetVolume(m_slMicVolume, m_nMicDeviceId, vol)) != SL_RESULT_SUCCESS) {
AUDIO_OPENSLES_DEBUG_ERROR("SetVolume() failed with error code = %d", slResult);
- return -1;
+ return -1;
}
}
#endif /* AUDIO_OPENSLES_UNDER_ANDROID */
- return 0;
+ return 0;
}
int SLAudioDevice::SetStereoRecording(bool bEnabled)
{
- CHECK_TRUE(m_bInitialized, "Not initialized");
- CHECK_RECORDING_NOT_INITIALIZED();
- AUDIO_OPENSLES_DEBUG_INFO("SetStereoRecording(%s)", bEnabled ? "True" : "False");
- m_bStereoRecording = bEnabled;
- return 0;
+ CHECK_TRUE(m_bInitialized, "Not initialized");
+ CHECK_RECORDING_NOT_INITIALIZED();
+ AUDIO_OPENSLES_DEBUG_INFO("SetStereoRecording(%s)", bEnabled ? "True" : "False");
+ m_bStereoRecording = bEnabled;
+ return 0;
}
int SLAudioDevice::SetRecordingSampleRate(int nRecordingSampleRate)
{
- CHECK_TRUE(m_bInitialized, "Not initialized");
- CHECK_RECORDING_NOT_INITIALIZED();
-
- AUDIO_OPENSLES_DEBUG_INFO("SetRecordingSampleRate(%d)", nRecordingSampleRate);
-
- switch(nRecordingSampleRate){
- case 8000: case 11025: case 16000: case 22050: case 24000: case 32000: case 44100: case 64000: case 88200: case 96000: case 192000:
- {
- m_nRecordingSampleRate = nRecordingSampleRate;
- return 0;
- }
- default:
- {
- AUDIO_OPENSLES_DEBUG_ERROR("%d not valid sampling rate", nRecordingSampleRate);
- return -1;
- }
- }
+ CHECK_TRUE(m_bInitialized, "Not initialized");
+ CHECK_RECORDING_NOT_INITIALIZED();
+
+ AUDIO_OPENSLES_DEBUG_INFO("SetRecordingSampleRate(%d)", nRecordingSampleRate);
+
+ switch(nRecordingSampleRate) {
+ case 8000:
+ case 11025:
+ case 16000:
+ case 22050:
+ case 24000:
+ case 32000:
+ case 44100:
+ case 64000:
+ case 88200:
+ case 96000:
+ case 192000: {
+ m_nRecordingSampleRate = nRecordingSampleRate;
+ return 0;
+ }
+ default: {
+ AUDIO_OPENSLES_DEBUG_ERROR("%d not valid sampling rate", nRecordingSampleRate);
+ return -1;
+ }
+ }
}
int SLAudioDevice::InitRecording()
{
- CHECK_TRUE(m_bInitialized, "Not initialized");
+ CHECK_TRUE(m_bInitialized, "Not initialized");
- AUDIO_OPENSLES_DEBUG_INFO("SLAudioDevice::InitRecording()");
+ AUDIO_OPENSLES_DEBUG_INFO("SLAudioDevice::InitRecording()");
if (m_bRecording) {
AUDIO_OPENSLES_DEBUG_ERROR("Recording already started");
return -1;
}
- if (m_bRecordingInitialized) {
+ if (m_bRecordingInitialized) {
return 0;
}
@@ -696,21 +725,21 @@ int SLAudioDevice::InitRecording()
return -1;
}
- // Set stream type
- SLAndroidConfigurationItf slRecorderConfig;
- SLint32 slStreamType = SL_ANDROID_RECORDING_PRESET_GENERIC;
+ // Set stream type
+ SLAndroidConfigurationItf slRecorderConfig;
+ SLint32 slStreamType = SL_ANDROID_RECORDING_PRESET_GENERIC;
slResult = (*m_slRecorder)->GetInterface(m_slRecorder, SL_IID_ANDROIDCONFIGURATION, &slRecorderConfig);
if(slResult != SL_RESULT_SUCCESS) {
AUDIO_OPENSLES_DEBUG_ERROR("GetInterface(SL_IID_ANDROIDCONFIGURATION) failed with error code = %d", slResult);
return -1;
}
- AUDIO_OPENSLES_DEBUG_INFO("Recording stream type = %d", slStreamType);
+ AUDIO_OPENSLES_DEBUG_INFO("Recording stream type = %d", slStreamType);
slResult = (*slRecorderConfig)->SetConfiguration(slRecorderConfig, SL_ANDROID_KEY_RECORDING_PRESET, &slStreamType, sizeof(SLint32));
- if(slResult != SL_RESULT_SUCCESS) {
+ if(slResult != SL_RESULT_SUCCESS) {
AUDIO_OPENSLES_DEBUG_ERROR("SetConfiguration(SL_ANDROID_KEY_RECORDING_PRESET) failed with error code = %d", slResult);
return -1;
}
-
+
// Realizing the recorder in synchronous mode.
slResult = (*m_slRecorder)->Realize(m_slRecorder, SL_BOOLEAN_FALSE);
@@ -742,47 +771,47 @@ int SLAudioDevice::InitRecording()
#endif /* AUDIO_OPENSLES_UNDER_ANDROID */
- AUDIO_OPENSLES_DEBUG_INFO("Recording initialized");
+ AUDIO_OPENSLES_DEBUG_INFO("Recording initialized");
- m_bRecordingInitialized = true;
+ m_bRecordingInitialized = true;
- return 0;
+ return 0;
}
int SLAudioDevice::StereoRecording(bool *pEnabled)
{
- CHECK_TRUE(m_bInitialized, "Not initialized");
- CHECK_RECORDING_INITIALIZED();
+ CHECK_TRUE(m_bInitialized, "Not initialized");
+ CHECK_RECORDING_INITIALIZED();
- if(!pEnabled){
- AUDIO_OPENSLES_DEBUG_ERROR("Invalid parameter");
- return -1;
- }
+ if(!pEnabled) {
+ AUDIO_OPENSLES_DEBUG_ERROR("Invalid parameter");
+ return -1;
+ }
- *pEnabled = m_bStereoRecording;
- return 0;
+ *pEnabled = m_bStereoRecording;
+ return 0;
}
int SLAudioDevice::RecordingSampleRate(int *pRecordingSampleRate)
{
- CHECK_TRUE(m_bInitialized, "Not initialized");
- CHECK_RECORDING_INITIALIZED();
+ CHECK_TRUE(m_bInitialized, "Not initialized");
+ CHECK_RECORDING_INITIALIZED();
- if(!pRecordingSampleRate){
- AUDIO_OPENSLES_DEBUG_ERROR("Invalid parameter");
- return -1;
- }
+ if(!pRecordingSampleRate) {
+ AUDIO_OPENSLES_DEBUG_ERROR("Invalid parameter");
+ return -1;
+ }
- *pRecordingSampleRate = m_nRecordingSampleRate;
- return 0;
+ *pRecordingSampleRate = m_nRecordingSampleRate;
+ return 0;
}
int SLAudioDevice::StartRecording()
{
- AUDIO_OPENSLES_DEBUG_INFO("SLAudioDevice::StartRecording()");
+ AUDIO_OPENSLES_DEBUG_INFO("SLAudioDevice::StartRecording()");
- CHECK_TRUE(m_bInitialized, "Not initialized");
- CHECK_RECORDING_INITIALIZED();
+ CHECK_TRUE(m_bInitialized, "Not initialized");
+ CHECK_RECORDING_INITIALIZED();
if (m_bRecording) {
return 0;
@@ -791,13 +820,13 @@ int SLAudioDevice::StartRecording()
#if AUDIO_OPENSLES_UNDER_ANDROID
if (m_slRecorderRecord == NULL) {
- AUDIO_OPENSLES_DEBUG_ERROR("RecordITF is NULL");
+ AUDIO_OPENSLES_DEBUG_ERROR("RecordITF is NULL");
return -1;
}
if (m_slRecorderSimpleBufferQueue == NULL) {
- AUDIO_OPENSLES_DEBUG_ERROR("Recorder Simple Buffer Queue is NULL");
- return -1;
+ AUDIO_OPENSLES_DEBUG_ERROR("Recorder Simple Buffer Queue is NULL");
+ return -1;
}
// Reset recording buffer
@@ -822,31 +851,31 @@ int SLAudioDevice::StartRecording()
_recQueueSeq++;
}
// Record the audio
- m_bRecording = true;
+ m_bRecording = true;
slResult = (*m_slRecorderRecord)->SetRecordState(m_slRecorderRecord, SL_RECORDSTATE_RECORDING);
if (slResult != SL_RESULT_SUCCESS) {
AUDIO_OPENSLES_DEBUG_ERROR("Failed to start recording with error code = %d", slResult);
- m_bRecording = false;
+ m_bRecording = false;
return -1;
}
#else
- m_bRecording = true;
+ m_bRecording = true;
#endif /* AUDIO_OPENSLES_UNDER_ANDROID */
- AUDIO_OPENSLES_DEBUG_INFO("Recording started - rate = %d", m_nRecordingSampleRate);
-
- return 0;
+ AUDIO_OPENSLES_DEBUG_INFO("Recording started - rate = %d", m_nRecordingSampleRate);
+
+ return 0;
}
bool SLAudioDevice::Recording()
{
- return m_bRecording;
+ return m_bRecording;
}
int SLAudioDevice::StopRecording()
{
- AUDIO_OPENSLES_DEBUG_INFO("SLAudioDevice::StopRecording()");
- if (!m_bRecording) {
+ AUDIO_OPENSLES_DEBUG_INFO("SLAudioDevice::StopRecording()");
+ if (!m_bRecording) {
return 0;
}
#if AUDIO_OPENSLES_UNDER_ANDROID
@@ -870,47 +899,47 @@ int SLAudioDevice::StopRecording()
}
#endif
- AUDIO_OPENSLES_DEBUG_INFO("Recording stopped");
- m_bRecording = false;
- m_bRecordingInitialized = false;
- return 0;
+ AUDIO_OPENSLES_DEBUG_INFO("Recording stopped");
+ m_bRecording = false;
+ m_bRecordingInitialized = false;
+ return 0;
}
int SLAudioDevice::Terminate()
{
- if (!m_bInitialized) {
+ if (!m_bInitialized) {
return 0;
}
- if(Recording()){
- StopRecording();
- }
+ if(Recording()) {
+ StopRecording();
+ }
- if(Playing()){
- StopPlayout();
- }
+ if(Playing()) {
+ StopPlayout();
+ }
#if AUDIO_OPENSLES_UNDER_ANDROID
- if(m_slPlayer){
- (*m_slPlayer)->Destroy(m_slPlayer);
- m_slPlayer = NULL;
- m_slPlayerPlay = NULL;
- m_slPlayerSimpleBufferQueue = NULL;
- }
-
- if(m_slRecorder){
- (*m_slRecorder)->Destroy(m_slRecorder);
- m_slRecorder = NULL;
- m_slRecorderRecord = NULL;
- m_slRecorderSimpleBufferQueue = NULL;
- m_slAudioIODeviceCapabilities = NULL;
- }
-
- if(m_slOutputMixObject){
- (*m_slOutputMixObject)->Destroy(m_slOutputMixObject);
- m_slOutputMixObject = NULL;
- }
+ if(m_slPlayer) {
+ (*m_slPlayer)->Destroy(m_slPlayer);
+ m_slPlayer = NULL;
+ m_slPlayerPlay = NULL;
+ m_slPlayerSimpleBufferQueue = NULL;
+ }
+
+ if(m_slRecorder) {
+ (*m_slRecorder)->Destroy(m_slRecorder);
+ m_slRecorder = NULL;
+ m_slRecorderRecord = NULL;
+ m_slRecorderSimpleBufferQueue = NULL;
+ m_slAudioIODeviceCapabilities = NULL;
+ }
+
+ if(m_slOutputMixObject) {
+ (*m_slOutputMixObject)->Destroy(m_slOutputMixObject);
+ m_slOutputMixObject = NULL;
+ }
if (m_slEngineObject) {
(*m_slEngineObject)->Destroy(m_slEngineObject);
@@ -919,9 +948,9 @@ int SLAudioDevice::Terminate()
}
#endif
- m_bSpeakerInitialized = false;
- m_bPlayoutInitialized = false;
- m_bRecordingInitialized = false;
+ m_bSpeakerInitialized = false;
+ m_bPlayoutInitialized = false;
+ m_bRecordingInitialized = false;
m_bInitialized = false;
return 0;
@@ -929,73 +958,73 @@ int SLAudioDevice::Terminate()
int SLAudioDevice::PlayoutApplyNewConfig()
{
- AUDIO_OPENSLES_DEBUG_INFO("SLAudioDevice::PlayoutApplyNewConfig()");
+ AUDIO_OPENSLES_DEBUG_INFO("SLAudioDevice::PlayoutApplyNewConfig()");
#if AUDIO_OPENSLES_UNDER_ANDROID
- if(m_slPlayer){
- SLresult slResult;
- int ret;
- bool wasPlaying = Playing();
-
- if(wasPlaying){
- if ((ret = StopPlayout())){
- AUDIO_OPENSLES_DEBUG_ERROR("Failed to stop playout for reconf");
- return ret;
- }
- if((ret = InitPlayout())){
- AUDIO_OPENSLES_DEBUG_ERROR("Failed to init() playout after reconf");
- return ret;
- }
- }
-
- if(wasPlaying){
- if((ret = StartPlayout())){
- AUDIO_OPENSLES_DEBUG_ERROR("Failed to start() playout after reconf");
- return ret;
- }
- }
- }
+ if(m_slPlayer) {
+ SLresult slResult;
+ int ret;
+ bool wasPlaying = Playing();
+
+ if(wasPlaying) {
+ if ((ret = StopPlayout())) {
+ AUDIO_OPENSLES_DEBUG_ERROR("Failed to stop playout for reconf");
+ return ret;
+ }
+ if((ret = InitPlayout())) {
+ AUDIO_OPENSLES_DEBUG_ERROR("Failed to init() playout after reconf");
+ return ret;
+ }
+ }
+
+ if(wasPlaying) {
+ if((ret = StartPlayout())) {
+ AUDIO_OPENSLES_DEBUG_ERROR("Failed to start() playout after reconf");
+ return ret;
+ }
+ }
+ }
#endif /* AUDIO_OPENSLES_UNDER_ANDROID */
- return 0;
+ return 0;
}
uint32_t SLAudioDevice::PullPlayoutData(void* pAudioSamples, const uint32_t nSamples)
{
- if(!pAudioSamples || !nSamples){
- AUDIO_OPENSLES_DEBUG_ERROR("PullPlayoutData() - Invalid parameter");
- return 0;
- }
-
- if(!m_pCallback){
- memset(pAudioSamples, 0, (nSamples << BYTES_PER_SAMPLE_LOG2));
- return nSamples;
- }
-
- uint32_t nSamplesOut = 0;
- const_cast<SLAudioDeviceCallback*>(m_pCallback)->NeedMorePlayData(nSamples,
- BYTES_PER_SAMPLE,
- m_bStereoPlayout ? 2 : 1,
- m_nPlayoutSampleRate,
- pAudioSamples,
- nSamplesOut);
- return nSamplesOut;
+ if(!pAudioSamples || !nSamples) {
+ AUDIO_OPENSLES_DEBUG_ERROR("PullPlayoutData() - Invalid parameter");
+ return 0;
+ }
+
+ if(!m_pCallback) {
+ memset(pAudioSamples, 0, (nSamples << BYTES_PER_SAMPLE_LOG2));
+ return nSamples;
+ }
+
+ uint32_t nSamplesOut = 0;
+ const_cast<SLAudioDeviceCallback*>(m_pCallback)->NeedMorePlayData(nSamples,
+ BYTES_PER_SAMPLE,
+ m_bStereoPlayout ? 2 : 1,
+ m_nPlayoutSampleRate,
+ pAudioSamples,
+ nSamplesOut);
+ return nSamplesOut;
}
void SLAudioDevice::PushRecordingData(void* pAudioSamples, const uint32_t nSamples)
{
- if(!pAudioSamples || !nSamples){
- AUDIO_OPENSLES_DEBUG_ERROR("PushRecordingData() - Invalid parameter");
- return;
- }
-
- if(m_pCallback){
- const_cast<SLAudioDeviceCallback*>(m_pCallback)->RecordedDataIsAvailable(pAudioSamples,
- nSamples,
- BYTES_PER_SAMPLE,
- m_bStereoRecording ? 2 : 1,
- m_nRecordingSampleRate);
- }
+ if(!pAudioSamples || !nSamples) {
+ AUDIO_OPENSLES_DEBUG_ERROR("PushRecordingData() - Invalid parameter");
+ return;
+ }
+
+ if(m_pCallback) {
+ const_cast<SLAudioDeviceCallback*>(m_pCallback)->RecordedDataIsAvailable(pAudioSamples,
+ nSamples,
+ BYTES_PER_SAMPLE,
+ m_bStereoRecording ? 2 : 1,
+ m_nRecordingSampleRate);
+ }
}
#if AUDIO_OPENSLES_UNDER_ANDROID
@@ -1004,24 +1033,24 @@ void SLAudioDevice::PlayerSimpleBufferQueueCallback(SLAndroidSimpleBufferQueueIt
{
SLAudioDevice* This = static_cast<SLAudioDevice*> (pContext);
- // AUDIO_OPENSLES_DEBUG_INFO("PlayerSimpleBufferQueueCallback(playing=%s, _playQueueSeq=%d)", (This->m_bPlaying ? "true" : "false"), This->_playQueueSeq);
+ // AUDIO_OPENSLES_DEBUG_INFO("PlayerSimpleBufferQueueCallback(playing=%s, _playQueueSeq=%d)", (This->m_bPlaying ? "true" : "false"), This->_playQueueSeq);
- if (This->m_bPlaying && (This->_playQueueSeq < N_PLAY_QUEUE_BUFFERS)) {
- unsigned int noSamp10ms = This->m_nPlayoutSampleRate / 100;
+ if (This->m_bPlaying && (This->_playQueueSeq < N_PLAY_QUEUE_BUFFERS)) {
+ unsigned int noSamp10ms = This->m_nPlayoutSampleRate / 100;
uint8_t playBuffer[noSamp10ms << BYTES_PER_SAMPLE_LOG2];
uint32_t noSamplesOut = This->PullPlayoutData(playBuffer, noSamp10ms);
-
+
if (noSamp10ms != noSamplesOut) {
- if(noSamplesOut){ // (noSamplesOut==0) -> jitter buffer cannot provide data
- AUDIO_OPENSLES_DEBUG_ERROR("noSamp10ms (%u) != noSamplesOut (%d)", noSamp10ms, noSamplesOut);
- }
- noSamplesOut = noSamp10ms;
- memset(This->_playQueueBuffer[This->_playQueueSeq], 0, (noSamplesOut << BYTES_PER_SAMPLE_LOG2));
+ if(noSamplesOut) { // (noSamplesOut==0) -> jitter buffer cannot provide data
+ AUDIO_OPENSLES_DEBUG_ERROR("noSamp10ms (%u) != noSamplesOut (%d)", noSamp10ms, noSamplesOut);
+ }
+ noSamplesOut = noSamp10ms;
+ memset(This->_playQueueBuffer[This->_playQueueSeq], 0, (noSamplesOut << BYTES_PER_SAMPLE_LOG2));
}
- else{
- memcpy(This->_playQueueBuffer[This->_playQueueSeq], playBuffer, (noSamplesOut << BYTES_PER_SAMPLE_LOG2));
- }
-
+ else {
+ memcpy(This->_playQueueBuffer[This->_playQueueSeq], playBuffer, (noSamplesOut << BYTES_PER_SAMPLE_LOG2));
+ }
+
SLresult slResult = (*This->m_slPlayerSimpleBufferQueue)->Enqueue(This->m_slPlayerSimpleBufferQueue, This->_playQueueBuffer[This->_playQueueSeq], (noSamplesOut << BYTES_PER_SAMPLE_LOG2));
if (slResult != SL_RESULT_SUCCESS) {
AUDIO_OPENSLES_DEBUG_ERROR("Player simpler buffer queue Enqueue failed, noSamplesOut=%d, ret=%d", noSamplesOut, slResult);
@@ -1036,31 +1065,30 @@ void SLAudioDevice::RecorderSimpleBufferQueueCallback(SLAndroidSimpleBufferQueue
{
// AUDIO_OPENSLES_DEBUG_INFO("RecorderSimpleBufferQueueCallback()");
- SLAudioDevice* This = static_cast<SLAudioDevice*> (pContext);
+ SLAudioDevice* This = static_cast<SLAudioDevice*> (pContext);
- if (This->m_bRecording) {
+ if (This->m_bRecording) {
const unsigned int noSamp10ms = This->m_nRecordingSampleRate / 100;
-
+
#if 1 // not using async thread
- // push data
- This->PushRecordingData(This->_recQueueBuffer[0], noSamp10ms);
+ // push data
+ This->PushRecordingData(This->_recQueueBuffer[0], noSamp10ms);
// enqueue new buffer
SLresult slResult = (*This->m_slRecorderSimpleBufferQueue)->Enqueue(
- This->m_slRecorderSimpleBufferQueue,
- (void*) This->_recQueueBuffer[0],
- (noSamp10ms << BYTES_PER_SAMPLE_LOG2));
+ This->m_slRecorderSimpleBufferQueue,
+ (void*) This->_recQueueBuffer[0],
+ (noSamp10ms << BYTES_PER_SAMPLE_LOG2));
if (slResult != SL_RESULT_SUCCESS) {
- AUDIO_OPENSLES_DEBUG_WARN("Failed to enqueue recording buffer with error code = %d", slResult);
+ AUDIO_OPENSLES_DEBUG_WARN("Failed to enqueue recording buffer with error code = %d", slResult);
return;
}
#else
unsigned int dataPos = 0;
- uint16_t bufPos = 0;
+ uint16_t bufPos = 0;
int16_t insertPos = -1;
unsigned int nCopy = 0; // Number of samples to copy
- while (dataPos < noSamp10ms)
- {
+ while (dataPos < noSamp10ms) {
// Loop over all recording buffers or until we find the partially
// full buffer
// First choice is to insert into partially full buffer,
@@ -1068,24 +1096,20 @@ void SLAudioDevice::RecorderSimpleBufferQueueCallback(SLAndroidSimpleBufferQueue
bufPos = 0;
insertPos = -1;
nCopy = 0;
- while (bufPos < N_REC_BUFFERS)
- {
- if ((This->_recLength[bufPos] > 0) && (This->_recLength[bufPos] < noSamp10ms))
- {
+ while (bufPos < N_REC_BUFFERS) {
+ if ((This->_recLength[bufPos] > 0) && (This->_recLength[bufPos] < noSamp10ms)) {
// Found the partially full buffer
insertPos = static_cast<int16_t> (bufPos);
bufPos = N_REC_BUFFERS; // Don't need to search more
}
- else if ((-1 == insertPos) && (0 == This->_recLength[bufPos]))
- {
+ else if ((-1 == insertPos) && (0 == This->_recLength[bufPos])) {
// Found an empty buffer
insertPos = static_cast<int16_t> (bufPos);
}
++bufPos;
}
- if (insertPos > -1)
- {
+ if (insertPos > -1) {
// We found a non-full buffer, copy data from the buffer queue
// o recBuffer
unsigned int dataToCopy = noSamp10ms - dataPos;
@@ -1093,8 +1117,7 @@ void SLAudioDevice::RecorderSimpleBufferQueueCallback(SLAndroidSimpleBufferQueue
unsigned int roomInBuffer = noSamp10ms - currentRecLen;
nCopy = (dataToCopy < roomInBuffer ? dataToCopy : roomInBuffer);
memcpy(&This->_recBuffer[insertPos][currentRecLen], &This->_recQueueBuffer[This->_recQueueSeq][dataPos], nCopy * sizeof(short));
- if (0 == currentRecLen)
- {
+ if (0 == currentRecLen) {
_recSeqNumber[insertPos] = This->_recCurrentSeq;
++_recCurrentSeq;
}
@@ -1104,8 +1127,7 @@ void SLAudioDevice::RecorderSimpleBufferQueueCallback(SLAndroidSimpleBufferQueue
This->_recLength[insertPos] += nCopy;
dataPos += nCopy;
}
- else
- {
+ else {
// Didn't find a non-full buffer
AUDIO_OPENSLES_DEBUG_WARN("Could not insert into recording buffer");
dataPos = noSamp10ms; // Don't try to insert more
@@ -1117,19 +1139,19 @@ void SLAudioDevice::RecorderSimpleBufferQueueCallback(SLAndroidSimpleBufferQueue
memset(This->_recQueueBuffer[This->_recQueueSeq], 0, (REC_BUF_SIZE_IN_SAMPLES << BYTES_PER_SAMPLE_LOG2));
// write the empty buffer to the queue
SLresult slResult = (*This->m_slRecorderSimpleBufferQueue)->Enqueue(
- This->m_slRecorderSimpleBufferQueue,
- (void*) This->_recQueueBuffer[This->_recQueueSeq],
- (noSamp10ms << BYTES_PER_SAMPLE_LOG2));
+ This->m_slRecorderSimpleBufferQueue,
+ (void*) This->_recQueueBuffer[This->_recQueueSeq],
+ (noSamp10ms << BYTES_PER_SAMPLE_LOG2));
if (slResult != SL_RESULT_SUCCESS) {
- AUDIO_OPENSLES_DEBUG_WARN("Failed to enqueue recording buffer with error code = %d", slResult);
+ AUDIO_OPENSLES_DEBUG_WARN("Failed to enqueue recording buffer with error code = %d", slResult);
return;
}
// update the rec queue seq
This->_recQueueSeq = (This->_recQueueSeq + 1) % N_REC_QUEUE_BUFFERS;
- // alert thread
- // TODO
+ // alert thread
+ // TODO
#endif
}
}
diff --git a/plugins/audio_opensles/audio_opensles_device.h b/plugins/audio_opensles/audio_opensles_device.h
index 7f24dac..dd7da73 100755
--- a/plugins/audio_opensles/audio_opensles_device.h
+++ b/plugins/audio_opensles/audio_opensles_device.h
@@ -1,17 +1,17 @@
/* Copyright (C) 2012 Doubango Telecom <http://www.doubango.org>
-*
+*
* This file is part of Open Source Doubango Framework.
*
* DOUBANGO is free software: you can redistribute it and/or modify
* it under the terms of the GNU General Public License as published by
* the Free Software Foundation, either version 3 of the License, or
* (at your option) any later version.
-*
+*
* DOUBANGO is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
* GNU General Public License for more details.
-*
+*
* You should have received a copy of the GNU General Public License
* along with DOUBANGO.
*/
@@ -50,7 +50,7 @@ public:
const uint32_t nSamples,
const uint8_t nBytesPerSample,
const uint8_t nChannels,
- const uint32_t samplesPerSec) = 0;
+ const uint32_t samplesPerSec) = 0;
virtual int32_t NeedMorePlayData(const uint32_t nSamples,
const uint8_t nBytesPerSample,
@@ -66,75 +66,75 @@ protected:
class SLAudioDevice
{
public:
- SLAudioDevice(const SLAudioDeviceCallback* pCallback = NULL);
- virtual ~SLAudioDevice();
+ SLAudioDevice(const SLAudioDeviceCallback* pCallback = NULL);
+ virtual ~SLAudioDevice();
public:
- int SetCallback(const SLAudioDeviceCallback* pCallback);
- int Init();
- bool Initialized();
- int SpeakerIsAvailable(bool *pAvailable);
- int InitSpeaker();
- int SetMaxSpeakerVolume(int nMaxSpeakerVolume);
- int SetMinSpeakerVolume(int nMinSpeakerVolume);
- int SetSpeakerVolume(int nSpeakerVolume);
- int SetSpeakerOn(bool bSpeakerOn);
- int PlayoutIsAvailable(bool *pAvailable);
- int SetStereoPlayout(bool bEnabled);
- int SetPlayoutBuffer(int nPlayoutBufferSize);
- int SetPlayoutSampleRate(int nPlayoutSampleRate);
- int InitPlayout();
- int StereoPlayout(bool *pEnabled);
- int PlayoutSampleRate(int *pPlayoutSampleRate);
- int StartPlayout();
- bool Playing();
- int StopPlayout();
- int RecordingIsAvailable(bool *pAvailable);
- int MicrophoneIsAvailable(bool *pAvailable);
- int InitMicrophone();
- int SetMicrophoneVolume(int nMicrophoneVolume);
- int SetStereoRecording(bool bEnabled);
- int SetRecordingSampleRate(int nRecordingSampleRate);
- int InitRecording();
- int StereoRecording(bool *pEnabled);
- int RecordingSampleRate(int *pRecordingSampleRate);
- int StartRecording();
- bool Recording();
- int StopRecording();
- int Terminate();
+ int SetCallback(const SLAudioDeviceCallback* pCallback);
+ int Init();
+ bool Initialized();
+ int SpeakerIsAvailable(bool *pAvailable);
+ int InitSpeaker();
+ int SetMaxSpeakerVolume(int nMaxSpeakerVolume);
+ int SetMinSpeakerVolume(int nMinSpeakerVolume);
+ int SetSpeakerVolume(int nSpeakerVolume);
+ int SetSpeakerOn(bool bSpeakerOn);
+ int PlayoutIsAvailable(bool *pAvailable);
+ int SetStereoPlayout(bool bEnabled);
+ int SetPlayoutBuffer(int nPlayoutBufferSize);
+ int SetPlayoutSampleRate(int nPlayoutSampleRate);
+ int InitPlayout();
+ int StereoPlayout(bool *pEnabled);
+ int PlayoutSampleRate(int *pPlayoutSampleRate);
+ int StartPlayout();
+ bool Playing();
+ int StopPlayout();
+ int RecordingIsAvailable(bool *pAvailable);
+ int MicrophoneIsAvailable(bool *pAvailable);
+ int InitMicrophone();
+ int SetMicrophoneVolume(int nMicrophoneVolume);
+ int SetStereoRecording(bool bEnabled);
+ int SetRecordingSampleRate(int nRecordingSampleRate);
+ int InitRecording();
+ int StereoRecording(bool *pEnabled);
+ int RecordingSampleRate(int *pRecordingSampleRate);
+ int StartRecording();
+ bool Recording();
+ int StopRecording();
+ int Terminate();
private:
- int PlayoutApplyNewConfig();
- uint32_t PullPlayoutData(void* pAudioSamples, const uint32_t nSamples);
- void PushRecordingData(void* pAudioSamples, const uint32_t nSamples);
+ int PlayoutApplyNewConfig();
+ uint32_t PullPlayoutData(void* pAudioSamples, const uint32_t nSamples);
+ void PushRecordingData(void* pAudioSamples, const uint32_t nSamples);
#if AUDIO_OPENSLES_UNDER_ANDROID
- static void PlayerSimpleBufferQueueCallback(SLAndroidSimpleBufferQueueItf queueItf, void *pContext);
- static void RecorderSimpleBufferQueueCallback(SLAndroidSimpleBufferQueueItf queueItf, void *pContext);
+ static void PlayerSimpleBufferQueueCallback(SLAndroidSimpleBufferQueueItf queueItf, void *pContext);
+ static void RecorderSimpleBufferQueueCallback(SLAndroidSimpleBufferQueueItf queueItf, void *pContext);
#endif
private:
- const SLAudioDeviceCallback* m_pCallback;
- int m_nMicDeviceId;
- bool m_bInitialized;
- bool m_bSpeakerInitialized;
- bool m_bPlayoutInitialized;
- bool m_bRecordingInitialized;
- bool m_bMicrophoneInitialized;
- bool m_bSpeakerOn;
- bool m_bStereoPlayout;
- bool m_bStereoRecording;
- int m_nPlayoutBufferSize;
- int m_nRecordingBufferSize;
- int m_nPlayoutSampleRate;
- int m_nRecordingSampleRate;
- bool m_bPlaying;
- bool m_bRecording;
- int m_nSpeakerVolume;
- int m_nMaxSpeakerVolume;
- int m_nMinSpeakerVolume;
+ const SLAudioDeviceCallback* m_pCallback;
+ int m_nMicDeviceId;
+ bool m_bInitialized;
+ bool m_bSpeakerInitialized;
+ bool m_bPlayoutInitialized;
+ bool m_bRecordingInitialized;
+ bool m_bMicrophoneInitialized;
+ bool m_bSpeakerOn;
+ bool m_bStereoPlayout;
+ bool m_bStereoRecording;
+ int m_nPlayoutBufferSize;
+ int m_nRecordingBufferSize;
+ int m_nPlayoutSampleRate;
+ int m_nRecordingSampleRate;
+ bool m_bPlaying;
+ bool m_bRecording;
+ int m_nSpeakerVolume;
+ int m_nMaxSpeakerVolume;
+ int m_nMinSpeakerVolume;
#if AUDIO_OPENSLES_UNDER_ANDROID
- // audio unit
+ // audio unit
SLObjectItf m_slEngineObject;
// playout device
@@ -152,10 +152,10 @@ private:
SLAndroidSimpleBufferQueueItf m_slRecorderSimpleBufferQueue;
SLDeviceVolumeItf m_slMicVolume;
- int _recQueueSeq;
+ int _recQueueSeq;
- // Playout buffer
- uint8_t _playQueueBuffer[N_PLAY_QUEUE_BUFFERS][PLAY_BUF_SIZE_IN_SAMPLES << BYTES_PER_SAMPLE_LOG2];
+ // Playout buffer
+ uint8_t _playQueueBuffer[N_PLAY_QUEUE_BUFFERS][PLAY_BUF_SIZE_IN_SAMPLES << BYTES_PER_SAMPLE_LOG2];
int _playQueueSeq;
// Recording buffer
uint8_t _recQueueBuffer[N_REC_QUEUE_BUFFERS][REC_BUF_SIZE_IN_SAMPLES << BYTES_PER_SAMPLE_LOG2];
diff --git a/plugins/audio_opensles/audio_opensles_device_impl.cxx b/plugins/audio_opensles/audio_opensles_device_impl.cxx
index 8e32d91..69269aa 100755
--- a/plugins/audio_opensles/audio_opensles_device_impl.cxx
+++ b/plugins/audio_opensles/audio_opensles_device_impl.cxx
@@ -1,17 +1,17 @@
/* Copyright (C) 2012 Doubango Telecom <http://www.doubango.org>
-*
+*
* This file is part of Open Source Doubango Framework.
*
* DOUBANGO is free software: you can redistribute it and/or modify
* it under the terms of the GNU General Public License as published by
* the Free Software Foundation, either version 3 of the License, or
* (at your option) any later version.
-*
+*
* DOUBANGO is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
* GNU General Public License for more details.
-*
+*
* You should have received a copy of the GNU General Public License
* along with DOUBANGO.
*/
@@ -21,7 +21,7 @@
#include "audio_opensles_consumer.h"
SLAudioDeviceCallbackImpl::SLAudioDeviceCallbackImpl():
-SLAudioDeviceCallback()
+ SLAudioDeviceCallback()
{
}
@@ -30,29 +30,29 @@ SLAudioDeviceCallbackImpl::~SLAudioDeviceCallbackImpl()
}
int32_t SLAudioDeviceCallbackImpl::RecordedDataIsAvailable(const void* audioSamples,
- const uint32_t nSamples,
- const uint8_t nBytesPerSample,
- const uint8_t nChannels,
- const uint32_t samplesPerSec)
+ const uint32_t nSamples,
+ const uint8_t nBytesPerSample,
+ const uint8_t nChannels,
+ const uint32_t samplesPerSec)
{
- if(!m_pProducer){
- AUDIO_OPENSLES_DEBUG_WARN("No wrapped producer");
- return 0;
- }
- return audio_producer_opensles_handle_data_10ms(m_pProducer, audioSamples, nSamples, nBytesPerSample, samplesPerSec, nChannels);
+ if(!m_pProducer) {
+ AUDIO_OPENSLES_DEBUG_WARN("No wrapped producer");
+ return 0;
+ }
+ return audio_producer_opensles_handle_data_10ms(m_pProducer, audioSamples, nSamples, nBytesPerSample, samplesPerSec, nChannels);
}
int32_t SLAudioDeviceCallbackImpl::NeedMorePlayData(const uint32_t nSamples,
- const uint8_t nBytesPerSample,
- const uint8_t nChannels,
- const uint32_t samplesPerSec,
- void* audioSamples,
- uint32_t& nSamplesOut)
+ const uint8_t nBytesPerSample,
+ const uint8_t nChannels,
+ const uint32_t samplesPerSec,
+ void* audioSamples,
+ uint32_t& nSamplesOut)
{
- if(!m_pConsumer){
- AUDIO_OPENSLES_DEBUG_WARN("No wrapped consumer");
- return 0;
- }
- return audio_consumer_opensles_get_data_10ms(m_pConsumer, audioSamples, nSamples, nBytesPerSample, nChannels, samplesPerSec, nSamplesOut);
+ if(!m_pConsumer) {
+ AUDIO_OPENSLES_DEBUG_WARN("No wrapped consumer");
+ return 0;
+ }
+ return audio_consumer_opensles_get_data_10ms(m_pConsumer, audioSamples, nSamples, nBytesPerSample, nChannels, samplesPerSec, nSamplesOut);
} \ No newline at end of file
diff --git a/plugins/audio_opensles/audio_opensles_device_impl.h b/plugins/audio_opensles/audio_opensles_device_impl.h
index 8168fe9..6f6ad60 100755
--- a/plugins/audio_opensles/audio_opensles_device_impl.h
+++ b/plugins/audio_opensles/audio_opensles_device_impl.h
@@ -1,17 +1,17 @@
/* Copyright (C) 2012 Doubango Telecom <http://www.doubango.org>
-*
+*
* This file is part of Open Source Doubango Framework.
*
* DOUBANGO is free software: you can redistribute it and/or modify
* it under the terms of the GNU General Public License as published by
* the Free Software Foundation, either version 3 of the License, or
* (at your option) any later version.
-*
+*
* DOUBANGO is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
* GNU General Public License for more details.
-*
+*
* You should have received a copy of the GNU General Public License
* along with DOUBANGO.
*/
@@ -24,14 +24,14 @@
class SLAudioDeviceCallbackImpl : public SLAudioDeviceCallback
{
public:
- SLAudioDeviceCallbackImpl();
- virtual ~SLAudioDeviceCallbackImpl();
+ SLAudioDeviceCallbackImpl();
+ virtual ~SLAudioDeviceCallbackImpl();
- virtual int32_t RecordedDataIsAvailable(const void* audioSamples,
+ virtual int32_t RecordedDataIsAvailable(const void* audioSamples,
const uint32_t nSamples,
const uint8_t nBytesPerSample,
const uint8_t nChannels,
- const uint32_t samplesPerSec);
+ const uint32_t samplesPerSec);
virtual int32_t NeedMorePlayData(const uint32_t nSamples,
const uint8_t nBytesPerSample,
@@ -40,12 +40,16 @@ public:
void* audioSamples,
uint32_t& nSamplesOut);
- inline void SetConsumer(const struct audio_consumer_opensles_s* pConsumer){ m_pConsumer = pConsumer; }
- inline void SetProducer(const struct audio_producer_opensles_s* pProducer){ m_pProducer = pProducer; }
+ inline void SetConsumer(const struct audio_consumer_opensles_s* pConsumer) {
+ m_pConsumer = pConsumer;
+ }
+ inline void SetProducer(const struct audio_producer_opensles_s* pProducer) {
+ m_pProducer = pProducer;
+ }
private:
- const struct audio_consumer_opensles_s* m_pConsumer; // mut be const and must not take reference
- const struct audio_producer_opensles_s* m_pProducer; // mut be const and must not take reference
+ const struct audio_consumer_opensles_s* m_pConsumer; // mut be const and must not take reference
+ const struct audio_producer_opensles_s* m_pProducer; // mut be const and must not take reference
};
#endif /* _DOUBANGO_AUDIO_OPENSLES_SLDEVICE_IMPL_H */
diff --git a/plugins/audio_opensles/audio_opensles_producer.cxx b/plugins/audio_opensles/audio_opensles_producer.cxx
index 69c06f6..007b59f 100755
--- a/plugins/audio_opensles/audio_opensles_producer.cxx
+++ b/plugins/audio_opensles/audio_opensles_producer.cxx
@@ -1,17 +1,17 @@
/* Copyright (C) 2012 Doubango Telecom <http://www.doubango.org>
-*
+*
* This file is part of Open Source Doubango Framework.
*
* DOUBANGO is free software: you can redistribute it and/or modify
* it under the terms of the GNU General Public License as published by
* the Free Software Foundation, either version 3 of the License, or
* (at your option) any later version.
-*
+*
* DOUBANGO is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
* GNU General Public License for more details.
-*
+*
* You should have received a copy of the GNU General Public License
* along with DOUBANGO.
*/
@@ -24,159 +24,158 @@
#include "tsk_memory.h"
#include "tsk_debug.h"
-typedef struct audio_producer_opensles_s
-{
- TDAV_DECLARE_PRODUCER_AUDIO;
-
- bool isMuted;
- audio_opensles_instance_handle_t* audioInstHandle;
- struct{
- void* ptr;
- int size;
- int index;
- } buffer;
+typedef struct audio_producer_opensles_s {
+ TDAV_DECLARE_PRODUCER_AUDIO;
+
+ bool isMuted;
+ audio_opensles_instance_handle_t* audioInstHandle;
+ struct {
+ void* ptr;
+ int size;
+ int index;
+ } buffer;
}
audio_producer_opensles_t;
int audio_producer_opensles_handle_data_10ms(const audio_producer_opensles_t* _self, const void* audioSamples, int nSamples, int nBytesPerSample, int samplesPerSec, int nChannels)
{
- if(!_self || !audioSamples || !nSamples){
- AUDIO_OPENSLES_DEBUG_ERROR("Invalid parameter");
- return -1;
- }
- if(!TMEDIA_PRODUCER(_self)->enc_cb.callback){
- AUDIO_OPENSLES_DEBUG_WARN("No callback function is registered for the producer");
- return 0;
- }
- if((nSamples != (samplesPerSec / 100))){
- AUDIO_OPENSLES_DEBUG_ERROR("Not producing 10ms samples (nSamples=%d, samplesPerSec=%d)", nSamples, samplesPerSec);
- return -2;
- }
- if((nBytesPerSample != (TMEDIA_PRODUCER(_self)->audio.bits_per_sample >> 3))){
- AUDIO_OPENSLES_DEBUG_ERROR("%d not valid bytes/samples", nBytesPerSample);
- return -3;
- }
- if((nChannels != TMEDIA_PRODUCER(_self)->audio.channels)){
- AUDIO_OPENSLES_DEBUG_ERROR("Recording - %d not the expected number of channels but should be %d", nChannels, TMEDIA_PRODUCER(_self)->audio.channels);
- return -4;
- }
-
- int nSamplesInBits = (nSamples * nBytesPerSample);
- if(_self->buffer.index + nSamplesInBits > _self->buffer.size){
- AUDIO_OPENSLES_DEBUG_ERROR("Buffer overflow");
- return -5;
- }
-
- audio_producer_opensles_t* self = const_cast<audio_producer_opensles_t*>(_self);
-
- if(self->isMuted){
- memset((((uint8_t*)self->buffer.ptr) + self->buffer.index), 0, nSamplesInBits);
- }
- else{
- memcpy((((uint8_t*)self->buffer.ptr) + self->buffer.index), audioSamples, nSamplesInBits);
- }
- self->buffer.index += nSamplesInBits;
-
- if(self->buffer.index == self->buffer.size){
- self->buffer.index = 0;
- TMEDIA_PRODUCER(self)->enc_cb.callback(TMEDIA_PRODUCER(self)->enc_cb.callback_data, self->buffer.ptr, self->buffer.size);
- }
-
- return 0;
+ if(!_self || !audioSamples || !nSamples) {
+ AUDIO_OPENSLES_DEBUG_ERROR("Invalid parameter");
+ return -1;
+ }
+ if(!TMEDIA_PRODUCER(_self)->enc_cb.callback) {
+ AUDIO_OPENSLES_DEBUG_WARN("No callback function is registered for the producer");
+ return 0;
+ }
+ if((nSamples != (samplesPerSec / 100))) {
+ AUDIO_OPENSLES_DEBUG_ERROR("Not producing 10ms samples (nSamples=%d, samplesPerSec=%d)", nSamples, samplesPerSec);
+ return -2;
+ }
+ if((nBytesPerSample != (TMEDIA_PRODUCER(_self)->audio.bits_per_sample >> 3))) {
+ AUDIO_OPENSLES_DEBUG_ERROR("%d not valid bytes/samples", nBytesPerSample);
+ return -3;
+ }
+ if((nChannels != TMEDIA_PRODUCER(_self)->audio.channels)) {
+ AUDIO_OPENSLES_DEBUG_ERROR("Recording - %d not the expected number of channels but should be %d", nChannels, TMEDIA_PRODUCER(_self)->audio.channels);
+ return -4;
+ }
+
+ int nSamplesInBits = (nSamples * nBytesPerSample);
+ if(_self->buffer.index + nSamplesInBits > _self->buffer.size) {
+ AUDIO_OPENSLES_DEBUG_ERROR("Buffer overflow");
+ return -5;
+ }
+
+ audio_producer_opensles_t* self = const_cast<audio_producer_opensles_t*>(_self);
+
+ if(self->isMuted) {
+ memset((((uint8_t*)self->buffer.ptr) + self->buffer.index), 0, nSamplesInBits);
+ }
+ else {
+ memcpy((((uint8_t*)self->buffer.ptr) + self->buffer.index), audioSamples, nSamplesInBits);
+ }
+ self->buffer.index += nSamplesInBits;
+
+ if(self->buffer.index == self->buffer.size) {
+ self->buffer.index = 0;
+ TMEDIA_PRODUCER(self)->enc_cb.callback(TMEDIA_PRODUCER(self)->enc_cb.callback_data, self->buffer.ptr, self->buffer.size);
+ }
+
+ return 0;
}
/* ============ Media Producer Interface ================= */
static int audio_producer_opensles_set(tmedia_producer_t* _self, const tmedia_param_t* param)
-{
- audio_producer_opensles_t* self = (audio_producer_opensles_t*)_self;
- if(param->plugin_type == tmedia_ppt_producer){
- if(param->value_type == tmedia_pvt_int32){
- if(tsk_striequals(param->key, "mute")){
- self->isMuted = (*((int32_t*)param->value) != 0);
- // Mute not supported on android -> send silence when needed
- return 0;
- }
- else if(tsk_striequals(param->key, "volume")){
- return audio_opensles_instance_set_microphone_volume(self->audioInstHandle, *((int32_t*)param->value));
- }
- }
- }
- return tdav_producer_audio_set(TDAV_PRODUCER_AUDIO(self), param);
+{
+ audio_producer_opensles_t* self = (audio_producer_opensles_t*)_self;
+ if(param->plugin_type == tmedia_ppt_producer) {
+ if(param->value_type == tmedia_pvt_int32) {
+ if(tsk_striequals(param->key, "mute")) {
+ self->isMuted = (*((int32_t*)param->value) != 0);
+ // Mute not supported on android -> send silence when needed
+ return 0;
+ }
+ else if(tsk_striequals(param->key, "volume")) {
+ return audio_opensles_instance_set_microphone_volume(self->audioInstHandle, *((int32_t*)param->value));
+ }
+ }
+ }
+ return tdav_producer_audio_set(TDAV_PRODUCER_AUDIO(self), param);
}
static int audio_producer_opensles_prepare(tmedia_producer_t* _self, const tmedia_codec_t* codec)
{
- audio_producer_opensles_t* self = (audio_producer_opensles_t*)_self;
- if(!self || !codec){
- AUDIO_OPENSLES_DEBUG_ERROR("Invalid parameter");
- return -1;
- }
-
- // create audio instance
- if(!(self->audioInstHandle = audio_opensles_instance_create(TMEDIA_PRODUCER(self)->session_id))){
- AUDIO_OPENSLES_DEBUG_ERROR("Failed to create audio instance handle");
- return -2;
- }
-
- // check that ptime is mutiple of 10
- if((codec->plugin->audio.ptime % 10)){
- AUDIO_OPENSLES_DEBUG_ERROR("ptime=%d not multiple of 10", codec->plugin->audio.ptime);
- return -3;
- }
-
- // init input parameters from the codec
- TMEDIA_PRODUCER(self)->audio.channels = TMEDIA_CODEC_CHANNELS_AUDIO_ENCODING(codec);
- TMEDIA_PRODUCER(self)->audio.rate = TMEDIA_CODEC_RATE_ENCODING(codec);
- TMEDIA_PRODUCER(self)->audio.ptime = TMEDIA_CODEC_PTIME_AUDIO_ENCODING(codec);
-
- AUDIO_OPENSLES_DEBUG_INFO("audio_producer_opensles_prepare(channels=%d, rate=%d, ptime=%d)", codec->plugin->audio.channels, codec->plugin->rate, codec->plugin->audio.ptime);
-
- // prepare playout device and update output parameters
- int ret;
- ret = audio_opensles_instance_prepare_producer(self->audioInstHandle, &_self);
-
- // now that the producer is prepared we can initialize internal buffer using device caps
- if(ret == 0){
- // allocate buffer
- int xsize = ((TMEDIA_PRODUCER(self)->audio.ptime * TMEDIA_PRODUCER(self)->audio.rate) / 1000) * (TMEDIA_PRODUCER(self)->audio.bits_per_sample >> 3);
- AUDIO_OPENSLES_DEBUG_INFO("producer buffer xsize = %d", xsize);
- if(!(self->buffer.ptr = tsk_realloc(self->buffer.ptr, xsize))){
- AUDIO_OPENSLES_DEBUG_ERROR("Failed to allocate buffer with size = %d", xsize);
- self->buffer.size = 0;
- return -1;
- }
- self->buffer.size = xsize;
- self->buffer.index = 0;
- }
- return ret;
+ audio_producer_opensles_t* self = (audio_producer_opensles_t*)_self;
+ if(!self || !codec) {
+ AUDIO_OPENSLES_DEBUG_ERROR("Invalid parameter");
+ return -1;
+ }
+
+ // create audio instance
+ if(!(self->audioInstHandle = audio_opensles_instance_create(TMEDIA_PRODUCER(self)->session_id))) {
+ AUDIO_OPENSLES_DEBUG_ERROR("Failed to create audio instance handle");
+ return -2;
+ }
+
+ // check that ptime is mutiple of 10
+ if((codec->plugin->audio.ptime % 10)) {
+ AUDIO_OPENSLES_DEBUG_ERROR("ptime=%d not multiple of 10", codec->plugin->audio.ptime);
+ return -3;
+ }
+
+ // init input parameters from the codec
+ TMEDIA_PRODUCER(self)->audio.channels = TMEDIA_CODEC_CHANNELS_AUDIO_ENCODING(codec);
+ TMEDIA_PRODUCER(self)->audio.rate = TMEDIA_CODEC_RATE_ENCODING(codec);
+ TMEDIA_PRODUCER(self)->audio.ptime = TMEDIA_CODEC_PTIME_AUDIO_ENCODING(codec);
+
+ AUDIO_OPENSLES_DEBUG_INFO("audio_producer_opensles_prepare(channels=%d, rate=%d, ptime=%d)", codec->plugin->audio.channels, codec->plugin->rate, codec->plugin->audio.ptime);
+
+ // prepare playout device and update output parameters
+ int ret;
+ ret = audio_opensles_instance_prepare_producer(self->audioInstHandle, &_self);
+
+ // now that the producer is prepared we can initialize internal buffer using device caps
+ if(ret == 0) {
+ // allocate buffer
+ int xsize = ((TMEDIA_PRODUCER(self)->audio.ptime * TMEDIA_PRODUCER(self)->audio.rate) / 1000) * (TMEDIA_PRODUCER(self)->audio.bits_per_sample >> 3);
+ AUDIO_OPENSLES_DEBUG_INFO("producer buffer xsize = %d", xsize);
+ if(!(self->buffer.ptr = tsk_realloc(self->buffer.ptr, xsize))) {
+ AUDIO_OPENSLES_DEBUG_ERROR("Failed to allocate buffer with size = %d", xsize);
+ self->buffer.size = 0;
+ return -1;
+ }
+ self->buffer.size = xsize;
+ self->buffer.index = 0;
+ }
+ return ret;
}
static int audio_producer_opensles_start(tmedia_producer_t* _self)
{
- audio_producer_opensles_t* self = (audio_producer_opensles_t*)_self;
- if(!self){
- AUDIO_OPENSLES_DEBUG_ERROR("Invalid parameter");
- return -1;
- }
- AUDIO_OPENSLES_DEBUG_INFO("audio_producer_opensles_start");
-
- return audio_opensles_instance_start_producer(self->audioInstHandle);
+ audio_producer_opensles_t* self = (audio_producer_opensles_t*)_self;
+ if(!self) {
+ AUDIO_OPENSLES_DEBUG_ERROR("Invalid parameter");
+ return -1;
+ }
+ AUDIO_OPENSLES_DEBUG_INFO("audio_producer_opensles_start");
+
+ return audio_opensles_instance_start_producer(self->audioInstHandle);
}
static int audio_producer_opensles_pause(tmedia_producer_t* self)
{
- return 0;
+ return 0;
}
static int audio_producer_opensles_stop(tmedia_producer_t* _self)
{
- audio_producer_opensles_t* self = (audio_producer_opensles_t*)_self;
- if(!self){
- AUDIO_OPENSLES_DEBUG_ERROR("Invalid parameter");
- return -1;
- }
+ audio_producer_opensles_t* self = (audio_producer_opensles_t*)_self;
+ if(!self) {
+ AUDIO_OPENSLES_DEBUG_ERROR("Invalid parameter");
+ return -1;
+ }
- return audio_opensles_instance_stop_producer(self->audioInstHandle);
+ return audio_opensles_instance_stop_producer(self->audioInstHandle);
}
@@ -186,54 +185,52 @@ static int audio_producer_opensles_stop(tmedia_producer_t* _self)
/* constructor */
static tsk_object_t* audio_producer_opensles_ctor(tsk_object_t *_self, va_list * app)
{
- audio_producer_opensles_t *self = (audio_producer_opensles_t *)_self;
- if(self){
- /* init base */
- tdav_producer_audio_init(TDAV_PRODUCER_AUDIO(self));
- /* init self */
-
- }
- return self;
+ audio_producer_opensles_t *self = (audio_producer_opensles_t *)_self;
+ if(self) {
+ /* init base */
+ tdav_producer_audio_init(TDAV_PRODUCER_AUDIO(self));
+ /* init self */
+
+ }
+ return self;
}
/* destructor */
static tsk_object_t* audio_producer_opensles_dtor(tsk_object_t *_self)
-{
- audio_producer_opensles_t *self = (audio_producer_opensles_t *)_self;
- if(self){
- /* stop */
- audio_producer_opensles_stop(TMEDIA_PRODUCER(self));
- /* deinit self */
- if(self->audioInstHandle){
- audio_opensles_instance_destroy(&self->audioInstHandle);
- }
- TSK_FREE(self->buffer.ptr);
-
- /* deinit base */
- tdav_producer_audio_deinit(TDAV_PRODUCER_AUDIO(self));
- }
-
- return self;
+{
+ audio_producer_opensles_t *self = (audio_producer_opensles_t *)_self;
+ if(self) {
+ /* stop */
+ audio_producer_opensles_stop(TMEDIA_PRODUCER(self));
+ /* deinit self */
+ if(self->audioInstHandle) {
+ audio_opensles_instance_destroy(&self->audioInstHandle);
+ }
+ TSK_FREE(self->buffer.ptr);
+
+ /* deinit base */
+ tdav_producer_audio_deinit(TDAV_PRODUCER_AUDIO(self));
+ }
+
+ return self;
}
/* object definition */
-static const tsk_object_def_t audio_producer_opensles_def_s =
-{
- sizeof(audio_producer_opensles_t),
- audio_producer_opensles_ctor,
- audio_producer_opensles_dtor,
- tdav_producer_audio_cmp,
+static const tsk_object_def_t audio_producer_opensles_def_s = {
+ sizeof(audio_producer_opensles_t),
+ audio_producer_opensles_ctor,
+ audio_producer_opensles_dtor,
+ tdav_producer_audio_cmp,
};
/* plugin definition*/
-static const tmedia_producer_plugin_def_t audio_producer_opensles_plugin_def_s =
-{
- &audio_producer_opensles_def_s,
-
- tmedia_audio,
- "SLES audio producer",
-
- audio_producer_opensles_set,
- audio_producer_opensles_prepare,
- audio_producer_opensles_start,
- audio_producer_opensles_pause,
- audio_producer_opensles_stop
+static const tmedia_producer_plugin_def_t audio_producer_opensles_plugin_def_s = {
+ &audio_producer_opensles_def_s,
+
+ tmedia_audio,
+ "SLES audio producer",
+
+ audio_producer_opensles_set,
+ audio_producer_opensles_prepare,
+ audio_producer_opensles_start,
+ audio_producer_opensles_pause,
+ audio_producer_opensles_stop
};
const tmedia_producer_plugin_def_t *audio_producer_opensles_plugin_def_t = &audio_producer_opensles_plugin_def_s; \ No newline at end of file
diff --git a/plugins/audio_opensles/audio_opensles_producer.h b/plugins/audio_opensles/audio_opensles_producer.h
index 0c4c756..d6ce9d9 100755
--- a/plugins/audio_opensles/audio_opensles_producer.h
+++ b/plugins/audio_opensles/audio_opensles_producer.h
@@ -1,17 +1,17 @@
/* Copyright (C) 2012 Doubango Telecom <http://www.doubango.org>
-*
+*
* This file is part of Open Source Doubango Framework.
*
* DOUBANGO is free software: you can redistribute it and/or modify
* it under the terms of the GNU General Public License as published by
* the Free Software Foundation, either version 3 of the License, or
* (at your option) any later version.
-*
+*
* DOUBANGO is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
* GNU General Public License for more details.
-*
+*
* You should have received a copy of the GNU General Public License
* along with DOUBANGO.
*/
diff --git a/plugins/audio_opensles/dllmain.cxx b/plugins/audio_opensles/dllmain.cxx
index e6b34f4..295c340 100755
--- a/plugins/audio_opensles/dllmain.cxx
+++ b/plugins/audio_opensles/dllmain.cxx
@@ -1,17 +1,17 @@
/* Copyright (C) 2012 Doubango Telecom <http://www.doubango.org>
-*
+*
* This file is part of Open Source Doubango Framework.
*
* DOUBANGO is free software: you can redistribute it and/or modify
* it under the terms of the GNU General Public License as published by
* the Free Software Foundation, either version 3 of the License, or
* (at your option) any later version.
-*
+*
* DOUBANGO is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
* GNU General Public License for more details.
-*
+*
* You should have received a copy of the GNU General Public License
* along with DOUBANGO.
*/
@@ -20,16 +20,15 @@
BOOL APIENTRY DllMain( HMODULE hModule,
DWORD ul_reason_for_call,
LPVOID lpReserved
- )
+ )
{
- switch (ul_reason_for_call)
- {
- case DLL_PROCESS_ATTACH:
- case DLL_THREAD_ATTACH:
- case DLL_THREAD_DETACH:
- case DLL_PROCESS_DETACH:
- break;
- }
- return TRUE;
+ switch (ul_reason_for_call) {
+ case DLL_PROCESS_ATTACH:
+ case DLL_THREAD_ATTACH:
+ case DLL_THREAD_DETACH:
+ case DLL_PROCESS_DETACH:
+ break;
+ }
+ return TRUE;
}
diff --git a/plugins/audio_webrtc/audio_webrtc.cxx b/plugins/audio_webrtc/audio_webrtc.cxx
index 167166d..b604d49 100755
--- a/plugins/audio_webrtc/audio_webrtc.cxx
+++ b/plugins/audio_webrtc/audio_webrtc.cxx
@@ -1,17 +1,17 @@
/* Copyright (C) 2012 Doubango Telecom <http://www.doubango.org>
-*
+*
* This file is part of Open Source Doubango Framework.
*
* DOUBANGO is free software: you can redistribute it and/or modify
* it under the terms of the GNU General Public License as published by
* the Free Software Foundation, either version 3 of the License, or
* (at your option) any later version.
-*
+*
* DOUBANGO is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
* GNU General Public License for more details.
-*
+*
* You should have received a copy of the GNU General Public License
* along with DOUBANGO.
*/
@@ -37,99 +37,93 @@ using namespace webrtc;
#if DOUBANGO_AUDIO_WEBRTC_UNDER_ANDROID
// https://groups.google.com/group/android-ndk/browse_thread/thread/a1667f28162cf69b/8ef3a171df7f8dfe
-extern "C"
-{
- void *__dso_handle = NULL;
-}
+extern "C"
+{
+ void *__dso_handle = NULL;
+}
#endif
-typedef enum PLUGIN_INDEX_E
-{
- PLUGIN_INDEX_AUDIO_CONSUMER,
- PLUGIN_INDEX_AUDIO_PRODUCER,
- PLUGIN_INDEX_COUNT
+typedef enum PLUGIN_INDEX_E {
+ PLUGIN_INDEX_AUDIO_CONSUMER,
+ PLUGIN_INDEX_AUDIO_PRODUCER,
+ PLUGIN_INDEX_COUNT
}
PLUGIN_INDEX_T;
int __plugin_get_def_count()
{
- return PLUGIN_INDEX_COUNT;
+ return PLUGIN_INDEX_COUNT;
}
tsk_plugin_def_type_t __plugin_get_def_type_at(int index)
{
- switch(index){
- case PLUGIN_INDEX_AUDIO_CONSUMER: return tsk_plugin_def_type_consumer;
- case PLUGIN_INDEX_AUDIO_PRODUCER: return tsk_plugin_def_type_producer;
- default:
- {
- DOUBANGO_AUDIO_WEBRTC_DEBUG_ERROR("No plugin at index %d", index);
- return tsk_plugin_def_type_none;
- }
- }
+ switch(index) {
+ case PLUGIN_INDEX_AUDIO_CONSUMER:
+ return tsk_plugin_def_type_consumer;
+ case PLUGIN_INDEX_AUDIO_PRODUCER:
+ return tsk_plugin_def_type_producer;
+ default: {
+ DOUBANGO_AUDIO_WEBRTC_DEBUG_ERROR("No plugin at index %d", index);
+ return tsk_plugin_def_type_none;
+ }
+ }
}
tsk_plugin_def_media_type_t __plugin_get_def_media_type_at(int index)
{
- switch(index){
- case PLUGIN_INDEX_AUDIO_CONSUMER:
- case PLUGIN_INDEX_AUDIO_PRODUCER:
- {
- return tsk_plugin_def_media_type_audio;
- }
- default:
- {
- DOUBANGO_AUDIO_WEBRTC_DEBUG_ERROR("No plugin at index %d", index);
- return tsk_plugin_def_media_type_none;
- }
- }
+ switch(index) {
+ case PLUGIN_INDEX_AUDIO_CONSUMER:
+ case PLUGIN_INDEX_AUDIO_PRODUCER: {
+ return tsk_plugin_def_media_type_audio;
+ }
+ default: {
+ DOUBANGO_AUDIO_WEBRTC_DEBUG_ERROR("No plugin at index %d", index);
+ return tsk_plugin_def_media_type_none;
+ }
+ }
}
tsk_plugin_def_ptr_const_t __plugin_get_def_at(int index)
{
- switch(index){
- case PLUGIN_INDEX_AUDIO_CONSUMER:
- {
- return audio_consumer_webrtc_plugin_def_t;
- }
- case PLUGIN_INDEX_AUDIO_PRODUCER:
- {
- return audio_producer_webrtc_plugin_def_t;
- }
- default:
- {
- DOUBANGO_AUDIO_WEBRTC_DEBUG_ERROR("No plugin at index %d", index);
- return tsk_null;
- }
- }
+ switch(index) {
+ case PLUGIN_INDEX_AUDIO_CONSUMER: {
+ return audio_consumer_webrtc_plugin_def_t;
+ }
+ case PLUGIN_INDEX_AUDIO_PRODUCER: {
+ return audio_producer_webrtc_plugin_def_t;
+ }
+ default: {
+ DOUBANGO_AUDIO_WEBRTC_DEBUG_ERROR("No plugin at index %d", index);
+ return tsk_null;
+ }
+ }
}
//
// WebRTC AudioInstance
//
-typedef struct audio_webrtc_instance_s
-{
- TSK_DECLARE_OBJECT;
-
- uint64_t sessionId;
-
- bool isStarted;
-
- bool isConsumerPrepared;
- bool isConsumerStarted;
- bool isProducerPrepared;
- bool isProducerStarted;
-
- bool isSpeakerAvailable;
- bool isPlayoutAvailable;
- bool isRecordingAvailable;
-
- AudioDeviceModule* device;
- AudioTransportImpl* transport;
-
- TSK_DECLARE_SAFEOBJ;
+typedef struct audio_webrtc_instance_s {
+ TSK_DECLARE_OBJECT;
+
+ uint64_t sessionId;
+
+ bool isStarted;
+
+ bool isConsumerPrepared;
+ bool isConsumerStarted;
+ bool isProducerPrepared;
+ bool isProducerStarted;
+
+ bool isSpeakerAvailable;
+ bool isPlayoutAvailable;
+ bool isRecordingAvailable;
+
+ AudioDeviceModule* device;
+ AudioTransportImpl* transport;
+
+ TSK_DECLARE_SAFEOBJ;
}
audio_webrtc_instance_t;
typedef tsk_list_t audio_webrtc_instances_L_t;
@@ -138,425 +132,425 @@ static audio_webrtc_instances_L_t* __audioInstances = tsk_null;
static tsk_object_t* audio_webrtc_instance_ctor(tsk_object_t * self, va_list * app)
{
- audio_webrtc_instance_t* audioInstance = (audio_webrtc_instance_t*)self;
- if(audioInstance){
- tsk_safeobj_init(audioInstance);
- }
- return self;
+ audio_webrtc_instance_t* audioInstance = (audio_webrtc_instance_t*)self;
+ if(audioInstance) {
+ tsk_safeobj_init(audioInstance);
+ }
+ return self;
}
static tsk_object_t* audio_webrtc_instance_dtor(tsk_object_t * self)
-{
- DOUBANGO_AUDIO_WEBRTC_DEBUG_INFO("Audio Instance destroyed");
- audio_webrtc_instance_t* audioInstance = (audio_webrtc_instance_t*)self;
- if(audioInstance){
+{
+ DOUBANGO_AUDIO_WEBRTC_DEBUG_INFO("Audio Instance destroyed");
+ audio_webrtc_instance_t* audioInstance = (audio_webrtc_instance_t*)self;
+ if(audioInstance) {
tsk_safeobj_lock(audioInstance);
- if(audioInstance->device){
- audioInstance->device->RegisterAudioCallback(tsk_null);
- audioInstance->device->Terminate();
- audioInstance->device->Release();//FIXME: must be deleted?
- audioInstance->device = tsk_null;
- }
- if(audioInstance->transport){
- delete audioInstance->transport;
- audioInstance->transport = tsk_null;
- }
+ if(audioInstance->device) {
+ audioInstance->device->RegisterAudioCallback(tsk_null);
+ audioInstance->device->Terminate();
+ audioInstance->device->Release();//FIXME: must be deleted?
+ audioInstance->device = tsk_null;
+ }
+ if(audioInstance->transport) {
+ delete audioInstance->transport;
+ audioInstance->transport = tsk_null;
+ }
tsk_safeobj_unlock(audioInstance);
-
- tsk_safeobj_deinit(audioInstance);
- }
- return self;
+
+ tsk_safeobj_deinit(audioInstance);
+ }
+ return self;
}
static int audio_webrtc_instance_cmp(const tsk_object_t *_ai1, const tsk_object_t *_ai2)
{
- return ((int)_ai1 - (int)_ai2);
+ return ((int)_ai1 - (int)_ai2);
}
-static const tsk_object_def_t audio_webrtc_instance_def_s =
-{
- sizeof(audio_webrtc_instance_t),
- audio_webrtc_instance_ctor,
- audio_webrtc_instance_dtor,
- audio_webrtc_instance_cmp,
+static const tsk_object_def_t audio_webrtc_instance_def_s = {
+ sizeof(audio_webrtc_instance_t),
+ audio_webrtc_instance_ctor,
+ audio_webrtc_instance_dtor,
+ audio_webrtc_instance_cmp,
};
const tsk_object_def_t *audio_webrtc_instance_def_t = &audio_webrtc_instance_def_s;
audio_webrtc_instance_handle_t* audio_webrtc_instance_create(uint64_t sessionId)
{
- audio_webrtc_instance_t* audioInstance = tsk_null;
-
- // create list used to hold instances
- if(!__audioInstances && !(__audioInstances = tsk_list_create())){
- DOUBANGO_AUDIO_WEBRTC_DEBUG_ERROR("Failed to create new list");
- return tsk_null;
- }
-
- //= lock the list
- tsk_list_lock(__audioInstances);
-
- // find the instance from the list
- const tsk_list_item_t* item;
- tsk_list_foreach(item, __audioInstances){
- if(((audio_webrtc_instance_t*)item->data)->sessionId == sessionId){
- audioInstance = (audio_webrtc_instance_t*)tsk_object_ref(item->data);
- break;
- }
- }
-
- if(!audioInstance){
- audio_webrtc_instance_t* _audioInstance;
- if(!(_audioInstance = (audio_webrtc_instance_t*)tsk_object_new(&audio_webrtc_instance_def_s))){
- DOUBANGO_AUDIO_WEBRTC_DEBUG_ERROR("Failed to create new audio instance");
- goto done;
- }
-
- if(!(_audioInstance->device = AudioDeviceModuleImpl::Create(kAudioDeviceModuleId))){
- DOUBANGO_AUDIO_WEBRTC_DEBUG_ERROR("Failed to create audio device");
- TSK_OBJECT_SAFE_FREE(_audioInstance);
- goto done;
- }
- _audioInstance->device->AddRef();
-
- if(!(_audioInstance->transport = new AudioTransportImpl(_audioInstance->device))){
- DOUBANGO_AUDIO_WEBRTC_DEBUG_ERROR("Failed to create audio transport");
- TSK_OBJECT_SAFE_FREE(_audioInstance);
- goto done;
- }
- if((_audioInstance->device->RegisterAudioCallback(_audioInstance->transport))){
- DOUBANGO_AUDIO_WEBRTC_DEBUG_ERROR("AudioDeviceModule::RegisterAudioCallback() failed");
- TSK_OBJECT_SAFE_FREE(_audioInstance);
- goto done;
- }
-
- if((_audioInstance->device->Init())){
- DOUBANGO_AUDIO_WEBRTC_DEBUG_ERROR("AudioDeviceModule::Init() failed");
- TSK_OBJECT_SAFE_FREE(_audioInstance);
- goto done;
- }
-
- _audioInstance->sessionId = sessionId;
- audioInstance = _audioInstance;
- tsk_list_push_back_data(__audioInstances, (void**)&_audioInstance);
- }
+ audio_webrtc_instance_t* audioInstance = tsk_null;
+
+ // create list used to hold instances
+ if(!__audioInstances && !(__audioInstances = tsk_list_create())) {
+ DOUBANGO_AUDIO_WEBRTC_DEBUG_ERROR("Failed to create new list");
+ return tsk_null;
+ }
+
+ //= lock the list
+ tsk_list_lock(__audioInstances);
+
+ // find the instance from the list
+ const tsk_list_item_t* item;
+ tsk_list_foreach(item, __audioInstances) {
+ if(((audio_webrtc_instance_t*)item->data)->sessionId == sessionId) {
+ audioInstance = (audio_webrtc_instance_t*)tsk_object_ref(item->data);
+ break;
+ }
+ }
+
+ if(!audioInstance) {
+ audio_webrtc_instance_t* _audioInstance;
+ if(!(_audioInstance = (audio_webrtc_instance_t*)tsk_object_new(&audio_webrtc_instance_def_s))) {
+ DOUBANGO_AUDIO_WEBRTC_DEBUG_ERROR("Failed to create new audio instance");
+ goto done;
+ }
+
+ if(!(_audioInstance->device = AudioDeviceModuleImpl::Create(kAudioDeviceModuleId))) {
+ DOUBANGO_AUDIO_WEBRTC_DEBUG_ERROR("Failed to create audio device");
+ TSK_OBJECT_SAFE_FREE(_audioInstance);
+ goto done;
+ }
+ _audioInstance->device->AddRef();
+
+ if(!(_audioInstance->transport = new AudioTransportImpl(_audioInstance->device))) {
+ DOUBANGO_AUDIO_WEBRTC_DEBUG_ERROR("Failed to create audio transport");
+ TSK_OBJECT_SAFE_FREE(_audioInstance);
+ goto done;
+ }
+ if((_audioInstance->device->RegisterAudioCallback(_audioInstance->transport))) {
+ DOUBANGO_AUDIO_WEBRTC_DEBUG_ERROR("AudioDeviceModule::RegisterAudioCallback() failed");
+ TSK_OBJECT_SAFE_FREE(_audioInstance);
+ goto done;
+ }
+
+ if((_audioInstance->device->Init())) {
+ DOUBANGO_AUDIO_WEBRTC_DEBUG_ERROR("AudioDeviceModule::Init() failed");
+ TSK_OBJECT_SAFE_FREE(_audioInstance);
+ goto done;
+ }
+
+ _audioInstance->sessionId = sessionId;
+ audioInstance = _audioInstance;
+ tsk_list_push_back_data(__audioInstances, (void**)&_audioInstance);
+ }
done:
- //= unlock the list
- tsk_list_unlock(__audioInstances);
+ //= unlock the list
+ tsk_list_unlock(__audioInstances);
- return audioInstance;
+ return audioInstance;
}
int audio_webrtc_instance_prepare_consumer(audio_webrtc_instance_handle_t* _self, tmedia_consumer_t** _consumer)
{
- audio_webrtc_instance_t* self = (audio_webrtc_instance_t*)_self;
- if(!self || !self->device || !self->transport || !_consumer || !*_consumer){
- DOUBANGO_AUDIO_WEBRTC_DEBUG_ERROR("invalid parameter");
- return -1;
- }
-
- if(self->isConsumerPrepared){
- DOUBANGO_AUDIO_WEBRTC_DEBUG_WARN("Consumer already prepared");
- return 0;
- }
-
- int ret;
- bool _bool;
-
- tsk_safeobj_lock(self);
-
- self->transport->SetConsumer((const struct audio_consumer_webrtc_s*)*_consumer);
-
- if((ret = self->device->SetPlayoutDevice(DOUBANGO_AUDIO_WEBRTC_DEVICE_DEFAULT))){
- DOUBANGO_AUDIO_WEBRTC_DEBUG_ERROR("AudioDeviceModule->SetPlayoutDevice(%d) failed", DOUBANGO_AUDIO_WEBRTC_DEVICE_DEFAULT);
- }
-
- if((ret = self->device->SpeakerIsAvailable(&_bool))){
- DOUBANGO_AUDIO_WEBRTC_DEBUG_ERROR("SpeakerIsAvailable() failed with error code=%d", ret);
- }
- else{
- if(!_bool){
- DOUBANGO_AUDIO_WEBRTC_DEBUG_ERROR("SpeakerIsAvailable() returned false");
- }
- self->isSpeakerAvailable = _bool;
- }
-
- if((ret = self->device->InitSpeaker())){
- DOUBANGO_AUDIO_WEBRTC_DEBUG_ERROR("InitSpeaker() failed with error code=%d", ret);
- }
-
- if((ret = self->device->PlayoutIsAvailable(&_bool))){
- DOUBANGO_AUDIO_WEBRTC_DEBUG_ERROR("PlayoutIsAvailable() failed with error code =%d", ret);
- }
- else{
- if(!_bool){
- DOUBANGO_AUDIO_WEBRTC_DEBUG_ERROR("PlayoutIsAvailable() returned false");
- }
- self->isPlayoutAvailable = _bool;
- }
-
- if((ret = self->device->SetStereoPlayout(((*_consumer)->audio.in.channels == 2)))){
- DOUBANGO_AUDIO_WEBRTC_DEBUG_ERROR("SetStereoPlayout(%d==2) failed with error code=%d", (*_consumer)->audio.in.channels, ret);
- }
-
- //if((ret = self->device->SetPlayoutBuffer(AudioDeviceModule::kFixedBufferSize, (*_consumer)->audio.ptime))){
- // DOUBANGO_AUDIO_WEBRTC_DEBUG_ERROR("SetPlayoutBuffer(%d ms) failed with error code=%d", (*_consumer)->audio.ptime, ret);
- //}
- // always request 10ms buffers. In all cases WebRTC don't support anything else
- if((ret = self->device->SetPlayoutBuffer(AudioDeviceModule::kFixedBufferSize, 10))){
- DOUBANGO_AUDIO_WEBRTC_DEBUG_ERROR("SetPlayoutBuffer(%d ms) failed with error code=%d", 10, ret);
- }
-
- uint32_t playoutSampleRate = (*_consumer)->audio.out.rate ? (*_consumer)->audio.out.rate : (*_consumer)->audio.in.rate;
- if((ret = self->device->SetPlayoutSampleRate(playoutSampleRate))){
- DOUBANGO_AUDIO_WEBRTC_DEBUG_ERROR("SetPlayoutSampleRate(%d) failed with error code=%d", playoutSampleRate, ret);
- }
-
- if((ret = self->device->InitPlayout())){
- DOUBANGO_AUDIO_WEBRTC_DEBUG_ERROR("AudioDeviceModule::InitPlayout() failed with error code = %d", ret);
- goto done;
- }
-
- // init output parameters
- if((ret = self->device->StereoPlayout(&_bool))){
- DOUBANGO_AUDIO_WEBRTC_DEBUG_ERROR("StereoPlayout() failed with error code=%d", ret);
- }
- else{
- (*_consumer)->audio.out.channels = (_bool ? 2 : 1);
- }
- if((ret = self->device->PlayoutSampleRate(&playoutSampleRate))){
- DOUBANGO_AUDIO_WEBRTC_DEBUG_ERROR("PlayoutSampleRate() failed with error code=%d", ret);
- }
- else{
- (*_consumer)->audio.out.rate = playoutSampleRate;
- }
+ audio_webrtc_instance_t* self = (audio_webrtc_instance_t*)_self;
+ if(!self || !self->device || !self->transport || !_consumer || !*_consumer) {
+ DOUBANGO_AUDIO_WEBRTC_DEBUG_ERROR("invalid parameter");
+ return -1;
+ }
+
+ if(self->isConsumerPrepared) {
+ DOUBANGO_AUDIO_WEBRTC_DEBUG_WARN("Consumer already prepared");
+ return 0;
+ }
+
+ int ret;
+ bool _bool;
+
+ tsk_safeobj_lock(self);
+
+ self->transport->SetConsumer((const struct audio_consumer_webrtc_s*)*_consumer);
+
+ if((ret = self->device->SetPlayoutDevice(DOUBANGO_AUDIO_WEBRTC_DEVICE_DEFAULT))) {
+ DOUBANGO_AUDIO_WEBRTC_DEBUG_ERROR("AudioDeviceModule->SetPlayoutDevice(%d) failed", DOUBANGO_AUDIO_WEBRTC_DEVICE_DEFAULT);
+ }
+
+ if((ret = self->device->SpeakerIsAvailable(&_bool))) {
+ DOUBANGO_AUDIO_WEBRTC_DEBUG_ERROR("SpeakerIsAvailable() failed with error code=%d", ret);
+ }
+ else {
+ if(!_bool) {
+ DOUBANGO_AUDIO_WEBRTC_DEBUG_ERROR("SpeakerIsAvailable() returned false");
+ }
+ self->isSpeakerAvailable = _bool;
+ }
+
+ if((ret = self->device->InitSpeaker())) {
+ DOUBANGO_AUDIO_WEBRTC_DEBUG_ERROR("InitSpeaker() failed with error code=%d", ret);
+ }
+
+ if((ret = self->device->PlayoutIsAvailable(&_bool))) {
+ DOUBANGO_AUDIO_WEBRTC_DEBUG_ERROR("PlayoutIsAvailable() failed with error code =%d", ret);
+ }
+ else {
+ if(!_bool) {
+ DOUBANGO_AUDIO_WEBRTC_DEBUG_ERROR("PlayoutIsAvailable() returned false");
+ }
+ self->isPlayoutAvailable = _bool;
+ }
+
+ if((ret = self->device->SetStereoPlayout(((*_consumer)->audio.in.channels == 2)))) {
+ DOUBANGO_AUDIO_WEBRTC_DEBUG_ERROR("SetStereoPlayout(%d==2) failed with error code=%d", (*_consumer)->audio.in.channels, ret);
+ }
+
+ //if((ret = self->device->SetPlayoutBuffer(AudioDeviceModule::kFixedBufferSize, (*_consumer)->audio.ptime))){
+ // DOUBANGO_AUDIO_WEBRTC_DEBUG_ERROR("SetPlayoutBuffer(%d ms) failed with error code=%d", (*_consumer)->audio.ptime, ret);
+ //}
+ // always request 10ms buffers. In all cases WebRTC don't support anything else
+ if((ret = self->device->SetPlayoutBuffer(AudioDeviceModule::kFixedBufferSize, 10))) {
+ DOUBANGO_AUDIO_WEBRTC_DEBUG_ERROR("SetPlayoutBuffer(%d ms) failed with error code=%d", 10, ret);
+ }
+
+ uint32_t playoutSampleRate = (*_consumer)->audio.out.rate ? (*_consumer)->audio.out.rate : (*_consumer)->audio.in.rate;
+ if((ret = self->device->SetPlayoutSampleRate(playoutSampleRate))) {
+ DOUBANGO_AUDIO_WEBRTC_DEBUG_ERROR("SetPlayoutSampleRate(%d) failed with error code=%d", playoutSampleRate, ret);
+ }
+
+ if((ret = self->device->InitPlayout())) {
+ DOUBANGO_AUDIO_WEBRTC_DEBUG_ERROR("AudioDeviceModule::InitPlayout() failed with error code = %d", ret);
+ goto done;
+ }
+
+ // init output parameters
+ if((ret = self->device->StereoPlayout(&_bool))) {
+ DOUBANGO_AUDIO_WEBRTC_DEBUG_ERROR("StereoPlayout() failed with error code=%d", ret);
+ }
+ else {
+ (*_consumer)->audio.out.channels = (_bool ? 2 : 1);
+ }
+ if((ret = self->device->PlayoutSampleRate(&playoutSampleRate))) {
+ DOUBANGO_AUDIO_WEBRTC_DEBUG_ERROR("PlayoutSampleRate() failed with error code=%d", ret);
+ }
+ else {
+ (*_consumer)->audio.out.rate = playoutSampleRate;
+ }
done:
- tsk_safeobj_unlock(self);
+ tsk_safeobj_unlock(self);
- self->isConsumerPrepared = (ret == 0);
+ self->isConsumerPrepared = (ret == 0);
- return ret;
+ return ret;
}
int audio_webrtc_instance_prepare_producer(audio_webrtc_instance_handle_t* _self, tmedia_producer_t** _producer)
{
- audio_webrtc_instance_t* self = (audio_webrtc_instance_t*)_self;
- if(!self || !self->device || !self->transport || !_producer || !*_producer){
- DOUBANGO_AUDIO_WEBRTC_DEBUG_ERROR("invalid parameter");
- return -1;
- }
-
- if(self->isProducerPrepared){
- DOUBANGO_AUDIO_WEBRTC_DEBUG_WARN("Producer already prepared");
- return 0;
- }
-
- int ret;
- bool _bool;
-
- tsk_safeobj_lock(self);
-
- self->transport->SetProducer((const struct audio_producer_webrtc_s*)*_producer);
-
- if((ret = self->device->SetRecordingDevice(DOUBANGO_AUDIO_WEBRTC_DEVICE_DEFAULT))){
- DOUBANGO_AUDIO_WEBRTC_DEBUG_ERROR("AudioDeviceModule->SetRecordingDevice(%d) failed", DOUBANGO_AUDIO_WEBRTC_DEVICE_DEFAULT);
- }
-
- if((ret = self->device->RecordingIsAvailable(&_bool))){
- DOUBANGO_AUDIO_WEBRTC_DEBUG_ERROR("RecordingIsAvailable() failed with error code =%d", ret);
- }
- else{
- if(!_bool){
- DOUBANGO_AUDIO_WEBRTC_DEBUG_ERROR("RecordingIsAvailable() returned false");
- }
- self->isRecordingAvailable = _bool;
- }
-
- if((ret = self->device->MicrophoneIsAvailable(&_bool))){
- DOUBANGO_AUDIO_WEBRTC_DEBUG_ERROR("MicrophoneIsAvailable() failed with error code =%d", ret);
- }
- else{
- if(!_bool){
- DOUBANGO_AUDIO_WEBRTC_DEBUG_ERROR("MicrophoneIsAvailable() returned false");
- }
- else{
- if((ret = self->device->InitMicrophone())){
- DOUBANGO_AUDIO_WEBRTC_DEBUG_ERROR("InitMicrophone() failed with error code =%d", ret);
- }
- }
- }
-
- if((ret = self->device->SetStereoRecording(((*_producer)->audio.channels == 2)))){
- DOUBANGO_AUDIO_WEBRTC_DEBUG_ERROR("SetStereoRecording(%d==2) failed with error code=%d", (*_producer)->audio.channels, ret);
- }
-
- uint32_t recordingSampleRate = (*_producer)->audio.rate;
- if((ret = self->device->SetRecordingSampleRate(recordingSampleRate))){
- DOUBANGO_AUDIO_WEBRTC_DEBUG_ERROR("SetRecordingSampleRate(%d) failed with error code=%d", recordingSampleRate, ret);
- }
-
- if((ret = self->device->InitRecording())){
- DOUBANGO_AUDIO_WEBRTC_DEBUG_ERROR("AudioDeviceModule::InitRecording() failed with error code = %d", ret);
- goto done;
- }
-
- // init output parameters
- if((ret = self->device->StereoRecording(&_bool))){
- DOUBANGO_AUDIO_WEBRTC_DEBUG_ERROR("StereoRecording() failed with error code=%d", ret);
- }
- else{
- (*_producer)->audio.channels = (_bool ? 2 : 1);
- }
- if((ret = self->device->RecordingSampleRate(&recordingSampleRate))){
- DOUBANGO_AUDIO_WEBRTC_DEBUG_ERROR("RecordingSampleRate() failed with error code=%d", ret);
- }
- else{
- (*_producer)->audio.rate = recordingSampleRate;
- }
+ audio_webrtc_instance_t* self = (audio_webrtc_instance_t*)_self;
+ if(!self || !self->device || !self->transport || !_producer || !*_producer) {
+ DOUBANGO_AUDIO_WEBRTC_DEBUG_ERROR("invalid parameter");
+ return -1;
+ }
+
+ if(self->isProducerPrepared) {
+ DOUBANGO_AUDIO_WEBRTC_DEBUG_WARN("Producer already prepared");
+ return 0;
+ }
+
+ int ret;
+ bool _bool;
+
+ tsk_safeobj_lock(self);
+
+ self->transport->SetProducer((const struct audio_producer_webrtc_s*)*_producer);
+
+ if((ret = self->device->SetRecordingDevice(DOUBANGO_AUDIO_WEBRTC_DEVICE_DEFAULT))) {
+ DOUBANGO_AUDIO_WEBRTC_DEBUG_ERROR("AudioDeviceModule->SetRecordingDevice(%d) failed", DOUBANGO_AUDIO_WEBRTC_DEVICE_DEFAULT);
+ }
+
+ if((ret = self->device->RecordingIsAvailable(&_bool))) {
+ DOUBANGO_AUDIO_WEBRTC_DEBUG_ERROR("RecordingIsAvailable() failed with error code =%d", ret);
+ }
+ else {
+ if(!_bool) {
+ DOUBANGO_AUDIO_WEBRTC_DEBUG_ERROR("RecordingIsAvailable() returned false");
+ }
+ self->isRecordingAvailable = _bool;
+ }
+
+ if((ret = self->device->MicrophoneIsAvailable(&_bool))) {
+ DOUBANGO_AUDIO_WEBRTC_DEBUG_ERROR("MicrophoneIsAvailable() failed with error code =%d", ret);
+ }
+ else {
+ if(!_bool) {
+ DOUBANGO_AUDIO_WEBRTC_DEBUG_ERROR("MicrophoneIsAvailable() returned false");
+ }
+ else {
+ if((ret = self->device->InitMicrophone())) {
+ DOUBANGO_AUDIO_WEBRTC_DEBUG_ERROR("InitMicrophone() failed with error code =%d", ret);
+ }
+ }
+ }
+
+ if((ret = self->device->SetStereoRecording(((*_producer)->audio.channels == 2)))) {
+ DOUBANGO_AUDIO_WEBRTC_DEBUG_ERROR("SetStereoRecording(%d==2) failed with error code=%d", (*_producer)->audio.channels, ret);
+ }
+
+ uint32_t recordingSampleRate = (*_producer)->audio.rate;
+ if((ret = self->device->SetRecordingSampleRate(recordingSampleRate))) {
+ DOUBANGO_AUDIO_WEBRTC_DEBUG_ERROR("SetRecordingSampleRate(%d) failed with error code=%d", recordingSampleRate, ret);
+ }
+
+ if((ret = self->device->InitRecording())) {
+ DOUBANGO_AUDIO_WEBRTC_DEBUG_ERROR("AudioDeviceModule::InitRecording() failed with error code = %d", ret);
+ goto done;
+ }
+
+ // init output parameters
+ if((ret = self->device->StereoRecording(&_bool))) {
+ DOUBANGO_AUDIO_WEBRTC_DEBUG_ERROR("StereoRecording() failed with error code=%d", ret);
+ }
+ else {
+ (*_producer)->audio.channels = (_bool ? 2 : 1);
+ }
+ if((ret = self->device->RecordingSampleRate(&recordingSampleRate))) {
+ DOUBANGO_AUDIO_WEBRTC_DEBUG_ERROR("RecordingSampleRate() failed with error code=%d", ret);
+ }
+ else {
+ (*_producer)->audio.rate = recordingSampleRate;
+ }
done:
- tsk_safeobj_unlock(self);
+ tsk_safeobj_unlock(self);
- self->isProducerPrepared = (ret == 0);
+ self->isProducerPrepared = (ret == 0);
- return ret;
+ return ret;
}
int audio_webrtc_instance_start_consumer(audio_webrtc_instance_handle_t* _self)
{
- audio_webrtc_instance_t* self = (audio_webrtc_instance_t*)_self;
- if(!self || !self->device || !self->transport){
- DOUBANGO_AUDIO_WEBRTC_DEBUG_ERROR("invalid parameter");
- return -1;
- }
-
- tsk_safeobj_lock(self);
- if(!self->isConsumerPrepared){
- DOUBANGO_AUDIO_WEBRTC_DEBUG_ERROR("Consumer not prepared");
- goto done;
- }
-
- if(self->isConsumerStarted){
- DOUBANGO_AUDIO_WEBRTC_DEBUG_WARN("Consumer already started");
- goto done;
- }
-
- if(self->isPlayoutAvailable){
- int ret;
- if((ret = self->device->StartPlayout())){
- DOUBANGO_AUDIO_WEBRTC_DEBUG_ERROR("StartPlayout() failed with error code = %d", ret);
- }
-
- self->isConsumerStarted = self->device->Playing();
- DOUBANGO_AUDIO_WEBRTC_DEBUG_INFO("isPlaying=%s", (self->isConsumerPrepared ? "true" : "false"));
- }
+ audio_webrtc_instance_t* self = (audio_webrtc_instance_t*)_self;
+ if(!self || !self->device || !self->transport) {
+ DOUBANGO_AUDIO_WEBRTC_DEBUG_ERROR("invalid parameter");
+ return -1;
+ }
+
+ tsk_safeobj_lock(self);
+ if(!self->isConsumerPrepared) {
+ DOUBANGO_AUDIO_WEBRTC_DEBUG_ERROR("Consumer not prepared");
+ goto done;
+ }
+
+ if(self->isConsumerStarted) {
+ DOUBANGO_AUDIO_WEBRTC_DEBUG_WARN("Consumer already started");
+ goto done;
+ }
+
+ if(self->isPlayoutAvailable) {
+ int ret;
+ if((ret = self->device->StartPlayout())) {
+ DOUBANGO_AUDIO_WEBRTC_DEBUG_ERROR("StartPlayout() failed with error code = %d", ret);
+ }
+
+ self->isConsumerStarted = self->device->Playing();
+ DOUBANGO_AUDIO_WEBRTC_DEBUG_INFO("isPlaying=%s", (self->isConsumerPrepared ? "true" : "false"));
+ }
done:
- tsk_safeobj_unlock(self);
- return (self->isConsumerStarted ? 0 : -1);
+ tsk_safeobj_unlock(self);
+ return (self->isConsumerStarted ? 0 : -1);
}
int audio_webrtc_instance_start_producer(audio_webrtc_instance_handle_t* _self)
{
- audio_webrtc_instance_t* self = (audio_webrtc_instance_t*)_self;
- if(!self || !self->device || !self->transport){
- DOUBANGO_AUDIO_WEBRTC_DEBUG_ERROR("invalid parameter");
- return -1;
- }
-
- tsk_safeobj_lock(self);
- if(!self->isProducerPrepared){
- DOUBANGO_AUDIO_WEBRTC_DEBUG_ERROR("Producer not prepared");
- goto done;
- }
-
- if(self->isProducerStarted){
- DOUBANGO_AUDIO_WEBRTC_DEBUG_WARN("Consumer already started");
- goto done;
- }
-
- if(self->isRecordingAvailable){
- int ret;
- if((ret = self->device->StartRecording())){
- DOUBANGO_AUDIO_WEBRTC_DEBUG_ERROR("StartRecording() failed with error code = %d", ret);
- }
-
- self->isProducerStarted = self->device->Recording();
- DOUBANGO_AUDIO_WEBRTC_DEBUG_INFO("isRecording=%s", (self->isProducerStarted ? "true" : "false"));
- }
+ audio_webrtc_instance_t* self = (audio_webrtc_instance_t*)_self;
+ if(!self || !self->device || !self->transport) {
+ DOUBANGO_AUDIO_WEBRTC_DEBUG_ERROR("invalid parameter");
+ return -1;
+ }
+
+ tsk_safeobj_lock(self);
+ if(!self->isProducerPrepared) {
+ DOUBANGO_AUDIO_WEBRTC_DEBUG_ERROR("Producer not prepared");
+ goto done;
+ }
+
+ if(self->isProducerStarted) {
+ DOUBANGO_AUDIO_WEBRTC_DEBUG_WARN("Consumer already started");
+ goto done;
+ }
+
+ if(self->isRecordingAvailable) {
+ int ret;
+ if((ret = self->device->StartRecording())) {
+ DOUBANGO_AUDIO_WEBRTC_DEBUG_ERROR("StartRecording() failed with error code = %d", ret);
+ }
+
+ self->isProducerStarted = self->device->Recording();
+ DOUBANGO_AUDIO_WEBRTC_DEBUG_INFO("isRecording=%s", (self->isProducerStarted ? "true" : "false"));
+ }
done:
- tsk_safeobj_unlock(self);
- return (self->isProducerStarted ? 0 : -1);
- return 0;
+ tsk_safeobj_unlock(self);
+ return (self->isProducerStarted ? 0 : -1);
+ return 0;
}
int audio_webrtc_instance_stop_consumer(audio_webrtc_instance_handle_t* _self)
{
- audio_webrtc_instance_t* self = (audio_webrtc_instance_t*)_self;
- if(!self || !self->device || !self->transport){
- DOUBANGO_AUDIO_WEBRTC_DEBUG_ERROR("invalid parameter");
- return -1;
- }
-
- tsk_safeobj_lock(self);
-
- if(!self->isConsumerStarted){
- goto done;
- }
-
- int ret;
- if((ret = self->device->StopPlayout())){
- DOUBANGO_AUDIO_WEBRTC_DEBUG_ERROR("StopPlayout() failed with error code = %d", ret);
- }
- else{
- self->isConsumerStarted = self->device->Playing();
- }
+ audio_webrtc_instance_t* self = (audio_webrtc_instance_t*)_self;
+ if(!self || !self->device || !self->transport) {
+ DOUBANGO_AUDIO_WEBRTC_DEBUG_ERROR("invalid parameter");
+ return -1;
+ }
+
+ tsk_safeobj_lock(self);
+
+ if(!self->isConsumerStarted) {
+ goto done;
+ }
+
+ int ret;
+ if((ret = self->device->StopPlayout())) {
+ DOUBANGO_AUDIO_WEBRTC_DEBUG_ERROR("StopPlayout() failed with error code = %d", ret);
+ }
+ else {
+ self->isConsumerStarted = self->device->Playing();
+ }
done:
- tsk_safeobj_unlock(self);
- return (self->isConsumerStarted ? -1 : 0);
+ tsk_safeobj_unlock(self);
+ return (self->isConsumerStarted ? -1 : 0);
}
int audio_webrtc_instance_stop_producer(audio_webrtc_instance_handle_t* _self)
{
- audio_webrtc_instance_t* self = (audio_webrtc_instance_t*)_self;
- if(!self || !self->device || !self->transport){
- DOUBANGO_AUDIO_WEBRTC_DEBUG_ERROR("invalid parameter");
- return -1;
- }
-
- tsk_safeobj_lock(self);
-
- if(!self->isProducerStarted){
- goto done;
- }
-
- int ret;
- if((ret = self->device->StopRecording())){
- DOUBANGO_AUDIO_WEBRTC_DEBUG_ERROR("StopRecording() failed with error code = %d", ret);
- }
- else{
- self->isProducerStarted = self->device->Recording();
- }
+ audio_webrtc_instance_t* self = (audio_webrtc_instance_t*)_self;
+ if(!self || !self->device || !self->transport) {
+ DOUBANGO_AUDIO_WEBRTC_DEBUG_ERROR("invalid parameter");
+ return -1;
+ }
+
+ tsk_safeobj_lock(self);
+
+ if(!self->isProducerStarted) {
+ goto done;
+ }
+
+ int ret;
+ if((ret = self->device->StopRecording())) {
+ DOUBANGO_AUDIO_WEBRTC_DEBUG_ERROR("StopRecording() failed with error code = %d", ret);
+ }
+ else {
+ self->isProducerStarted = self->device->Recording();
+ }
done:
- tsk_safeobj_unlock(self);
- return (self->isProducerStarted ? -1 : 0);
+ tsk_safeobj_unlock(self);
+ return (self->isProducerStarted ? -1 : 0);
}
-int audio_webrtc_instance_destroy(audio_webrtc_instance_handle_t** _self){
- if(!_self || !*_self){
- DOUBANGO_AUDIO_WEBRTC_DEBUG_ERROR("Invalid parameter");
- return -1;
- }
- tsk_list_lock(__audioInstances);
- if(tsk_object_get_refcount(*_self)==1){
- tsk_list_remove_item_by_data(__audioInstances, *_self);
- }
- else {
- tsk_object_unref(*_self);
- }
- tsk_list_unlock(__audioInstances);
- *_self = tsk_null;
- return 0;
+int audio_webrtc_instance_destroy(audio_webrtc_instance_handle_t** _self)
+{
+ if(!_self || !*_self) {
+ DOUBANGO_AUDIO_WEBRTC_DEBUG_ERROR("Invalid parameter");
+ return -1;
+ }
+ tsk_list_lock(__audioInstances);
+ if(tsk_object_get_refcount(*_self)==1) {
+ tsk_list_remove_item_by_data(__audioInstances, *_self);
+ }
+ else {
+ tsk_object_unref(*_self);
+ }
+ tsk_list_unlock(__audioInstances);
+ *_self = tsk_null;
+ return 0;
}
diff --git a/plugins/audio_webrtc/audio_webrtc.h b/plugins/audio_webrtc/audio_webrtc.h
index 115a243..070d2fb 100755
--- a/plugins/audio_webrtc/audio_webrtc.h
+++ b/plugins/audio_webrtc/audio_webrtc.h
@@ -1,17 +1,17 @@
/* Copyright (C) 2012 Doubango Telecom <http://www.doubango.org>
-*
+*
* This file is part of Open Source Doubango Framework.
*
* DOUBANGO is free software: you can redistribute it and/or modify
* it under the terms of the GNU General Public License as published by
* the Free Software Foundation, either version 3 of the License, or
* (at your option) any later version.
-*
+*
* DOUBANGO is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
* GNU General Public License for more details.
-*
+*
* You should have received a copy of the GNU General Public License
* along with DOUBANGO.
*/
diff --git a/plugins/audio_webrtc/audio_webrtc_config.h b/plugins/audio_webrtc/audio_webrtc_config.h
index dc7c1fd..d441703 100755
--- a/plugins/audio_webrtc/audio_webrtc_config.h
+++ b/plugins/audio_webrtc/audio_webrtc_config.h
@@ -1,17 +1,17 @@
/* Copyright (C) 2012 Doubango Telecom <http://www.doubango.org>
-*
+*
* This file is part of Open Source Doubango Framework.
*
* DOUBANGO is free software: you can redistribute it and/or modify
* it under the terms of the GNU General Public License as published by
* the Free Software Foundation, either version 3 of the License, or
* (at your option) any later version.
-*
+*
* DOUBANGO is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
* GNU General Public License for more details.
-*
+*
* You should have received a copy of the GNU General Public License
* along with DOUBANGO.
*/
@@ -72,13 +72,13 @@
# define DOUBANGO_AUDIO_WEBRTC_BEGIN_DECLS extern "C" {
# define DOUBANGO_AUDIO_WEBRTC_END_DECLS }
#else
-# define DOUBANGO_AUDIO_WEBRTC_BEGIN_DECLS
+# define DOUBANGO_AUDIO_WEBRTC_BEGIN_DECLS
# define DOUBANGO_AUDIO_WEBRTC_END_DECLS
#endif
#ifdef _MSC_VER
#if HAVE_FFMPEG // FFMPeg warnings (treated as errors)
-# pragma warning (disable:4244)
+# pragma warning (disable:4244)
#endif
# define inline __inline
# define _CRT_SECURE_NO_WARNINGS
@@ -96,7 +96,7 @@
#endif
#if HAVE_CONFIG_H
- #include "../config.h"
+#include "../config.h"
#endif
#if DOUBANGO_AUDIO_WEBRTC_UNDER_WINDOWS
@@ -113,22 +113,30 @@
#define ANDROID_DEBUG_TAG "plugin_audio_webrtc" // DDMS log tag when using eclise
static void DOUBANGO_AUDIO_WEBRTC_DEBUG_ANY(int level, const char* fmt, ...)
{
- char* message = tsk_null;
- va_list ap;
- va_start(ap, fmt);
- tsk_sprintf_2(&message, fmt, &ap);
-
- if(message){
- switch(level){
- case DEBUG_LEVEL_INFO: __android_log_write(ANDROID_LOG_INFO, ANDROID_DEBUG_TAG, message); break;
- case DEBUG_LEVEL_WARN: __android_log_write(ANDROID_LOG_WARN, ANDROID_DEBUG_TAG, message); break;
- case DEBUG_LEVEL_ERROR: __android_log_write(ANDROID_LOG_ERROR, ANDROID_DEBUG_TAG, message); break;
- case DEBUG_LEVEL_FATAL: __android_log_write(ANDROID_LOG_FATAL, ANDROID_DEBUG_TAG, message); break;
- }
- TSK_FREE(message);
- }
-
- va_end(ap);
+ char* message = tsk_null;
+ va_list ap;
+ va_start(ap, fmt);
+ tsk_sprintf_2(&message, fmt, &ap);
+
+ if(message) {
+ switch(level) {
+ case DEBUG_LEVEL_INFO:
+ __android_log_write(ANDROID_LOG_INFO, ANDROID_DEBUG_TAG, message);
+ break;
+ case DEBUG_LEVEL_WARN:
+ __android_log_write(ANDROID_LOG_WARN, ANDROID_DEBUG_TAG, message);
+ break;
+ case DEBUG_LEVEL_ERROR:
+ __android_log_write(ANDROID_LOG_ERROR, ANDROID_DEBUG_TAG, message);
+ break;
+ case DEBUG_LEVEL_FATAL:
+ __android_log_write(ANDROID_LOG_FATAL, ANDROID_DEBUG_TAG, message);
+ break;
+ }
+ TSK_FREE(message);
+ }
+
+ va_end(ap);
}
#define DOUBANGO_AUDIO_WEBRTC_DEBUG_INFO(FMT, ...) DOUBANGO_AUDIO_WEBRTC_DEBUG_ANY(DEBUG_LEVEL_INFO, FMT, ##__VA_ARGS__)
#define DOUBANGO_AUDIO_WEBRTC_DEBUG_WARN(FMT, ...) DOUBANGO_AUDIO_WEBRTC_DEBUG_ANY(DEBUG_LEVEL_WARN, FMT, ##__VA_ARGS__)
diff --git a/plugins/audio_webrtc/audio_webrtc_consumer.cxx b/plugins/audio_webrtc/audio_webrtc_consumer.cxx
index e55097b..8762a2d 100755
--- a/plugins/audio_webrtc/audio_webrtc_consumer.cxx
+++ b/plugins/audio_webrtc/audio_webrtc_consumer.cxx
@@ -1,17 +1,17 @@
/* Copyright (C) 2012 Doubango Telecom <http://www.doubango.org>
-*
+*
* This file is part of Open Source Doubango Framework.
*
* DOUBANGO is free software: you can redistribute it and/or modify
* it under the terms of the GNU General Public License as published by
* the Free Software Foundation, either version 3 of the License, or
* (at your option) any later version.
-*
+*
* DOUBANGO is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
* GNU General Public License for more details.
-*
+*
* You should have received a copy of the GNU General Public License
* along with DOUBANGO.
*/
@@ -24,153 +24,152 @@
#include "tsk_memory.h"
#include "tsk_debug.h"
-typedef struct audio_consumer_webrtc_s
-{
- TDAV_DECLARE_CONSUMER_AUDIO;
- audio_webrtc_instance_handle_t* audioInstHandle;
- struct{
- void* ptr;
- bool isFull;
- int size;
- int index;
- } buffer;
+typedef struct audio_consumer_webrtc_s {
+ TDAV_DECLARE_CONSUMER_AUDIO;
+ audio_webrtc_instance_handle_t* audioInstHandle;
+ struct {
+ void* ptr;
+ bool isFull;
+ int size;
+ int index;
+ } buffer;
}
audio_consumer_webrtc_t;
int audio_consumer_webrtc_get_data_10ms(const audio_consumer_webrtc_t* _self, void* audioSamples, int nSamples, int nBytesPerSample, int nChannels, int samplesPerSec, uint32_t &nSamplesOut)
{
- nSamplesOut = 0;
- if(!_self || !audioSamples || !nSamples){
- DOUBANGO_AUDIO_WEBRTC_DEBUG_ERROR("Invalid parameter");
- return -1;
- }
- if((nSamples != (samplesPerSec / 100))){
- DOUBANGO_AUDIO_WEBRTC_DEBUG_ERROR("Not producing 10ms samples (nSamples=%d, samplesPerSec=%d)", nSamples, samplesPerSec);
- return -2;
- }
- if((nBytesPerSample != (TMEDIA_CONSUMER(_self)->audio.bits_per_sample >> 3))){
- DOUBANGO_AUDIO_WEBRTC_DEBUG_ERROR("%d not valid bytes/samples", nBytesPerSample);
- return -3;
- }
- if((nChannels != TMEDIA_CONSUMER(_self)->audio.out.channels)){
- DOUBANGO_AUDIO_WEBRTC_DEBUG_ERROR("%d not the expected number of channels", nChannels);
- return -4;
- }
-
- audio_consumer_webrtc_t* self = const_cast<audio_consumer_webrtc_t*>(_self);
-
- if(self->buffer.index == self->buffer.size){
- tdav_consumer_audio_tick(TDAV_CONSUMER_AUDIO(self));
- self->buffer.index = 0;
- if((tdav_consumer_audio_get(TDAV_CONSUMER_AUDIO(self), self->buffer.ptr, self->buffer.size)) != self->buffer.size){
- nSamplesOut = 0;
- return 0;
- }
- }
-
- int nSamplesInBits = (nSamples * nBytesPerSample);
- if(_self->buffer.index + nSamplesInBits <= _self->buffer.size){
- memcpy(audioSamples, (((uint8_t*)self->buffer.ptr) + self->buffer.index), nSamplesInBits);
- }
- self->buffer.index += nSamplesInBits;
- TSK_CLAMP(0, self->buffer.index, self->buffer.size);
- nSamplesOut = nSamples;
-
- return 0;
+ nSamplesOut = 0;
+ if(!_self || !audioSamples || !nSamples) {
+ DOUBANGO_AUDIO_WEBRTC_DEBUG_ERROR("Invalid parameter");
+ return -1;
+ }
+ if((nSamples != (samplesPerSec / 100))) {
+ DOUBANGO_AUDIO_WEBRTC_DEBUG_ERROR("Not producing 10ms samples (nSamples=%d, samplesPerSec=%d)", nSamples, samplesPerSec);
+ return -2;
+ }
+ if((nBytesPerSample != (TMEDIA_CONSUMER(_self)->audio.bits_per_sample >> 3))) {
+ DOUBANGO_AUDIO_WEBRTC_DEBUG_ERROR("%d not valid bytes/samples", nBytesPerSample);
+ return -3;
+ }
+ if((nChannels != TMEDIA_CONSUMER(_self)->audio.out.channels)) {
+ DOUBANGO_AUDIO_WEBRTC_DEBUG_ERROR("%d not the expected number of channels", nChannels);
+ return -4;
+ }
+
+ audio_consumer_webrtc_t* self = const_cast<audio_consumer_webrtc_t*>(_self);
+
+ if(self->buffer.index == self->buffer.size) {
+ tdav_consumer_audio_tick(TDAV_CONSUMER_AUDIO(self));
+ self->buffer.index = 0;
+ if((tdav_consumer_audio_get(TDAV_CONSUMER_AUDIO(self), self->buffer.ptr, self->buffer.size)) != self->buffer.size) {
+ nSamplesOut = 0;
+ return 0;
+ }
+ }
+
+ int nSamplesInBits = (nSamples * nBytesPerSample);
+ if(_self->buffer.index + nSamplesInBits <= _self->buffer.size) {
+ memcpy(audioSamples, (((uint8_t*)self->buffer.ptr) + self->buffer.index), nSamplesInBits);
+ }
+ self->buffer.index += nSamplesInBits;
+ TSK_CLAMP(0, self->buffer.index, self->buffer.size);
+ nSamplesOut = nSamples;
+
+ return 0;
}
/* ============ Media Consumer Interface ================= */
static int audio_consumer_webrtc_set(tmedia_consumer_t* self, const tmedia_param_t* param)
{
- audio_consumer_webrtc_t* webrtc = (audio_consumer_webrtc_t*)self;
- int ret = tdav_consumer_audio_set(TDAV_CONSUMER_AUDIO(self), param);
+ audio_consumer_webrtc_t* webrtc = (audio_consumer_webrtc_t*)self;
+ int ret = tdav_consumer_audio_set(TDAV_CONSUMER_AUDIO(self), param);
- if(ret == 0){
- if(tsk_striequals(param->key, "volume")){
-
- }
- }
+ if(ret == 0) {
+ if(tsk_striequals(param->key, "volume")) {
- return ret;
+ }
+ }
+
+ return ret;
}
static int audio_consumer_webrtc_prepare(tmedia_consumer_t* _self, const tmedia_codec_t* codec)
{
- audio_consumer_webrtc_t* self = (audio_consumer_webrtc_t*)_self;
- if(!self){
- DOUBANGO_AUDIO_WEBRTC_DEBUG_ERROR("Invalid parameter");
- return -1;
- }
-
- // create audio instance
- if(!(self->audioInstHandle = audio_webrtc_instance_create(TMEDIA_CONSUMER(self)->session_id))){
- DOUBANGO_AUDIO_WEBRTC_DEBUG_ERROR("Failed to create audio instance handle");
- return -1;
- }
-
- // initialize input parameters from the codec information
- TMEDIA_CONSUMER(self)->audio.ptime = codec->plugin->audio.ptime;
- TMEDIA_CONSUMER(self)->audio.in.channels = codec->plugin->audio.channels;
- TMEDIA_CONSUMER(self)->audio.in.rate = codec->plugin->rate;
-
- // prepare playout device and update output parameters
- int ret = audio_webrtc_instance_prepare_consumer(self->audioInstHandle, &_self);
-
- // now that the producer is prepared we can initialize internal buffer using device caps
- if(ret == 0){
- // allocate buffer
- int xsize = ((TMEDIA_CONSUMER(self)->audio.ptime * TMEDIA_CONSUMER(self)->audio.out.rate) / 1000) * (TMEDIA_CONSUMER(self)->audio.bits_per_sample >> 3);
- if(!(self->buffer.ptr = tsk_realloc(self->buffer.ptr, xsize))){
- DOUBANGO_AUDIO_WEBRTC_DEBUG_ERROR("Failed to allocate buffer with size = %d", xsize);
- self->buffer.size = 0;
- return -1;
- }
- memset(self->buffer.ptr, 0, xsize);
- self->buffer.size = xsize;
- self->buffer.index = 0;
- self->buffer.isFull = false;
- }
- return ret;
+ audio_consumer_webrtc_t* self = (audio_consumer_webrtc_t*)_self;
+ if(!self) {
+ DOUBANGO_AUDIO_WEBRTC_DEBUG_ERROR("Invalid parameter");
+ return -1;
+ }
+
+ // create audio instance
+ if(!(self->audioInstHandle = audio_webrtc_instance_create(TMEDIA_CONSUMER(self)->session_id))) {
+ DOUBANGO_AUDIO_WEBRTC_DEBUG_ERROR("Failed to create audio instance handle");
+ return -1;
+ }
+
+ // initialize input parameters from the codec information
+ TMEDIA_CONSUMER(self)->audio.ptime = codec->plugin->audio.ptime;
+ TMEDIA_CONSUMER(self)->audio.in.channels = codec->plugin->audio.channels;
+ TMEDIA_CONSUMER(self)->audio.in.rate = codec->plugin->rate;
+
+ // prepare playout device and update output parameters
+ int ret = audio_webrtc_instance_prepare_consumer(self->audioInstHandle, &_self);
+
+ // now that the producer is prepared we can initialize internal buffer using device caps
+ if(ret == 0) {
+ // allocate buffer
+ int xsize = ((TMEDIA_CONSUMER(self)->audio.ptime * TMEDIA_CONSUMER(self)->audio.out.rate) / 1000) * (TMEDIA_CONSUMER(self)->audio.bits_per_sample >> 3);
+ if(!(self->buffer.ptr = tsk_realloc(self->buffer.ptr, xsize))) {
+ DOUBANGO_AUDIO_WEBRTC_DEBUG_ERROR("Failed to allocate buffer with size = %d", xsize);
+ self->buffer.size = 0;
+ return -1;
+ }
+ memset(self->buffer.ptr, 0, xsize);
+ self->buffer.size = xsize;
+ self->buffer.index = 0;
+ self->buffer.isFull = false;
+ }
+ return ret;
}
static int audio_consumer_webrtc_start(tmedia_consumer_t* _self)
{
- audio_consumer_webrtc_t* self = (audio_consumer_webrtc_t*)_self;
- if(!self){
- DOUBANGO_AUDIO_WEBRTC_DEBUG_ERROR("Invalid parameter");
- return -1;
- }
+ audio_consumer_webrtc_t* self = (audio_consumer_webrtc_t*)_self;
+ if(!self) {
+ DOUBANGO_AUDIO_WEBRTC_DEBUG_ERROR("Invalid parameter");
+ return -1;
+ }
- return audio_webrtc_instance_start_consumer(self->audioInstHandle);
+ return audio_webrtc_instance_start_consumer(self->audioInstHandle);
}
static int audio_consumer_webrtc_consume(tmedia_consumer_t* _self, const void* buffer, tsk_size_t size, const tsk_object_t* proto_hdr)
{
- audio_consumer_webrtc_t* self = (audio_consumer_webrtc_t*)_self;
- if(!self || !buffer || !size){
- DOUBANGO_AUDIO_WEBRTC_DEBUG_ERROR("1Invalid parameter");
- return -1;
- }
- /* buffer is already decoded */
- return tdav_consumer_audio_put(TDAV_CONSUMER_AUDIO(self), buffer, size, proto_hdr);
+ audio_consumer_webrtc_t* self = (audio_consumer_webrtc_t*)_self;
+ if(!self || !buffer || !size) {
+ DOUBANGO_AUDIO_WEBRTC_DEBUG_ERROR("1Invalid parameter");
+ return -1;
+ }
+ /* buffer is already decoded */
+ return tdav_consumer_audio_put(TDAV_CONSUMER_AUDIO(self), buffer, size, proto_hdr);
}
static int audio_consumer_webrtc_pause(tmedia_consumer_t* self)
{
- return 0;
+ return 0;
}
static int audio_consumer_webrtc_stop(tmedia_consumer_t* _self)
{
- audio_consumer_webrtc_t* self = (audio_consumer_webrtc_t*)_self;
- if(!self){
- DOUBANGO_AUDIO_WEBRTC_DEBUG_ERROR("Invalid parameter");
- return -1;
- }
+ audio_consumer_webrtc_t* self = (audio_consumer_webrtc_t*)_self;
+ if(!self) {
+ DOUBANGO_AUDIO_WEBRTC_DEBUG_ERROR("Invalid parameter");
+ return -1;
+ }
- return audio_webrtc_instance_stop_consumer(self->audioInstHandle);
+ return audio_webrtc_instance_stop_consumer(self->audioInstHandle);
}
@@ -180,54 +179,52 @@ static int audio_consumer_webrtc_stop(tmedia_consumer_t* _self)
/* constructor */
static tsk_object_t* audio_consumer_webrtc_ctor(tsk_object_t *_self, va_list * app)
{
- audio_consumer_webrtc_t *self = (audio_consumer_webrtc_t *)_self;
- if(self){
- /* init base */
- tdav_consumer_audio_init(TDAV_CONSUMER_AUDIO(self));
- /* init self */
-
- }
- return self;
+ audio_consumer_webrtc_t *self = (audio_consumer_webrtc_t *)_self;
+ if(self) {
+ /* init base */
+ tdav_consumer_audio_init(TDAV_CONSUMER_AUDIO(self));
+ /* init self */
+
+ }
+ return self;
}
/* destructor */
static tsk_object_t* audio_consumer_webrtc_dtor(tsk_object_t *_self)
-{
- audio_consumer_webrtc_t *self = (audio_consumer_webrtc_t *)_self;
- if(self){
- /* stop */
- audio_consumer_webrtc_stop(TMEDIA_CONSUMER(self));
- /* deinit self */
- if(self->audioInstHandle){
- audio_webrtc_instance_destroy(&self->audioInstHandle);
- }
- TSK_FREE(self->buffer.ptr);
- /* deinit base */
- tdav_consumer_audio_deinit(TDAV_CONSUMER_AUDIO(self));
- }
-
- return self;
+{
+ audio_consumer_webrtc_t *self = (audio_consumer_webrtc_t *)_self;
+ if(self) {
+ /* stop */
+ audio_consumer_webrtc_stop(TMEDIA_CONSUMER(self));
+ /* deinit self */
+ if(self->audioInstHandle) {
+ audio_webrtc_instance_destroy(&self->audioInstHandle);
+ }
+ TSK_FREE(self->buffer.ptr);
+ /* deinit base */
+ tdav_consumer_audio_deinit(TDAV_CONSUMER_AUDIO(self));
+ }
+
+ return self;
}
/* object definition */
-static const tsk_object_def_t audio_consumer_webrtc_def_s =
-{
- sizeof(audio_consumer_webrtc_t),
- audio_consumer_webrtc_ctor,
- audio_consumer_webrtc_dtor,
- tdav_consumer_audio_cmp,
+static const tsk_object_def_t audio_consumer_webrtc_def_s = {
+ sizeof(audio_consumer_webrtc_t),
+ audio_consumer_webrtc_ctor,
+ audio_consumer_webrtc_dtor,
+ tdav_consumer_audio_cmp,
};
/* plugin definition*/
-static const tmedia_consumer_plugin_def_t audio_consumer_webrtc_plugin_def_s =
-{
- &audio_consumer_webrtc_def_s,
-
- tmedia_audio,
- "WebRTC audio consumer",
-
- audio_consumer_webrtc_set,
- audio_consumer_webrtc_prepare,
- audio_consumer_webrtc_start,
- audio_consumer_webrtc_consume,
- audio_consumer_webrtc_pause,
- audio_consumer_webrtc_stop
+static const tmedia_consumer_plugin_def_t audio_consumer_webrtc_plugin_def_s = {
+ &audio_consumer_webrtc_def_s,
+
+ tmedia_audio,
+ "WebRTC audio consumer",
+
+ audio_consumer_webrtc_set,
+ audio_consumer_webrtc_prepare,
+ audio_consumer_webrtc_start,
+ audio_consumer_webrtc_consume,
+ audio_consumer_webrtc_pause,
+ audio_consumer_webrtc_stop
};
const tmedia_consumer_plugin_def_t *audio_consumer_webrtc_plugin_def_t = &audio_consumer_webrtc_plugin_def_s;
diff --git a/plugins/audio_webrtc/audio_webrtc_consumer.h b/plugins/audio_webrtc/audio_webrtc_consumer.h
index 9dc7dd1..8874f6d 100755
--- a/plugins/audio_webrtc/audio_webrtc_consumer.h
+++ b/plugins/audio_webrtc/audio_webrtc_consumer.h
@@ -1,17 +1,17 @@
/* Copyright (C) 2012 Doubango Telecom <http://www.doubango.org>
-*
+*
* This file is part of Open Source Doubango Framework.
*
* DOUBANGO is free software: you can redistribute it and/or modify
* it under the terms of the GNU General Public License as published by
* the Free Software Foundation, either version 3 of the License, or
* (at your option) any later version.
-*
+*
* DOUBANGO is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
* GNU General Public License for more details.
-*
+*
* You should have received a copy of the GNU General Public License
* along with DOUBANGO.
*/
@@ -22,7 +22,7 @@
DOUBANGO_AUDIO_WEBRTC_BEGIN_DECLS
-extern const struct tmedia_consumer_plugin_def_s *audio_consumer_webrtc_plugin_def_t;
+extern const struct tmedia_consumer_plugin_def_s *audio_consumer_webrtc_plugin_def_t;
int audio_consumer_webrtc_get_data_10ms(const struct audio_consumer_webrtc_s* self, void* audioSamples, int nSamples, int nBytesPerSample, int nChannels, int samplesPerSec, uint32_t &nSamplesOut);
diff --git a/plugins/audio_webrtc/audio_webrtc_producer.cxx b/plugins/audio_webrtc/audio_webrtc_producer.cxx
index 02c5aeb..3d95d06 100755
--- a/plugins/audio_webrtc/audio_webrtc_producer.cxx
+++ b/plugins/audio_webrtc/audio_webrtc_producer.cxx
@@ -1,17 +1,17 @@
/* Copyright (C) 2012 Doubango Telecom <http://www.doubango.org>
-*
+*
* This file is part of Open Source Doubango Framework.
*
* DOUBANGO is free software: you can redistribute it and/or modify
* it under the terms of the GNU General Public License as published by
* the Free Software Foundation, either version 3 of the License, or
* (at your option) any later version.
-*
+*
* DOUBANGO is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
* GNU General Public License for more details.
-*
+*
* You should have received a copy of the GNU General Public License
* along with DOUBANGO.
*/
@@ -24,147 +24,146 @@
#include "tsk_memory.h"
#include "tsk_debug.h"
-typedef struct audio_producer_webrtc_s
-{
- TDAV_DECLARE_PRODUCER_AUDIO;
-
- bool isMuted;
- audio_webrtc_instance_handle_t* audioInstHandle;
- struct{
- void* ptr;
- int size;
- int index;
- } buffer;
+typedef struct audio_producer_webrtc_s {
+ TDAV_DECLARE_PRODUCER_AUDIO;
+
+ bool isMuted;
+ audio_webrtc_instance_handle_t* audioInstHandle;
+ struct {
+ void* ptr;
+ int size;
+ int index;
+ } buffer;
}
audio_producer_webrtc_t;
int audio_producer_webrtc_handle_data_10ms(const audio_producer_webrtc_t* _self, const void* audioSamples, int nSamples, int nBytesPerSample, int samplesPerSec, int nChannels)
{
- if(!_self || !audioSamples || !nSamples){
- DOUBANGO_AUDIO_WEBRTC_DEBUG_ERROR("Invalid parameter");
- return -1;
- }
- if((nSamples != (samplesPerSec / 100))){
- DOUBANGO_AUDIO_WEBRTC_DEBUG_ERROR("Not producing 10ms samples (nSamples=%d, samplesPerSec=%d)", nSamples, samplesPerSec);
- return -2;
- }
- if((nBytesPerSample != (TMEDIA_PRODUCER(_self)->audio.bits_per_sample >> 3))){
- DOUBANGO_AUDIO_WEBRTC_DEBUG_ERROR("%d not valid bytes/samples", nBytesPerSample);
- return -3;
- }
- if((nChannels != TMEDIA_PRODUCER(_self)->audio.channels)){
- DOUBANGO_AUDIO_WEBRTC_DEBUG_ERROR("%d not the expected number of channels", nChannels);
- return -4;
- }
-
- int nSamplesInBits = (nSamples * nBytesPerSample);
- if(_self->buffer.index + nSamplesInBits > _self->buffer.size){
- DOUBANGO_AUDIO_WEBRTC_DEBUG_ERROR("Buffer overflow");
- return -5;
- }
-
- audio_producer_webrtc_t* self = const_cast<audio_producer_webrtc_t*>(_self);
-
- memcpy((((uint8_t*)self->buffer.ptr) + self->buffer.index), audioSamples, nSamplesInBits);
- self->buffer.index += nSamplesInBits;
-
- if(self->buffer.index == self->buffer.size){
- self->buffer.index = 0;
- if(TMEDIA_PRODUCER(self)->enc_cb.callback){
- if(self->isMuted){
- memset(self->buffer.ptr, 0, self->buffer.size);
- }
- TMEDIA_PRODUCER(self)->enc_cb.callback(TMEDIA_PRODUCER(self)->enc_cb.callback_data, self->buffer.ptr, self->buffer.size);
- }
- }
-
- return 0;
+ if(!_self || !audioSamples || !nSamples) {
+ DOUBANGO_AUDIO_WEBRTC_DEBUG_ERROR("Invalid parameter");
+ return -1;
+ }
+ if((nSamples != (samplesPerSec / 100))) {
+ DOUBANGO_AUDIO_WEBRTC_DEBUG_ERROR("Not producing 10ms samples (nSamples=%d, samplesPerSec=%d)", nSamples, samplesPerSec);
+ return -2;
+ }
+ if((nBytesPerSample != (TMEDIA_PRODUCER(_self)->audio.bits_per_sample >> 3))) {
+ DOUBANGO_AUDIO_WEBRTC_DEBUG_ERROR("%d not valid bytes/samples", nBytesPerSample);
+ return -3;
+ }
+ if((nChannels != TMEDIA_PRODUCER(_self)->audio.channels)) {
+ DOUBANGO_AUDIO_WEBRTC_DEBUG_ERROR("%d not the expected number of channels", nChannels);
+ return -4;
+ }
+
+ int nSamplesInBits = (nSamples * nBytesPerSample);
+ if(_self->buffer.index + nSamplesInBits > _self->buffer.size) {
+ DOUBANGO_AUDIO_WEBRTC_DEBUG_ERROR("Buffer overflow");
+ return -5;
+ }
+
+ audio_producer_webrtc_t* self = const_cast<audio_producer_webrtc_t*>(_self);
+
+ memcpy((((uint8_t*)self->buffer.ptr) + self->buffer.index), audioSamples, nSamplesInBits);
+ self->buffer.index += nSamplesInBits;
+
+ if(self->buffer.index == self->buffer.size) {
+ self->buffer.index = 0;
+ if(TMEDIA_PRODUCER(self)->enc_cb.callback) {
+ if(self->isMuted) {
+ memset(self->buffer.ptr, 0, self->buffer.size);
+ }
+ TMEDIA_PRODUCER(self)->enc_cb.callback(TMEDIA_PRODUCER(self)->enc_cb.callback_data, self->buffer.ptr, self->buffer.size);
+ }
+ }
+
+ return 0;
}
/* ============ Media Producer Interface ================= */
static int audio_producer_webrtc_set(tmedia_producer_t* _self, const tmedia_param_t* param)
-{
- audio_producer_webrtc_t* self = (audio_producer_webrtc_t*)_self;
- if(param->plugin_type == tmedia_ppt_producer){
- if(param->value_type == tmedia_pvt_int32){
- if(tsk_striequals(param->key, "mute")){
- self->isMuted = (TSK_TO_INT32((uint8_t*)param->value) != 0);
- return 0;
- }
- }
- }
- return tdav_producer_audio_set(TDAV_PRODUCER_AUDIO(self), param);
+{
+ audio_producer_webrtc_t* self = (audio_producer_webrtc_t*)_self;
+ if(param->plugin_type == tmedia_ppt_producer) {
+ if(param->value_type == tmedia_pvt_int32) {
+ if(tsk_striequals(param->key, "mute")) {
+ self->isMuted = (TSK_TO_INT32((uint8_t*)param->value) != 0);
+ return 0;
+ }
+ }
+ }
+ return tdav_producer_audio_set(TDAV_PRODUCER_AUDIO(self), param);
}
static int audio_producer_webrtc_prepare(tmedia_producer_t* _self, const tmedia_codec_t* codec)
{
- audio_producer_webrtc_t* self = (audio_producer_webrtc_t*)_self;
- if(!self || !codec){
- DOUBANGO_AUDIO_WEBRTC_DEBUG_ERROR("Invalid parameter");
- return -1;
- }
-
- // create audio instance
- if(!(self->audioInstHandle = audio_webrtc_instance_create(TMEDIA_PRODUCER(self)->session_id))){
- DOUBANGO_AUDIO_WEBRTC_DEBUG_ERROR("Failed to create audio instance handle");
- return -2;
- }
-
- // check that ptime is mutiple of 10
- if((codec->plugin->audio.ptime % 10)){
- DOUBANGO_AUDIO_WEBRTC_DEBUG_ERROR("ptime=%d not multiple of 10", codec->plugin->audio.ptime);
- return -3;
- }
-
- // init input parameters from the codec
- TMEDIA_PRODUCER(self)->audio.channels = codec->plugin->audio.channels;
- TMEDIA_PRODUCER(self)->audio.rate = codec->plugin->rate;
- TMEDIA_PRODUCER(self)->audio.ptime = codec->plugin->audio.ptime;
-
- // prepare playout device and update output parameters
- int ret;
- ret = audio_webrtc_instance_prepare_producer(self->audioInstHandle, &_self);
-
- // now that the producer is prepared we can initialize internal buffer using device caps
- if(ret == 0){
- // allocate buffer
- int xsize = ((TMEDIA_PRODUCER(self)->audio.ptime * TMEDIA_PRODUCER(self)->audio.rate) / 1000) * (TMEDIA_PRODUCER(self)->audio.bits_per_sample >> 3);
- if(!(self->buffer.ptr = tsk_realloc(self->buffer.ptr, xsize))){
- DOUBANGO_AUDIO_WEBRTC_DEBUG_ERROR("Failed to allocate buffer with size = %d", xsize);
- self->buffer.size = 0;
- return -1;
- }
- self->buffer.size = xsize;
- self->buffer.index = 0;
- }
- return ret;
+ audio_producer_webrtc_t* self = (audio_producer_webrtc_t*)_self;
+ if(!self || !codec) {
+ DOUBANGO_AUDIO_WEBRTC_DEBUG_ERROR("Invalid parameter");
+ return -1;
+ }
+
+ // create audio instance
+ if(!(self->audioInstHandle = audio_webrtc_instance_create(TMEDIA_PRODUCER(self)->session_id))) {
+ DOUBANGO_AUDIO_WEBRTC_DEBUG_ERROR("Failed to create audio instance handle");
+ return -2;
+ }
+
+ // check that ptime is mutiple of 10
+ if((codec->plugin->audio.ptime % 10)) {
+ DOUBANGO_AUDIO_WEBRTC_DEBUG_ERROR("ptime=%d not multiple of 10", codec->plugin->audio.ptime);
+ return -3;
+ }
+
+ // init input parameters from the codec
+ TMEDIA_PRODUCER(self)->audio.channels = codec->plugin->audio.channels;
+ TMEDIA_PRODUCER(self)->audio.rate = codec->plugin->rate;
+ TMEDIA_PRODUCER(self)->audio.ptime = codec->plugin->audio.ptime;
+
+ // prepare playout device and update output parameters
+ int ret;
+ ret = audio_webrtc_instance_prepare_producer(self->audioInstHandle, &_self);
+
+ // now that the producer is prepared we can initialize internal buffer using device caps
+ if(ret == 0) {
+ // allocate buffer
+ int xsize = ((TMEDIA_PRODUCER(self)->audio.ptime * TMEDIA_PRODUCER(self)->audio.rate) / 1000) * (TMEDIA_PRODUCER(self)->audio.bits_per_sample >> 3);
+ if(!(self->buffer.ptr = tsk_realloc(self->buffer.ptr, xsize))) {
+ DOUBANGO_AUDIO_WEBRTC_DEBUG_ERROR("Failed to allocate buffer with size = %d", xsize);
+ self->buffer.size = 0;
+ return -1;
+ }
+ self->buffer.size = xsize;
+ self->buffer.index = 0;
+ }
+ return ret;
}
static int audio_producer_webrtc_start(tmedia_producer_t* _self)
{
- audio_producer_webrtc_t* self = (audio_producer_webrtc_t*)_self;
- if(!self){
- DOUBANGO_AUDIO_WEBRTC_DEBUG_ERROR("Invalid parameter");
- return -1;
- }
+ audio_producer_webrtc_t* self = (audio_producer_webrtc_t*)_self;
+ if(!self) {
+ DOUBANGO_AUDIO_WEBRTC_DEBUG_ERROR("Invalid parameter");
+ return -1;
+ }
- return audio_webrtc_instance_start_producer(self->audioInstHandle);
+ return audio_webrtc_instance_start_producer(self->audioInstHandle);
}
static int audio_producer_webrtc_pause(tmedia_producer_t* self)
{
- return 0;
+ return 0;
}
static int audio_producer_webrtc_stop(tmedia_producer_t* _self)
{
- audio_producer_webrtc_t* self = (audio_producer_webrtc_t*)_self;
- if(!self){
- DOUBANGO_AUDIO_WEBRTC_DEBUG_ERROR("Invalid parameter");
- return -1;
- }
+ audio_producer_webrtc_t* self = (audio_producer_webrtc_t*)_self;
+ if(!self) {
+ DOUBANGO_AUDIO_WEBRTC_DEBUG_ERROR("Invalid parameter");
+ return -1;
+ }
- return audio_webrtc_instance_stop_producer(self->audioInstHandle);
+ return audio_webrtc_instance_stop_producer(self->audioInstHandle);
}
@@ -174,54 +173,52 @@ static int audio_producer_webrtc_stop(tmedia_producer_t* _self)
/* constructor */
static tsk_object_t* audio_producer_webrtc_ctor(tsk_object_t *_self, va_list * app)
{
- audio_producer_webrtc_t *self = (audio_producer_webrtc_t *)_self;
- if(self){
- /* init base */
- tdav_producer_audio_init(TDAV_PRODUCER_AUDIO(self));
- /* init self */
-
- }
- return self;
+ audio_producer_webrtc_t *self = (audio_producer_webrtc_t *)_self;
+ if(self) {
+ /* init base */
+ tdav_producer_audio_init(TDAV_PRODUCER_AUDIO(self));
+ /* init self */
+
+ }
+ return self;
}
/* destructor */
static tsk_object_t* audio_producer_webrtc_dtor(tsk_object_t *_self)
-{
- audio_producer_webrtc_t *self = (audio_producer_webrtc_t *)_self;
- if(self){
- /* stop */
- audio_producer_webrtc_stop(TMEDIA_PRODUCER(self));
- /* deinit self */
- if(self->audioInstHandle){
- audio_webrtc_instance_destroy(&self->audioInstHandle);
- }
- TSK_FREE(self->buffer.ptr);
-
- /* deinit base */
- tdav_producer_audio_deinit(TDAV_PRODUCER_AUDIO(self));
- }
-
- return self;
+{
+ audio_producer_webrtc_t *self = (audio_producer_webrtc_t *)_self;
+ if(self) {
+ /* stop */
+ audio_producer_webrtc_stop(TMEDIA_PRODUCER(self));
+ /* deinit self */
+ if(self->audioInstHandle) {
+ audio_webrtc_instance_destroy(&self->audioInstHandle);
+ }
+ TSK_FREE(self->buffer.ptr);
+
+ /* deinit base */
+ tdav_producer_audio_deinit(TDAV_PRODUCER_AUDIO(self));
+ }
+
+ return self;
}
/* object definition */
-static const tsk_object_def_t audio_producer_webrtc_def_s =
-{
- sizeof(audio_producer_webrtc_t),
- audio_producer_webrtc_ctor,
- audio_producer_webrtc_dtor,
- tdav_producer_audio_cmp,
+static const tsk_object_def_t audio_producer_webrtc_def_s = {
+ sizeof(audio_producer_webrtc_t),
+ audio_producer_webrtc_ctor,
+ audio_producer_webrtc_dtor,
+ tdav_producer_audio_cmp,
};
/* plugin definition*/
-static const tmedia_producer_plugin_def_t audio_producer_webrtc_plugin_def_s =
-{
- &audio_producer_webrtc_def_s,
-
- tmedia_audio,
- "WebRTC audio producer",
-
- audio_producer_webrtc_set,
- audio_producer_webrtc_prepare,
- audio_producer_webrtc_start,
- audio_producer_webrtc_pause,
- audio_producer_webrtc_stop
+static const tmedia_producer_plugin_def_t audio_producer_webrtc_plugin_def_s = {
+ &audio_producer_webrtc_def_s,
+
+ tmedia_audio,
+ "WebRTC audio producer",
+
+ audio_producer_webrtc_set,
+ audio_producer_webrtc_prepare,
+ audio_producer_webrtc_start,
+ audio_producer_webrtc_pause,
+ audio_producer_webrtc_stop
};
const tmedia_producer_plugin_def_t *audio_producer_webrtc_plugin_def_t = &audio_producer_webrtc_plugin_def_s; \ No newline at end of file
diff --git a/plugins/audio_webrtc/audio_webrtc_producer.h b/plugins/audio_webrtc/audio_webrtc_producer.h
index 49adf0d..0e1defd 100755
--- a/plugins/audio_webrtc/audio_webrtc_producer.h
+++ b/plugins/audio_webrtc/audio_webrtc_producer.h
@@ -1,17 +1,17 @@
/* Copyright (C) 2012 Doubango Telecom <http://www.doubango.org>
-*
+*
* This file is part of Open Source Doubango Framework.
*
* DOUBANGO is free software: you can redistribute it and/or modify
* it under the terms of the GNU General Public License as published by
* the Free Software Foundation, either version 3 of the License, or
* (at your option) any later version.
-*
+*
* DOUBANGO is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
* GNU General Public License for more details.
-*
+*
* You should have received a copy of the GNU General Public License
* along with DOUBANGO.
*/
diff --git a/plugins/audio_webrtc/audio_webrtc_transport.cxx b/plugins/audio_webrtc/audio_webrtc_transport.cxx
index 470e4e7..a9b50ed 100755
--- a/plugins/audio_webrtc/audio_webrtc_transport.cxx
+++ b/plugins/audio_webrtc/audio_webrtc_transport.cxx
@@ -1,17 +1,17 @@
/* Copyright (C) 2012 Doubango Telecom <http://www.doubango.org>
-*
+*
* This file is part of Open Source Doubango Framework.
*
* DOUBANGO is free software: you can redistribute it and/or modify
* it under the terms of the GNU General Public License as published by
* the Free Software Foundation, either version 3 of the License, or
* (at your option) any later version.
-*
+*
* DOUBANGO is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
* GNU General Public License for more details.
-*
+*
* You should have received a copy of the GNU General Public License
* along with DOUBANGO.
*/
@@ -33,15 +33,15 @@ AudioTransportImpl::AudioTransportImpl(AudioDeviceModule* audioDevice) :
_microphoneBoost(false),
_microphoneAGC(false),
_loopBackMeasurements(false),
- _consumer(tsk_null),
- _producer(tsk_null)
+ _consumer(tsk_null),
+ _producer(tsk_null)
{
-
+
}
AudioTransportImpl::~AudioTransportImpl()
{
-
+
}
void AudioTransportImpl::SetFullDuplex(bool enable)
@@ -60,11 +60,11 @@ WebRtc_Word32 AudioTransportImpl::RecordedDataIsAvailable(
const WebRtc_UWord32 currentMicLevel,
WebRtc_UWord32& newMicLevel)
{
- if(!_producer){
- DOUBANGO_AUDIO_WEBRTC_DEBUG_ERROR("No wrapped producer");
- return 0;
- }
- return audio_producer_webrtc_handle_data_10ms(_producer, audioSamples, nSamples, nBytesPerSample, samplesPerSec, nChannels);
+ if(!_producer) {
+ DOUBANGO_AUDIO_WEBRTC_DEBUG_ERROR("No wrapped producer");
+ return 0;
+ }
+ return audio_producer_webrtc_handle_data_10ms(_producer, audioSamples, nSamples, nBytesPerSample, samplesPerSec, nChannels);
}
@@ -76,9 +76,9 @@ WebRtc_Word32 AudioTransportImpl::NeedMorePlayData(
void* audioSamples,
WebRtc_UWord32& nSamplesOut)
{
- if(!_consumer){
- DOUBANGO_AUDIO_WEBRTC_DEBUG_ERROR("No wrapped consumer");
- return 0;
- }
- return audio_consumer_webrtc_get_data_10ms(_consumer, audioSamples, nSamples, nBytesPerSample, nChannels, samplesPerSec, nSamplesOut);
+ if(!_consumer) {
+ DOUBANGO_AUDIO_WEBRTC_DEBUG_ERROR("No wrapped consumer");
+ return 0;
+ }
+ return audio_consumer_webrtc_get_data_10ms(_consumer, audioSamples, nSamples, nBytesPerSample, nChannels, samplesPerSec, nSamplesOut);
} \ No newline at end of file
diff --git a/plugins/audio_webrtc/audio_webrtc_transport.h b/plugins/audio_webrtc/audio_webrtc_transport.h
index 6d98ab5..07e5ac8 100755
--- a/plugins/audio_webrtc/audio_webrtc_transport.h
+++ b/plugins/audio_webrtc/audio_webrtc_transport.h
@@ -1,17 +1,17 @@
/* Copyright (C) 2012 Doubango Telecom <http://www.doubango.org>
-*
+*
* This file is part of Open Source Doubango Framework.
*
* DOUBANGO is free software: you can redistribute it and/or modify
* it under the terms of the GNU General Public License as published by
* the Free Software Foundation, either version 3 of the License, or
* (at your option) any later version.
-*
+*
* DOUBANGO is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
* GNU General Public License for more details.
-*
+*
* You should have received a copy of the GNU General Public License
* along with DOUBANGO.
*/
@@ -26,15 +26,15 @@ class AudioTransportImpl: public webrtc::AudioTransport
{
public:
virtual WebRtc_Word32
- RecordedDataIsAvailable(const void* audioSamples,
- const WebRtc_UWord32 nSamples,
- const WebRtc_UWord8 nBytesPerSample,
- const WebRtc_UWord8 nChannels,
- const WebRtc_UWord32 samplesPerSec,
- const WebRtc_UWord32 totalDelayMS,
- const WebRtc_Word32 clockDrift,
- const WebRtc_UWord32 currentMicLevel,
- WebRtc_UWord32& newMicLevel);
+ RecordedDataIsAvailable(const void* audioSamples,
+ const WebRtc_UWord32 nSamples,
+ const WebRtc_UWord8 nBytesPerSample,
+ const WebRtc_UWord8 nChannels,
+ const WebRtc_UWord32 samplesPerSec,
+ const WebRtc_UWord32 totalDelayMS,
+ const WebRtc_Word32 clockDrift,
+ const WebRtc_UWord32 currentMicLevel,
+ WebRtc_UWord32& newMicLevel);
virtual WebRtc_Word32 NeedMorePlayData(const WebRtc_UWord32 nSamples,
const WebRtc_UWord8 nBytesPerSample,
@@ -43,63 +43,54 @@ public:
void* audioSamples,
WebRtc_UWord32& nSamplesOut);
- AudioTransportImpl(webrtc::AudioDeviceModule* audioDevice);
+ AudioTransportImpl(webrtc::AudioDeviceModule* audioDevice);
~AudioTransportImpl();
public:
void SetFullDuplex(bool enable);
- void SetSpeakerVolume(bool enable)
- {
+ void SetSpeakerVolume(bool enable) {
_speakerVolume = enable;
}
;
- void SetSpeakerMute(bool enable)
- {
+ void SetSpeakerMute(bool enable) {
_speakerMute = enable;
}
;
- void SetMicrophoneMute(bool enable)
- {
+ void SetMicrophoneMute(bool enable) {
_microphoneMute = enable;
}
;
- void SetMicrophoneVolume(bool enable)
- {
+ void SetMicrophoneVolume(bool enable) {
_microphoneVolume = enable;
}
;
- void SetMicrophoneBoost(bool enable)
- {
+ void SetMicrophoneBoost(bool enable) {
_microphoneBoost = enable;
}
;
- void SetLoopbackMeasurements(bool enable)
- {
+ void SetLoopbackMeasurements(bool enable) {
_loopBackMeasurements = enable;
}
;
- void SetMicrophoneAGC(bool enable)
- {
+ void SetMicrophoneAGC(bool enable) {
_microphoneAGC = enable;
}
;
- void SetConsumer(const struct audio_consumer_webrtc_s* consumer)
- {
+ void SetConsumer(const struct audio_consumer_webrtc_s* consumer) {
_consumer = consumer;
}
;
- void SetProducer(const struct audio_producer_webrtc_s* producer)
- {
+ void SetProducer(const struct audio_producer_webrtc_s* producer) {
_producer = producer;
}
;
private:
webrtc::AudioDeviceModule* _audioDevice;
- const struct audio_consumer_webrtc_s* _consumer; // mut be const and must not take reference
- const struct audio_producer_webrtc_s* _producer; // mut be const and must not take reference
+ const struct audio_consumer_webrtc_s* _consumer; // mut be const and must not take reference
+ const struct audio_producer_webrtc_s* _producer; // mut be const and must not take reference
bool _fullDuplex;
bool _speakerVolume;
diff --git a/plugins/audio_webrtc/dllmain.cxx b/plugins/audio_webrtc/dllmain.cxx
index 8a319bc..06fc5ff 100755
--- a/plugins/audio_webrtc/dllmain.cxx
+++ b/plugins/audio_webrtc/dllmain.cxx
@@ -1,17 +1,17 @@
/* Copyright (C) 2012 Doubango Telecom <http://www.doubango.org>
-*
+*
* This file is part of Open Source Doubango Framework.
*
* DOUBANGO is free software: you can redistribute it and/or modify
* it under the terms of the GNU General Public License as published by
* the Free Software Foundation, either version 3 of the License, or
* (at your option) any later version.
-*
+*
* DOUBANGO is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
* GNU General Public License for more details.
-*
+*
* You should have received a copy of the GNU General Public License
* along with DOUBANGO.
*/
@@ -24,16 +24,15 @@
BOOL APIENTRY DllMain( HMODULE hModule,
DWORD ul_reason_for_call,
LPVOID lpReserved
- )
+ )
{
- switch (ul_reason_for_call)
- {
- case DLL_PROCESS_ATTACH:
- case DLL_THREAD_ATTACH:
- case DLL_THREAD_DETACH:
- case DLL_PROCESS_DETACH:
- break;
- }
- return TRUE;
+ switch (ul_reason_for_call) {
+ case DLL_PROCESS_ATTACH:
+ case DLL_THREAD_ATTACH:
+ case DLL_THREAD_DETACH:
+ case DLL_PROCESS_DETACH:
+ break;
+ }
+ return TRUE;
}
diff --git a/plugins/pluginCUDA/dllmain_cuda.cxx b/plugins/pluginCUDA/dllmain_cuda.cxx
index 57c3ffd..a3d6967 100755
--- a/plugins/pluginCUDA/dllmain_cuda.cxx
+++ b/plugins/pluginCUDA/dllmain_cuda.cxx
@@ -1,18 +1,18 @@
/* Copyright (C) 2013 Mamadou DIOP
* Copyright (C) 2013 Doubango Telecom <http://www.doubango.org>
-*
+*
* This file is part of Open Source Doubango Framework.
*
* DOUBANGO is free software: you can redistribute it and/or modify
* it under the terms of the GNU General Public License as published by
* the Free Software Foundation, either version 3 of the License, or
* (at your option) any later version.
-*
+*
* DOUBANGO is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
* GNU General Public License for more details.
-*
+*
* You should have received a copy of the GNU General Public License
* along with DOUBANGO.
*/
@@ -54,84 +54,78 @@ PLUGIN_CUDA_END_DECLS /* END */
BOOL APIENTRY DllMain( HMODULE hModule,
DWORD ul_reason_for_call,
LPVOID lpReserved
- )
+ )
{
- switch (ul_reason_for_call)
- {
- case DLL_PROCESS_ATTACH:
- break;
- case DLL_THREAD_ATTACH:
- break;
- case DLL_THREAD_DETACH:
- break;
- case DLL_PROCESS_DETACH:
- break;
- }
- return TRUE;
+ switch (ul_reason_for_call) {
+ case DLL_PROCESS_ATTACH:
+ break;
+ case DLL_THREAD_ATTACH:
+ break;
+ case DLL_THREAD_DETACH:
+ break;
+ case DLL_PROCESS_DETACH:
+ break;
+ }
+ return TRUE;
}
-typedef enum PLUGIN_INDEX_E
-{
+typedef enum PLUGIN_INDEX_E {
#if PLUGIN_CUDA_H264_ENABLE
- PLUGIN_INDEX_CODEC_H264_MAIN,
- PLUGIN_INDEX_CODEC_H264_BASE,
+ PLUGIN_INDEX_CODEC_H264_MAIN,
+ PLUGIN_INDEX_CODEC_H264_BASE,
#endif
-
- PLUGIN_INDEX_COUNT
+
+ PLUGIN_INDEX_COUNT
}
PLUGIN_INDEX_T;
int __plugin_get_def_count()
{
- return CudaUtils::IsH264Supported() ? PLUGIN_INDEX_COUNT : 0;
+ return CudaUtils::IsH264Supported() ? PLUGIN_INDEX_COUNT : 0;
}
tsk_plugin_def_type_t __plugin_get_def_type_at(int index)
{
#if PLUGIN_CUDA_H264_ENABLE
- switch(index){
- case PLUGIN_INDEX_CODEC_H264_MAIN:
- case PLUGIN_INDEX_CODEC_H264_BASE:
- {
- return CudaUtils::IsH264Supported() ? tsk_plugin_def_type_codec : tsk_plugin_def_type_none;
- }
- }
+ switch(index) {
+ case PLUGIN_INDEX_CODEC_H264_MAIN:
+ case PLUGIN_INDEX_CODEC_H264_BASE: {
+ return CudaUtils::IsH264Supported() ? tsk_plugin_def_type_codec : tsk_plugin_def_type_none;
+ }
+ }
#endif
- TSK_DEBUG_ERROR("No plugin at index %d", index);
- return tsk_plugin_def_type_none;
+ TSK_DEBUG_ERROR("No plugin at index %d", index);
+ return tsk_plugin_def_type_none;
}
tsk_plugin_def_media_type_t __plugin_get_def_media_type_at(int index)
{
#if PLUGIN_CUDA_H264_ENABLE
- switch(index){
- case PLUGIN_INDEX_CODEC_H264_MAIN:
- case PLUGIN_INDEX_CODEC_H264_BASE:
- {
- return CudaUtils::IsH264Supported() ? tsk_plugin_def_media_type_video : tsk_plugin_def_media_type_none;
- }
- }
+ switch(index) {
+ case PLUGIN_INDEX_CODEC_H264_MAIN:
+ case PLUGIN_INDEX_CODEC_H264_BASE: {
+ return CudaUtils::IsH264Supported() ? tsk_plugin_def_media_type_video : tsk_plugin_def_media_type_none;
+ }
+ }
#endif
- TSK_DEBUG_ERROR("No plugin at index %d", index);
- return tsk_plugin_def_media_type_none;
+ TSK_DEBUG_ERROR("No plugin at index %d", index);
+ return tsk_plugin_def_media_type_none;
}
tsk_plugin_def_ptr_const_t __plugin_get_def_at(int index)
{
#if PLUGIN_CUDA_H264_ENABLE
- switch(index){
- case PLUGIN_INDEX_CODEC_H264_MAIN:
- {
- return CudaUtils::IsH264Supported() ? cuda_codec_h264_main_plugin_def_t : tsk_null;
- }
- case PLUGIN_INDEX_CODEC_H264_BASE:
- {
- return CudaUtils::IsH264Supported() ? cuda_codec_h264_base_plugin_def_t : tsk_null;
- }
- }
+ switch(index) {
+ case PLUGIN_INDEX_CODEC_H264_MAIN: {
+ return CudaUtils::IsH264Supported() ? cuda_codec_h264_main_plugin_def_t : tsk_null;
+ }
+ case PLUGIN_INDEX_CODEC_H264_BASE: {
+ return CudaUtils::IsH264Supported() ? cuda_codec_h264_base_plugin_def_t : tsk_null;
+ }
+ }
#endif
- TSK_DEBUG_ERROR("No plugin at index %d", index);
- return tsk_null;
+ TSK_DEBUG_ERROR("No plugin at index %d", index);
+ return tsk_null;
}
diff --git a/plugins/pluginCUDA/plugin_cuda_codec_h264.cxx b/plugins/pluginCUDA/plugin_cuda_codec_h264.cxx
index b2c8e2e..4bc1d53 100755
--- a/plugins/pluginCUDA/plugin_cuda_codec_h264.cxx
+++ b/plugins/pluginCUDA/plugin_cuda_codec_h264.cxx
@@ -1,18 +1,18 @@
/* Copyright (C) 2013 Mamadou DIOP
* Copyright (C) 2013 Doubango Telecom <http://www.doubango.org>
-*
+*
* This file is part of Open Source Doubango Framework.
*
* DOUBANGO is free software: you can redistribute it and/or modify
* it under the terms of the GNU General Public License as published by
* the Free Software Foundation, either version 3 of the License, or
* (at your option) any later version.
-*
+*
* DOUBANGO is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
* GNU General Public License for more details.
-*
+*
* You should have received a copy of the GNU General Public License
* along with DOUBANGO.
*/
@@ -43,52 +43,51 @@
#include <cuda.h>
#include <Windows.h>
-typedef struct cuda_codec_h264_s
-{
- TDAV_DECLARE_CODEC_H264_COMMON;
-
- // Encoder
- struct{
- NVEncoder pInst;
- NVEncoderParams ctxParams;
- NVVE_CallbackParams clbParams;
- void* pBufferPtr;
- tsk_size_t nBufferSize;
- int64_t frame_count;
- tsk_bool_t force_idr;
- int32_t quality; // [1-31]
- int rotation;
- int neg_width;
- int neg_height;
- int neg_fps;
- int max_bitrate_bps;
- int32_t max_bw_kpbs;
- tsk_bool_t passthrough; // whether to bypass encoding
- } encoder;
-
- // decoder
- struct{
- CUvideodecoder pInst;
- CUVIDDECODECREATEINFO cuInfo;
- CUvideoparser cuParser;
- CUVIDPARSERPARAMS cuPaserParams;
- CUdevice cuDevice;
- IDirect3D9 *pD3D9;
- IDirect3DDevice9 *pD3D9Device;
- CUcontext cuContext;
- struct {
- void *pcuPtr; // MUST bee freed using cuMemFreeHost()
- tsk_size_t nSize;
- tsk_size_t nPitch;
- tsk_bool_t bAvail;
- } cuBuffer;
- void* accumulator;
- tsk_size_t accumulator_pos;
- tsk_size_t accumulator_size;
- uint16_t last_seq;
- tsk_bool_t passthrough; // whether to bypass decoding
- tsk_mutex_handle_t *phMutex;
- } decoder;
+typedef struct cuda_codec_h264_s {
+ TDAV_DECLARE_CODEC_H264_COMMON;
+
+ // Encoder
+ struct {
+ NVEncoder pInst;
+ NVEncoderParams ctxParams;
+ NVVE_CallbackParams clbParams;
+ void* pBufferPtr;
+ tsk_size_t nBufferSize;
+ int64_t frame_count;
+ tsk_bool_t force_idr;
+ int32_t quality; // [1-31]
+ int rotation;
+ int neg_width;
+ int neg_height;
+ int neg_fps;
+ int max_bitrate_bps;
+ int32_t max_bw_kpbs;
+ tsk_bool_t passthrough; // whether to bypass encoding
+ } encoder;
+
+ // decoder
+ struct {
+ CUvideodecoder pInst;
+ CUVIDDECODECREATEINFO cuInfo;
+ CUvideoparser cuParser;
+ CUVIDPARSERPARAMS cuPaserParams;
+ CUdevice cuDevice;
+ IDirect3D9 *pD3D9;
+ IDirect3DDevice9 *pD3D9Device;
+ CUcontext cuContext;
+ struct {
+ void *pcuPtr; // MUST bee freed using cuMemFreeHost()
+ tsk_size_t nSize;
+ tsk_size_t nPitch;
+ tsk_bool_t bAvail;
+ } cuBuffer;
+ void* accumulator;
+ tsk_size_t accumulator_pos;
+ tsk_size_t accumulator_size;
+ uint16_t last_seq;
+ tsk_bool_t passthrough; // whether to bypass decoding
+ tsk_mutex_handle_t *phMutex;
+ } decoder;
}
cuda_codec_h264_t;
@@ -120,405 +119,368 @@ static void CUDAAPI _NVCallback_HandleOnEndFrame(const NVVE_EndFrameInfo *pefi,
static int cuda_codec_h264_set(tmedia_codec_t* self, const tmedia_param_t* param)
{
- cuda_codec_h264_t* h264 = (cuda_codec_h264_t*)self;
- if(!self->opened){
- TSK_DEBUG_ERROR("Codec not opened");
- return -1;
- }
- if(param->value_type == tmedia_pvt_int32){
- if(tsk_striequals(param->key, "action")){
- tmedia_codec_action_t action = (tmedia_codec_action_t)TSK_TO_INT32((uint8_t*)param->value);
- switch(action){
- case tmedia_codec_action_encode_idr:
- {
- h264->encoder.force_idr = tsk_true;
- break;
- }
- case tmedia_codec_action_bw_down:
- {
- h264->encoder.quality = TSK_CLAMP(1, (h264->encoder.quality + 1), 31);
- break;
- }
- case tmedia_codec_action_bw_up:
- {
- h264->encoder.quality = TSK_CLAMP(1, (h264->encoder.quality - 1), 31);
- break;
- }
- }
- return 0;
- }
- else if(tsk_striequals(param->key, "bypass-encoding")){
- h264->encoder.passthrough = *((int32_t*)param->value) ? tsk_true : tsk_false;
- TSK_DEBUG_INFO("[H.264] bypass-encoding = %d", h264->encoder.passthrough);
- return 0;
- }
- else if(tsk_striequals(param->key, "bypass-decoding")){
- h264->decoder.passthrough = *((int32_t*)param->value) ? tsk_true : tsk_false;
- TSK_DEBUG_INFO("[H.264] bypass-decoding = %d", h264->decoder.passthrough);
- return 0;
- }
- else if(tsk_striequals(param->key, "rotation")){
- int rotation = *((int32_t*)param->value);
- if(h264->encoder.rotation != rotation){
- if(self->opened){
- int ret;
- h264->encoder.rotation = rotation;
- if((ret = cuda_codec_h264_close_encoder(h264))){
- return ret;
- }
- if((ret = cuda_codec_h264_open_encoder(h264))){
- return ret;
- }
- }
- }
- return 0;
- }
- }
- return -1;
+ cuda_codec_h264_t* h264 = (cuda_codec_h264_t*)self;
+ if(!self->opened) {
+ TSK_DEBUG_ERROR("Codec not opened");
+ return -1;
+ }
+ if(param->value_type == tmedia_pvt_int32) {
+ if(tsk_striequals(param->key, "action")) {
+ tmedia_codec_action_t action = (tmedia_codec_action_t)TSK_TO_INT32((uint8_t*)param->value);
+ switch(action) {
+ case tmedia_codec_action_encode_idr: {
+ h264->encoder.force_idr = tsk_true;
+ break;
+ }
+ case tmedia_codec_action_bw_down: {
+ h264->encoder.quality = TSK_CLAMP(1, (h264->encoder.quality + 1), 31);
+ break;
+ }
+ case tmedia_codec_action_bw_up: {
+ h264->encoder.quality = TSK_CLAMP(1, (h264->encoder.quality - 1), 31);
+ break;
+ }
+ }
+ return 0;
+ }
+ else if(tsk_striequals(param->key, "bypass-encoding")) {
+ h264->encoder.passthrough = *((int32_t*)param->value) ? tsk_true : tsk_false;
+ TSK_DEBUG_INFO("[H.264] bypass-encoding = %d", h264->encoder.passthrough);
+ return 0;
+ }
+ else if(tsk_striequals(param->key, "bypass-decoding")) {
+ h264->decoder.passthrough = *((int32_t*)param->value) ? tsk_true : tsk_false;
+ TSK_DEBUG_INFO("[H.264] bypass-decoding = %d", h264->decoder.passthrough);
+ return 0;
+ }
+ else if(tsk_striequals(param->key, "rotation")) {
+ int rotation = *((int32_t*)param->value);
+ if(h264->encoder.rotation != rotation) {
+ if(self->opened) {
+ int ret;
+ h264->encoder.rotation = rotation;
+ if((ret = cuda_codec_h264_close_encoder(h264))) {
+ return ret;
+ }
+ if((ret = cuda_codec_h264_open_encoder(h264))) {
+ return ret;
+ }
+ }
+ }
+ return 0;
+ }
+ }
+ return -1;
}
static int cuda_codec_h264_open(tmedia_codec_t* self)
{
- int ret;
- cuda_codec_h264_t* h264 = (cuda_codec_h264_t*)self;
-
- if(!h264){
- TSK_DEBUG_ERROR("Invalid parameter");
- return -1;
- }
-
- /* the caller (base class) already checked that the codec is not opened */
-
- // Encoder
- if((ret = cuda_codec_h264_open_encoder(h264))){
- return ret;
- }
-
- // Decoder
- if((ret = cuda_codec_h264_open_decoder(h264))){
- return ret;
- }
-
- return 0;
+ int ret;
+ cuda_codec_h264_t* h264 = (cuda_codec_h264_t*)self;
+
+ if(!h264) {
+ TSK_DEBUG_ERROR("Invalid parameter");
+ return -1;
+ }
+
+ /* the caller (base class) already checked that the codec is not opened */
+
+ // Encoder
+ if((ret = cuda_codec_h264_open_encoder(h264))) {
+ return ret;
+ }
+
+ // Decoder
+ if((ret = cuda_codec_h264_open_decoder(h264))) {
+ return ret;
+ }
+
+ return 0;
}
static int cuda_codec_h264_close(tmedia_codec_t* self)
{
- cuda_codec_h264_t* h264 = (cuda_codec_h264_t*)self;
+ cuda_codec_h264_t* h264 = (cuda_codec_h264_t*)self;
- if(!h264){
- TSK_DEBUG_ERROR("Invalid parameter");
- return -1;
- }
+ if(!h264) {
+ TSK_DEBUG_ERROR("Invalid parameter");
+ return -1;
+ }
- /* the caller (base class) alreasy checked that the codec is opened */
+ /* the caller (base class) alreasy checked that the codec is opened */
- // Encoder
- cuda_codec_h264_close_encoder(h264);
+ // Encoder
+ cuda_codec_h264_close_encoder(h264);
- // Decoder
- cuda_codec_h264_close_decoder(h264);
+ // Decoder
+ cuda_codec_h264_close_decoder(h264);
- return 0;
+ return 0;
}
static tsk_size_t cuda_codec_h264_encode(tmedia_codec_t* self, const void* in_data, tsk_size_t in_size, void** out_data, tsk_size_t* out_max_size)
{
- int ret = 0;
- NVVE_EncodeFrameParams efparams;
- tsk_bool_t send_idr, send_hdr;
- unsigned long flags = 0;
-
- cuda_codec_h264_t* h264 = (cuda_codec_h264_t*)self;
- tdav_codec_h264_common_t* common = (tdav_codec_h264_common_t*)self;
-
- if(!self || !in_data || !in_size)
- {
- TSK_DEBUG_ERROR("Invalid parameter");
- return 0;
- }
-
- if(h264->encoder.passthrough) {
- tdav_codec_h264_rtp_encap(common, (const uint8_t*)in_data, in_size);
- return 0;
- }
-
- if((h264->encoder.ctxParams.iOutputSize[1] * h264->encoder.ctxParams.iOutputSize[0] * 3) >> 1 != in_size)
- {
- /* guard */
- TSK_DEBUG_ERROR("Invalid size");
- return 0;
- }
-
- if(!self->opened || !h264->encoder.pInst /*|| !h264->encoder.pInst->IsReady()*/)
- {
- TSK_DEBUG_ERROR("Encoder not opened or not ready");
- return 0;
- }
-
- if(h264->encoder.passthrough)
- {
- tdav_codec_h264_rtp_encap(TDAV_CODEC_H264_COMMON(h264), (const uint8_t*)in_data, in_size);
- return 0;
- }
-
- HRESULT hr = S_OK;
-
- efparams.Width = h264->encoder.ctxParams.iOutputSize[0];
- efparams.Height = h264->encoder.ctxParams.iOutputSize[1];
- efparams.Pitch = (h264->encoder.ctxParams.nDeviceMemPitch ? h264->encoder.ctxParams.nDeviceMemPitch : h264->encoder.ctxParams.iOutputSize[0]);
- efparams.PictureStruc = (NVVE_PicStruct)h264->encoder.ctxParams.iPictureType;
+ int ret = 0;
+ NVVE_EncodeFrameParams efparams;
+ tsk_bool_t send_idr, send_hdr;
+ unsigned long flags = 0;
+
+ cuda_codec_h264_t* h264 = (cuda_codec_h264_t*)self;
+ tdav_codec_h264_common_t* common = (tdav_codec_h264_common_t*)self;
+
+ if(!self || !in_data || !in_size) {
+ TSK_DEBUG_ERROR("Invalid parameter");
+ return 0;
+ }
+
+ if(h264->encoder.passthrough) {
+ tdav_codec_h264_rtp_encap(common, (const uint8_t*)in_data, in_size);
+ return 0;
+ }
+
+ if((h264->encoder.ctxParams.iOutputSize[1] * h264->encoder.ctxParams.iOutputSize[0] * 3) >> 1 != in_size) {
+ /* guard */
+ TSK_DEBUG_ERROR("Invalid size");
+ return 0;
+ }
+
+ if(!self->opened || !h264->encoder.pInst /*|| !h264->encoder.pInst->IsReady()*/) {
+ TSK_DEBUG_ERROR("Encoder not opened or not ready");
+ return 0;
+ }
+
+ if(h264->encoder.passthrough) {
+ tdav_codec_h264_rtp_encap(TDAV_CODEC_H264_COMMON(h264), (const uint8_t*)in_data, in_size);
+ return 0;
+ }
+
+ HRESULT hr = S_OK;
+
+ efparams.Width = h264->encoder.ctxParams.iOutputSize[0];
+ efparams.Height = h264->encoder.ctxParams.iOutputSize[1];
+ efparams.Pitch = (h264->encoder.ctxParams.nDeviceMemPitch ? h264->encoder.ctxParams.nDeviceMemPitch : h264->encoder.ctxParams.iOutputSize[0]);
+ efparams.PictureStruc = (NVVE_PicStruct)h264->encoder.ctxParams.iPictureType;
efparams.SurfFmt = (NVVE_SurfaceFormat)h264->encoder.ctxParams.iSurfaceFormat;
- efparams.progressiveFrame = (h264->encoder.ctxParams.iSurfaceFormat == 3) ? 1 : 0;
+ efparams.progressiveFrame = (h264->encoder.ctxParams.iSurfaceFormat == 3) ? 1 : 0;
efparams.repeatFirstField = 0;
- efparams.topfieldfirst = (h264->encoder.ctxParams.iSurfaceFormat == 1) ? 1 : 0;
- efparams.picBuf = (unsigned char *)in_data;
- efparams.bLast = 0;
-
- // send IDR for:
- // - the first frame
- // - remote peer requested an IDR
- // - every second within the first 4seconds
- send_idr = (
- h264->encoder.frame_count++ == 0
- || h264 ->encoder.force_idr
- || ( (h264->encoder.frame_count < h264->encoder.neg_fps * 4) && ((h264->encoder.frame_count % h264->encoder.neg_fps)==0) )
- );
-
- if(send_idr)
- {
- flags |= 0x04; // FORCE IDR
- }
-
- // send SPS and PPS headers for:
- // - IDR frames (not required but it's the easiest way to deal with pkt loss)
- // - every 5 seconds after the first 4seconds
- send_hdr = (
- send_idr
- || ( (h264->encoder.frame_count % (h264->encoder.neg_fps * 5))==0 )
- );
- if(send_hdr)
- {
- if(h264->encoder.ctxParams.iDisableSPSPPS)
- {
- unsigned char SPSPPSBuff[1024];
- int SPSPPSBuffSize = sizeof(SPSPPSBuff);
- hr = NVGetSPSPPS(h264->encoder.pInst, SPSPPSBuff, SPSPPSBuffSize, &SPSPPSBuffSize);
- if(SUCCEEDED(hr))
- {
- int size = 0;
- while(size < SPSPPSBuffSize - 2)
- {
- int16_t next_size = ((int16_t)SPSPPSBuff[size])<<1 | ((int16_t)SPSPPSBuff[size + 1]);
- tdav_codec_h264_rtp_encap(common, &SPSPPSBuff[size + 2], next_size);
- size += next_size + 2;
- }
- }
- else
- {
- TSK_DEBUG_ERROR("NVGetSPSPPS failed with error code = %08x", hr)
- }
- }
- }
-
- // Encode data
- CHECK_HR(hr = NVEncodeFrame(h264->encoder.pInst, &efparams, flags, NULL));
-
- // reset
- h264->encoder.force_idr = tsk_false;
+ efparams.topfieldfirst = (h264->encoder.ctxParams.iSurfaceFormat == 1) ? 1 : 0;
+ efparams.picBuf = (unsigned char *)in_data;
+ efparams.bLast = 0;
+
+ // send IDR for:
+ // - the first frame
+ // - remote peer requested an IDR
+ // - every second within the first 4seconds
+ send_idr = (
+ h264->encoder.frame_count++ == 0
+ || h264 ->encoder.force_idr
+ || ( (h264->encoder.frame_count < h264->encoder.neg_fps * 4) && ((h264->encoder.frame_count % h264->encoder.neg_fps)==0) )
+ );
+
+ if(send_idr) {
+ flags |= 0x04; // FORCE IDR
+ }
+
+ // send SPS and PPS headers for:
+ // - IDR frames (not required but it's the easiest way to deal with pkt loss)
+ // - every 5 seconds after the first 4seconds
+ send_hdr = (
+ send_idr
+ || ( (h264->encoder.frame_count % (h264->encoder.neg_fps * 5))==0 )
+ );
+ if(send_hdr) {
+ if(h264->encoder.ctxParams.iDisableSPSPPS) {
+ unsigned char SPSPPSBuff[1024];
+ int SPSPPSBuffSize = sizeof(SPSPPSBuff);
+ hr = NVGetSPSPPS(h264->encoder.pInst, SPSPPSBuff, SPSPPSBuffSize, &SPSPPSBuffSize);
+ if(SUCCEEDED(hr)) {
+ int size = 0;
+ while(size < SPSPPSBuffSize - 2) {
+ int16_t next_size = ((int16_t)SPSPPSBuff[size])<<1 | ((int16_t)SPSPPSBuff[size + 1]);
+ tdav_codec_h264_rtp_encap(common, &SPSPPSBuff[size + 2], next_size);
+ size += next_size + 2;
+ }
+ }
+ else {
+ TSK_DEBUG_ERROR("NVGetSPSPPS failed with error code = %08x", hr)
+ }
+ }
+ }
+
+ // Encode data
+ CHECK_HR(hr = NVEncodeFrame(h264->encoder.pInst, &efparams, flags, NULL));
+
+ // reset
+ h264->encoder.force_idr = tsk_false;
bail:
- return 0;
+ return 0;
}
static tsk_size_t cuda_codec_h264_decode(tmedia_codec_t* self, const void* in_data, tsk_size_t in_size, void** out_data, tsk_size_t* out_max_size, const tsk_object_t* proto_hdr)
{
- cuda_codec_h264_t* h264 = (cuda_codec_h264_t*)self;
- const trtp_rtp_header_t* rtp_hdr = (const trtp_rtp_header_t*)proto_hdr;
-
- const uint8_t* pay_ptr = tsk_null;
- tsk_size_t pay_size = 0;
- int ret;
- tsk_bool_t append_scp;
- tsk_bool_t sps_or_pps;
- tsk_size_t retsize = 0, size_to_copy = 0;
- static const tsk_size_t xmax_size = (3840 * 2160 * 3) >> 3; // >>3 instead of >>1 (not an error)
- static tsk_size_t start_code_prefix_size = sizeof(H264_START_CODE_PREFIX);
-
- if(!h264 || !in_data || !in_size || !out_data)
- {
- TSK_DEBUG_ERROR("Invalid parameter");
- return 0;
- }
-
- if(!self->opened || !h264->encoder.pInst)
- {
- TSK_DEBUG_ERROR("Decoder not opened or not ready");
- return 0;
- }
-
- HRESULT hr = S_OK;
-
- /* Packet lost? */
- if((h264->decoder.last_seq + 1) != rtp_hdr->seq_num && h264->decoder.last_seq)
- {
- TSK_DEBUG_INFO("[H.264] Packet loss, seq_num=%d", (h264->decoder.last_seq + 1));
- }
- h264->decoder.last_seq = rtp_hdr->seq_num;
-
-
- /* 5.3. NAL Unit Octet Usage
- +---------------+
+ cuda_codec_h264_t* h264 = (cuda_codec_h264_t*)self;
+ const trtp_rtp_header_t* rtp_hdr = (const trtp_rtp_header_t*)proto_hdr;
+
+ const uint8_t* pay_ptr = tsk_null;
+ tsk_size_t pay_size = 0;
+ int ret;
+ tsk_bool_t append_scp;
+ tsk_bool_t sps_or_pps;
+ tsk_size_t retsize = 0, size_to_copy = 0;
+ static const tsk_size_t xmax_size = (3840 * 2160 * 3) >> 3; // >>3 instead of >>1 (not an error)
+ static tsk_size_t start_code_prefix_size = sizeof(H264_START_CODE_PREFIX);
+
+ if(!h264 || !in_data || !in_size || !out_data) {
+ TSK_DEBUG_ERROR("Invalid parameter");
+ return 0;
+ }
+
+ if(!self->opened || !h264->encoder.pInst) {
+ TSK_DEBUG_ERROR("Decoder not opened or not ready");
+ return 0;
+ }
+
+ HRESULT hr = S_OK;
+
+ /* Packet lost? */
+ if((h264->decoder.last_seq + 1) != rtp_hdr->seq_num && h264->decoder.last_seq) {
+ TSK_DEBUG_INFO("[H.264] Packet loss, seq_num=%d", (h264->decoder.last_seq + 1));
+ }
+ h264->decoder.last_seq = rtp_hdr->seq_num;
+
+
+ /* 5.3. NAL Unit Octet Usage
+ +---------------+
|0|1|2|3|4|5|6|7|
+-+-+-+-+-+-+-+-+
|F|NRI| Type |
+---------------+
- */
- if(*((uint8_t*)in_data) & 0x80)
- {
- TSK_DEBUG_WARN("F=1");
- /* reset accumulator */
- h264->decoder.accumulator = 0;
- return 0;
- }
-
- /* get payload */
- if((ret = tdav_codec_h264_get_pay(in_data, in_size, (const void**)&pay_ptr, &pay_size, &append_scp)) || !pay_ptr || !pay_size)
- {
- TSK_DEBUG_ERROR("Depayloader failed to get H.264 content");
- return 0;
- }
- //append_scp = tsk_true;
- size_to_copy = pay_size + (append_scp ? start_code_prefix_size : 0);
- // whether it's SPS or PPS (append_scp is false for subsequent FUA chuncks)
- sps_or_pps = append_scp && pay_ptr && ((pay_ptr[0] & 0x1F) == 7 || (pay_ptr[0] & 0x1F) == 8);
-
- // start-accumulator
- if(!h264->decoder.accumulator)
- {
- if(size_to_copy > xmax_size)
- {
- TSK_DEBUG_ERROR("%u too big to contain valid encoded data. xmax_size=%u", size_to_copy, xmax_size);
- return 0;
- }
- if(!(h264->decoder.accumulator = tsk_calloc(size_to_copy, sizeof(uint8_t))))
- {
- TSK_DEBUG_ERROR("Failed to allocated new buffer");
- return 0;
- }
- h264->decoder.accumulator_size = size_to_copy;
- }
- if((h264->decoder.accumulator_pos + size_to_copy) >= xmax_size)
- {
- TSK_DEBUG_ERROR("BufferOverflow");
- h264->decoder.accumulator_pos = 0;
- return 0;
- }
- if((h264->decoder.accumulator_pos + size_to_copy) > h264->decoder.accumulator_size)
- {
- if(!(h264->decoder.accumulator = tsk_realloc(h264->decoder.accumulator, (h264->decoder.accumulator_pos + size_to_copy))))
- {
- TSK_DEBUG_ERROR("Failed to reallocated new buffer");
- h264->decoder.accumulator_pos = 0;
- h264->decoder.accumulator_size = 0;
- return 0;
- }
- h264->decoder.accumulator_size = (h264->decoder.accumulator_pos + size_to_copy);
- }
-
- if(append_scp)
- {
- memcpy(&((uint8_t*)h264->decoder.accumulator)[h264->decoder.accumulator_pos], H264_START_CODE_PREFIX, start_code_prefix_size);
- h264->decoder.accumulator_pos += start_code_prefix_size;
- }
- memcpy(&((uint8_t*)h264->decoder.accumulator)[h264->decoder.accumulator_pos], pay_ptr, pay_size);
- h264->decoder.accumulator_pos += pay_size;
- // end-accumulator
-
-
- if(sps_or_pps)
- {
- // http://libav-users.943685.n4.nabble.com/Decode-H264-streams-how-to-fill-AVCodecContext-from-SPS-PPS-td2484472.html
- // SPS and PPS should be bundled with IDR
- TSK_DEBUG_INFO("Receiving SPS or PPS ...to be tied to an IDR");
- }
- else if(rtp_hdr->marker)
- {
- if(h264->decoder.passthrough)
- {
- if(*out_max_size < h264->decoder.accumulator_pos)
- {
- if((*out_data = tsk_realloc(*out_data, h264->decoder.accumulator_pos)))
- {
- *out_max_size = h264->decoder.accumulator_pos;
- }
- else
- {
- *out_max_size = 0;
- return 0;
- }
- }
- memcpy(*out_data, h264->decoder.accumulator, h264->decoder.accumulator_pos);
- retsize = h264->decoder.accumulator_pos;
- }
- else
- {
- // !h264->decoder.passthrough
- CUVIDSOURCEDATAPACKET pkt;
- CUresult cuResult;
- pkt.flags = 0;
- pkt.payload_size = (unsigned long) h264->decoder.accumulator_pos;
- pkt.payload = (unsigned char *)h264->decoder.accumulator;
- pkt.timestamp = 0;
-
- // reset accumulator
- h264->decoder.accumulator_pos = 0;
- cuResult = cuvidParseVideoData(h264->decoder.cuParser, &pkt);
- if(cuResult != CUDA_SUCCESS)
- {
- TSK_DEBUG_ERROR("cuvidParseVideoData() failed with error code = %d", (int)cuResult);
- CHECK_HR(hr = E_FAIL);
- }
-
- if(h264->decoder.cuBuffer.bAvail)
- {
- h264->decoder.cuBuffer.bAvail = tsk_false;
- if((retsize = _cuda_codec_h264_pict_layout(h264, out_data, out_max_size)) == 0)
- {
- TSK_DEBUG_ERROR("_cuda_codec_h264_pict_layout failed");
- CHECK_HR(hr = E_FAIL);
- }
- }
- }// else(!h264->decoder.passthrough)
- } // else if(rtp_hdr->marker)
+ */
+ if(*((uint8_t*)in_data) & 0x80) {
+ TSK_DEBUG_WARN("F=1");
+ /* reset accumulator */
+ h264->decoder.accumulator = 0;
+ return 0;
+ }
+
+ /* get payload */
+ if((ret = tdav_codec_h264_get_pay(in_data, in_size, (const void**)&pay_ptr, &pay_size, &append_scp)) || !pay_ptr || !pay_size) {
+ TSK_DEBUG_ERROR("Depayloader failed to get H.264 content");
+ return 0;
+ }
+ //append_scp = tsk_true;
+ size_to_copy = pay_size + (append_scp ? start_code_prefix_size : 0);
+ // whether it's SPS or PPS (append_scp is false for subsequent FUA chuncks)
+ sps_or_pps = append_scp && pay_ptr && ((pay_ptr[0] & 0x1F) == 7 || (pay_ptr[0] & 0x1F) == 8);
+
+ // start-accumulator
+ if(!h264->decoder.accumulator) {
+ if(size_to_copy > xmax_size) {
+ TSK_DEBUG_ERROR("%u too big to contain valid encoded data. xmax_size=%u", size_to_copy, xmax_size);
+ return 0;
+ }
+ if(!(h264->decoder.accumulator = tsk_calloc(size_to_copy, sizeof(uint8_t)))) {
+ TSK_DEBUG_ERROR("Failed to allocated new buffer");
+ return 0;
+ }
+ h264->decoder.accumulator_size = size_to_copy;
+ }
+ if((h264->decoder.accumulator_pos + size_to_copy) >= xmax_size) {
+ TSK_DEBUG_ERROR("BufferOverflow");
+ h264->decoder.accumulator_pos = 0;
+ return 0;
+ }
+ if((h264->decoder.accumulator_pos + size_to_copy) > h264->decoder.accumulator_size) {
+ if(!(h264->decoder.accumulator = tsk_realloc(h264->decoder.accumulator, (h264->decoder.accumulator_pos + size_to_copy)))) {
+ TSK_DEBUG_ERROR("Failed to reallocated new buffer");
+ h264->decoder.accumulator_pos = 0;
+ h264->decoder.accumulator_size = 0;
+ return 0;
+ }
+ h264->decoder.accumulator_size = (h264->decoder.accumulator_pos + size_to_copy);
+ }
+
+ if(append_scp) {
+ memcpy(&((uint8_t*)h264->decoder.accumulator)[h264->decoder.accumulator_pos], H264_START_CODE_PREFIX, start_code_prefix_size);
+ h264->decoder.accumulator_pos += start_code_prefix_size;
+ }
+ memcpy(&((uint8_t*)h264->decoder.accumulator)[h264->decoder.accumulator_pos], pay_ptr, pay_size);
+ h264->decoder.accumulator_pos += pay_size;
+ // end-accumulator
+
+
+ if(sps_or_pps) {
+ // http://libav-users.943685.n4.nabble.com/Decode-H264-streams-how-to-fill-AVCodecContext-from-SPS-PPS-td2484472.html
+ // SPS and PPS should be bundled with IDR
+ TSK_DEBUG_INFO("Receiving SPS or PPS ...to be tied to an IDR");
+ }
+ else if(rtp_hdr->marker) {
+ if(h264->decoder.passthrough) {
+ if(*out_max_size < h264->decoder.accumulator_pos) {
+ if((*out_data = tsk_realloc(*out_data, h264->decoder.accumulator_pos))) {
+ *out_max_size = h264->decoder.accumulator_pos;
+ }
+ else {
+ *out_max_size = 0;
+ return 0;
+ }
+ }
+ memcpy(*out_data, h264->decoder.accumulator, h264->decoder.accumulator_pos);
+ retsize = h264->decoder.accumulator_pos;
+ }
+ else {
+ // !h264->decoder.passthrough
+ CUVIDSOURCEDATAPACKET pkt;
+ CUresult cuResult;
+ pkt.flags = 0;
+ pkt.payload_size = (unsigned long) h264->decoder.accumulator_pos;
+ pkt.payload = (unsigned char *)h264->decoder.accumulator;
+ pkt.timestamp = 0;
+
+ // reset accumulator
+ h264->decoder.accumulator_pos = 0;
+ cuResult = cuvidParseVideoData(h264->decoder.cuParser, &pkt);
+ if(cuResult != CUDA_SUCCESS) {
+ TSK_DEBUG_ERROR("cuvidParseVideoData() failed with error code = %d", (int)cuResult);
+ CHECK_HR(hr = E_FAIL);
+ }
+
+ if(h264->decoder.cuBuffer.bAvail) {
+ h264->decoder.cuBuffer.bAvail = tsk_false;
+ if((retsize = _cuda_codec_h264_pict_layout(h264, out_data, out_max_size)) == 0) {
+ TSK_DEBUG_ERROR("_cuda_codec_h264_pict_layout failed");
+ CHECK_HR(hr = E_FAIL);
+ }
+ }
+ }// else(!h264->decoder.passthrough)
+ } // else if(rtp_hdr->marker)
bail:
- if(FAILED(hr))
- {
- TSK_DEBUG_INFO("Failed to decode the buffer with error code =%d, size=%u, append=%s", ret, h264->decoder.accumulator_pos, append_scp ? "yes" : "no");
- if(TMEDIA_CODEC_VIDEO(self)->in.callback)
- {
- TMEDIA_CODEC_VIDEO(self)->in.result.type = tmedia_video_decode_result_type_error;
- TMEDIA_CODEC_VIDEO(self)->in.result.proto_hdr = proto_hdr;
- TMEDIA_CODEC_VIDEO(self)->in.callback(&TMEDIA_CODEC_VIDEO(self)->in.result);
- }
- }
- return retsize;
+ if(FAILED(hr)) {
+ TSK_DEBUG_INFO("Failed to decode the buffer with error code =%d, size=%u, append=%s", ret, h264->decoder.accumulator_pos, append_scp ? "yes" : "no");
+ if(TMEDIA_CODEC_VIDEO(self)->in.callback) {
+ TMEDIA_CODEC_VIDEO(self)->in.result.type = tmedia_video_decode_result_type_error;
+ TMEDIA_CODEC_VIDEO(self)->in.result.proto_hdr = proto_hdr;
+ TMEDIA_CODEC_VIDEO(self)->in.callback(&TMEDIA_CODEC_VIDEO(self)->in.result);
+ }
+ }
+ return retsize;
}
static tsk_bool_t cuda_codec_h264_sdp_att_match(const tmedia_codec_t* self, const char* att_name, const char* att_value)
{
- return tdav_codec_h264_common_sdp_att_match((tdav_codec_h264_common_t*)self, att_name, att_value);
+ return tdav_codec_h264_common_sdp_att_match((tdav_codec_h264_common_t*)self, att_name, att_value);
}
static char* cuda_codec_h264_sdp_att_get(const tmedia_codec_t* self, const char* att_name)
{
- char* att = tdav_codec_h264_common_sdp_att_get((const tdav_codec_h264_common_t*)self, att_name);
- if(att && tsk_striequals(att_name, "fmtp")) {
- tsk_strcat(&att, "; impl=CUDA");
- }
- return att;
+ char* att = tdav_codec_h264_common_sdp_att_get((const tdav_codec_h264_common_t*)self, att_name);
+ if(att && tsk_striequals(att_name, "fmtp")) {
+ tsk_strcat(&att, "; impl=CUDA");
+ }
+ return att;
}
@@ -529,63 +491,61 @@ static char* cuda_codec_h264_sdp_att_get(const tmedia_codec_t* self, const char*
/* constructor */
static tsk_object_t* cuda_codec_h264_base_ctor(tsk_object_t * self, va_list * app)
{
- cuda_codec_h264_t *h264 = (cuda_codec_h264_t*)self;
- if(h264){
- /* init base: called by tmedia_codec_create() */
- /* init self */
- if(cuda_codec_h264_init(h264, profile_idc_baseline) != 0){
- return tsk_null;
- }
- }
- return self;
+ cuda_codec_h264_t *h264 = (cuda_codec_h264_t*)self;
+ if(h264) {
+ /* init base: called by tmedia_codec_create() */
+ /* init self */
+ if(cuda_codec_h264_init(h264, profile_idc_baseline) != 0) {
+ return tsk_null;
+ }
+ }
+ return self;
}
/* destructor */
static tsk_object_t* cuda_codec_h264_base_dtor(tsk_object_t * self)
-{
- cuda_codec_h264_t *h264 = (cuda_codec_h264_t*)self;
- if(h264){
- /* deinit base */
- tdav_codec_h264_common_deinit(TDAV_CODEC_H264_COMMON(self));
- /* deinit self */
- cuda_codec_h264_deinit(h264);
- }
-
- return self;
+{
+ cuda_codec_h264_t *h264 = (cuda_codec_h264_t*)self;
+ if(h264) {
+ /* deinit base */
+ tdav_codec_h264_common_deinit(TDAV_CODEC_H264_COMMON(self));
+ /* deinit self */
+ cuda_codec_h264_deinit(h264);
+ }
+
+ return self;
}
/* object definition */
-static const tsk_object_def_t cuda_codec_h264_base_def_s =
-{
- sizeof(cuda_codec_h264_t),
- cuda_codec_h264_base_ctor,
- cuda_codec_h264_base_dtor,
- tmedia_codec_cmp,
+static const tsk_object_def_t cuda_codec_h264_base_def_s = {
+ sizeof(cuda_codec_h264_t),
+ cuda_codec_h264_base_ctor,
+ cuda_codec_h264_base_dtor,
+ tmedia_codec_cmp,
};
/* plugin definition*/
-static const tmedia_codec_plugin_def_t cuda_codec_h264_base_plugin_def_s =
-{
- &cuda_codec_h264_base_def_s,
-
- tmedia_video,
- tmedia_codec_id_h264_bp,
- "H264",
- "H264 Base Profile (NVIDIA CUDA)",
- TMEDIA_CODEC_FORMAT_H264_BP,
- tsk_true,
- 90000, // rate
-
- /* audio */
- { 0 },
-
- /* video (width, height, fps) */
- {176, 144, 0}, // fps is @deprecated
-
- cuda_codec_h264_set,
- cuda_codec_h264_open,
- cuda_codec_h264_close,
- cuda_codec_h264_encode,
- cuda_codec_h264_decode,
- cuda_codec_h264_sdp_att_match,
- cuda_codec_h264_sdp_att_get
+static const tmedia_codec_plugin_def_t cuda_codec_h264_base_plugin_def_s = {
+ &cuda_codec_h264_base_def_s,
+
+ tmedia_video,
+ tmedia_codec_id_h264_bp,
+ "H264",
+ "H264 Base Profile (NVIDIA CUDA)",
+ TMEDIA_CODEC_FORMAT_H264_BP,
+ tsk_true,
+ 90000, // rate
+
+ /* audio */
+ { 0 },
+
+ /* video (width, height, fps) */
+ {176, 144, 0}, // fps is @deprecated
+
+ cuda_codec_h264_set,
+ cuda_codec_h264_open,
+ cuda_codec_h264_close,
+ cuda_codec_h264_encode,
+ cuda_codec_h264_decode,
+ cuda_codec_h264_sdp_att_match,
+ cuda_codec_h264_sdp_att_get
};
const tmedia_codec_plugin_def_t *cuda_codec_h264_base_plugin_def_t = &cuda_codec_h264_base_plugin_def_s;
@@ -594,64 +554,62 @@ const tmedia_codec_plugin_def_t *cuda_codec_h264_base_plugin_def_t = &cuda_codec
/* constructor */
static tsk_object_t* cuda_codec_h264_main_ctor(tsk_object_t * self, va_list * app)
{
- cuda_codec_h264_t *h264 = (cuda_codec_h264_t*)self;
- if(h264){
- /* init base: called by tmedia_codec_create() */
- /* init self */
- if(cuda_codec_h264_init(h264, profile_idc_main) != 0){
- return tsk_null;
- }
- }
- return self;
+ cuda_codec_h264_t *h264 = (cuda_codec_h264_t*)self;
+ if(h264) {
+ /* init base: called by tmedia_codec_create() */
+ /* init self */
+ if(cuda_codec_h264_init(h264, profile_idc_main) != 0) {
+ return tsk_null;
+ }
+ }
+ return self;
}
/* destructor */
static tsk_object_t* cuda_codec_h264_main_dtor(tsk_object_t * self)
-{
- cuda_codec_h264_t *h264 = (cuda_codec_h264_t*)self;
- if(h264){
- /* deinit base */
- tdav_codec_h264_common_deinit(TDAV_CODEC_H264_COMMON(self));
- /* deinit self */
- cuda_codec_h264_deinit(h264);
-
- }
-
- return self;
+{
+ cuda_codec_h264_t *h264 = (cuda_codec_h264_t*)self;
+ if(h264) {
+ /* deinit base */
+ tdav_codec_h264_common_deinit(TDAV_CODEC_H264_COMMON(self));
+ /* deinit self */
+ cuda_codec_h264_deinit(h264);
+
+ }
+
+ return self;
}
/* object definition */
-static const tsk_object_def_t cuda_codec_h264_main_def_s =
-{
- sizeof(cuda_codec_h264_t),
- cuda_codec_h264_main_ctor,
- cuda_codec_h264_main_dtor,
- tmedia_codec_cmp,
+static const tsk_object_def_t cuda_codec_h264_main_def_s = {
+ sizeof(cuda_codec_h264_t),
+ cuda_codec_h264_main_ctor,
+ cuda_codec_h264_main_dtor,
+ tmedia_codec_cmp,
};
/* plugin definition*/
-static const tmedia_codec_plugin_def_t cuda_codec_h264_main_plugin_def_s =
-{
- &cuda_codec_h264_main_def_s,
-
- tmedia_video,
- tmedia_codec_id_h264_mp,
- "H264",
- "H264 Main Profile (NVIDIA CUDA)",
- TMEDIA_CODEC_FORMAT_H264_MP,
- tsk_true,
- 90000, // rate
-
- /* audio */
- { 0 },
-
- /* video (width, height, fps)*/
- {176, 144, 0},// fps is @deprecated
-
- cuda_codec_h264_set,
- cuda_codec_h264_open,
- cuda_codec_h264_close,
- cuda_codec_h264_encode,
- cuda_codec_h264_decode,
- cuda_codec_h264_sdp_att_match,
- cuda_codec_h264_sdp_att_get
+static const tmedia_codec_plugin_def_t cuda_codec_h264_main_plugin_def_s = {
+ &cuda_codec_h264_main_def_s,
+
+ tmedia_video,
+ tmedia_codec_id_h264_mp,
+ "H264",
+ "H264 Main Profile (NVIDIA CUDA)",
+ TMEDIA_CODEC_FORMAT_H264_MP,
+ tsk_true,
+ 90000, // rate
+
+ /* audio */
+ { 0 },
+
+ /* video (width, height, fps)*/
+ {176, 144, 0},// fps is @deprecated
+
+ cuda_codec_h264_set,
+ cuda_codec_h264_open,
+ cuda_codec_h264_close,
+ cuda_codec_h264_encode,
+ cuda_codec_h264_decode,
+ cuda_codec_h264_sdp_att_match,
+ cuda_codec_h264_sdp_att_get
};
const tmedia_codec_plugin_def_t *cuda_codec_h264_main_plugin_def_t = &cuda_codec_h264_main_plugin_def_s;
@@ -663,272 +621,333 @@ const tmedia_codec_plugin_def_t *cuda_codec_h264_main_plugin_def_t = &cuda_codec
int cuda_codec_h264_open_encoder(cuda_codec_h264_t* self)
{
- HRESULT hr = S_OK;
- int32_t max_bw_kpbs;
- int bestGPU = 0, gpuPerf = 0;
- static int low_latency = 1;
- tdav_codec_h264_common_t* common = (tdav_codec_h264_common_t*)self;
-
- if(self->encoder.pInst)
- {
- TSK_DEBUG_ERROR("Encoder already initialized");
+ HRESULT hr = S_OK;
+ int32_t max_bw_kpbs;
+ int bestGPU = 0, gpuPerf = 0;
+ static int low_latency = 1;
+ tdav_codec_h264_common_t* common = (tdav_codec_h264_common_t*)self;
+
+ if(self->encoder.pInst) {
+ TSK_DEBUG_ERROR("Encoder already initialized");
#if defined(E_ILLEGAL_METHOD_CALL)
- CHECK_HR(hr = E_ILLEGAL_METHOD_CALL);
+ CHECK_HR(hr = E_ILLEGAL_METHOD_CALL);
#else
- CHECK_HR(hr = 0x8000000EL);
+ CHECK_HR(hr = 0x8000000EL);
#endif
- }
-
- memset(&self->encoder.clbParams, 0, sizeof(self->encoder.clbParams));
- memset(&self->encoder.ctxParams, 0, sizeof(self->encoder.ctxParams));
-
- // create encoder
- CHECK_HR(hr = NVCreateEncoder(&self->encoder.pInst));
- CHECK_HR(hr = NVSetCodec(self->encoder.pInst, NV_CODEC_TYPE_H264));
- CHECK_HR(hr = NVSetDefaultParam(self->encoder.pInst));
-
- CHECK_HR(hr = NVGetParamValue(self->encoder.pInst, NVVE_GET_GPU_COUNT, &self->encoder.ctxParams.GPU_count));
- {
- int temp = 0, deviceCount;
- for (deviceCount=0; deviceCount < self->encoder.ctxParams.GPU_count; deviceCount++)
- {
- NVVE_GPUAttributes GPUAttributes = {0};
-
- GPUAttributes.iGpuOrdinal = deviceCount;
- hr = NVGetParamValue(self->encoder.pInst, NVVE_GET_GPU_ATTRIBUTES, &GPUAttributes);
- if(FAILED(hr))
- {
- TSK_DEBUG_ERROR("NVGetParamValue(NVVE_GET_GPU_ATTRIBUTES) failed with error code = %08x", hr);
- continue;
- }
-
- temp = GPUAttributes.iClockRate * GPUAttributes.iMultiProcessorCount;
- temp = temp * CudaUtils::ConvertSMVer2Cores(GPUAttributes.iMajor, GPUAttributes.iMinor);
-
- if(temp > gpuPerf)
- {
- gpuPerf = temp;
- bestGPU = deviceCount;
- }
- }
- }
-
- self->encoder.neg_width = (self->encoder.rotation == 90 || self->encoder.rotation == 270) ? TMEDIA_CODEC_VIDEO(self)->out.height : TMEDIA_CODEC_VIDEO(self)->out.width;
- self->encoder.neg_height = (self->encoder.rotation == 90 || self->encoder.rotation == 270) ? TMEDIA_CODEC_VIDEO(self)->out.width : TMEDIA_CODEC_VIDEO(self)->out.height;
- self->encoder.neg_fps = TMEDIA_CODEC_VIDEO(self)->out.fps;
- max_bw_kpbs = TSK_CLAMP(
- 0,
- tmedia_get_video_bandwidth_kbps_2(self->encoder.neg_width, self->encoder.neg_height, self->encoder.neg_fps),
- self->encoder.max_bw_kpbs
- );
- self->encoder.max_bitrate_bps = (max_bw_kpbs * 1024);
-
- TSK_DEBUG_INFO("[H.264 CUDA Encoder] neg_width=%d, neg_height=%d, neg_fps=%d, max_bitrate_bps=%d",
- self->encoder.neg_width,
- self->encoder.neg_height,
- self->encoder.neg_fps,
- self->encoder.max_bitrate_bps
- );
-
- self->encoder.ctxParams.iForcedGPU = bestGPU;
- self->encoder.ctxParams.iInputSize[0] = self->encoder.neg_width;
- self->encoder.ctxParams.iInputSize[1] = self->encoder.neg_height;
- self->encoder.ctxParams.iOutputSize[0] = self->encoder.neg_width;
- self->encoder.ctxParams.iOutputSize[1] = self->encoder.neg_height;
- self->encoder.ctxParams.GPUOffloadLevel= NVVE_GPU_OFFLOAD_ALL;
- self->encoder.ctxParams.iSurfaceFormat = (int)IYUV;
- self->encoder.ctxParams.iPictureType = (int)FRAME_PICTURE;
- self->encoder.ctxParams.Fieldmode = MODE_FRAME;
- self->encoder.ctxParams.Presets = (NVVE_PRESETS_TARGET)-1;//Should be iPod, Zune ...
- // self->encoder.ctxParams.iP_Interval = 1;
- self->encoder.ctxParams.iAspectRatio[0] = 1;
- self->encoder.ctxParams.iAspectRatio[1] = 1;
- self->encoder.ctxParams.iAspectRatio[2] = 0;
- self->encoder.ctxParams.iIDR_Period = (self->encoder.neg_fps * PLUGIN_CUDA_H264_GOP_SIZE_IN_SECONDS);
- self->encoder.ctxParams.iUseDeviceMem = 0;
- self->encoder.ctxParams.iDynamicGOP = 0;
- self->encoder.ctxParams.RCType = RC_CBR;
- self->encoder.ctxParams.iAvgBitrate = self->encoder.max_bitrate_bps;
- self->encoder.ctxParams.iPeakBitrate = self->encoder.max_bitrate_bps;
- self->encoder.ctxParams.iQP_Level_Intra = 25;
- self->encoder.ctxParams.iQP_Level_InterP = 28;
- self->encoder.ctxParams.iQP_Level_InterB = 31;
- self->encoder.ctxParams.iFrameRate[0] = self->encoder.neg_fps;
- self->encoder.ctxParams.iFrameRate[1] = 1;
- self->encoder.ctxParams.iDeblockMode = 1;
- self->encoder.ctxParams.iForceIntra = 0;
- self->encoder.ctxParams.iForceIDR = 0;
- self->encoder.ctxParams.iClearStat = 0;
- self->encoder.ctxParams.DIMode = DI_MEDIAN;
- self->encoder.ctxParams.iDisableSPSPPS = 1; // Do not include SPS/PPS frames
- self->encoder.ctxParams.iNaluFramingType = 0; // StartCodes
- self->encoder.ctxParams.iMultiGPU = 1;
- switch(common->profile)
- {
- case profile_idc_baseline:
- {
- self->encoder.ctxParams.iDisableCabac = 1;
- self->encoder.ctxParams.iProfileLevel = 0xff42; // 0xff -> autoselect level
- break;
- }
- case profile_idc_main:
- {
- self->encoder.ctxParams.iDisableCabac = 0;
- self->encoder.ctxParams.iProfileLevel = 0xff4d; // 0xff -> autoselect level
- break;
- }
- default:
- {
- CHECK_HR(hr = E_NOTIMPL);
- break;
- }
- }
-
- //
- // Allocate memory
- //
- self->encoder.nBufferSize = (self->encoder.ctxParams.iOutputSize[1] * self->encoder.ctxParams.iOutputSize[0] * 3) >> 4;
- if(!self->encoder.pBufferPtr && !(self->encoder.pBufferPtr = tsk_realloc(self->encoder.pBufferPtr, self->encoder.nBufferSize)))
- {
- self->encoder.nBufferSize = 0;
- CHECK_HR(hr = E_OUTOFMEMORY);
- }
-
- //
- // Set parameters
- //
- hr = NVSetParamValue(self->encoder.pInst, NVVE_FORCE_GPU_SELECTION, &self->encoder.ctxParams.iForcedGPU);
- if(FAILED(hr))
- {
- TSK_DEBUG_WARN("NVSetParamValue(NVVE_FORCE_GPU_SELECTION) failed with error code = %08x", hr);
- }
- CHECK_HR(hr = NVSetParamValue(self->encoder.pInst, NVVE_DEVICE_MEMORY_INPUT, &(self->encoder.ctxParams.iUseDeviceMem)));
- hr = NVSetParamValue(self->encoder.pInst,NVVE_OUT_SIZE, &(self->encoder.ctxParams.iOutputSize)); if (hr!=S_OK) { TSK_DEBUG_WARN("NVSetParamValue(NVVE_OUT_SIZE) failed with error code = %08x", hr); }
- hr = NVSetParamValue(self->encoder.pInst,NVVE_IN_SIZE, &(self->encoder.ctxParams.iInputSize)); if (hr!=S_OK) { TSK_DEBUG_WARN("NVSetParamValue(NVVE_IN_SIZE) failed with error code = %08x", hr); }
- hr = NVSetParamValue(self->encoder.pInst,NVVE_MULTI_GPU, &(self->encoder.ctxParams.iMultiGPU)); if (hr!=S_OK) { TSK_DEBUG_WARN("NVSetParamValue(NVVE_MULTI_GPU) failed with error code = %08x", hr); }
- hr = NVSetParamValue(self->encoder.pInst,NVVE_ASPECT_RATIO, &(self->encoder.ctxParams.iAspectRatio));if (hr!=S_OK) { TSK_DEBUG_WARN("NVSetParamValue(NVVE_ASPECT_RATIO) failed with error code = %08x", hr); }
- hr = NVSetParamValue(self->encoder.pInst,NVVE_FIELD_ENC_MODE, &(self->encoder.ctxParams.Fieldmode)); if (hr!=S_OK) { TSK_DEBUG_WARN("NVSetParamValue(NVVE_FIELD_ENC_MODE) failed with error code = %08x", hr); }
- hr = NVSetParamValue(self->encoder.pInst,NVVE_P_INTERVAL, &(self->encoder.ctxParams.iP_Interval)); if (hr!=S_OK) { TSK_DEBUG_WARN("NVSetParamValue(NVVE_P_INTERVAL) failed with error code = %08x", hr); }
- hr = NVSetParamValue(self->encoder.pInst,NVVE_IDR_PERIOD, &(self->encoder.ctxParams.iIDR_Period)); if (hr!=S_OK) { TSK_DEBUG_WARN("NVSetParamValue(NVVE_IDR_PERIOD) failed with error code = %08x", hr); }
- hr = NVSetParamValue(self->encoder.pInst,NVVE_DYNAMIC_GOP, &(self->encoder.ctxParams.iDynamicGOP)); if (hr!=S_OK) { TSK_DEBUG_WARN("NVSetParamValue(NVVE_DYNAMIC_GOP) failed with error code = %08x", hr); }
- hr = NVSetParamValue(self->encoder.pInst,NVVE_RC_TYPE, &(self->encoder.ctxParams.RCType)); if (hr!=S_OK) { TSK_DEBUG_WARN("NVSetParamValue(NVVE_RC_TYPE) failed with error code = %08x", hr); }
- hr = NVSetParamValue(self->encoder.pInst,NVVE_AVG_BITRATE, &(self->encoder.ctxParams.iAvgBitrate)); if (hr!=S_OK) { TSK_DEBUG_WARN("NVSetParamValue(NVVE_AVG_BITRATE) failed with error code = %08x", hr); }
- hr = NVSetParamValue(self->encoder.pInst,NVVE_PEAK_BITRATE, &(self->encoder.ctxParams.iPeakBitrate)); if (hr!=S_OK) { TSK_DEBUG_WARN("NVSetParamValue(NVVE_PEAK_BITRATE) failed with error code = %08x", hr); }
- hr = NVSetParamValue(self->encoder.pInst,NVVE_QP_LEVEL_INTRA, &(self->encoder.ctxParams.iQP_Level_Intra)); if (hr!=S_OK) { TSK_DEBUG_WARN("NVSetParamValue(NVVE_OUT_SIZE) failed with error code = %08x", hr); }
- hr = NVSetParamValue(self->encoder.pInst,NVVE_QP_LEVEL_INTER_P,&(self->encoder.ctxParams.iQP_Level_InterP)); if (hr!=S_OK) { TSK_DEBUG_WARN("NVSetParamValue(NVVE_QP_LEVEL_INTER_P) failed with error code = %08x", hr); }
- hr = NVSetParamValue(self->encoder.pInst,NVVE_QP_LEVEL_INTER_B,&(self->encoder.ctxParams.iQP_Level_InterB)); if (hr!=S_OK) { TSK_DEBUG_WARN("NVSetParamValue(NVVE_QP_LEVEL_INTER_B) failed with error code = %08x", hr); }
- hr = NVSetParamValue(self->encoder.pInst,NVVE_FRAME_RATE, &(self->encoder.ctxParams.iFrameRate)); if (hr!=S_OK) { TSK_DEBUG_WARN("NVSetParamValue(NVVE_FRAME_RATE) failed with error code = %08x", hr); }
- hr = NVSetParamValue(self->encoder.pInst,NVVE_DEBLOCK_MODE, &(self->encoder.ctxParams.iDeblockMode)); if (hr!=S_OK) { TSK_DEBUG_WARN("NVSetParamValue(NVVE_DEBLOCK_MODE) failed with error code = %08x", hr); }
- hr = NVSetParamValue(self->encoder.pInst,NVVE_PROFILE_LEVEL, &(self->encoder.ctxParams.iProfileLevel)); if (hr!=S_OK) { TSK_DEBUG_WARN("NVSetParamValue(NVVE_PROFILE_LEVEL) failed with error code = %08x", hr); }
- hr = NVSetParamValue(self->encoder.pInst,NVVE_FORCE_INTRA, &(self->encoder.ctxParams.iForceIntra)); if (hr!=S_OK) { TSK_DEBUG_WARN("NVSetParamValue(NVVE_FORCE_INTRA) failed with error code = %08x", hr); }
- hr = NVSetParamValue(self->encoder.pInst,NVVE_FORCE_IDR, &(self->encoder.ctxParams.iForceIDR)); if (hr!=S_OK) { TSK_DEBUG_WARN("NVSetParamValue(NVVE_FORCE_IDR) failed with error code = %08x", hr); }
- hr = NVSetParamValue(self->encoder.pInst,NVVE_CLEAR_STAT, &(self->encoder.ctxParams.iClearStat)); if (hr!=S_OK) { TSK_DEBUG_WARN("NVSetParamValue(NVVE_CLEAR_STAT) failed with error code = %08x", hr); }
- hr = NVSetParamValue(self->encoder.pInst,NVVE_SET_DEINTERLACE,&(self->encoder.ctxParams.DIMode)); if (hr!=S_OK) { TSK_DEBUG_WARN("NVSetParamValue(NVVE_SET_DEINTERLACE) failed with error code = %08x", hr); }
- if (self->encoder.ctxParams.Presets != -1)
- {
- hr = NVSetParamValue(self->encoder.pInst,NVVE_PRESETS, &(self->encoder.ctxParams.Presets)); if (hr!=S_OK) { TSK_DEBUG_WARN("NVSetParamValue(NVVE_PRESETS) failed with error code = %08x", hr); }
- }
- hr = NVSetParamValue(self->encoder.pInst,NVVE_DISABLE_CABAC, &(self->encoder.ctxParams.iDisableCabac)); if (hr!=S_OK) { TSK_DEBUG_WARN("NVSetParamValue(NVVE_DISABLE_CABAC) failed with error code = %08x", hr); }
- hr = NVSetParamValue(self->encoder.pInst,NVVE_CONFIGURE_NALU_FRAMING_TYPE, &(self->encoder.ctxParams.iNaluFramingType)); if (hr!=S_OK) { TSK_DEBUG_WARN("NVSetParamValue(NVVE_CONFIGURE_NALU_FRAMING_TYPE) failed with error code = %08x", hr); }
- hr = NVSetParamValue(self->encoder.pInst,NVVE_DISABLE_SPS_PPS,&(self->encoder.ctxParams.iDisableSPSPPS)); if (hr!=S_OK) { TSK_DEBUG_WARN("NVSetParamValue(NVVE_DISABLE_SPS_PPS) failed with error code = %08x", hr); }
- hr = NVSetParamValue(self->encoder.pInst,NVVE_LOW_LATENCY,&low_latency); if (hr!=S_OK) { TSK_DEBUG_WARN("NVSetParamValue(NVVE_LOW_LATENCY) failed with error code = %08x", hr); }
-
- self->encoder.clbParams.pfnacquirebitstream = _NVCallback_HandleAcquireBitStream;
- self->encoder.clbParams.pfnonbeginframe = _NVCallback_HandleOnBeginFrame;
- self->encoder.clbParams.pfnonendframe = _NVCallback_HandleOnEndFrame;
- self->encoder.clbParams.pfnreleasebitstream = _NVCallback_HandleReleaseBitStream;
- NVRegisterCB(self->encoder.pInst, self->encoder.clbParams, self);
-
-
- CHECK_HR(hr = NVCreateHWEncoder(self->encoder.pInst));
-
+ }
+
+ memset(&self->encoder.clbParams, 0, sizeof(self->encoder.clbParams));
+ memset(&self->encoder.ctxParams, 0, sizeof(self->encoder.ctxParams));
+
+ // create encoder
+ CHECK_HR(hr = NVCreateEncoder(&self->encoder.pInst));
+ CHECK_HR(hr = NVSetCodec(self->encoder.pInst, NV_CODEC_TYPE_H264));
+ CHECK_HR(hr = NVSetDefaultParam(self->encoder.pInst));
+
+ CHECK_HR(hr = NVGetParamValue(self->encoder.pInst, NVVE_GET_GPU_COUNT, &self->encoder.ctxParams.GPU_count));
+ {
+ int temp = 0, deviceCount;
+ for (deviceCount=0; deviceCount < self->encoder.ctxParams.GPU_count; deviceCount++) {
+ NVVE_GPUAttributes GPUAttributes = {0};
+
+ GPUAttributes.iGpuOrdinal = deviceCount;
+ hr = NVGetParamValue(self->encoder.pInst, NVVE_GET_GPU_ATTRIBUTES, &GPUAttributes);
+ if(FAILED(hr)) {
+ TSK_DEBUG_ERROR("NVGetParamValue(NVVE_GET_GPU_ATTRIBUTES) failed with error code = %08x", hr);
+ continue;
+ }
+
+ temp = GPUAttributes.iClockRate * GPUAttributes.iMultiProcessorCount;
+ temp = temp * CudaUtils::ConvertSMVer2Cores(GPUAttributes.iMajor, GPUAttributes.iMinor);
+
+ if(temp > gpuPerf) {
+ gpuPerf = temp;
+ bestGPU = deviceCount;
+ }
+ }
+ }
+
+ self->encoder.neg_width = (self->encoder.rotation == 90 || self->encoder.rotation == 270) ? TMEDIA_CODEC_VIDEO(self)->out.height : TMEDIA_CODEC_VIDEO(self)->out.width;
+ self->encoder.neg_height = (self->encoder.rotation == 90 || self->encoder.rotation == 270) ? TMEDIA_CODEC_VIDEO(self)->out.width : TMEDIA_CODEC_VIDEO(self)->out.height;
+ self->encoder.neg_fps = TMEDIA_CODEC_VIDEO(self)->out.fps;
+ max_bw_kpbs = TSK_CLAMP(
+ 0,
+ tmedia_get_video_bandwidth_kbps_2(self->encoder.neg_width, self->encoder.neg_height, self->encoder.neg_fps),
+ self->encoder.max_bw_kpbs
+ );
+ self->encoder.max_bitrate_bps = (max_bw_kpbs * 1024);
+
+ TSK_DEBUG_INFO("[H.264 CUDA Encoder] neg_width=%d, neg_height=%d, neg_fps=%d, max_bitrate_bps=%d",
+ self->encoder.neg_width,
+ self->encoder.neg_height,
+ self->encoder.neg_fps,
+ self->encoder.max_bitrate_bps
+ );
+
+ self->encoder.ctxParams.iForcedGPU = bestGPU;
+ self->encoder.ctxParams.iInputSize[0] = self->encoder.neg_width;
+ self->encoder.ctxParams.iInputSize[1] = self->encoder.neg_height;
+ self->encoder.ctxParams.iOutputSize[0] = self->encoder.neg_width;
+ self->encoder.ctxParams.iOutputSize[1] = self->encoder.neg_height;
+ self->encoder.ctxParams.GPUOffloadLevel= NVVE_GPU_OFFLOAD_ALL;
+ self->encoder.ctxParams.iSurfaceFormat = (int)IYUV;
+ self->encoder.ctxParams.iPictureType = (int)FRAME_PICTURE;
+ self->encoder.ctxParams.Fieldmode = MODE_FRAME;
+ self->encoder.ctxParams.Presets = (NVVE_PRESETS_TARGET)-1;//Should be iPod, Zune ...
+ // self->encoder.ctxParams.iP_Interval = 1;
+ self->encoder.ctxParams.iAspectRatio[0] = 1;
+ self->encoder.ctxParams.iAspectRatio[1] = 1;
+ self->encoder.ctxParams.iAspectRatio[2] = 0;
+ self->encoder.ctxParams.iIDR_Period = (self->encoder.neg_fps * PLUGIN_CUDA_H264_GOP_SIZE_IN_SECONDS);
+ self->encoder.ctxParams.iUseDeviceMem = 0;
+ self->encoder.ctxParams.iDynamicGOP = 0;
+ self->encoder.ctxParams.RCType = RC_CBR;
+ self->encoder.ctxParams.iAvgBitrate = self->encoder.max_bitrate_bps;
+ self->encoder.ctxParams.iPeakBitrate = self->encoder.max_bitrate_bps;
+ self->encoder.ctxParams.iQP_Level_Intra = 25;
+ self->encoder.ctxParams.iQP_Level_InterP = 28;
+ self->encoder.ctxParams.iQP_Level_InterB = 31;
+ self->encoder.ctxParams.iFrameRate[0] = self->encoder.neg_fps;
+ self->encoder.ctxParams.iFrameRate[1] = 1;
+ self->encoder.ctxParams.iDeblockMode = 1;
+ self->encoder.ctxParams.iForceIntra = 0;
+ self->encoder.ctxParams.iForceIDR = 0;
+ self->encoder.ctxParams.iClearStat = 0;
+ self->encoder.ctxParams.DIMode = DI_MEDIAN;
+ self->encoder.ctxParams.iDisableSPSPPS = 1; // Do not include SPS/PPS frames
+ self->encoder.ctxParams.iNaluFramingType = 0; // StartCodes
+ self->encoder.ctxParams.iMultiGPU = 1;
+ switch(common->profile) {
+ case profile_idc_baseline: {
+ self->encoder.ctxParams.iDisableCabac = 1;
+ self->encoder.ctxParams.iProfileLevel = 0xff42; // 0xff -> autoselect level
+ break;
+ }
+ case profile_idc_main: {
+ self->encoder.ctxParams.iDisableCabac = 0;
+ self->encoder.ctxParams.iProfileLevel = 0xff4d; // 0xff -> autoselect level
+ break;
+ }
+ default: {
+ CHECK_HR(hr = E_NOTIMPL);
+ break;
+ }
+ }
+
+ //
+ // Allocate memory
+ //
+ self->encoder.nBufferSize = (self->encoder.ctxParams.iOutputSize[1] * self->encoder.ctxParams.iOutputSize[0] * 3) >> 4;
+ if(!self->encoder.pBufferPtr && !(self->encoder.pBufferPtr = tsk_realloc(self->encoder.pBufferPtr, self->encoder.nBufferSize))) {
+ self->encoder.nBufferSize = 0;
+ CHECK_HR(hr = E_OUTOFMEMORY);
+ }
+
+ //
+ // Set parameters
+ //
+ hr = NVSetParamValue(self->encoder.pInst, NVVE_FORCE_GPU_SELECTION, &self->encoder.ctxParams.iForcedGPU);
+ if(FAILED(hr)) {
+ TSK_DEBUG_WARN("NVSetParamValue(NVVE_FORCE_GPU_SELECTION) failed with error code = %08x", hr);
+ }
+ CHECK_HR(hr = NVSetParamValue(self->encoder.pInst, NVVE_DEVICE_MEMORY_INPUT, &(self->encoder.ctxParams.iUseDeviceMem)));
+ hr = NVSetParamValue(self->encoder.pInst,NVVE_OUT_SIZE, &(self->encoder.ctxParams.iOutputSize));
+ if (hr!=S_OK) {
+ TSK_DEBUG_WARN("NVSetParamValue(NVVE_OUT_SIZE) failed with error code = %08x", hr);
+ }
+ hr = NVSetParamValue(self->encoder.pInst,NVVE_IN_SIZE, &(self->encoder.ctxParams.iInputSize));
+ if (hr!=S_OK) {
+ TSK_DEBUG_WARN("NVSetParamValue(NVVE_IN_SIZE) failed with error code = %08x", hr);
+ }
+ hr = NVSetParamValue(self->encoder.pInst,NVVE_MULTI_GPU, &(self->encoder.ctxParams.iMultiGPU));
+ if (hr!=S_OK) {
+ TSK_DEBUG_WARN("NVSetParamValue(NVVE_MULTI_GPU) failed with error code = %08x", hr);
+ }
+ hr = NVSetParamValue(self->encoder.pInst,NVVE_ASPECT_RATIO, &(self->encoder.ctxParams.iAspectRatio));
+ if (hr!=S_OK) {
+ TSK_DEBUG_WARN("NVSetParamValue(NVVE_ASPECT_RATIO) failed with error code = %08x", hr);
+ }
+ hr = NVSetParamValue(self->encoder.pInst,NVVE_FIELD_ENC_MODE, &(self->encoder.ctxParams.Fieldmode));
+ if (hr!=S_OK) {
+ TSK_DEBUG_WARN("NVSetParamValue(NVVE_FIELD_ENC_MODE) failed with error code = %08x", hr);
+ }
+ hr = NVSetParamValue(self->encoder.pInst,NVVE_P_INTERVAL, &(self->encoder.ctxParams.iP_Interval));
+ if (hr!=S_OK) {
+ TSK_DEBUG_WARN("NVSetParamValue(NVVE_P_INTERVAL) failed with error code = %08x", hr);
+ }
+ hr = NVSetParamValue(self->encoder.pInst,NVVE_IDR_PERIOD, &(self->encoder.ctxParams.iIDR_Period));
+ if (hr!=S_OK) {
+ TSK_DEBUG_WARN("NVSetParamValue(NVVE_IDR_PERIOD) failed with error code = %08x", hr);
+ }
+ hr = NVSetParamValue(self->encoder.pInst,NVVE_DYNAMIC_GOP, &(self->encoder.ctxParams.iDynamicGOP));
+ if (hr!=S_OK) {
+ TSK_DEBUG_WARN("NVSetParamValue(NVVE_DYNAMIC_GOP) failed with error code = %08x", hr);
+ }
+ hr = NVSetParamValue(self->encoder.pInst,NVVE_RC_TYPE, &(self->encoder.ctxParams.RCType));
+ if (hr!=S_OK) {
+ TSK_DEBUG_WARN("NVSetParamValue(NVVE_RC_TYPE) failed with error code = %08x", hr);
+ }
+ hr = NVSetParamValue(self->encoder.pInst,NVVE_AVG_BITRATE, &(self->encoder.ctxParams.iAvgBitrate));
+ if (hr!=S_OK) {
+ TSK_DEBUG_WARN("NVSetParamValue(NVVE_AVG_BITRATE) failed with error code = %08x", hr);
+ }
+ hr = NVSetParamValue(self->encoder.pInst,NVVE_PEAK_BITRATE, &(self->encoder.ctxParams.iPeakBitrate));
+ if (hr!=S_OK) {
+ TSK_DEBUG_WARN("NVSetParamValue(NVVE_PEAK_BITRATE) failed with error code = %08x", hr);
+ }
+ hr = NVSetParamValue(self->encoder.pInst,NVVE_QP_LEVEL_INTRA, &(self->encoder.ctxParams.iQP_Level_Intra));
+ if (hr!=S_OK) {
+ TSK_DEBUG_WARN("NVSetParamValue(NVVE_OUT_SIZE) failed with error code = %08x", hr);
+ }
+ hr = NVSetParamValue(self->encoder.pInst,NVVE_QP_LEVEL_INTER_P,&(self->encoder.ctxParams.iQP_Level_InterP));
+ if (hr!=S_OK) {
+ TSK_DEBUG_WARN("NVSetParamValue(NVVE_QP_LEVEL_INTER_P) failed with error code = %08x", hr);
+ }
+ hr = NVSetParamValue(self->encoder.pInst,NVVE_QP_LEVEL_INTER_B,&(self->encoder.ctxParams.iQP_Level_InterB));
+ if (hr!=S_OK) {
+ TSK_DEBUG_WARN("NVSetParamValue(NVVE_QP_LEVEL_INTER_B) failed with error code = %08x", hr);
+ }
+ hr = NVSetParamValue(self->encoder.pInst,NVVE_FRAME_RATE, &(self->encoder.ctxParams.iFrameRate));
+ if (hr!=S_OK) {
+ TSK_DEBUG_WARN("NVSetParamValue(NVVE_FRAME_RATE) failed with error code = %08x", hr);
+ }
+ hr = NVSetParamValue(self->encoder.pInst,NVVE_DEBLOCK_MODE, &(self->encoder.ctxParams.iDeblockMode));
+ if (hr!=S_OK) {
+ TSK_DEBUG_WARN("NVSetParamValue(NVVE_DEBLOCK_MODE) failed with error code = %08x", hr);
+ }
+ hr = NVSetParamValue(self->encoder.pInst,NVVE_PROFILE_LEVEL, &(self->encoder.ctxParams.iProfileLevel));
+ if (hr!=S_OK) {
+ TSK_DEBUG_WARN("NVSetParamValue(NVVE_PROFILE_LEVEL) failed with error code = %08x", hr);
+ }
+ hr = NVSetParamValue(self->encoder.pInst,NVVE_FORCE_INTRA, &(self->encoder.ctxParams.iForceIntra));
+ if (hr!=S_OK) {
+ TSK_DEBUG_WARN("NVSetParamValue(NVVE_FORCE_INTRA) failed with error code = %08x", hr);
+ }
+ hr = NVSetParamValue(self->encoder.pInst,NVVE_FORCE_IDR, &(self->encoder.ctxParams.iForceIDR));
+ if (hr!=S_OK) {
+ TSK_DEBUG_WARN("NVSetParamValue(NVVE_FORCE_IDR) failed with error code = %08x", hr);
+ }
+ hr = NVSetParamValue(self->encoder.pInst,NVVE_CLEAR_STAT, &(self->encoder.ctxParams.iClearStat));
+ if (hr!=S_OK) {
+ TSK_DEBUG_WARN("NVSetParamValue(NVVE_CLEAR_STAT) failed with error code = %08x", hr);
+ }
+ hr = NVSetParamValue(self->encoder.pInst,NVVE_SET_DEINTERLACE,&(self->encoder.ctxParams.DIMode));
+ if (hr!=S_OK) {
+ TSK_DEBUG_WARN("NVSetParamValue(NVVE_SET_DEINTERLACE) failed with error code = %08x", hr);
+ }
+ if (self->encoder.ctxParams.Presets != -1) {
+ hr = NVSetParamValue(self->encoder.pInst,NVVE_PRESETS, &(self->encoder.ctxParams.Presets));
+ if (hr!=S_OK) {
+ TSK_DEBUG_WARN("NVSetParamValue(NVVE_PRESETS) failed with error code = %08x", hr);
+ }
+ }
+ hr = NVSetParamValue(self->encoder.pInst,NVVE_DISABLE_CABAC, &(self->encoder.ctxParams.iDisableCabac));
+ if (hr!=S_OK) {
+ TSK_DEBUG_WARN("NVSetParamValue(NVVE_DISABLE_CABAC) failed with error code = %08x", hr);
+ }
+ hr = NVSetParamValue(self->encoder.pInst,NVVE_CONFIGURE_NALU_FRAMING_TYPE, &(self->encoder.ctxParams.iNaluFramingType));
+ if (hr!=S_OK) {
+ TSK_DEBUG_WARN("NVSetParamValue(NVVE_CONFIGURE_NALU_FRAMING_TYPE) failed with error code = %08x", hr);
+ }
+ hr = NVSetParamValue(self->encoder.pInst,NVVE_DISABLE_SPS_PPS,&(self->encoder.ctxParams.iDisableSPSPPS));
+ if (hr!=S_OK) {
+ TSK_DEBUG_WARN("NVSetParamValue(NVVE_DISABLE_SPS_PPS) failed with error code = %08x", hr);
+ }
+ hr = NVSetParamValue(self->encoder.pInst,NVVE_LOW_LATENCY,&low_latency);
+ if (hr!=S_OK) {
+ TSK_DEBUG_WARN("NVSetParamValue(NVVE_LOW_LATENCY) failed with error code = %08x", hr);
+ }
+
+ self->encoder.clbParams.pfnacquirebitstream = _NVCallback_HandleAcquireBitStream;
+ self->encoder.clbParams.pfnonbeginframe = _NVCallback_HandleOnBeginFrame;
+ self->encoder.clbParams.pfnonendframe = _NVCallback_HandleOnEndFrame;
+ self->encoder.clbParams.pfnreleasebitstream = _NVCallback_HandleReleaseBitStream;
+ NVRegisterCB(self->encoder.pInst, self->encoder.clbParams, self);
+
+
+ CHECK_HR(hr = NVCreateHWEncoder(self->encoder.pInst));
+
bail:
- return SUCCEEDED(hr) ? 0 : -1;
+ return SUCCEEDED(hr) ? 0 : -1;
}
int cuda_codec_h264_close_encoder(cuda_codec_h264_t* self)
{
- if(self)
- {
- if(self->encoder.pInst)
- {
- NVDestroyEncoder(self->encoder.pInst);
- self->encoder.pInst = NULL;
- }
- if(self->encoder.pBufferPtr)
- {
- TSK_FREE(self->encoder.pBufferPtr);
- self->encoder.nBufferSize = 0;
- }
- self->encoder.frame_count = 0;
- }
-
- return 0;
+ if(self) {
+ if(self->encoder.pInst) {
+ NVDestroyEncoder(self->encoder.pInst);
+ self->encoder.pInst = NULL;
+ }
+ if(self->encoder.pBufferPtr) {
+ TSK_FREE(self->encoder.pBufferPtr);
+ self->encoder.nBufferSize = 0;
+ }
+ self->encoder.frame_count = 0;
+ }
+
+ return 0;
}
int cuda_codec_h264_open_decoder(cuda_codec_h264_t* self)
{
- HRESULT hr = S_OK;
- tdav_codec_h264_common_t* common = (tdav_codec_h264_common_t*)self;
- int i, adapterCount;
- CUresult cuResult;
- D3DPRESENT_PARAMETERS d3dpp;
-
- if(self->decoder.pInst || self->decoder.cuDevice || self->decoder.cuContext || self->decoder.pD3D9 || self->decoder.pD3D9Device)
- {
- TSK_DEBUG_ERROR("Decoder already initialized");
+ HRESULT hr = S_OK;
+ tdav_codec_h264_common_t* common = (tdav_codec_h264_common_t*)self;
+ int i, adapterCount;
+ CUresult cuResult;
+ D3DPRESENT_PARAMETERS d3dpp;
+
+ if(self->decoder.pInst || self->decoder.cuDevice || self->decoder.cuContext || self->decoder.pD3D9 || self->decoder.pD3D9Device) {
+ TSK_DEBUG_ERROR("Decoder already initialized");
#if defined(E_ILLEGAL_METHOD_CALL)
- CHECK_HR(hr = E_ILLEGAL_METHOD_CALL);
+ CHECK_HR(hr = E_ILLEGAL_METHOD_CALL);
#else
- CHECK_HR(hr = 0x8000000EL);
+ CHECK_HR(hr = 0x8000000EL);
#endif
- }
-
- TSK_DEBUG_INFO("[H.264 MF Decoder] neg_width=%d, neg_height=%d, neg_fps=%d",
- TMEDIA_CODEC_VIDEO(self)->in.width,
- TMEDIA_CODEC_VIDEO(self)->in.height,
- TMEDIA_CODEC_VIDEO(self)->in.fps
- );
-
- memset(&self->decoder.cuInfo, 0, sizeof(self->decoder.cuInfo));
- self->decoder.cuInfo.ulCreationFlags = cudaVideoCreate_PreferCUDA;
- self->decoder.cuInfo.CodecType = cudaVideoCodec_H264;
- self->decoder.cuInfo.ulWidth = TMEDIA_CODEC_VIDEO(self)->in.width;
- self->decoder.cuInfo.ulTargetWidth = TMEDIA_CODEC_VIDEO(self)->in.width;
- self->decoder.cuInfo.ulHeight = TMEDIA_CODEC_VIDEO(self)->in.height;
- self->decoder.cuInfo.ulTargetHeight = TMEDIA_CODEC_VIDEO(self)->in.height;
- self->decoder.cuInfo.ulNumDecodeSurfaces = PLUGIN_CUDA_H264_MAX_FRM_CNT;
- self->decoder.cuInfo.ulNumOutputSurfaces = 1;
- self->decoder.cuInfo.ChromaFormat = cudaVideoChromaFormat_420;
- self->decoder.cuInfo.OutputFormat = cudaVideoSurfaceFormat_NV12;
- self->decoder.cuInfo.DeinterlaceMode = cudaVideoDeinterlaceMode_Adaptive;
-
- self->decoder.cuDevice = CudaUtils::GetMaxGflopsDeviceId();
+ }
+
+ TSK_DEBUG_INFO("[H.264 MF Decoder] neg_width=%d, neg_height=%d, neg_fps=%d",
+ TMEDIA_CODEC_VIDEO(self)->in.width,
+ TMEDIA_CODEC_VIDEO(self)->in.height,
+ TMEDIA_CODEC_VIDEO(self)->in.fps
+ );
+
+ memset(&self->decoder.cuInfo, 0, sizeof(self->decoder.cuInfo));
+ self->decoder.cuInfo.ulCreationFlags = cudaVideoCreate_PreferCUDA;
+ self->decoder.cuInfo.CodecType = cudaVideoCodec_H264;
+ self->decoder.cuInfo.ulWidth = TMEDIA_CODEC_VIDEO(self)->in.width;
+ self->decoder.cuInfo.ulTargetWidth = TMEDIA_CODEC_VIDEO(self)->in.width;
+ self->decoder.cuInfo.ulHeight = TMEDIA_CODEC_VIDEO(self)->in.height;
+ self->decoder.cuInfo.ulTargetHeight = TMEDIA_CODEC_VIDEO(self)->in.height;
+ self->decoder.cuInfo.ulNumDecodeSurfaces = PLUGIN_CUDA_H264_MAX_FRM_CNT;
+ self->decoder.cuInfo.ulNumOutputSurfaces = 1;
+ self->decoder.cuInfo.ChromaFormat = cudaVideoChromaFormat_420;
+ self->decoder.cuInfo.OutputFormat = cudaVideoSurfaceFormat_NV12;
+ self->decoder.cuInfo.DeinterlaceMode = cudaVideoDeinterlaceMode_Adaptive;
+
+ self->decoder.cuDevice = CudaUtils::GetMaxGflopsDeviceId();
#if _DEBUG || DEBUG
- {
- int major, minor;
- size_t totalGlobalMem;
- char deviceName[256];
- cuDeviceComputeCapability(&major, &minor, self->decoder.cuDevice);
- cuDeviceGetName(deviceName, sizeof(deviceName), self->decoder.cuDevice);
- TSK_DEBUG_INFO("[CUDA H.264 decoder] Using GPU Device %d: %s has SM %d.%d compute capability", self->decoder.cuDevice, deviceName, major, minor);
-
- /*cutilDrvSafeCallNoSync(*/cuDeviceTotalMem(&totalGlobalMem, self->decoder.cuDevice)/*)*/;
- TSK_DEBUG_INFO("[CUDA H.264 decoder] Total amount of global memory in GPU device: %4.4f MB", (float)totalGlobalMem/(1024*1024));
- }
+ {
+ int major, minor;
+ size_t totalGlobalMem;
+ char deviceName[256];
+ cuDeviceComputeCapability(&major, &minor, self->decoder.cuDevice);
+ cuDeviceGetName(deviceName, sizeof(deviceName), self->decoder.cuDevice);
+ TSK_DEBUG_INFO("[CUDA H.264 decoder] Using GPU Device %d: %s has SM %d.%d compute capability", self->decoder.cuDevice, deviceName, major, minor);
+
+ /*cutilDrvSafeCallNoSync(*/cuDeviceTotalMem(&totalGlobalMem, self->decoder.cuDevice)/*)*/;
+ TSK_DEBUG_INFO("[CUDA H.264 decoder] Total amount of global memory in GPU device: %4.4f MB", (float)totalGlobalMem/(1024*1024));
+ }
#endif
- // create Direct3D instance
- self->decoder.pD3D9 = Direct3DCreate9(D3D_SDK_VERSION);
- if(!self->decoder.pD3D9)
- {
- CHECK_HR(hr = E_OUTOFMEMORY);
- }
- adapterCount = self->decoder.pD3D9->GetAdapterCount();
- for(i = 0; i < adapterCount; ++i)
- {
- ZeroMemory(&d3dpp, sizeof(d3dpp));
+ // create Direct3D instance
+ self->decoder.pD3D9 = Direct3DCreate9(D3D_SDK_VERSION);
+ if(!self->decoder.pD3D9) {
+ CHECK_HR(hr = E_OUTOFMEMORY);
+ }
+ adapterCount = self->decoder.pD3D9->GetAdapterCount();
+ for(i = 0; i < adapterCount; ++i) {
+ ZeroMemory(&d3dpp, sizeof(d3dpp));
d3dpp.Windowed = TRUE;
d3dpp.BackBufferFormat = D3DFMT_X8R8G8B8;
d3dpp.BackBufferWidth = self->decoder.cuInfo.ulTargetWidth;
@@ -938,401 +957,359 @@ int cuda_codec_h264_open_decoder(cuda_codec_h264_t* self)
d3dpp.PresentationInterval = D3DPRESENT_INTERVAL_IMMEDIATE;
d3dpp.Flags = D3DPRESENTFLAG_VIDEO;
hr = self->decoder.pD3D9->CreateDevice(i,
- D3DDEVTYPE_HAL,
- GetDesktopWindow(),
- D3DCREATE_FPU_PRESERVE | D3DCREATE_MULTITHREADED | D3DCREATE_HARDWARE_VERTEXPROCESSING,
- &d3dpp,
- &self->decoder.pD3D9Device);
- if(hr == S_OK)
- {
+ D3DDEVTYPE_HAL,
+ GetDesktopWindow(),
+ D3DCREATE_FPU_PRESERVE | D3DCREATE_MULTITHREADED | D3DCREATE_HARDWARE_VERTEXPROCESSING,
+ &d3dpp,
+ &self->decoder.pD3D9Device);
+ if(hr == S_OK) {
cuResult = cuD3D9CtxCreate(&self->decoder.cuContext, &self->decoder.cuDevice, 0, self->decoder.pD3D9Device);
- if(cuResult == CUDA_SUCCESS)
- {
+ if(cuResult == CUDA_SUCCESS) {
break;
- }
- SafeRelease(&self->decoder.pD3D9Device);
- if(self->decoder.cuContext)
- {
- cuCtxDestroy(self->decoder.cuContext);
- self->decoder.cuContext = NULL;
- }
+ }
+ SafeRelease(&self->decoder.pD3D9Device);
+ if(self->decoder.cuContext) {
+ cuCtxDestroy(self->decoder.cuContext);
+ self->decoder.cuContext = NULL;
+ }
}
- }
+ }
- if(!self->decoder.pD3D9Device)
- {
- TSK_DEBUG_ERROR("Failed to create D3D9 device");
- CHECK_HR(hr = E_FAIL);
- }
+ if(!self->decoder.pD3D9Device) {
+ TSK_DEBUG_ERROR("Failed to create D3D9 device");
+ CHECK_HR(hr = E_FAIL);
+ }
- memset(&self->decoder.cuPaserParams, 0, sizeof(self->decoder.cuPaserParams));
- self->decoder.cuPaserParams.CodecType = cudaVideoCodec_H264;
+ memset(&self->decoder.cuPaserParams, 0, sizeof(self->decoder.cuPaserParams));
+ self->decoder.cuPaserParams.CodecType = cudaVideoCodec_H264;
self->decoder.cuPaserParams.ulMaxNumDecodeSurfaces = PLUGIN_CUDA_H264_MAX_FRM_CNT;
self->decoder.cuPaserParams.pUserData = self;
self->decoder.cuPaserParams.pfnSequenceCallback = _NVCallback_HandleVideoSequence;
self->decoder.cuPaserParams.pfnDecodePicture = _NVCallback_HandlePictureDecode;
self->decoder.cuPaserParams.pfnDisplayPicture = _NVCallback_HandlePictureDisplay;
cuResult = cuvidCreateVideoParser(&self->decoder.cuParser, &self->decoder.cuPaserParams);
- if(cuResult != CUDA_SUCCESS)
- {
- TSK_DEBUG_ERROR("cuvidCreateVideoParser(0) failed with error code = %d", (int)cuResult);
- CHECK_HR(hr = E_FAIL);
- }
-
- cuResult = cuvidCreateDecoder(&self->decoder.pInst, &self->decoder.cuInfo);
- if(CUDA_SUCCESS != cuResult)
- {
- TSK_DEBUG_ERROR("cuvidCreateDecoder failed with error code=%d", (int)cuResult);
- CHECK_HR(hr = E_FAIL);
- }
-
- if(!self->decoder.phMutex && !(self->decoder.phMutex = tsk_mutex_create()))
- {
- TSK_DEBUG_ERROR("Failed to create mutex");
- CHECK_HR(hr = E_FAIL);
- }
-
+ if(cuResult != CUDA_SUCCESS) {
+ TSK_DEBUG_ERROR("cuvidCreateVideoParser(0) failed with error code = %d", (int)cuResult);
+ CHECK_HR(hr = E_FAIL);
+ }
+
+ cuResult = cuvidCreateDecoder(&self->decoder.pInst, &self->decoder.cuInfo);
+ if(CUDA_SUCCESS != cuResult) {
+ TSK_DEBUG_ERROR("cuvidCreateDecoder failed with error code=%d", (int)cuResult);
+ CHECK_HR(hr = E_FAIL);
+ }
+
+ if(!self->decoder.phMutex && !(self->decoder.phMutex = tsk_mutex_create())) {
+ TSK_DEBUG_ERROR("Failed to create mutex");
+ CHECK_HR(hr = E_FAIL);
+ }
+
bail:
- return SUCCEEDED(hr) ? 0 : -1;
+ return SUCCEEDED(hr) ? 0 : -1;
}
int cuda_codec_h264_close_decoder(cuda_codec_h264_t* self)
{
- if(self)
- {
- if(self->decoder.pInst)
- {
- cuvidDestroyDecoder(self->decoder.pInst);
- self->decoder.pInst = NULL;
- }
- if(self->decoder.cuContext)
- {
- cuCtxDestroy(self->decoder.cuContext);
- self->decoder.cuContext = NULL;
- }
- SafeRelease(&self->decoder.pD3D9Device);
- SafeRelease(&self->decoder.pD3D9);
- if(self->decoder.cuParser)
- {
- cuvidDestroyVideoParser(self->decoder.cuParser);
- self->decoder.cuParser = NULL;
- }
- {/* cuBuffer.XXX */
- if(self->decoder.cuBuffer.pcuPtr)
- {
- cuMemFreeHost(self->decoder.cuBuffer.pcuPtr);
- self->decoder.cuBuffer.pcuPtr = NULL;
- }
- self->decoder.cuBuffer.nSize = self->decoder.cuBuffer.nPitch = 0;
- self->decoder.cuBuffer.bAvail = tsk_false;
- }
-
- if(self->decoder.phMutex)
- {
- tsk_mutex_destroy(&self->decoder.phMutex);
- }
-
- TSK_FREE(self->decoder.accumulator);
- self->decoder.accumulator_pos = 0;
- }
-
- return 0;
+ if(self) {
+ if(self->decoder.pInst) {
+ cuvidDestroyDecoder(self->decoder.pInst);
+ self->decoder.pInst = NULL;
+ }
+ if(self->decoder.cuContext) {
+ cuCtxDestroy(self->decoder.cuContext);
+ self->decoder.cuContext = NULL;
+ }
+ SafeRelease(&self->decoder.pD3D9Device);
+ SafeRelease(&self->decoder.pD3D9);
+ if(self->decoder.cuParser) {
+ cuvidDestroyVideoParser(self->decoder.cuParser);
+ self->decoder.cuParser = NULL;
+ }
+ {/* cuBuffer.XXX */
+ if(self->decoder.cuBuffer.pcuPtr) {
+ cuMemFreeHost(self->decoder.cuBuffer.pcuPtr);
+ self->decoder.cuBuffer.pcuPtr = NULL;
+ }
+ self->decoder.cuBuffer.nSize = self->decoder.cuBuffer.nPitch = 0;
+ self->decoder.cuBuffer.bAvail = tsk_false;
+ }
+
+ if(self->decoder.phMutex) {
+ tsk_mutex_destroy(&self->decoder.phMutex);
+ }
+
+ TSK_FREE(self->decoder.accumulator);
+ self->decoder.accumulator_pos = 0;
+ }
+
+ return 0;
}
int cuda_codec_h264_init(cuda_codec_h264_t* self, profile_idc_t profile)
{
- int ret = 0;
- level_idc_t level;
- tdav_codec_h264_common_t* common = (tdav_codec_h264_common_t*)self;
-
- if(!self)
- {
- TSK_DEBUG_ERROR("Invalid parameter");
- return -1;
- }
-
- CudaUtils::Startup();
-
- if((ret = tdav_codec_h264_common_init(common)))
- {
- TSK_DEBUG_ERROR("cuda_codec_h264_common_init() faile with error code=%d", ret);
- return ret;
- }
-
- if((ret = tdav_codec_h264_common_level_from_size(TMEDIA_CODEC_VIDEO(self)->out.width, TMEDIA_CODEC_VIDEO(self)->out.height, &level)))
- {
- TSK_DEBUG_ERROR("Failed to find level for size=[%u, %u]", TMEDIA_CODEC_VIDEO(self)->out.width, TMEDIA_CODEC_VIDEO(self)->out.height);
- return ret;
- }
-
- (self)->encoder.max_bw_kpbs = tmedia_defaults_get_bandwidth_video_upload_max();
- common->pack_mode = H264_PACKETIZATION_MODE;
- common->profile = profile;
- common->level = level;
- TMEDIA_CODEC_VIDEO(self)->in.max_mbps = TMEDIA_CODEC_VIDEO(self)->out.max_mbps = H264_MAX_MBPS*1000;
- TMEDIA_CODEC_VIDEO(self)->in.max_br = TMEDIA_CODEC_VIDEO(self)->out.max_br = H264_MAX_BR*1000;
-
- TMEDIA_CODEC_VIDEO(self)->in.chroma = tmedia_chroma_nv12; // decoder
- TMEDIA_CODEC_VIDEO(self)->out.chroma = tmedia_chroma_yuv420p; // encoder
-
- self->encoder.quality = 1;
-
- return ret;
+ int ret = 0;
+ level_idc_t level;
+ tdav_codec_h264_common_t* common = (tdav_codec_h264_common_t*)self;
+
+ if(!self) {
+ TSK_DEBUG_ERROR("Invalid parameter");
+ return -1;
+ }
+
+ CudaUtils::Startup();
+
+ if((ret = tdav_codec_h264_common_init(common))) {
+ TSK_DEBUG_ERROR("cuda_codec_h264_common_init() faile with error code=%d", ret);
+ return ret;
+ }
+
+ if((ret = tdav_codec_h264_common_level_from_size(TMEDIA_CODEC_VIDEO(self)->out.width, TMEDIA_CODEC_VIDEO(self)->out.height, &level))) {
+ TSK_DEBUG_ERROR("Failed to find level for size=[%u, %u]", TMEDIA_CODEC_VIDEO(self)->out.width, TMEDIA_CODEC_VIDEO(self)->out.height);
+ return ret;
+ }
+
+ (self)->encoder.max_bw_kpbs = tmedia_defaults_get_bandwidth_video_upload_max();
+ common->pack_mode = H264_PACKETIZATION_MODE;
+ common->profile = profile;
+ common->level = level;
+ TMEDIA_CODEC_VIDEO(self)->in.max_mbps = TMEDIA_CODEC_VIDEO(self)->out.max_mbps = H264_MAX_MBPS*1000;
+ TMEDIA_CODEC_VIDEO(self)->in.max_br = TMEDIA_CODEC_VIDEO(self)->out.max_br = H264_MAX_BR*1000;
+
+ TMEDIA_CODEC_VIDEO(self)->in.chroma = tmedia_chroma_nv12; // decoder
+ TMEDIA_CODEC_VIDEO(self)->out.chroma = tmedia_chroma_yuv420p; // encoder
+
+ self->encoder.quality = 1;
+
+ return ret;
}
int cuda_codec_h264_deinit(cuda_codec_h264_t* self)
{
- if(!self)
- {
- TSK_DEBUG_ERROR("Invalid parameter");
- return -1;
- }
+ if(!self) {
+ TSK_DEBUG_ERROR("Invalid parameter");
+ return -1;
+ }
- cuda_codec_h264_close((tmedia_codec_t*)self);
+ cuda_codec_h264_close((tmedia_codec_t*)self);
- return 0;
+ return 0;
}
static inline tsk_size_t _cuda_codec_h264_pict_layout(cuda_codec_h264_t* self, void**output, tsk_size_t *output_size)
{
- if(self && self->decoder.cuBuffer.pcuPtr && self->decoder.cuBuffer.nSize)
- {
- const unsigned int w = TMEDIA_CODEC_VIDEO(self)->in.width;
- const unsigned int w_div_2 = (w >> 1);
- const unsigned int h = TMEDIA_CODEC_VIDEO(self)->in.height;
- const unsigned int h_div_2 = (h >> 1);
- const unsigned int pitch = self->decoder.cuBuffer.nPitch;
- const unsigned int pitch_div_2 = (pitch >> 1);
- const tsk_size_t xsize = (w * h * 3) >> 1;
- // resize if too short
- if(*output_size < xsize)
- {
- if((*output = tsk_realloc(*output, xsize)))
- {
- *output_size = xsize;
- }
- else
- {
- *output_size = 0;
- return 0;
- }
- }
-
-
- register unsigned int y;
+ if(self && self->decoder.cuBuffer.pcuPtr && self->decoder.cuBuffer.nSize) {
+ const unsigned int w = TMEDIA_CODEC_VIDEO(self)->in.width;
+ const unsigned int w_div_2 = (w >> 1);
+ const unsigned int h = TMEDIA_CODEC_VIDEO(self)->in.height;
+ const unsigned int h_div_2 = (h >> 1);
+ const unsigned int pitch = self->decoder.cuBuffer.nPitch;
+ const unsigned int pitch_div_2 = (pitch >> 1);
+ const tsk_size_t xsize = (w * h * 3) >> 1;
+ // resize if too short
+ if(*output_size < xsize) {
+ if((*output = tsk_realloc(*output, xsize))) {
+ *output_size = xsize;
+ }
+ else {
+ *output_size = 0;
+ return 0;
+ }
+ }
+
+
+ register unsigned int y;
const unsigned char *p = (const unsigned char *)self->decoder.cuBuffer.pcuPtr, *q = p + (h * pitch);
register unsigned char *i = (unsigned char *)*output, *j = i + (h * w);
- for (y = 0; y < h; y++)
- {
- // luma
+ for (y = 0; y < h; y++) {
+ // luma
memcpy(i, p, w);
- i += w;
- p += pitch;
-
- // chroma
- memcpy(j, &q[(y&1) ? w_div_2 : 0], w_div_2);
- j += w_div_2;
- if(y&1)
- {
- q += pitch;
- }
+ i += w;
+ p += pitch;
+
+ // chroma
+ memcpy(j, &q[(y&1) ? w_div_2 : 0], w_div_2);
+ j += w_div_2;
+ if(y&1) {
+ q += pitch;
+ }
}
-
- return xsize;
- }
- return 0;
+
+ return xsize;
+ }
+ return 0;
}
static int CUDAAPI _NVCallback_HandleVideoSequence(void *pvUserData, CUVIDEOFORMAT *pFormat)
{
- cuda_codec_h264_t* h264 = (cuda_codec_h264_t*)pvUserData;
- CUresult cuResult;
-
- if(!h264 || !pFormat)
- {
- TSK_DEBUG_ERROR("Invalid parameter");
- return 0;//error
- }
-
- tsk_mutex_lock(h264->decoder.phMutex);
-
- int ret = 1;
- // http://corecodec.com/products/coreavc/guide
- // CROP 1088 to 1080
- // H.264 encoded video size is always a multiple of 16, and sequences that are 1080 pixels high are encoded as 1088 padded at the bottom.
- // Also H.264 specifications provides a set of cropping parameters to signal that parts of the encoded picture are not important and should not be displayed.
- // Some H.264 encoders fail to specify cropping parameters when encoding 1080 video.
- int newWidth = pFormat->coded_width;//pFormat->display_area.right - pFormat->display_area.left;
- int newHeight = pFormat->coded_height;//pFormat->display_area.bottom - pFormat->display_area.top;
-
- if(newWidth != TMEDIA_CODEC_VIDEO(h264)->in.width || pFormat->coded_height != newHeight)
- {
- TSK_DEBUG_INFO("[H.264 CUDA decoder] display area = left:%d, right:%d, bottom:%d, top:%d",
- pFormat->display_area.left,
- pFormat->display_area.right,
- pFormat->display_area.bottom,
- pFormat->display_area.top
- );
-
- h264->decoder.cuInfo.ulWidth = newWidth;
- h264->decoder.cuInfo.ulTargetWidth = newWidth;
- h264->decoder.cuInfo.ulHeight = newHeight;
- h264->decoder.cuInfo.ulTargetHeight = newHeight;
-
- CUresult cuResult = cuCtxPushCurrent(h264->decoder.cuContext);
- if(cuResult != CUDA_SUCCESS)
- {
- TSK_DEBUG_ERROR("cuCtxPushCurrent failed with error code=%d", (int)cuResult);
- ret = 0; //error
- goto bail;
- }
-
- if(h264->decoder.pInst)
- {
- cuvidDestroyDecoder(h264->decoder.pInst);
- h264->decoder.pInst = NULL;
- }
- cuResult = cuvidCreateDecoder(&h264->decoder.pInst, &h264->decoder.cuInfo);
- if(CUDA_SUCCESS != cuResult)
- {
- TSK_DEBUG_ERROR("cuvidCreateDecoder failed with error code=%d", (int)cuResult);
- ret = 0; //error
- goto bail;
- }
- else
- {
- TMEDIA_CODEC_VIDEO(h264)->in.width = /*pFormat->coded_width*/newWidth;
- TMEDIA_CODEC_VIDEO(h264)->in.height = /*pFormat->coded_height*/newHeight;
- ret = 1; //success
- }
- }
+ cuda_codec_h264_t* h264 = (cuda_codec_h264_t*)pvUserData;
+ CUresult cuResult;
+
+ if(!h264 || !pFormat) {
+ TSK_DEBUG_ERROR("Invalid parameter");
+ return 0;//error
+ }
+
+ tsk_mutex_lock(h264->decoder.phMutex);
+
+ int ret = 1;
+ // http://corecodec.com/products/coreavc/guide
+ // CROP 1088 to 1080
+ // H.264 encoded video size is always a multiple of 16, and sequences that are 1080 pixels high are encoded as 1088 padded at the bottom.
+ // Also H.264 specifications provides a set of cropping parameters to signal that parts of the encoded picture are not important and should not be displayed.
+ // Some H.264 encoders fail to specify cropping parameters when encoding 1080 video.
+ int newWidth = pFormat->coded_width;//pFormat->display_area.right - pFormat->display_area.left;
+ int newHeight = pFormat->coded_height;//pFormat->display_area.bottom - pFormat->display_area.top;
+
+ if(newWidth != TMEDIA_CODEC_VIDEO(h264)->in.width || pFormat->coded_height != newHeight) {
+ TSK_DEBUG_INFO("[H.264 CUDA decoder] display area = left:%d, right:%d, bottom:%d, top:%d",
+ pFormat->display_area.left,
+ pFormat->display_area.right,
+ pFormat->display_area.bottom,
+ pFormat->display_area.top
+ );
+
+ h264->decoder.cuInfo.ulWidth = newWidth;
+ h264->decoder.cuInfo.ulTargetWidth = newWidth;
+ h264->decoder.cuInfo.ulHeight = newHeight;
+ h264->decoder.cuInfo.ulTargetHeight = newHeight;
+
+ CUresult cuResult = cuCtxPushCurrent(h264->decoder.cuContext);
+ if(cuResult != CUDA_SUCCESS) {
+ TSK_DEBUG_ERROR("cuCtxPushCurrent failed with error code=%d", (int)cuResult);
+ ret = 0; //error
+ goto bail;
+ }
+
+ if(h264->decoder.pInst) {
+ cuvidDestroyDecoder(h264->decoder.pInst);
+ h264->decoder.pInst = NULL;
+ }
+ cuResult = cuvidCreateDecoder(&h264->decoder.pInst, &h264->decoder.cuInfo);
+ if(CUDA_SUCCESS != cuResult) {
+ TSK_DEBUG_ERROR("cuvidCreateDecoder failed with error code=%d", (int)cuResult);
+ ret = 0; //error
+ goto bail;
+ }
+ else {
+ TMEDIA_CODEC_VIDEO(h264)->in.width = /*pFormat->coded_width*/newWidth;
+ TMEDIA_CODEC_VIDEO(h264)->in.height = /*pFormat->coded_height*/newHeight;
+ ret = 1; //success
+ }
+ }
bail:
- cuResult = cuCtxPopCurrent(NULL);
- tsk_mutex_unlock(h264->decoder.phMutex);
- return ret;//success
+ cuResult = cuCtxPopCurrent(NULL);
+ tsk_mutex_unlock(h264->decoder.phMutex);
+ return ret;//success
}
static int CUDAAPI _NVCallback_HandlePictureDecode(void *pvUserData, CUVIDPICPARAMS *pPicParams)
{
- cuda_codec_h264_t* h264 = (cuda_codec_h264_t*)pvUserData;
- if(!h264 || !pPicParams)
- {
- TSK_DEBUG_ERROR("Invalid parameter");
- return 0;//error
- }
+ cuda_codec_h264_t* h264 = (cuda_codec_h264_t*)pvUserData;
+ if(!h264 || !pPicParams) {
+ TSK_DEBUG_ERROR("Invalid parameter");
+ return 0;//error
+ }
- tsk_mutex_lock(h264->decoder.phMutex);
- CUresult cuResult = cuvidDecodePicture(h264->decoder.pInst, pPicParams);
- tsk_mutex_unlock(h264->decoder.phMutex);
+ tsk_mutex_lock(h264->decoder.phMutex);
+ CUresult cuResult = cuvidDecodePicture(h264->decoder.pInst, pPicParams);
+ tsk_mutex_unlock(h264->decoder.phMutex);
- if(cuResult != CUDA_SUCCESS)
- {
- TSK_DEBUG_ERROR("cuvidDecodePicture failed with error code= %d", cuResult);
- return 0;//error
+ if(cuResult != CUDA_SUCCESS) {
+ TSK_DEBUG_ERROR("cuvidDecodePicture failed with error code= %d", cuResult);
+ return 0;//error
}
- return 1;//success
+ return 1;//success
}
static int CUDAAPI _NVCallback_HandlePictureDisplay(void *pvUserData, CUVIDPARSERDISPINFO *pPicParams)
{
- cuda_codec_h264_t* h264 = (cuda_codec_h264_t*)pvUserData;
- CUVIDPROCPARAMS vpp = {0};
- CUdeviceptr devPtr;
- CUresult cuResult;
- tsk_size_t nv12_size;
- tsk_bool_t mapped = tsk_false;
- int ret = 1;//success
-
- if(!h264 || !pPicParams)
- {
- TSK_DEBUG_ERROR("Invalid parameter");
- return 0;//error
- }
-
- cuResult = cuCtxPushCurrent(h264->decoder.cuContext);
- if(cuResult != CUDA_SUCCESS)
- {
- TSK_DEBUG_ERROR("cuCtxPushCurrent failed with error code = %d", (int)cuResult);
- ret = 0;//error
- goto bail;
- }
-
- vpp.progressive_frame = pPicParams->progressive_frame;
- vpp.top_field_first = pPicParams->top_field_first;
- cuResult = cuvidMapVideoFrame(h264->decoder.pInst, pPicParams->picture_index, &devPtr, &h264->decoder.cuBuffer.nPitch, &vpp);
-
- if(cuResult != CUDA_SUCCESS)
- {
- TSK_DEBUG_ERROR("cuvidMapVideoFrame failed with error code = %d", (int)cuResult);
- ret = 0;//error
- goto bail;
- }
- mapped = tsk_true;
- nv12_size = ((h264->decoder.cuBuffer.nPitch * TMEDIA_CODEC_VIDEO(h264)->in.height) * 3) >> 1;
- if ((!h264->decoder.cuBuffer.pcuPtr) || (nv12_size > h264->decoder.cuBuffer.nSize))
- {
- h264->decoder.cuBuffer.nSize = 0;
- if (h264->decoder.cuBuffer.pcuPtr)
- {
- cuResult = cuMemFreeHost(h264->decoder.cuBuffer.pcuPtr);
- h264->decoder.cuBuffer.pcuPtr = NULL;
- }
- cuResult = cuMemAllocHost((void**)&h264->decoder.cuBuffer.pcuPtr, nv12_size);
- if (cuResult != CUDA_SUCCESS)
- {
- TSK_DEBUG_ERROR("cuMemAllocHost failed to allocate %d bytes (error code=%d)", nv12_size, (int)cuResult);
- h264->decoder.cuBuffer.pcuPtr = tsk_null;
- h264->decoder.cuBuffer.nSize = 0;
- ret = 0;//error
- }
- else
- {
- h264->decoder.cuBuffer.nSize = nv12_size;
- }
- }
- if(h264->decoder.cuBuffer.pcuPtr)
- {
- cuResult = cuMemcpyDtoH(h264->decoder.cuBuffer.pcuPtr, devPtr, nv12_size);
- }
+ cuda_codec_h264_t* h264 = (cuda_codec_h264_t*)pvUserData;
+ CUVIDPROCPARAMS vpp = {0};
+ CUdeviceptr devPtr;
+ CUresult cuResult;
+ tsk_size_t nv12_size;
+ tsk_bool_t mapped = tsk_false;
+ int ret = 1;//success
+
+ if(!h264 || !pPicParams) {
+ TSK_DEBUG_ERROR("Invalid parameter");
+ return 0;//error
+ }
+
+ cuResult = cuCtxPushCurrent(h264->decoder.cuContext);
+ if(cuResult != CUDA_SUCCESS) {
+ TSK_DEBUG_ERROR("cuCtxPushCurrent failed with error code = %d", (int)cuResult);
+ ret = 0;//error
+ goto bail;
+ }
+
+ vpp.progressive_frame = pPicParams->progressive_frame;
+ vpp.top_field_first = pPicParams->top_field_first;
+ cuResult = cuvidMapVideoFrame(h264->decoder.pInst, pPicParams->picture_index, &devPtr, &h264->decoder.cuBuffer.nPitch, &vpp);
+
+ if(cuResult != CUDA_SUCCESS) {
+ TSK_DEBUG_ERROR("cuvidMapVideoFrame failed with error code = %d", (int)cuResult);
+ ret = 0;//error
+ goto bail;
+ }
+ mapped = tsk_true;
+ nv12_size = ((h264->decoder.cuBuffer.nPitch * TMEDIA_CODEC_VIDEO(h264)->in.height) * 3) >> 1;
+ if ((!h264->decoder.cuBuffer.pcuPtr) || (nv12_size > h264->decoder.cuBuffer.nSize)) {
+ h264->decoder.cuBuffer.nSize = 0;
+ if (h264->decoder.cuBuffer.pcuPtr) {
+ cuResult = cuMemFreeHost(h264->decoder.cuBuffer.pcuPtr);
+ h264->decoder.cuBuffer.pcuPtr = NULL;
+ }
+ cuResult = cuMemAllocHost((void**)&h264->decoder.cuBuffer.pcuPtr, nv12_size);
+ if (cuResult != CUDA_SUCCESS) {
+ TSK_DEBUG_ERROR("cuMemAllocHost failed to allocate %d bytes (error code=%d)", nv12_size, (int)cuResult);
+ h264->decoder.cuBuffer.pcuPtr = tsk_null;
+ h264->decoder.cuBuffer.nSize = 0;
+ ret = 0;//error
+ }
+ else {
+ h264->decoder.cuBuffer.nSize = nv12_size;
+ }
+ }
+ if(h264->decoder.cuBuffer.pcuPtr) {
+ cuResult = cuMemcpyDtoH(h264->decoder.cuBuffer.pcuPtr, devPtr, nv12_size);
+ }
bail:
- if(mapped)
- {
- cuResult = cuvidUnmapVideoFrame(h264->decoder.pInst, devPtr);
- }
- cuResult = cuCtxPopCurrent(NULL);
-
- h264->decoder.cuBuffer.bAvail = (ret == 1);
- return ret;
+ if(mapped) {
+ cuResult = cuvidUnmapVideoFrame(h264->decoder.pInst, devPtr);
+ }
+ cuResult = cuCtxPopCurrent(NULL);
+
+ h264->decoder.cuBuffer.bAvail = (ret == 1);
+ return ret;
}
static unsigned char* CUDAAPI _NVCallback_HandleAcquireBitStream(int *pBufferSize, void *pUserdata)
{
- cuda_codec_h264_t* h264 = (cuda_codec_h264_t*)pUserdata;
- if(!h264 || !pBufferSize)
- {
- TSK_DEBUG_ERROR("Invalid parameter");
- return tsk_null;
- }
-
- *pBufferSize = (int)h264->encoder.nBufferSize;
- return (unsigned char*)h264->encoder.pBufferPtr;
+ cuda_codec_h264_t* h264 = (cuda_codec_h264_t*)pUserdata;
+ if(!h264 || !pBufferSize) {
+ TSK_DEBUG_ERROR("Invalid parameter");
+ return tsk_null;
+ }
+
+ *pBufferSize = (int)h264->encoder.nBufferSize;
+ return (unsigned char*)h264->encoder.pBufferPtr;
}
static void CUDAAPI _NVCallback_HandleReleaseBitStream(int nBytesInBuffer, unsigned char *cb, void *pUserdata)
{
- tdav_codec_h264_common_t* common = (tdav_codec_h264_common_t*)pUserdata;
- if(!common || !cb || !nBytesInBuffer)
- {
- TSK_DEBUG_ERROR("Invalid parameter");
- return;
- }
- tdav_codec_h264_rtp_encap(common, (const uint8_t*)cb, (tsk_size_t)nBytesInBuffer);
+ tdav_codec_h264_common_t* common = (tdav_codec_h264_common_t*)pUserdata;
+ if(!common || !cb || !nBytesInBuffer) {
+ TSK_DEBUG_ERROR("Invalid parameter");
+ return;
+ }
+ tdav_codec_h264_rtp_encap(common, (const uint8_t*)cb, (tsk_size_t)nBytesInBuffer);
}
static void CUDAAPI _NVCallback_HandleOnBeginFrame(const NVVE_BeginFrameInfo *pbfi, void *pUserdata)
diff --git a/plugins/pluginCUDA/plugin_cuda_config.h b/plugins/pluginCUDA/plugin_cuda_config.h
index 4fceebb..3431e0c 100755
--- a/plugins/pluginCUDA/plugin_cuda_config.h
+++ b/plugins/pluginCUDA/plugin_cuda_config.h
@@ -1,18 +1,18 @@
/* Copyright (C) 2013 Mamadou DIOP
* Copyright (C) 2013 Doubango Telecom <http://www.doubango.org>
-*
+*
* This file is part of Open Source Doubango Framework.
*
* DOUBANGO is free software: you can redistribute it and/or modify
* it under the terms of the GNU General Public License as published by
* the Free Software Foundation, either version 3 of the License, or
* (at your option) any later version.
-*
+*
* DOUBANGO is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
* GNU General Public License for more details.
-*
+*
* You should have received a copy of the GNU General Public License
* along with DOUBANGO.
*/
@@ -48,12 +48,12 @@
# define PLUGIN_CUDA_UNDER_X86 1
#endif
-// Guards against C++ name mangling
+// Guards against C++ name mangling
#ifdef __cplusplus
# define PLUGIN_CUDA_BEGIN_DECLS extern "C" {
# define PLUGIN_CUDA_END_DECLS }
#else
-# define PLUGIN_CUDA_BEGIN_DECLS
+# define PLUGIN_CUDA_BEGIN_DECLS
# define PLUGIN_CUDA_END_DECLS
#endif
@@ -69,7 +69,7 @@
#endif
#if HAVE_CONFIG_H
- #include <config.h>
+#include <config.h>
#endif
#endif // PLUGIN_CUDA_CONFIG_H
diff --git a/plugins/pluginCUDA/plugin_cuda_tdav.cxx b/plugins/pluginCUDA/plugin_cuda_tdav.cxx
index 2d16b72..44a25d5 100755
--- a/plugins/pluginCUDA/plugin_cuda_tdav.cxx
+++ b/plugins/pluginCUDA/plugin_cuda_tdav.cxx
@@ -1,18 +1,18 @@
/* Copyright (C) 2013 Mamadou DIOP
* Copyright (C) 2013 Doubango Telecom <http://www.doubango.org>
-*
+*
* This file is part of Open Source Doubango Framework.
*
* DOUBANGO is free software: you can redistribute it and/or modify
* it under the terms of the GNU General Public License as published by
* the Free Software Foundation, either version 3 of the License, or
* (at your option) any later version.
-*
+*
* DOUBANGO is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
* GNU General Public License for more details.
-*
+*
* You should have received a copy of the GNU General Public License
* along with DOUBANGO.
*/
diff --git a/plugins/pluginCUDA/plugin_cuda_utils.cxx b/plugins/pluginCUDA/plugin_cuda_utils.cxx
index 94c7baf..d5d2aad 100755
--- a/plugins/pluginCUDA/plugin_cuda_utils.cxx
+++ b/plugins/pluginCUDA/plugin_cuda_utils.cxx
@@ -1,18 +1,18 @@
/* Copyright (C) 2013 Mamadou DIOP
* Copyright (C) 2013 Doubango Telecom <http://www.doubango.org>
-*
+*
* This file is part of Open Source Doubango Framework.
*
* DOUBANGO is free software: you can redistribute it and/or modify
* it under the terms of the GNU General Public License as published by
* the Free Software Foundation, either version 3 of the License, or
* (at your option) any later version.
-*
+*
* DOUBANGO is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
* GNU General Public License for more details.
-*
+*
* You should have received a copy of the GNU General Public License
* along with DOUBANGO.
*/
@@ -31,83 +31,74 @@ int CudaUtils::g_nCores = 0;
HRESULT CudaUtils::Startup()
{
- if(!g_bStarted)
- {
- CUresult cuResult = CUDA_SUCCESS;
- HRESULT hr = CoInitializeEx(NULL, COINIT_MULTITHREADED);
- if(SUCCEEDED(hr) || hr == 0x80010106) // 0x80010106 when called from managed code (e.g. Boghe) - More info: http://support.microsoft.com/kb/824480
- {
- if((cuResult = cuInit(0)) != CUDA_SUCCESS)
- {
- TSK_DEBUG_ERROR("cuInit() failed with error code = %08x", cuResult);
- hr = E_FAIL;
- }
- else
- {
- hr = S_OK;
- }
- }
- g_bStarted = true;
- return hr;
- }
- return S_OK;
+ if(!g_bStarted) {
+ CUresult cuResult = CUDA_SUCCESS;
+ HRESULT hr = CoInitializeEx(NULL, COINIT_MULTITHREADED);
+ if(SUCCEEDED(hr) || hr == 0x80010106) { // 0x80010106 when called from managed code (e.g. Boghe) - More info: http://support.microsoft.com/kb/824480
+ if((cuResult = cuInit(0)) != CUDA_SUCCESS) {
+ TSK_DEBUG_ERROR("cuInit() failed with error code = %08x", cuResult);
+ hr = E_FAIL;
+ }
+ else {
+ hr = S_OK;
+ }
+ }
+ g_bStarted = true;
+ return hr;
+ }
+ return S_OK;
}
HRESULT CudaUtils::Shutdown()
{
- // cuDeinit();
- return S_OK;
+ // cuDeinit();
+ return S_OK;
}
bool CudaUtils::IsH264Supported()
{
- if(g_bH264Checked)
- {
- return g_bH264Supported;
- }
-
- HRESULT hr = S_OK;
-
- CHECK_HR(hr = Startup());
-
- g_bH264Checked = true;
-
- NVEncoder pEncoder = NULL;
-
- CHECK_HR(hr = NVGetHWEncodeCaps());
- CHECK_HR(hr = NVCreateEncoder(&pEncoder));
- // Both Base and Main profiles *must* be supported
- CHECK_HR(hr = NVIsSupportedCodecProfile(pEncoder, NV_CODEC_TYPE_H264, NVVE_H264_PROFILE_BASELINE));
- CHECK_HR(hr = NVIsSupportedCodecProfile(pEncoder, NV_CODEC_TYPE_H264, NVVE_H264_PROFILE_MAIN));
-
- g_bH264Supported = true;
-
+ if(g_bH264Checked) {
+ return g_bH264Supported;
+ }
+
+ HRESULT hr = S_OK;
+
+ CHECK_HR(hr = Startup());
+
+ g_bH264Checked = true;
+
+ NVEncoder pEncoder = NULL;
+
+ CHECK_HR(hr = NVGetHWEncodeCaps());
+ CHECK_HR(hr = NVCreateEncoder(&pEncoder));
+ // Both Base and Main profiles *must* be supported
+ CHECK_HR(hr = NVIsSupportedCodecProfile(pEncoder, NV_CODEC_TYPE_H264, NVVE_H264_PROFILE_BASELINE));
+ CHECK_HR(hr = NVIsSupportedCodecProfile(pEncoder, NV_CODEC_TYPE_H264, NVVE_H264_PROFILE_MAIN));
+
+ g_bH264Supported = true;
+
bail:
- if(pEncoder)
- {
- NVDestroyEncoder(pEncoder);
- pEncoder = NULL;
- }
+ if(pEncoder) {
+ NVDestroyEncoder(pEncoder);
+ pEncoder = NULL;
+ }
- return g_bH264Supported;
+ return g_bH264Supported;
}
int CudaUtils::ConvertSMVer2Cores(int nMajor, int nMinor)
{
- if(g_nCores != 0)
- {
- return g_nCores;
- }
-
- // Defines for GPU Architecture types (using the SM version to determine the # of cores per SM
- typedef struct
- {
+ if(g_nCores != 0) {
+ return g_nCores;
+ }
+
+ // Defines for GPU Architecture types (using the SM version to determine the # of cores per SM
+ typedef struct {
int SM; // 0xMm (hexidecimal notation), M = SM Major version, and m = SM minor version
int Cores;
} sSMtoCores;
- sSMtoCores nGpuArchCoresPerSM[] =
- {
+ sSMtoCores nGpuArchCoresPerSM[] = {
{ 0x10, 8 }, // Tesla Generation (SM 1.0) G80 class
{ 0x11, 8 }, // Tesla Generation (SM 1.1) G8x class
{ 0x12, 8 }, // Tesla Generation (SM 1.2) G9x class
@@ -120,12 +111,10 @@ int CudaUtils::ConvertSMVer2Cores(int nMajor, int nMinor)
int index = 0;
- while (nGpuArchCoresPerSM[index].SM != -1)
- {
- if (nGpuArchCoresPerSM[index].SM == ((nMajor << 4) + nMinor))
- {
+ while (nGpuArchCoresPerSM[index].SM != -1) {
+ if (nGpuArchCoresPerSM[index].SM == ((nMajor << 4) + nMinor)) {
g_nCores = nGpuArchCoresPerSM[index].Cores;
- break;
+ break;
}
index++;
@@ -135,34 +124,32 @@ int CudaUtils::ConvertSMVer2Cores(int nMajor, int nMinor)
TSK_DEBUG_INFO("MapSMtoCores for SM %d.%d is undefined. Default to use %d Cores/SM", nMajor, nMinor, nGpuArchCoresPerSM[7].Cores);
g_nCores = nGpuArchCoresPerSM[7].Cores;
- return g_nCores;
+ return g_nCores;
}
int CudaUtils::GetMaxGflopsDeviceId()
{
- int device_count = 0;
- cudaGetDeviceCount( &device_count );
-
- cudaDeviceProp device_properties;
- int max_gflops_device = 0;
- int max_gflops = 0;
-
- int current_device = 0;
- cudaGetDeviceProperties( &device_properties, current_device );
- max_gflops = device_properties.multiProcessorCount * device_properties.clockRate;
- ++current_device;
-
- while( current_device < device_count )
- {
- cudaGetDeviceProperties( &device_properties, current_device );
- int gflops = device_properties.multiProcessorCount * device_properties.clockRate;
- if( gflops > max_gflops )
- {
- max_gflops = gflops;
- max_gflops_device = current_device;
- }
- ++current_device;
- }
-
- return max_gflops_device;
+ int device_count = 0;
+ cudaGetDeviceCount( &device_count );
+
+ cudaDeviceProp device_properties;
+ int max_gflops_device = 0;
+ int max_gflops = 0;
+
+ int current_device = 0;
+ cudaGetDeviceProperties( &device_properties, current_device );
+ max_gflops = device_properties.multiProcessorCount * device_properties.clockRate;
+ ++current_device;
+
+ while( current_device < device_count ) {
+ cudaGetDeviceProperties( &device_properties, current_device );
+ int gflops = device_properties.multiProcessorCount * device_properties.clockRate;
+ if( gflops > max_gflops ) {
+ max_gflops = gflops;
+ max_gflops_device = current_device;
+ }
+ ++current_device;
+ }
+
+ return max_gflops_device;
} \ No newline at end of file
diff --git a/plugins/pluginCUDA/plugin_cuda_utils.h b/plugins/pluginCUDA/plugin_cuda_utils.h
index 4829275..6b91fac 100755
--- a/plugins/pluginCUDA/plugin_cuda_utils.h
+++ b/plugins/pluginCUDA/plugin_cuda_utils.h
@@ -1,18 +1,18 @@
/* Copyright (C) 2013 Mamadou DIOP
* Copyright (C) 2013 Doubango Telecom <http://www.doubango.org>
-*
+*
* This file is part of Open Source Doubango Framework.
*
* DOUBANGO is free software: you can redistribute it and/or modify
* it under the terms of the GNU General Public License as published by
* the Free Software Foundation, either version 3 of the License, or
* (at your option) any later version.
-*
+*
* DOUBANGO is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
* GNU General Public License for more details.
-*
+*
* You should have received a copy of the GNU General Public License
* along with DOUBANGO.
*/
@@ -35,22 +35,22 @@
(*ppT)->Release(); \
*ppT = NULL; \
} \
-}
+}
class CudaUtils
{
public:
- static HRESULT Startup();
- static HRESULT Shutdown();
- static bool IsH264Supported();
- static int ConvertSMVer2Cores(int nMajor, int nMinor);
- static int GetMaxGflopsDeviceId();
+ static HRESULT Startup();
+ static HRESULT Shutdown();
+ static bool IsH264Supported();
+ static int ConvertSMVer2Cores(int nMajor, int nMinor);
+ static int GetMaxGflopsDeviceId();
private:
- static bool g_bStarted;
- static bool g_bH264Checked;
- static bool g_bH264Supported;
- static int g_nCores;
+ static bool g_bStarted;
+ static bool g_bH264Checked;
+ static bool g_bH264Supported;
+ static int g_nCores;
};
#endif/* PLUGIN_CUDA_UTILS_H */
diff --git a/plugins/pluginDirectShow/dllmain_dshow.cxx b/plugins/pluginDirectShow/dllmain_dshow.cxx
index 2b1eda8..b8a18d7 100755
--- a/plugins/pluginDirectShow/dllmain_dshow.cxx
+++ b/plugins/pluginDirectShow/dllmain_dshow.cxx
@@ -1,17 +1,17 @@
/* Copyright (C) 2011-2013 Doubango Telecom <http://www.doubango.org>
-*
+*
* This file is part of Open Source Doubango Framework.
*
* DOUBANGO is free software: you can redistribute it and/or modify
* it under the terms of the GNU General Public License as published by
* the Free Software Foundation, either version 3 of the License, or
* (at your option) any later version.
-*
+*
* DOUBANGO is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
* GNU General Public License for more details.
-*
+*
* You should have received a copy of the GNU General Public License
* along with DOUBANGO.
*/
@@ -42,13 +42,14 @@ extern const tmedia_consumer_plugin_def_t *plugin_video_dshow_consumer_plugin_de
extern const tmedia_producer_plugin_def_t *plugin_video_dshow_producer_plugin_def_t;
extern const tmedia_producer_plugin_def_t *plugin_screencast_dshow_producer_plugin_def_t;
-CFactoryTemplate g_Templates[]=
-{ { L""
-, NULL
-, NULL
-, NULL
-, NULL
-}
+CFactoryTemplate g_Templates[]= {
+ {
+ L""
+ , NULL
+ , NULL
+ , NULL
+ , NULL
+ }
};
int g_cTemplates = sizeof(g_Templates)/sizeof(g_Templates[0]);
@@ -56,101 +57,90 @@ int g_cTemplates = sizeof(g_Templates)/sizeof(g_Templates[0]);
BOOL APIENTRY DllMain( HMODULE hModule,
DWORD ul_reason_for_call,
LPVOID lpReserved
- )
+ )
{
- switch (ul_reason_for_call)
- {
- case DLL_PROCESS_ATTACH:
- case DLL_THREAD_ATTACH:
- case DLL_THREAD_DETACH:
- case DLL_PROCESS_DETACH:
- break;
- }
- return TRUE;
+ switch (ul_reason_for_call) {
+ case DLL_PROCESS_ATTACH:
+ case DLL_THREAD_ATTACH:
+ case DLL_THREAD_DETACH:
+ case DLL_PROCESS_DETACH:
+ break;
+ }
+ return TRUE;
}
#endif
-typedef enum PLUGIN_INDEX_E
-{
- PLUGIN_INDEX_VIDEO_CONSUMER,
- PLUGIN_INDEX_VIDEO_PRODUCER,
+typedef enum PLUGIN_INDEX_E {
+ PLUGIN_INDEX_VIDEO_CONSUMER,
+ PLUGIN_INDEX_VIDEO_PRODUCER,
#if 0
- PLUGIN_INDEX_SCREENCAST_PRODUCER,
+ PLUGIN_INDEX_SCREENCAST_PRODUCER,
#endif
- PLUGIN_INDEX_COUNT
+ PLUGIN_INDEX_COUNT
}
PLUGIN_INDEX_T;
int __plugin_get_def_count()
{
- return PLUGIN_INDEX_COUNT;
+ return PLUGIN_INDEX_COUNT;
}
tsk_plugin_def_type_t __plugin_get_def_type_at(int index)
{
- switch(index){
- case PLUGIN_INDEX_VIDEO_CONSUMER:
- return IsD3D9Supported() ? tsk_plugin_def_type_consumer : tsk_plugin_def_type_none;
- case PLUGIN_INDEX_VIDEO_PRODUCER:
+ switch(index) {
+ case PLUGIN_INDEX_VIDEO_CONSUMER:
+ return IsD3D9Supported() ? tsk_plugin_def_type_consumer : tsk_plugin_def_type_none;
+ case PLUGIN_INDEX_VIDEO_PRODUCER:
#if ENABLE_SCREENCAST
- case PLUGIN_INDEX_SCREENCAST_PRODUCER:
+ case PLUGIN_INDEX_SCREENCAST_PRODUCER:
#endif
- return tsk_plugin_def_type_producer;
- default:
- {
- TSK_DEBUG_ERROR("No plugin at index %d", index);
- return tsk_plugin_def_type_none;
- }
- }
+ return tsk_plugin_def_type_producer;
+ default: {
+ TSK_DEBUG_ERROR("No plugin at index %d", index);
+ return tsk_plugin_def_type_none;
+ }
+ }
}
tsk_plugin_def_media_type_t __plugin_get_def_media_type_at(int index)
{
- switch(index){
- case PLUGIN_INDEX_VIDEO_CONSUMER:
- {
- return IsD3D9Supported() ? tsk_plugin_def_media_type_video : tsk_plugin_def_media_type_none;
- }
- case PLUGIN_INDEX_VIDEO_PRODUCER:
- {
- return tsk_plugin_def_media_type_video;
- }
+ switch(index) {
+ case PLUGIN_INDEX_VIDEO_CONSUMER: {
+ return IsD3D9Supported() ? tsk_plugin_def_media_type_video : tsk_plugin_def_media_type_none;
+ }
+ case PLUGIN_INDEX_VIDEO_PRODUCER: {
+ return tsk_plugin_def_media_type_video;
+ }
#if ENABLE_SCREENCAST
- case PLUGIN_INDEX_SCREENCAST_PRODUCER:
- {
- return tsk_plugin_def_media_type_screencast;
- }
+ case PLUGIN_INDEX_SCREENCAST_PRODUCER: {
+ return tsk_plugin_def_media_type_screencast;
+ }
#endif
- default:
- {
- TSK_DEBUG_ERROR("No plugin at index %d", index);
- return tsk_plugin_def_media_type_none;
- }
- }
+ default: {
+ TSK_DEBUG_ERROR("No plugin at index %d", index);
+ return tsk_plugin_def_media_type_none;
+ }
+ }
}
tsk_plugin_def_ptr_const_t __plugin_get_def_at(int index)
{
- switch(index){
- case PLUGIN_INDEX_VIDEO_CONSUMER:
- {
- return IsD3D9Supported() ? plugin_video_dshow_consumer_plugin_def_t : tsk_null;
- }
- case PLUGIN_INDEX_VIDEO_PRODUCER:
- {
- return plugin_video_dshow_producer_plugin_def_t;
- }
+ switch(index) {
+ case PLUGIN_INDEX_VIDEO_CONSUMER: {
+ return IsD3D9Supported() ? plugin_video_dshow_consumer_plugin_def_t : tsk_null;
+ }
+ case PLUGIN_INDEX_VIDEO_PRODUCER: {
+ return plugin_video_dshow_producer_plugin_def_t;
+ }
#if ENABLE_SCREENCAST
- case PLUGIN_INDEX_SCREENCAST_PRODUCER:
- {
- return plugin_screencast_dshow_producer_plugin_def_t;
- }
+ case PLUGIN_INDEX_SCREENCAST_PRODUCER: {
+ return plugin_screencast_dshow_producer_plugin_def_t;
+ }
#endif
- default:
- {
- TSK_DEBUG_ERROR("No plugin at index %d", index);
- return tsk_null;
- }
- }
+ default: {
+ TSK_DEBUG_ERROR("No plugin at index %d", index);
+ return tsk_null;
+ }
+ }
}
diff --git a/plugins/pluginDirectShow/internals/DSBaseCaptureGraph.h b/plugins/pluginDirectShow/internals/DSBaseCaptureGraph.h
index 1817fb5..c2d8455 100755
--- a/plugins/pluginDirectShow/internals/DSBaseCaptureGraph.h
+++ b/plugins/pluginDirectShow/internals/DSBaseCaptureGraph.h
@@ -1,17 +1,17 @@
/* Copyright (C) 2014 Mamadou DIOP
-*
+*
* This file is part of Open Source Doubango Framework.
*
* DOUBANGO is free software: you can redistribute it and/or modify
* it under the terms of the GNU General Public License as published by
* the Free Software Foundation, either version 3 of the License, or
* (at your option) any later version.
-*
+*
* DOUBANGO is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
* GNU General Public License for more details.
-*
+*
* You should have received a copy of the GNU General Public License
* along with DOUBANGO.
*/
@@ -36,29 +36,29 @@ class DSBaseCaptureGraph
{
public:
#if defined(_WIN32_WCE)
- DSBaseCaptureGraph(DSISampleGrabberCB* callback, HRESULT *hr) {}
+ DSBaseCaptureGraph(DSISampleGrabberCB* callback, HRESULT *hr) {}
#else
- DSBaseCaptureGraph(ISampleGrabberCB* callback, HRESULT *hr) {}
+ DSBaseCaptureGraph(ISampleGrabberCB* callback, HRESULT *hr) {}
#endif
- virtual ~DSBaseCaptureGraph() {}
+ virtual ~DSBaseCaptureGraph() {}
- virtual std::vector<DSCaptureFormat> *getFormats() = 0;
+ virtual std::vector<DSCaptureFormat> *getFormats() = 0;
- virtual HRESULT setSource(const std::string &devicePath) = 0;
- virtual HRESULT setParameters(DSCaptureFormat *format, int framerate) = 0;
+ virtual HRESULT setSource(const std::string &devicePath) = 0;
+ virtual HRESULT setParameters(DSCaptureFormat *format, int framerate) = 0;
- virtual HRESULT connect() = 0;
- virtual HRESULT disconnect() = 0;
+ virtual HRESULT connect() = 0;
+ virtual HRESULT disconnect() = 0;
- virtual HRESULT start() = 0;
- virtual HRESULT stop() = 0;
- virtual HRESULT pause() = 0;
- virtual bool isRunning() = 0;
- virtual bool isPaused() = 0;
+ virtual HRESULT start() = 0;
+ virtual HRESULT stop() = 0;
+ virtual HRESULT pause() = 0;
+ virtual bool isRunning() = 0;
+ virtual bool isPaused() = 0;
- virtual std::string getDeviceId() const = 0;
+ virtual std::string getDeviceId() const = 0;
- virtual HRESULT getConnectedMediaType(AM_MEDIA_TYPE *mediaType) = 0;
+ virtual HRESULT getConnectedMediaType(AM_MEDIA_TYPE *mediaType) = 0;
};
#endif /* PLUGIN_DSHOW_DSBASECAPTUREGRAPH_H */
diff --git a/plugins/pluginDirectShow/internals/DSBufferWriter.h b/plugins/pluginDirectShow/internals/DSBufferWriter.h
index dbe1484..70c1f23 100755
--- a/plugins/pluginDirectShow/internals/DSBufferWriter.h
+++ b/plugins/pluginDirectShow/internals/DSBufferWriter.h
@@ -1,17 +1,17 @@
/* Copyright (C) 2011-2013 Doubango Telecom <http://www.doubango.org>
-*
+*
* This file is part of Open Source Doubango Framework.
*
* DOUBANGO is free software: you can redistribute it and/or modify
* it under the terms of the GNU General Public License as published by
* the Free Software Foundation, either version 3 of the License, or
* (at your option) any later version.
-*
+*
* DOUBANGO is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
* GNU General Public License for more details.
-*
+*
* You should have received a copy of the GNU General Public License
* along with DOUBANGO.
*/
@@ -30,19 +30,19 @@
#endif
// {27AD9929-E4E7-423b-8BDD-8AF5AC894DE0}
-TDSHOW_DEFINE_GUID(IID_DSBufferWriter,
+TDSHOW_DEFINE_GUID(IID_DSBufferWriter,
0x27ad9929, 0xe4e7, 0x423b, 0x8b, 0xdd, 0x8a, 0xf5, 0xac, 0x89, 0x4d, 0xe0);
*/
class DSBufferWriter
#ifndef _WIN32_WCE
- : public IUnknown
+ : public IUnknown
#endif
{
public:
- virtual void setBuffer (void* pBuffer, int size) = 0;
- virtual HRESULT setImageFormat(UINT width, UINT height/*, GUID subType, UINT fps*/) = 0;
+ virtual void setBuffer (void* pBuffer, int size) = 0;
+ virtual HRESULT setImageFormat(UINT width, UINT height/*, GUID subType, UINT fps*/) = 0;
};
#endif
diff --git a/plugins/pluginDirectShow/internals/DSCaptureFormat.cxx b/plugins/pluginDirectShow/internals/DSCaptureFormat.cxx
index 378a215..781101d 100755
--- a/plugins/pluginDirectShow/internals/DSCaptureFormat.cxx
+++ b/plugins/pluginDirectShow/internals/DSCaptureFormat.cxx
@@ -1,17 +1,17 @@
/* Copyright (C) 2011-2013 Doubango Telecom <http://www.doubango.org>
-*
+*
* This file is part of Open Source Doubango Framework.
*
* DOUBANGO is free software: you can redistribute it and/or modify
* it under the terms of the GNU General Public License as published by
* the Free Software Foundation, either version 3 of the License, or
* (at your option) any later version.
-*
+*
* DOUBANGO is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
* GNU General Public License for more details.
-*
+*
* You should have received a copy of the GNU General Public License
* along with DOUBANGO.
*/
@@ -21,40 +21,60 @@
int DSCaptureFormat::getMatchScore(int w, int h)
{
- int factor;
+ int factor;
- if ((w == width) && (h = height)){
- factor = 100;
- }
- else if ((w > this->width) && (h > this->height)){
- factor = 0;
- }
- else{
- factor = (50 * w) / this->width + (50 * h) / this->height;
- }
+ if ((w == width) && (h = height)) {
+ factor = 100;
+ }
+ else if ((w > this->width) && (h > this->height)) {
+ factor = 0;
+ }
+ else {
+ factor = (50 * w) / this->width + (50 * h) / this->height;
+ }
- if (isRGB()){
- factor *= 2;
- }
+ if (isRGB()) {
+ factor *= 2;
+ }
- return factor;
+ return factor;
}
bool DSCaptureFormat::isRGB()
{
- // Order used is optimized for most used RGB types
- if (IsEqualGUID(this->chroma, MEDIASUBTYPE_RGB32)) return true;
- if (IsEqualGUID(this->chroma, MEDIASUBTYPE_RGB24)) return true;
- if (IsEqualGUID(this->chroma, MEDIASUBTYPE_RGB565)) return true;
- if (IsEqualGUID(this->chroma, MEDIASUBTYPE_RGB555)) return true;
- if (IsEqualGUID(this->chroma, MEDIASUBTYPE_RGB8)) return true;
- if (IsEqualGUID(this->chroma, MEDIASUBTYPE_RGB4)) return true;
- if (IsEqualGUID(this->chroma, MEDIASUBTYPE_RGB1)) return true;
+ // Order used is optimized for most used RGB types
+ if (IsEqualGUID(this->chroma, MEDIASUBTYPE_RGB32)) {
+ return true;
+ }
+ if (IsEqualGUID(this->chroma, MEDIASUBTYPE_RGB24)) {
+ return true;
+ }
+ if (IsEqualGUID(this->chroma, MEDIASUBTYPE_RGB565)) {
+ return true;
+ }
+ if (IsEqualGUID(this->chroma, MEDIASUBTYPE_RGB555)) {
+ return true;
+ }
+ if (IsEqualGUID(this->chroma, MEDIASUBTYPE_RGB8)) {
+ return true;
+ }
+ if (IsEqualGUID(this->chroma, MEDIASUBTYPE_RGB4)) {
+ return true;
+ }
+ if (IsEqualGUID(this->chroma, MEDIASUBTYPE_RGB1)) {
+ return true;
+ }
#ifndef _WIN32_WCE
- if (IsEqualGUID(this->chroma, MEDIASUBTYPE_ARGB32)) return true;
- if (IsEqualGUID(this->chroma, MEDIASUBTYPE_ARGB4444)) return true;
- if (IsEqualGUID(this->chroma, MEDIASUBTYPE_ARGB1555)) return true;
+ if (IsEqualGUID(this->chroma, MEDIASUBTYPE_ARGB32)) {
+ return true;
+ }
+ if (IsEqualGUID(this->chroma, MEDIASUBTYPE_ARGB4444)) {
+ return true;
+ }
+ if (IsEqualGUID(this->chroma, MEDIASUBTYPE_ARGB1555)) {
+ return true;
+ }
#endif
- return false;
+ return false;
}
diff --git a/plugins/pluginDirectShow/internals/DSCaptureFormat.h b/plugins/pluginDirectShow/internals/DSCaptureFormat.h
index 5e1cabb..45497cb 100755
--- a/plugins/pluginDirectShow/internals/DSCaptureFormat.h
+++ b/plugins/pluginDirectShow/internals/DSCaptureFormat.h
@@ -1,17 +1,17 @@
/* Copyright (C) 2011-2013 Doubango Telecom <http://www.doubango.org>
-*
+*
* This file is part of Open Source Doubango Framework.
*
* DOUBANGO is free software: you can redistribute it and/or modify
* it under the terms of the GNU General Public License as published by
* the Free Software Foundation, either version 3 of the License, or
* (at your option) any later version.
-*
+*
* DOUBANGO is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
* GNU General Public License for more details.
-*
+*
* You should have received a copy of the GNU General Public License
* along with DOUBANGO.
*/
@@ -25,22 +25,30 @@
class DSCaptureFormat
{
public:
- DSCaptureFormat(int w, int h, int f, GUID c) : width(w), height(h), fps(f), chroma(c) {};
- virtual ~DSCaptureFormat() {};
-
- int getWidth() { return this->width; };
- int getHeight() { return this->height; };
- int getFramerate() { return this->fps; };
- GUID getChroma() { return this->chroma; };
-
- int getMatchScore(int w, int h);
- bool isRGB();
+ DSCaptureFormat(int w, int h, int f, GUID c) : width(w), height(h), fps(f), chroma(c) {};
+ virtual ~DSCaptureFormat() {};
+
+ int getWidth() {
+ return this->width;
+ };
+ int getHeight() {
+ return this->height;
+ };
+ int getFramerate() {
+ return this->fps;
+ };
+ GUID getChroma() {
+ return this->chroma;
+ };
+
+ int getMatchScore(int w, int h);
+ bool isRGB();
private:
- int width;
- int height;
- int fps;
- GUID chroma;
+ int width;
+ int height;
+ int fps;
+ GUID chroma;
};
diff --git a/plugins/pluginDirectShow/internals/DSCaptureGraph.cxx b/plugins/pluginDirectShow/internals/DSCaptureGraph.cxx
index 3da6da1..1b46b72 100755
--- a/plugins/pluginDirectShow/internals/DSCaptureGraph.cxx
+++ b/plugins/pluginDirectShow/internals/DSCaptureGraph.cxx
@@ -1,17 +1,17 @@
/* Copyright (C) 2011-2013 Doubango Telecom <http://www.doubango.org>
-*
+*
* This file is part of Open Source Doubango Framework.
*
* DOUBANGO is free software: you can redistribute it and/or modify
* it under the terms of the GNU General Public License as published by
* the Free Software Foundation, either version 3 of the License, or
* (at your option) any later version.
-*
+*
* DOUBANGO is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
* GNU General Public License for more details.
-*
+*
* You should have received a copy of the GNU General Public License
* along with DOUBANGO.
*/
@@ -27,149 +27,159 @@ using namespace std;
#ifdef _WIN32_WCE
DSCaptureGraph::DSCaptureGraph(DSISampleGrabberCB* callback, HRESULT *hr)
-: DSBaseCaptureGraph(callback, hr)
+ : DSBaseCaptureGraph(callback, hr)
#else
DSCaptureGraph::DSCaptureGraph(ISampleGrabberCB* callback, HRESULT *hr)
-: DSBaseCaptureGraph(callback, hr)
+ : DSBaseCaptureGraph(callback, hr)
#endif
{
- this->grabberCallback = callback;
+ this->grabberCallback = callback;
- this->captureFormat = NULL;
- this->captureGraphBuilder = NULL;
- this->graphBuilder = NULL;
+ this->captureFormat = NULL;
+ this->captureGraphBuilder = NULL;
+ this->graphBuilder = NULL;
- this->sourceFilter = NULL;
- this->sampleGrabberFilter = NULL;
+ this->sourceFilter = NULL;
+ this->sampleGrabberFilter = NULL;
#ifdef _WIN32_WCE
- this->colorConvertor565 = NULL;
+ this->colorConvertor565 = NULL;
#else
- this->frameRateFilter = NULL;
+ this->frameRateFilter = NULL;
#endif
- this->nullRendererFilter = NULL;
- this->grabberController = NULL;
- this->mediaController = NULL;
- this->mediaEventController = NULL;
+ this->nullRendererFilter = NULL;
+ this->grabberController = NULL;
+ this->mediaController = NULL;
+ this->mediaEventController = NULL;
- this->streamConfiguration = NULL;
+ this->streamConfiguration = NULL;
- this->running = FALSE;
- this->paused = FALSE;
- this->deviceId = "";
+ this->running = FALSE;
+ this->paused = FALSE;
+ this->deviceId = "";
- *hr = this->createCaptureGraph();
+ *hr = this->createCaptureGraph();
}
DSCaptureGraph::~DSCaptureGraph()
{
- SAFE_RELEASE(this->streamConfiguration);
+ SAFE_RELEASE(this->streamConfiguration);
- SAFE_RELEASE(this->mediaEventController);
- SAFE_RELEASE(this->mediaController);
- SAFE_RELEASE(this->grabberController);
+ SAFE_RELEASE(this->mediaEventController);
+ SAFE_RELEASE(this->mediaController);
+ SAFE_RELEASE(this->grabberController);
#if defined(_WIN32_WCE)
- SAFE_RELEASE(this->colorConvertor565);
+ SAFE_RELEASE(this->colorConvertor565);
#else
#endif
- SAFE_RELEASE(this->nullRendererFilter);
- SAFE_RELEASE(this->sampleGrabberFilter);
- SAFE_RELEASE(this->sourceFilter);
+ SAFE_RELEASE(this->nullRendererFilter);
+ SAFE_RELEASE(this->sampleGrabberFilter);
+ SAFE_RELEASE(this->sourceFilter);
- SAFE_RELEASE(this->graphBuilder);
- SAFE_RELEASE(this->captureGraphBuilder);
+ SAFE_RELEASE(this->graphBuilder);
+ SAFE_RELEASE(this->captureGraphBuilder);
}
HRESULT DSCaptureGraph::setSource(const std::string &devicePath)
{
- HRESULT hr = E_FAIL;
+ HRESULT hr = E_FAIL;
- if (this->sourceFilter){
- this->graphBuilder->RemoveFilter(this->sourceFilter);
- }
+ if (this->sourceFilter) {
+ this->graphBuilder->RemoveFilter(this->sourceFilter);
+ }
- SAFE_RELEASE(this->streamConfiguration);
- SAFE_RELEASE(this->sourceFilter);
+ SAFE_RELEASE(this->streamConfiguration);
+ SAFE_RELEASE(this->sourceFilter);
- // Create the filter
- this->deviceId = devicePath;
- hr = createSourceFilter(&this->deviceId, &this->sourceFilter);
+ // Create the filter
+ this->deviceId = devicePath;
+ hr = createSourceFilter(&this->deviceId, &this->sourceFilter);
- if (this->sourceFilter){
- // Gets the supported formats
- this->supportedFormats.clear();
- getSupportedFormats(this->sourceFilter, &this->supportedFormats);
+ if (this->sourceFilter) {
+ // Gets the supported formats
+ this->supportedFormats.clear();
+ getSupportedFormats(this->sourceFilter, &this->supportedFormats);
- // Query for video stream config
- hr = this->captureGraphBuilder->FindInterface(
- &PIN_CATEGORY_CAPTURE,
- &MEDIATYPE_Video,
- this->sourceFilter,
- IID_IAMStreamConfig,
- reinterpret_cast<void**>(&this->streamConfiguration));
+ // Query for video stream config
+ hr = this->captureGraphBuilder->FindInterface(
+ &PIN_CATEGORY_CAPTURE,
+ &MEDIATYPE_Video,
+ this->sourceFilter,
+ IID_IAMStreamConfig,
+ reinterpret_cast<void**>(&this->streamConfiguration));
- hr = this->graphBuilder->AddFilter(this->sourceFilter, FILTER_WEBCAM);
- }
+ hr = this->graphBuilder->AddFilter(this->sourceFilter, FILTER_WEBCAM);
+ }
- return hr;
+ return hr;
}
HRESULT DSCaptureGraph::setParameters(DSCaptureFormat *format, int framerate)
{
- HRESULT hr = E_FAIL;
- AM_MEDIA_TYPE *mediaType = NULL;
+ HRESULT hr = E_FAIL;
+ AM_MEDIA_TYPE *mediaType = NULL;
- if (!this->streamConfiguration) goto bail;
+ if (!this->streamConfiguration) {
+ goto bail;
+ }
- hr = this->streamConfiguration->GetFormat(&mediaType);
- if (FAILED(hr)) goto bail;
+ hr = this->streamConfiguration->GetFormat(&mediaType);
+ if (FAILED(hr)) {
+ goto bail;
+ }
- VIDEOINFOHEADER* vih = reinterpret_cast<VIDEOINFOHEADER*>(mediaType->pbFormat);
- BITMAPINFOHEADER* bih = &vih->bmiHeader;
+ VIDEOINFOHEADER* vih = reinterpret_cast<VIDEOINFOHEADER*>(mediaType->pbFormat);
+ BITMAPINFOHEADER* bih = &vih->bmiHeader;
- int w = format->getWidth();
- int h = format->getHeight();
+ int w = format->getWidth();
+ int h = format->getHeight();
- bool wN = (bih->biWidth<0);
- bool hN = (bih->biHeight<0);
+ bool wN = (bih->biWidth<0);
+ bool hN = (bih->biHeight<0);
- // DIBS are DWORD aligned
- int data_size = h * ((w * bih->biBitCount + 31) / 32) * 4;
+ // DIBS are DWORD aligned
+ int data_size = h * ((w * bih->biBitCount + 31) / 32) * 4;
- bih->biSize = sizeof(BITMAPINFOHEADER);
- bih->biWidth = w*(wN?-1:1);
- bih->biHeight = h*(hN?-1:1);
- bih->biSizeImage = data_size;
+ bih->biSize = sizeof(BITMAPINFOHEADER);
+ bih->biWidth = w*(wN?-1:1);
+ bih->biHeight = h*(hN?-1:1);
+ bih->biSizeImage = data_size;
- //vih->dwBitRate = framerate * data_size;
- //vih->AvgTimePerFrame = SECONDS_TO_100NS(framerate);
+ //vih->dwBitRate = framerate * data_size;
+ //vih->AvgTimePerFrame = SECONDS_TO_100NS(framerate);
- mediaType->cbFormat = sizeof(VIDEOINFOHEADER);
- //mediaType->lSampleSize = data_size;
- mediaType->subtype = format->getChroma();
+ mediaType->cbFormat = sizeof(VIDEOINFOHEADER);
+ //mediaType->lSampleSize = data_size;
+ mediaType->subtype = format->getChroma();
- hr = this->streamConfiguration->SetFormat(mediaType);
- if (FAILED(hr)) goto bail;
+ hr = this->streamConfiguration->SetFormat(mediaType);
+ if (FAILED(hr)) {
+ goto bail;
+ }
#if defined(_WIN32_WCE)
- hr = this->grabberController->SetFps((int) DS_SECONDS_FROM_100NS(vih->AvgTimePerFrame)/*format->getFramerate()*/, framerate);
- if (FAILED(hr)) goto bail;
- hr = this->grabberController->SetSize(w,h);
+ hr = this->grabberController->SetFps((int) DS_SECONDS_FROM_100NS(vih->AvgTimePerFrame)/*format->getFramerate()*/, framerate);
+ if (FAILED(hr)) {
+ goto bail;
+ }
+ hr = this->grabberController->SetSize(w,h);
#else
- // Set fps using tdshow filter
- hr = this->frameRateFilter->SetFps((int) ((float)vih->AvgTimePerFrame/10000.f)/*format->getFramerate()*/, framerate);
+ // Set fps using tdshow filter
+ hr = this->frameRateFilter->SetFps((int) ((float)vih->AvgTimePerFrame/10000.f)/*format->getFramerate()*/, framerate);
#endif
- if (FAILED(hr)) goto bail;
+ if (FAILED(hr)) {
+ goto bail;
+ }
- this->captureFormat = format;
+ this->captureFormat = format;
bail:
- DeleteMediaType(mediaType);
+ DeleteMediaType(mediaType);
- return hr;
+ return hr;
}
#if defined(_WIN32_WCE)
@@ -178,259 +188,314 @@ bail:
HRESULT DSCaptureGraph::connect()
{
- HRESULT hr;
+ HRESULT hr;
- if (!this->sourceFilter){
- TSK_DEBUG_ERROR("Invalid source filter");
- return E_FAIL;
- }
+ if (!this->sourceFilter) {
+ TSK_DEBUG_ERROR("Invalid source filter");
+ return E_FAIL;
+ }
- if (!this->captureFormat){
- TSK_DEBUG_ERROR("Invalid capture format");
- return E_FAIL;
- }
+ if (!this->captureFormat) {
+ TSK_DEBUG_ERROR("Invalid capture format");
+ return E_FAIL;
+ }
- if (!this->graphBuilder){
- TSK_DEBUG_ERROR("Invalid grash builder");
- return E_FAIL;
- }
+ if (!this->graphBuilder) {
+ TSK_DEBUG_ERROR("Invalid grash builder");
+ return E_FAIL;
+ }
- if (this->captureFormat->isRGB())
- {
+ if (this->captureFormat->isRGB()) {
#if defined(_WIN32_WCE)
- hr = ConnectFilters(this->graphBuilder, this->sourceFilter, this->colorConvertor565) ; if(FAILED(hr)) { TSK_DEBUG_ERROR("ConnectFilters failed"); return hr; }
- hr = ConnectFilters(this->graphBuilder, this->colorConvertor565, this->sampleGrabberFilter) ; if(FAILED(hr)) { TSK_DEBUG_ERROR("ConnectFilters failed"); return hr; }
- hr = ConnectFilters(this->graphBuilder, this->sampleGrabberFilter, this->nullRendererFilter); if(FAILED(hr)) { TSK_DEBUG_ERROR("ConnectFilters failed"); return hr; }
+ hr = ConnectFilters(this->graphBuilder, this->sourceFilter, this->colorConvertor565) ;
+ if(FAILED(hr)) {
+ TSK_DEBUG_ERROR("ConnectFilters failed");
+ return hr;
+ }
+ hr = ConnectFilters(this->graphBuilder, this->colorConvertor565, this->sampleGrabberFilter) ;
+ if(FAILED(hr)) {
+ TSK_DEBUG_ERROR("ConnectFilters failed");
+ return hr;
+ }
+ hr = ConnectFilters(this->graphBuilder, this->sampleGrabberFilter, this->nullRendererFilter);
+ if(FAILED(hr)) {
+ TSK_DEBUG_ERROR("ConnectFilters failed");
+ return hr;
+ }
#else
- hr = ConnectFilters(this->graphBuilder, this->sourceFilter, this->frameRateFilter); if(FAILED(hr)) { TSK_DEBUG_ERROR("ConnectFilters failed"); return hr; }
- hr = ConnectFilters(this->graphBuilder, this->frameRateFilter, this->sampleGrabberFilter); if(FAILED(hr)) { TSK_DEBUG_ERROR("ConnectFilters failed"); return hr; }
- hr = ConnectFilters(this->graphBuilder, this->sampleGrabberFilter, this->nullRendererFilter); if(FAILED(hr)) { TSK_DEBUG_ERROR("ConnectFilters failed"); return hr; }
+ hr = ConnectFilters(this->graphBuilder, this->sourceFilter, this->frameRateFilter);
+ if(FAILED(hr)) {
+ TSK_DEBUG_ERROR("ConnectFilters failed");
+ return hr;
+ }
+ hr = ConnectFilters(this->graphBuilder, this->frameRateFilter, this->sampleGrabberFilter);
+ if(FAILED(hr)) {
+ TSK_DEBUG_ERROR("ConnectFilters failed");
+ return hr;
+ }
+ hr = ConnectFilters(this->graphBuilder, this->sampleGrabberFilter, this->nullRendererFilter);
+ if(FAILED(hr)) {
+ TSK_DEBUG_ERROR("ConnectFilters failed");
+ return hr;
+ }
#endif
- }
- else
- {
+ }
+ else {
#if defined(_WIN32_WCE)
- hr = ConnectFilters(this->graphBuilder, this->sourceFilter, this->colorConvertor565) ; if(FAILED(hr))return hr;
- hr = ConnectFilters(this->graphBuilder, this->colorConvertor565, this->sampleGrabberFilter) ; if(FAILED(hr))return hr;
- hr = ConnectFilters(this->graphBuilder, this->sampleGrabberFilter, this->nullRendererFilter); if(FAILED(hr))return hr;
+ hr = ConnectFilters(this->graphBuilder, this->sourceFilter, this->colorConvertor565) ;
+ if(FAILED(hr)) {
+ return hr;
+ }
+ hr = ConnectFilters(this->graphBuilder, this->colorConvertor565, this->sampleGrabberFilter) ;
+ if(FAILED(hr)) {
+ return hr;
+ }
+ hr = ConnectFilters(this->graphBuilder, this->sampleGrabberFilter, this->nullRendererFilter);
+ if(FAILED(hr)) {
+ return hr;
+ }
#else
- // No convertor needed
- // AVI Decompressor Filter is automatically by the Filter Graph Manager when needed
- hr = ConnectFilters(this->graphBuilder, this->sourceFilter, this->frameRateFilter); if(FAILED(hr)) { TSK_DEBUG_ERROR("ConnectFilters failed"); return hr; }
- hr = ConnectFilters(this->graphBuilder, this->frameRateFilter, this->sampleGrabberFilter); if(FAILED(hr)) { TSK_DEBUG_ERROR("ConnectFilters failed"); return hr; }
- hr = ConnectFilters(this->graphBuilder, this->sampleGrabberFilter, this->nullRendererFilter); if(FAILED(hr)) { TSK_DEBUG_ERROR("ConnectFilters failed"); return hr; }
+ // No convertor needed
+ // AVI Decompressor Filter is automatically by the Filter Graph Manager when needed
+ hr = ConnectFilters(this->graphBuilder, this->sourceFilter, this->frameRateFilter);
+ if(FAILED(hr)) {
+ TSK_DEBUG_ERROR("ConnectFilters failed");
+ return hr;
+ }
+ hr = ConnectFilters(this->graphBuilder, this->frameRateFilter, this->sampleGrabberFilter);
+ if(FAILED(hr)) {
+ TSK_DEBUG_ERROR("ConnectFilters failed");
+ return hr;
+ }
+ hr = ConnectFilters(this->graphBuilder, this->sampleGrabberFilter, this->nullRendererFilter);
+ if(FAILED(hr)) {
+ TSK_DEBUG_ERROR("ConnectFilters failed");
+ return hr;
+ }
#endif
- }
+ }
- return hr;
+ return hr;
}
HRESULT DSCaptureGraph::disconnect()
{
- HRESULT hr;
+ HRESULT hr;
- if (!this->sourceFilter) {
- return E_FAIL;
- }
+ if (!this->sourceFilter) {
+ return E_FAIL;
+ }
- if (!this->captureFormat) {
- return E_FAIL;
- }
+ if (!this->captureFormat) {
+ return E_FAIL;
+ }
- if (this->captureFormat->isRGB())
- {
+ if (this->captureFormat->isRGB()) {
#if defined(_WIN32_WCE)
- hr = DisconnectFilters(this->graphBuilder, this->sourceFilter, this->colorConvertor565);
- hr = DisconnectFilters(this->graphBuilder, this->colorConvertor565, this->sampleGrabberFilter);
- hr = DisconnectFilters(this->graphBuilder, this->sampleGrabberFilter, this->nullRendererFilter);
+ hr = DisconnectFilters(this->graphBuilder, this->sourceFilter, this->colorConvertor565);
+ hr = DisconnectFilters(this->graphBuilder, this->colorConvertor565, this->sampleGrabberFilter);
+ hr = DisconnectFilters(this->graphBuilder, this->sampleGrabberFilter, this->nullRendererFilter);
#else
- hr = DisconnectFilters(this->graphBuilder, this->sourceFilter, this->frameRateFilter);
- hr = DisconnectFilters(this->graphBuilder, this->frameRateFilter, this->sampleGrabberFilter);
- hr = DisconnectFilters(this->graphBuilder, this->sampleGrabberFilter, this->nullRendererFilter);
+ hr = DisconnectFilters(this->graphBuilder, this->sourceFilter, this->frameRateFilter);
+ hr = DisconnectFilters(this->graphBuilder, this->frameRateFilter, this->sampleGrabberFilter);
+ hr = DisconnectFilters(this->graphBuilder, this->sampleGrabberFilter, this->nullRendererFilter);
#endif
- }
- else
- {
+ }
+ else {
#if defined(_WIN32_WCE)
- hr = DisconnectFilters(this->graphBuilder, this->sourceFilter, this->colorConvertor565); if(FAILED(hr))return hr;
- hr = DisconnectFilters(this->graphBuilder, this->colorConvertor565, this->sampleGrabberFilter); if(FAILED(hr))return hr;
- hr = DisconnectFilters(this->graphBuilder, this->sampleGrabberFilter, this->nullRendererFilter); if(FAILED(hr))return hr;
+ hr = DisconnectFilters(this->graphBuilder, this->sourceFilter, this->colorConvertor565);
+ if(FAILED(hr)) {
+ return hr;
+ }
+ hr = DisconnectFilters(this->graphBuilder, this->colorConvertor565, this->sampleGrabberFilter);
+ if(FAILED(hr)) {
+ return hr;
+ }
+ hr = DisconnectFilters(this->graphBuilder, this->sampleGrabberFilter, this->nullRendererFilter);
+ if(FAILED(hr)) {
+ return hr;
+ }
#else
- hr = DisconnectFilters(this->graphBuilder, this->sourceFilter, this->frameRateFilter);
- hr = DisconnectFilters(this->graphBuilder, this->frameRateFilter, this->sampleGrabberFilter);
- hr = DisconnectFilters(this->graphBuilder, this->sampleGrabberFilter, this->nullRendererFilter);
+ hr = DisconnectFilters(this->graphBuilder, this->sourceFilter, this->frameRateFilter);
+ hr = DisconnectFilters(this->graphBuilder, this->frameRateFilter, this->sampleGrabberFilter);
+ hr = DisconnectFilters(this->graphBuilder, this->sampleGrabberFilter, this->nullRendererFilter);
#endif
- }
+ }
- return hr;
+ return hr;
}
HRESULT DSCaptureGraph::start()
{
- HRESULT hr;
-
- if (isRunning() && !isPaused()) {
- return S_OK;
- }
-
- //this->mediaController->Stop();
-
- hr = this->mediaController ? this->mediaController->Run() : E_POINTER;
- /*if (hr == S_FALSE)
- {
- cerr << "DSCaptureGraph::mediaController->Start() has failed with " << hr << ". Waiting for transition." << endl;
- FILTER_STATE pfs;
- hr = this->mediaController->GetState(2500, (OAFilterState*) &pfs);
- hr = this->mediaController->Run();
- }*/
-
- if (!SUCCEEDED(hr))
- {
+ HRESULT hr;
+
+ if (isRunning() && !isPaused()) {
+ return S_OK;
+ }
+
+ //this->mediaController->Stop();
+
+ hr = this->mediaController ? this->mediaController->Run() : E_POINTER;
+ /*if (hr == S_FALSE)
+ {
+ cerr << "DSCaptureGraph::mediaController->Start() has failed with " << hr << ". Waiting for transition." << endl;
+ FILTER_STATE pfs;
+ hr = this->mediaController->GetState(2500, (OAFilterState*) &pfs);
+ hr = this->mediaController->Run();
+ }*/
+
+ if (!SUCCEEDED(hr)) {
#if defined(_WIN32_WCE)
- MessageBox(NULL, _T("Starting DirectShow Graph Failed"), _T("Failure"), MB_OK);
- //assert(1==15);
+ MessageBox(NULL, _T("Starting DirectShow Graph Failed"), _T("Failure"), MB_OK);
+ //assert(1==15);
#endif
- TSK_DEBUG_ERROR("DSCaptureGraph::mediaController->Run() has failed with %ld", hr);
- return hr;
- }
- this->running = true;
- return hr;
+ TSK_DEBUG_ERROR("DSCaptureGraph::mediaController->Run() has failed with %ld", hr);
+ return hr;
+ }
+ this->running = true;
+ return hr;
}
HRESULT DSCaptureGraph::pause()
{
- HRESULT hr = S_OK;
- if (isRunning()) {
- hr = this->mediaController->Pause();
- if (SUCCEEDED(hr)) {
- this->paused = TRUE;
- }
- }
- return hr;
+ HRESULT hr = S_OK;
+ if (isRunning()) {
+ hr = this->mediaController->Pause();
+ if (SUCCEEDED(hr)) {
+ this->paused = TRUE;
+ }
+ }
+ return hr;
}
HRESULT DSCaptureGraph::stop()
{
- HRESULT hr;
+ HRESULT hr;
#if 0 // Must not
- hr = this->mediaController->Pause();
- if (hr == S_FALSE)
- {
- TSK_DEBUG_ERROR("DSCaptureGraph::mediaController->Pause() has failed with %ld. Waiting for transition.", hr);
- FILTER_STATE pfs;
- hr = this->mediaController->GetState(2500, (OAFilterState*) &pfs);
- }
+ hr = this->mediaController->Pause();
+ if (hr == S_FALSE) {
+ TSK_DEBUG_ERROR("DSCaptureGraph::mediaController->Pause() has failed with %ld. Waiting for transition.", hr);
+ FILTER_STATE pfs;
+ hr = this->mediaController->GetState(2500, (OAFilterState*) &pfs);
+ }
#endif
- hr = this->mediaController->Stop();
- if (!SUCCEEDED(hr))
- {
- TSK_DEBUG_ERROR("DSCaptureGraph::mediaController->Stop() has failed with %ld", hr);
- }
- this->running = false;
- this->paused = false;
- return hr;
+ hr = this->mediaController->Stop();
+ if (!SUCCEEDED(hr)) {
+ TSK_DEBUG_ERROR("DSCaptureGraph::mediaController->Stop() has failed with %ld", hr);
+ }
+ this->running = false;
+ this->paused = false;
+ return hr;
}
bool DSCaptureGraph::isRunning()
{
- return this->running;
+ return this->running;
}
bool DSCaptureGraph::isPaused()
{
- return this->paused;
+ return this->paused;
}
HRESULT DSCaptureGraph::getConnectedMediaType(AM_MEDIA_TYPE *mediaType)
{
#if defined(_WIN32_WCE)
- memmove(mediaType, &this->grabberController->GetMediaType(), sizeof(AM_MEDIA_TYPE));
- return S_OK;
+ memmove(mediaType, &this->grabberController->GetMediaType(), sizeof(AM_MEDIA_TYPE));
+ return S_OK;
#else
- return this->grabberController->GetConnectedMediaType(mediaType);
+ return this->grabberController->GetConnectedMediaType(mediaType);
#endif
}
HRESULT DSCaptureGraph::createCaptureGraph()
{
- HRESULT hr;
+ HRESULT hr;
#if defined(_WIN32_WCE)
- // Create capture graph builder
- CHECK_HR(hr = COCREATE(CLSID_CaptureGraphBuilder, IID_ICaptureGraphBuilder2, this->captureGraphBuilder));
- CHECK_HR(hr = COCREATE(CLSID_FilterGraph, IID_IGraphBuilder, this->graphBuilder));
- CHECK_HR(hr = this->captureGraphBuilder->SetFiltergraph(this->graphBuilder));
-
- // Create filters
- LPUNKNOWN pUnk1 = NULL, pUnk2 = NULL;
- CHECK_HR(hr = COCREATE(CLSID_Colour, IID_IBaseFilter, this->colorConvertor565));
- this->sampleGrabberFilter = new DSSampleGrabber(FITLER_SAMPLE_GRABBER, pUnk1, &hr); CHECK_HR(hr);
- this->nullRendererFilter = new DSNullFilter(/*FILTER_NULL_RENDERER,*/ pUnk2, &hr); CHECK_HR(hr);
- this->grabberController = (DSSampleGrabber*)(this->sampleGrabberFilter); if (!this->grabberController) CHECK_HR(E_FAIL);
-
- // Add Filters
- CHECK_HR(hr = this->graphBuilder->AddFilter(this->colorConvertor565, FILTER_COLOR_CONVERTOR_565));
- CHECK_HR(hr = this->graphBuilder->AddFilter(this->sampleGrabberFilter, FITLER_SAMPLE_GRABBER));
- CHECK_HR(hr = this->graphBuilder->AddFilter(this->nullRendererFilter, FILTER_NULL_RENDERER));
-
- // Find media control
- CHECK_HR(hr = QUERY(this->graphBuilder, IID_IMediaControl, this->mediaController));
-
- // Set callback
- CHECK_HR(hr = this->grabberController->SetCallback(this->grabberCallback));
+ // Create capture graph builder
+ CHECK_HR(hr = COCREATE(CLSID_CaptureGraphBuilder, IID_ICaptureGraphBuilder2, this->captureGraphBuilder));
+ CHECK_HR(hr = COCREATE(CLSID_FilterGraph, IID_IGraphBuilder, this->graphBuilder));
+ CHECK_HR(hr = this->captureGraphBuilder->SetFiltergraph(this->graphBuilder));
+
+ // Create filters
+ LPUNKNOWN pUnk1 = NULL, pUnk2 = NULL;
+ CHECK_HR(hr = COCREATE(CLSID_Colour, IID_IBaseFilter, this->colorConvertor565));
+ this->sampleGrabberFilter = new DSSampleGrabber(FITLER_SAMPLE_GRABBER, pUnk1, &hr);
+ CHECK_HR(hr);
+ this->nullRendererFilter = new DSNullFilter(/*FILTER_NULL_RENDERER,*/ pUnk2, &hr);
+ CHECK_HR(hr);
+ this->grabberController = (DSSampleGrabber*)(this->sampleGrabberFilter);
+ if (!this->grabberController) {
+ CHECK_HR(E_FAIL);
+ }
+
+ // Add Filters
+ CHECK_HR(hr = this->graphBuilder->AddFilter(this->colorConvertor565, FILTER_COLOR_CONVERTOR_565));
+ CHECK_HR(hr = this->graphBuilder->AddFilter(this->sampleGrabberFilter, FITLER_SAMPLE_GRABBER));
+ CHECK_HR(hr = this->graphBuilder->AddFilter(this->nullRendererFilter, FILTER_NULL_RENDERER));
+
+ // Find media control
+ CHECK_HR(hr = QUERY(this->graphBuilder, IID_IMediaControl, this->mediaController));
+
+ // Set callback
+ CHECK_HR(hr = this->grabberController->SetCallback(this->grabberCallback));
#else
- // Create capture graph builder
- CHECK_HR(hr = COCREATE(CLSID_CaptureGraphBuilder2, IID_ICaptureGraphBuilder2, this->captureGraphBuilder));
+ // Create capture graph builder
+ CHECK_HR(hr = COCREATE(CLSID_CaptureGraphBuilder2, IID_ICaptureGraphBuilder2, this->captureGraphBuilder));
- // Create the graph builder
- CHECK_HR(hr = COCREATE(CLSID_FilterGraph, IID_IGraphBuilder, this->graphBuilder));
+ // Create the graph builder
+ CHECK_HR(hr = COCREATE(CLSID_FilterGraph, IID_IGraphBuilder, this->graphBuilder));
- // Initialize the Capture Graph Builder.
- CHECK_HR(hr = this->captureGraphBuilder->SetFiltergraph(this->graphBuilder));
+ // Initialize the Capture Graph Builder.
+ CHECK_HR(hr = this->captureGraphBuilder->SetFiltergraph(this->graphBuilder));
- // Create the sample grabber filter
- CHECK_HR(hr = COCREATE(CLSID_SampleGrabber, IID_IBaseFilter, this->sampleGrabberFilter));
+ // Create the sample grabber filter
+ CHECK_HR(hr = COCREATE(CLSID_SampleGrabber, IID_IBaseFilter, this->sampleGrabberFilter));
- // Create tdshow filter
- LPUNKNOWN pUnk = NULL;
- this->frameRateFilter = new DSFrameRateFilter(FILTER_FRAMERATE, pUnk, &hr); CHECK_HR(hr);
- if (!this->frameRateFilter == NULL) CHECK_HR(E_FAIL);
+ // Create tdshow filter
+ LPUNKNOWN pUnk = NULL;
+ this->frameRateFilter = new DSFrameRateFilter(FILTER_FRAMERATE, pUnk, &hr);
+ CHECK_HR(hr);
+ if (!this->frameRateFilter == NULL) {
+ CHECK_HR(E_FAIL);
+ }
- // Create the NULL renderer
- CHECK_HR(hr = COCREATE(CLSID_NullRenderer, IID_IBaseFilter, this->nullRendererFilter));
+ // Create the NULL renderer
+ CHECK_HR(hr = COCREATE(CLSID_NullRenderer, IID_IBaseFilter, this->nullRendererFilter));
- // Add sample grabber to the graph
- CHECK_HR(hr = this->graphBuilder->AddFilter(this->sampleGrabberFilter, FITLER_SAMPLE_GRABBER));
+ // Add sample grabber to the graph
+ CHECK_HR(hr = this->graphBuilder->AddFilter(this->sampleGrabberFilter, FITLER_SAMPLE_GRABBER));
- // Add null renderer to the graph
- CHECK_HR(hr = this->graphBuilder->AddFilter(this->nullRendererFilter, FILTER_NULL_RENDERER));
+ // Add null renderer to the graph
+ CHECK_HR(hr = this->graphBuilder->AddFilter(this->nullRendererFilter, FILTER_NULL_RENDERER));
- // Add tdshow filter
- CHECK_HR(hr = this->graphBuilder->AddFilter(this->frameRateFilter, FILTER_FRAMERATE));
+ // Add tdshow filter
+ CHECK_HR(hr = this->graphBuilder->AddFilter(this->frameRateFilter, FILTER_FRAMERATE));
- // Find media control
- CHECK_HR(hr = QUERY(this->graphBuilder, IID_IMediaControl, this->mediaController));
+ // Find media control
+ CHECK_HR(hr = QUERY(this->graphBuilder, IID_IMediaControl, this->mediaController));
- // Create the sample grabber
- CHECK_HR(hr = QUERY(this->sampleGrabberFilter, IID_ISampleGrabber, this->grabberController));
+ // Create the sample grabber
+ CHECK_HR(hr = QUERY(this->sampleGrabberFilter, IID_ISampleGrabber, this->grabberController));
- // Set the sample grabber media type (RGB24)
- // TODO : CHECK
- AM_MEDIA_TYPE mt;
- ZeroMemory(&mt, sizeof(AM_MEDIA_TYPE));
- mt.majortype = MEDIATYPE_Video;
- mt.subtype = MEDIASUBTYPE_RGB24;
- mt.formattype = FORMAT_VideoInfo;
+ // Set the sample grabber media type (RGB24)
+ // TODO : CHECK
+ AM_MEDIA_TYPE mt;
+ ZeroMemory(&mt, sizeof(AM_MEDIA_TYPE));
+ mt.majortype = MEDIATYPE_Video;
+ mt.subtype = MEDIASUBTYPE_RGB24;
+ mt.formattype = FORMAT_VideoInfo;
- CHECK_HR(hr = this->grabberController->SetMediaType(&mt));
+ CHECK_HR(hr = this->grabberController->SetMediaType(&mt));
- // Set sample grabber media type
- this->grabberController->SetOneShot(FALSE);
- this->grabberController->SetBufferSamples(FALSE);
+ // Set sample grabber media type
+ this->grabberController->SetOneShot(FALSE);
+ this->grabberController->SetBufferSamples(FALSE);
- CHECK_HR(hr = this->grabberController->SetCallback(this->grabberCallback, 1));
+ CHECK_HR(hr = this->grabberController->SetCallback(this->grabberCallback, 1));
#endif
bail:
- return hr;
+ return hr;
} \ No newline at end of file
diff --git a/plugins/pluginDirectShow/internals/DSCaptureGraph.h b/plugins/pluginDirectShow/internals/DSCaptureGraph.h
index 6bf8862..7c9b674 100755
--- a/plugins/pluginDirectShow/internals/DSCaptureGraph.h
+++ b/plugins/pluginDirectShow/internals/DSCaptureGraph.h
@@ -1,17 +1,17 @@
/* Copyright (C) 2011-2013 Doubango Telecom <http://www.doubango.org>
-*
+*
* This file is part of Open Source Doubango Framework.
*
* DOUBANGO is free software: you can redistribute it and/or modify
* it under the terms of the GNU General Public License as published by
* the Free Software Foundation, either version 3 of the License, or
* (at your option) any later version.
-*
+*
* DOUBANGO is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
* GNU General Public License for more details.
-*
+*
* You should have received a copy of the GNU General Public License
* along with DOUBANGO.
*/
@@ -37,70 +37,74 @@ class DSCaptureGraph : public DSBaseCaptureGraph
{
public:
#ifdef _WIN32_WCE
- DSCaptureGraph(DSISampleGrabberCB* callback, HRESULT *hr);
+ DSCaptureGraph(DSISampleGrabberCB* callback, HRESULT *hr);
#else
- DSCaptureGraph(ISampleGrabberCB* callback, HRESULT *hr);
+ DSCaptureGraph(ISampleGrabberCB* callback, HRESULT *hr);
#endif
- virtual ~DSCaptureGraph();
+ virtual ~DSCaptureGraph();
- std::vector<DSCaptureFormat> *getFormats() { return &this->supportedFormats; };
+ std::vector<DSCaptureFormat> *getFormats() {
+ return &this->supportedFormats;
+ };
- HRESULT setSource(const std::string &devicePath);
- HRESULT setParameters(DSCaptureFormat *format, int framerate);
+ HRESULT setSource(const std::string &devicePath);
+ HRESULT setParameters(DSCaptureFormat *format, int framerate);
- HRESULT connect();
- HRESULT disconnect();
+ HRESULT connect();
+ HRESULT disconnect();
- HRESULT start();
- HRESULT stop();
- HRESULT pause();
- bool isRunning();
- bool isPaused();
+ HRESULT start();
+ HRESULT stop();
+ HRESULT pause();
+ bool isRunning();
+ bool isPaused();
- std::string getDeviceId() const { return this->deviceId; };
+ std::string getDeviceId() const {
+ return this->deviceId;
+ };
- HRESULT getConnectedMediaType(AM_MEDIA_TYPE *mediaType);
+ HRESULT getConnectedMediaType(AM_MEDIA_TYPE *mediaType);
private:
- HRESULT createCaptureGraph();
+ HRESULT createCaptureGraph();
private:
#ifdef _WIN32_WCE
- DSISampleGrabberCB *grabberCallback;
+ DSISampleGrabberCB *grabberCallback;
#else
- ISampleGrabberCB *grabberCallback;
+ ISampleGrabberCB *grabberCallback;
#endif
- ICaptureGraphBuilder2 *captureGraphBuilder;
- IGraphBuilder *graphBuilder;
+ ICaptureGraphBuilder2 *captureGraphBuilder;
+ IGraphBuilder *graphBuilder;
+
+ IBaseFilter *sourceFilter;
+ IBaseFilter *nullRendererFilter;
+ IBaseFilter *sampleGrabberFilter;
- IBaseFilter *sourceFilter;
- IBaseFilter *nullRendererFilter;
- IBaseFilter *sampleGrabberFilter;
-
#ifdef _WIN32_WCE
- IBaseFilter *colorConvertor565; //http://msdn.microsoft.com/en-us/library/aa926076.aspx
+ IBaseFilter *colorConvertor565; //http://msdn.microsoft.com/en-us/library/aa926076.aspx
#else
- DSFrameRateFilter *frameRateFilter;
+ DSFrameRateFilter *frameRateFilter;
#endif
#ifdef _WIN32_WCE
- DSSampleGrabber *grabberController;
+ DSSampleGrabber *grabberController;
#else
- ISampleGrabber *grabberController;
+ ISampleGrabber *grabberController;
#endif
- IMediaControl *mediaController;
- IMediaEventEx *mediaEventController;
+ IMediaControl *mediaController;
+ IMediaEventEx *mediaEventController;
- IAMStreamConfig *streamConfiguration;
+ IAMStreamConfig *streamConfiguration;
- std::vector<DSCaptureFormat> supportedFormats;
- DSCaptureFormat *captureFormat;
+ std::vector<DSCaptureFormat> supportedFormats;
+ DSCaptureFormat *captureFormat;
- bool running;
- bool paused;
- std::string deviceId;
+ bool running;
+ bool paused;
+ std::string deviceId;
};
#endif
diff --git a/plugins/pluginDirectShow/internals/DSCaptureUtils.cxx b/plugins/pluginDirectShow/internals/DSCaptureUtils.cxx
index d95f996..6c99476 100755
--- a/plugins/pluginDirectShow/internals/DSCaptureUtils.cxx
+++ b/plugins/pluginDirectShow/internals/DSCaptureUtils.cxx
@@ -1,17 +1,17 @@
/* Copyright (C) 2011-2013 Doubango Telecom <http://www.doubango.org>
-*
+*
* This file is part of Open Source Doubango Framework.
*
* DOUBANGO is free software: you can redistribute it and/or modify
* it under the terms of the GNU General Public License as published by
* the Free Software Foundation, either version 3 of the License, or
* (at your option) any later version.
-*
+*
* DOUBANGO is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
* GNU General Public License for more details.
-*
+*
* You should have received a copy of the GNU General Public License
* along with DOUBANGO.
*/
@@ -38,340 +38,367 @@
HRESULT enumerateCaptureDevices(const std::string &prefix, std::vector<VideoGrabberName> *names)
{
- HRESULT hr = S_OK;
+ HRESULT hr = S_OK;
#ifdef _WIN32_WCE
- // FIXME: use FindNextDevice to query all devices
- HANDLE handle = NULL;
- DEVMGR_DEVICE_INFORMATION di;
-
- TCHAR pwzName[MAX_PATH]; memset(pwzName,NULL,MAX_PATH);
-
- GUID guidCamera = { 0xCB998A05, 0x122C, 0x4166, 0x84, 0x6A,
- 0x93, 0x3E, 0x4D, 0x7E, 0x3C, 0x86 }; // http://msdn.microsoft.com/en-us/library/aa918757.aspx
-
- di.dwSize = sizeof(di);
-
- for( int i=0; ; i++)
- {
- if(0 == i)
- { /* 1st time */
- handle = FindFirstDevice( DeviceSearchByGuid, &guidCamera, &di );
- if(!handle || !di.hDevice)
- {
- hr = ( HRESULT_FROM_WIN32( GetLastError() ));
- goto bail;
- }
- }
- else if(handle)
- { /* 2nd or 3rd time */
- BOOL ret = FindNextDevice(handle, &di);
- if(!ret || !di.hDevice)
- {
- /* No 2nd or 3rd camera ==> do not return error*/
- goto bail;
- }
- }
- else assert(0);
-
- StringCchCopy( pwzName, MAX_PATH, di.szDeviceName );
-
- /* from LPWSTR to LPSTR */
- char mbstr_name[MAX_PATH]; memset(mbstr_name,NULL,MAX_PATH);
- wcstombs(mbstr_name, pwzName, MAX_PATH);
-
- VideoGrabberName grabberName(std::string((const char*)mbstr_name), std::string((const char*)mbstr_name));
- names->push_back(grabberName);
- }
-
+ // FIXME: use FindNextDevice to query all devices
+ HANDLE handle = NULL;
+ DEVMGR_DEVICE_INFORMATION di;
+
+ TCHAR pwzName[MAX_PATH];
+ memset(pwzName,NULL,MAX_PATH);
+
+ GUID guidCamera = { 0xCB998A05, 0x122C, 0x4166, 0x84, 0x6A,
+ 0x93, 0x3E, 0x4D, 0x7E, 0x3C, 0x86
+ }; // http://msdn.microsoft.com/en-us/library/aa918757.aspx
+
+ di.dwSize = sizeof(di);
+
+ for( int i=0; ; i++) {
+ if(0 == i) {
+ /* 1st time */
+ handle = FindFirstDevice( DeviceSearchByGuid, &guidCamera, &di );
+ if(!handle || !di.hDevice) {
+ hr = ( HRESULT_FROM_WIN32( GetLastError() ));
+ goto bail;
+ }
+ }
+ else if(handle) {
+ /* 2nd or 3rd time */
+ BOOL ret = FindNextDevice(handle, &di);
+ if(!ret || !di.hDevice) {
+ /* No 2nd or 3rd camera ==> do not return error*/
+ goto bail;
+ }
+ }
+ else {
+ assert(0);
+ }
+
+ StringCchCopy( pwzName, MAX_PATH, di.szDeviceName );
+
+ /* from LPWSTR to LPSTR */
+ char mbstr_name[MAX_PATH];
+ memset(mbstr_name,NULL,MAX_PATH);
+ wcstombs(mbstr_name, pwzName, MAX_PATH);
+
+ VideoGrabberName grabberName(std::string((const char*)mbstr_name), std::string((const char*)mbstr_name));
+ names->push_back(grabberName);
+ }
+
bail:
- /* close */
- if(handle) FindClose( handle );
-
+ /* close */
+ if(handle) {
+ FindClose( handle );
+ }
+
#else
- ICreateDevEnum *deviceEnum;
- IEnumMoniker *enumerator;
- IMoniker *moniker;
-
- // Create the System Device Enumerator
- hr = COCREATE(CLSID_SystemDeviceEnum, IID_ICreateDevEnum, deviceEnum);
- if (FAILED(hr)) goto bail;
-
- // Ask for a device enumerator
- hr = deviceEnum->CreateClassEnumerator(CLSID_VideoInputDeviceCategory, &enumerator, INCLUDE_CATEGORY_FLAG);
- if (FAILED(hr)) goto bail;
-
- // hr = S_FALSE and enumerator is NULL if there is no device to enumerate
- if (!enumerator) goto bail;
-
- USES_CONVERSION;
-
- while (enumerator->Next(1, &moniker, NULL) == S_OK)
- {
- // Get the properties bag for each device
- IPropertyBag *propBag;
- hr = moniker->BindToStorage(0, 0, IID_IPropertyBag, reinterpret_cast<void**>(&propBag));
- if (FAILED(hr))
- {
- SAFE_RELEASE(moniker);
- continue;
- }
-
- std::string name;
- std::string description;
-
- VARIANT varName;
- VariantInit(&varName);
- VARIANT varDescription;
- VariantInit(&varDescription);
-
- // Find the device path (uniqueness is guaranteed)
- hr = propBag->Read(L"DevicePath", &varName, 0);
- if (SUCCEEDED(hr))
- {
- if (prefix != "") name = prefix + ":";
- name = name + std::string(W2A(varName.bstrVal));
- }
-
- // Find friendly name or the description
- hr = propBag->Read(L"FriendlyName", &varDescription, 0);
- if (SUCCEEDED(hr))
- {
- description = std::string(W2A(varDescription.bstrVal));
- }
- else
- {
- hr = propBag->Read(L"Description", &varDescription, 0);
- if (SUCCEEDED(hr)) description = std::string(W2A(varDescription.bstrVal));
- }
-
- hr = VariantClear(&varName);
- hr = VariantClear(&varDescription);
-
- SAFE_RELEASE(propBag);
- SAFE_RELEASE(moniker);
-
- // Add it to the list
- if (name != "")
- {
- VideoGrabberName grabberName(name, description);
- names->push_back(grabberName);
- }
- }
+ ICreateDevEnum *deviceEnum;
+ IEnumMoniker *enumerator;
+ IMoniker *moniker;
+
+ // Create the System Device Enumerator
+ hr = COCREATE(CLSID_SystemDeviceEnum, IID_ICreateDevEnum, deviceEnum);
+ if (FAILED(hr)) {
+ goto bail;
+ }
+
+ // Ask for a device enumerator
+ hr = deviceEnum->CreateClassEnumerator(CLSID_VideoInputDeviceCategory, &enumerator, INCLUDE_CATEGORY_FLAG);
+ if (FAILED(hr)) {
+ goto bail;
+ }
+
+ // hr = S_FALSE and enumerator is NULL if there is no device to enumerate
+ if (!enumerator) {
+ goto bail;
+ }
+
+ USES_CONVERSION;
+
+ while (enumerator->Next(1, &moniker, NULL) == S_OK) {
+ // Get the properties bag for each device
+ IPropertyBag *propBag;
+ hr = moniker->BindToStorage(0, 0, IID_IPropertyBag, reinterpret_cast<void**>(&propBag));
+ if (FAILED(hr)) {
+ SAFE_RELEASE(moniker);
+ continue;
+ }
+
+ std::string name;
+ std::string description;
+
+ VARIANT varName;
+ VariantInit(&varName);
+ VARIANT varDescription;
+ VariantInit(&varDescription);
+
+ // Find the device path (uniqueness is guaranteed)
+ hr = propBag->Read(L"DevicePath", &varName, 0);
+ if (SUCCEEDED(hr)) {
+ if (prefix != "") {
+ name = prefix + ":";
+ }
+ name = name + std::string(W2A(varName.bstrVal));
+ }
+
+ // Find friendly name or the description
+ hr = propBag->Read(L"FriendlyName", &varDescription, 0);
+ if (SUCCEEDED(hr)) {
+ description = std::string(W2A(varDescription.bstrVal));
+ }
+ else {
+ hr = propBag->Read(L"Description", &varDescription, 0);
+ if (SUCCEEDED(hr)) {
+ description = std::string(W2A(varDescription.bstrVal));
+ }
+ }
+
+ hr = VariantClear(&varName);
+ hr = VariantClear(&varDescription);
+
+ SAFE_RELEASE(propBag);
+ SAFE_RELEASE(moniker);
+
+ // Add it to the list
+ if (name != "") {
+ VideoGrabberName grabberName(name, description);
+ names->push_back(grabberName);
+ }
+ }
bail:
- SAFE_RELEASE(enumerator);
- SAFE_RELEASE(deviceEnum);
+ SAFE_RELEASE(enumerator);
+ SAFE_RELEASE(deviceEnum);
#endif
- return hr;
+ return hr;
}
HRESULT createSourceFilter(std::string *devicePath, IBaseFilter **sourceFilter)
{
- HRESULT hr;
-
- IEnumMoniker *enumerator = NULL;
- IMoniker *moniker = NULL;
- bool found = false;
+ HRESULT hr;
- // Set sourceFilter to null
- SAFE_RELEASE((*sourceFilter));
+ IEnumMoniker *enumerator = NULL;
+ IMoniker *moniker = NULL;
+ bool found = false;
+
+ // Set sourceFilter to null
+ SAFE_RELEASE((*sourceFilter));
#if defined( _WIN32_WCE)
- CPropertyBag pBag;
- HANDLE handle = NULL;
- DEVMGR_DEVICE_INFORMATION di;
- TCHAR pwzName[MAX_PATH];
- CComVariant varCamName;
- IPersistPropertyBag *propBag = NULL;
- GUID guidCamera = { 0xCB998A05, 0x122C, 0x4166, 0x84, 0x6A,
- 0x93, 0x3E, 0x4D, 0x7E, 0x3C, 0x86 }; // http://msdn.microsoft.com/en-us/library/aa918757.aspx
-
- di.dwSize = sizeof(di);
-
- for( int i=0; ; i++)
- {
- if(0 == i)
- { /* 1st time */
- handle = FindFirstDevice( DeviceSearchByGuid, &guidCamera, &di );
- if(!handle || !di.hDevice)
- {
- hr = ( HRESULT_FROM_WIN32( GetLastError() ));
- goto bail;
- }
- }
- else if(handle)
- { /* 2nd or 3rd time */
- BOOL ret = FindNextDevice(handle, &di);
- if(!ret || !di.hDevice)
- {
- /* No 2nd or 3rd camera ==> do not return error*/
- goto bail;
- }
- }
- else assert(0);
-
- StringCchCopy( pwzName, MAX_PATH, di.szDeviceName );
-
- /* from LPWSTR to LPSTR */
- char mbstr_name[MAX_PATH];
- memset(mbstr_name,NULL,MAX_PATH);
- wcstombs(mbstr_name, pwzName, MAX_PATH);
-
- if((std::string((const char*)mbstr_name) == (*devicePath)) || ("0" == (*devicePath)))
- {
- varCamName = pwzName;
- if( varCamName.vt != VT_BSTR )
- {
- hr = E_OUTOFMEMORY;
- goto bail;
- }
-
- // Create Source filter
- hr = COCREATE(CLSID_VideoCapture, IID_IBaseFilter, *sourceFilter);
- if(FAILED(hr)) goto bail;
-
- // Query PropertyBag
- hr = QUERY((*sourceFilter), IID_IPersistPropertyBag, propBag);
- if(FAILED(hr)) goto bail;
-
- hr = pBag.Write( L"VCapName", &varCamName );
- if(FAILED(hr)) goto bail;
-
- hr = propBag->Load( &pBag, NULL );
- if(FAILED(hr)) goto bail;
- }
- }
+ CPropertyBag pBag;
+ HANDLE handle = NULL;
+ DEVMGR_DEVICE_INFORMATION di;
+ TCHAR pwzName[MAX_PATH];
+ CComVariant varCamName;
+ IPersistPropertyBag *propBag = NULL;
+ GUID guidCamera = { 0xCB998A05, 0x122C, 0x4166, 0x84, 0x6A,
+ 0x93, 0x3E, 0x4D, 0x7E, 0x3C, 0x86
+ }; // http://msdn.microsoft.com/en-us/library/aa918757.aspx
+
+ di.dwSize = sizeof(di);
+
+ for( int i=0; ; i++) {
+ if(0 == i) {
+ /* 1st time */
+ handle = FindFirstDevice( DeviceSearchByGuid, &guidCamera, &di );
+ if(!handle || !di.hDevice) {
+ hr = ( HRESULT_FROM_WIN32( GetLastError() ));
+ goto bail;
+ }
+ }
+ else if(handle) {
+ /* 2nd or 3rd time */
+ BOOL ret = FindNextDevice(handle, &di);
+ if(!ret || !di.hDevice) {
+ /* No 2nd or 3rd camera ==> do not return error*/
+ goto bail;
+ }
+ }
+ else {
+ assert(0);
+ }
+
+ StringCchCopy( pwzName, MAX_PATH, di.szDeviceName );
+
+ /* from LPWSTR to LPSTR */
+ char mbstr_name[MAX_PATH];
+ memset(mbstr_name,NULL,MAX_PATH);
+ wcstombs(mbstr_name, pwzName, MAX_PATH);
+
+ if((std::string((const char*)mbstr_name) == (*devicePath)) || ("0" == (*devicePath))) {
+ varCamName = pwzName;
+ if( varCamName.vt != VT_BSTR ) {
+ hr = E_OUTOFMEMORY;
+ goto bail;
+ }
+
+ // Create Source filter
+ hr = COCREATE(CLSID_VideoCapture, IID_IBaseFilter, *sourceFilter);
+ if(FAILED(hr)) {
+ goto bail;
+ }
+
+ // Query PropertyBag
+ hr = QUERY((*sourceFilter), IID_IPersistPropertyBag, propBag);
+ if(FAILED(hr)) {
+ goto bail;
+ }
+
+ hr = pBag.Write( L"VCapName", &varCamName );
+ if(FAILED(hr)) {
+ goto bail;
+ }
+
+ hr = propBag->Load( &pBag, NULL );
+ if(FAILED(hr)) {
+ goto bail;
+ }
+ }
+ }
#else
- ICreateDevEnum *deviceEnum = NULL;
- IPropertyBag *propBag = NULL;
-
- // Create the System Device Enumerator
- hr = COCREATE(CLSID_SystemDeviceEnum, IID_ICreateDevEnum, deviceEnum);
- if (FAILED(hr)){
- goto bail;
- }
-
- // Ask for a device enumerator
- hr = deviceEnum->CreateClassEnumerator(CLSID_VideoInputDeviceCategory, &enumerator, INCLUDE_CATEGORY_FLAG);
- if(FAILED(hr)){
- goto bail;
- }
-
- // hr = S_FALSE and enumerator is NULL if there is no device to enumerate
- if(!enumerator){
- goto bail;
- }
-
- USES_CONVERSION;
-
- while (!found && (enumerator->Next(1, &moniker, NULL) == S_OK)){
- // Get the properties bag for each device
- hr = moniker->BindToStorage(0, 0, IID_IPropertyBag, reinterpret_cast<void**>(&propBag));
- if (FAILED(hr)){
- SAFE_RELEASE(moniker);
- continue;
- }
-
- std::string name;
-
- VARIANT varName;
- VariantInit(&varName);
-
- // Find the device path (uniqueness is guaranteed)
- hr = propBag->Read(L"DevicePath", &varName, 0);
- if (SUCCEEDED(hr)) name = std::string(W2A(varName.bstrVal));
-
- // Check for device path
- // "Null" means first found
- if ((name == (*devicePath)) ||
- ("Null" == (*devicePath)))
- {
- hr = moniker->BindToObject(0, 0, IID_IBaseFilter, reinterpret_cast<void**>(&(*sourceFilter)));
- if (SUCCEEDED(hr)){
- (*devicePath) = name;
- found = true;
- }
- }
-
- hr = VariantClear(&varName);
-
- SAFE_RELEASE(propBag);
- SAFE_RELEASE(moniker);
- }
+ ICreateDevEnum *deviceEnum = NULL;
+ IPropertyBag *propBag = NULL;
+
+ // Create the System Device Enumerator
+ hr = COCREATE(CLSID_SystemDeviceEnum, IID_ICreateDevEnum, deviceEnum);
+ if (FAILED(hr)) {
+ goto bail;
+ }
+
+ // Ask for a device enumerator
+ hr = deviceEnum->CreateClassEnumerator(CLSID_VideoInputDeviceCategory, &enumerator, INCLUDE_CATEGORY_FLAG);
+ if(FAILED(hr)) {
+ goto bail;
+ }
+
+ // hr = S_FALSE and enumerator is NULL if there is no device to enumerate
+ if(!enumerator) {
+ goto bail;
+ }
+
+ USES_CONVERSION;
+
+ while (!found && (enumerator->Next(1, &moniker, NULL) == S_OK)) {
+ // Get the properties bag for each device
+ hr = moniker->BindToStorage(0, 0, IID_IPropertyBag, reinterpret_cast<void**>(&propBag));
+ if (FAILED(hr)) {
+ SAFE_RELEASE(moniker);
+ continue;
+ }
+
+ std::string name;
+
+ VARIANT varName;
+ VariantInit(&varName);
+
+ // Find the device path (uniqueness is guaranteed)
+ hr = propBag->Read(L"DevicePath", &varName, 0);
+ if (SUCCEEDED(hr)) {
+ name = std::string(W2A(varName.bstrVal));
+ }
+
+ // Check for device path
+ // "Null" means first found
+ if ((name == (*devicePath)) ||
+ ("Null" == (*devicePath))) {
+ hr = moniker->BindToObject(0, 0, IID_IBaseFilter, reinterpret_cast<void**>(&(*sourceFilter)));
+ if (SUCCEEDED(hr)) {
+ (*devicePath) = name;
+ found = true;
+ }
+ }
+
+ hr = VariantClear(&varName);
+
+ SAFE_RELEASE(propBag);
+ SAFE_RELEASE(moniker);
+ }
#endif
bail:
#ifdef _WIN32_WCE
- if(handle) FindClose(handle);
+ if(handle) {
+ FindClose(handle);
+ }
#else
- SAFE_RELEASE(deviceEnum);
+ SAFE_RELEASE(deviceEnum);
#endif
- SAFE_RELEASE(moniker);
- SAFE_RELEASE(enumerator);
- SAFE_RELEASE(propBag);
+ SAFE_RELEASE(moniker);
+ SAFE_RELEASE(enumerator);
+ SAFE_RELEASE(propBag);
- return hr;
+ return hr;
}
HRESULT getSupportedFormats(IBaseFilter *sourceFilter, std::vector<DSCaptureFormat> *formats)
{
- HRESULT hr = E_FAIL;
- IPin *pinOut = NULL;
- IAMStreamConfig *streamConfig = NULL;
- AM_MEDIA_TYPE *mediaType = NULL;
- int count, size;
-
- // Check source filter pointer
- if (!sourceFilter) goto bail;
-
- pinOut = GetPin(sourceFilter, PINDIR_OUTPUT);
- if(!pinOut) goto bail;
-
- // Retrieve the stream config interface
- hr = QUERY(pinOut, IID_IAMStreamConfig, streamConfig);
- if (FAILED(hr)) goto bail;
-
- // Get the number of capabilities
- hr = streamConfig->GetNumberOfCapabilities(&count, &size);
- if (FAILED(hr)) goto bail;
-
- hr = streamConfig->GetFormat(&mediaType);
- if (FAILED(hr)) goto bail;
-
- // Iterate through the formats
- for (int i = 0; i < count; i++){
- VIDEO_STREAM_CONFIG_CAPS streamConfigCaps;
-
- hr = streamConfig->GetStreamCaps(i, &mediaType, reinterpret_cast<BYTE*>(&streamConfigCaps));
-
- if (FAILED(hr)){
- TSK_DEBUG_ERROR("Failed to get Stream caps");
- break;
- }
-
- if (streamConfigCaps.guid == FORMAT_VideoInfo){
- VIDEOINFOHEADER* vih = reinterpret_cast<VIDEOINFOHEADER*>(mediaType->pbFormat);
- BITMAPINFOHEADER* bih = &vih->bmiHeader;
-
- int width = abs(bih->biWidth);
- int height = abs(bih->biHeight);
- int fps = (int) ((float)(vih->AvgTimePerFrame)/10000.f);
- GUID chroma = mediaType->subtype;
-
- // Add format to the list
- DSCaptureFormat format(width, height, fps, chroma);
- formats->push_back(format);
- }
-
- DeleteMediaType(mediaType);
- }
+ HRESULT hr = E_FAIL;
+ IPin *pinOut = NULL;
+ IAMStreamConfig *streamConfig = NULL;
+ AM_MEDIA_TYPE *mediaType = NULL;
+ int count, size;
+
+ // Check source filter pointer
+ if (!sourceFilter) {
+ goto bail;
+ }
+
+ pinOut = GetPin(sourceFilter, PINDIR_OUTPUT);
+ if(!pinOut) {
+ goto bail;
+ }
+
+ // Retrieve the stream config interface
+ hr = QUERY(pinOut, IID_IAMStreamConfig, streamConfig);
+ if (FAILED(hr)) {
+ goto bail;
+ }
+
+ // Get the number of capabilities
+ hr = streamConfig->GetNumberOfCapabilities(&count, &size);
+ if (FAILED(hr)) {
+ goto bail;
+ }
+
+ hr = streamConfig->GetFormat(&mediaType);
+ if (FAILED(hr)) {
+ goto bail;
+ }
+
+ // Iterate through the formats
+ for (int i = 0; i < count; i++) {
+ VIDEO_STREAM_CONFIG_CAPS streamConfigCaps;
+
+ hr = streamConfig->GetStreamCaps(i, &mediaType, reinterpret_cast<BYTE*>(&streamConfigCaps));
+
+ if (FAILED(hr)) {
+ TSK_DEBUG_ERROR("Failed to get Stream caps");
+ break;
+ }
+
+ if (streamConfigCaps.guid == FORMAT_VideoInfo) {
+ VIDEOINFOHEADER* vih = reinterpret_cast<VIDEOINFOHEADER*>(mediaType->pbFormat);
+ BITMAPINFOHEADER* bih = &vih->bmiHeader;
+
+ int width = abs(bih->biWidth);
+ int height = abs(bih->biHeight);
+ int fps = (int) ((float)(vih->AvgTimePerFrame)/10000.f);
+ GUID chroma = mediaType->subtype;
+
+ // Add format to the list
+ DSCaptureFormat format(width, height, fps, chroma);
+ formats->push_back(format);
+ }
+
+ DeleteMediaType(mediaType);
+ }
bail:
- SAFE_RELEASE(streamConfig);
- SAFE_RELEASE(pinOut);
+ SAFE_RELEASE(streamConfig);
+ SAFE_RELEASE(pinOut);
- return hr;
+ return hr;
}
diff --git a/plugins/pluginDirectShow/internals/DSCaptureUtils.h b/plugins/pluginDirectShow/internals/DSCaptureUtils.h
index 7d99ed2..f7d6b60 100755
--- a/plugins/pluginDirectShow/internals/DSCaptureUtils.h
+++ b/plugins/pluginDirectShow/internals/DSCaptureUtils.h
@@ -1,17 +1,17 @@
/* Copyright (C) 2011-2013 Doubango Telecom <http://www.doubango.org>
-*
+*
* This file is part of Open Source Doubango Framework.
*
* DOUBANGO is free software: you can redistribute it and/or modify
* it under the terms of the GNU General Public License as published by
* the Free Software Foundation, either version 3 of the License, or
* (at your option) any later version.
-*
+*
* DOUBANGO is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
* GNU General Public License for more details.
-*
+*
* You should have received a copy of the GNU General Public License
* along with DOUBANGO.
*/
diff --git a/plugins/pluginDirectShow/internals/DSDibHelper.cxx b/plugins/pluginDirectShow/internals/DSDibHelper.cxx
index b7f40d5..3b02ae2 100755
--- a/plugins/pluginDirectShow/internals/DSDibHelper.cxx
+++ b/plugins/pluginDirectShow/internals/DSDibHelper.cxx
@@ -22,11 +22,12 @@ HBITMAP CopyScreenToBitmap(LPRECT lpRect, BYTE *pData, BITMAPINFO *pHeader)
int xScrn, yScrn; // screen resolution
// check for an empty rectangle
- if (IsRectEmpty(lpRect))
- return NULL;
+ if (IsRectEmpty(lpRect)) {
+ return NULL;
+ }
// create a DC for the screen and create
- // a memory DC compatible to screen DC
+ // a memory DC compatible to screen DC
hScrDC = CreateDC(TEXT("DISPLAY"), NULL, NULL, NULL);
hMemDC = CreateCompatibleDC(hScrDC);
@@ -41,14 +42,18 @@ HBITMAP CopyScreenToBitmap(LPRECT lpRect, BYTE *pData, BITMAPINFO *pHeader)
yScrn = GetDeviceCaps(hScrDC, VERTRES);
//make sure bitmap rectangle is visible
- if (nX < 0)
+ if (nX < 0) {
nX = 0;
- if (nY < 0)
+ }
+ if (nY < 0) {
nY = 0;
- if (nX2 > xScrn)
+ }
+ if (nX2 > xScrn) {
nX2 = xScrn;
- if (nY2 > yScrn)
+ }
+ if (nY2 > yScrn) {
nY2 = yScrn;
+ }
nWidth = nX2 - nX;
nHeight = nY2 - nY;
@@ -63,7 +68,7 @@ HBITMAP CopyScreenToBitmap(LPRECT lpRect, BYTE *pData, BITMAPINFO *pHeader)
BitBlt(hMemDC, 0, 0, nWidth, nHeight, hScrDC, nX, nY, SRCCOPY);
// select old bitmap back into memory DC and get handle to
- // bitmap of the screen
+ // bitmap of the screen
hBitmap = (HBITMAP) SelectObject(hMemDC, hOldBitmap);
// Copy the bitmap data into the provided BYTE buffer
diff --git a/plugins/pluginDirectShow/internals/DSDisplay.cxx b/plugins/pluginDirectShow/internals/DSDisplay.cxx
index 326b86c..5cc1b04 100755
--- a/plugins/pluginDirectShow/internals/DSDisplay.cxx
+++ b/plugins/pluginDirectShow/internals/DSDisplay.cxx
@@ -2,19 +2,19 @@
* Copyright (C) 2010-2011 Mamadou Diop.
*
* Contact: Mamadou Diop <diopmamadou(at)doubango.org>
-*
+*
* This file is part of Open Source Doubango Framework.
*
* DOUBANGO is free software: you can redistribute it and/or modify
* it under the terms of the GNU General Public License as published by
* the Free Software Foundation, either version 3 of the License, or
* (at your option) any later version.
-*
+*
* DOUBANGO is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
* GNU General Public License for more details.
-*
+*
* You should have received a copy of the GNU General Public License
* along with DOUBANGO.
*
@@ -36,12 +36,11 @@ using namespace std;
#define FSCREEN_MIN_IDEAL_WIDTH 352
#define FSCREEN_MIN_IDEAL_HEIGHT 288
-typedef struct tdshow_display_s
-{
- TSK_DECLARE_OBJECT;
+typedef struct tdshow_display_s {
+ TSK_DECLARE_OBJECT;
- HWND hwnd;
- DSDisplay* display;
+ HWND hwnd;
+ DSDisplay* display;
}
tdshow_display_t;
typedef tsk_list_t tdshow_displays_L_t;
@@ -53,504 +52,501 @@ static tdshow_displays_L_t* __directshow__Displays = tsk_null;
/*== Predicate function to find tdshow_display_t object by HWND. */
static int __pred_find_display_by_hwnd(const tsk_list_item_t *item, const void *hWnd)
{
- if(item && item->data){
- const tdshow_display_t *display = (const tdshow_display_t *)item->data;
- int ret = 0;
- tsk_subsat_int32_ptr(display->hwnd, *((HWND*)hWnd), &ret);
- return ret;
- }
- return -1;
+ if(item && item->data) {
+ const tdshow_display_t *display = (const tdshow_display_t *)item->data;
+ int ret = 0;
+ tsk_subsat_int32_ptr(display->hwnd, *((HWND*)hWnd), &ret);
+ return ret;
+ }
+ return -1;
}
// C Callback that dispatch event to the right display
static LRESULT CALLBACK __directshow__WndProcWindow(HWND hWnd, UINT uMsg, WPARAM wParam, LPARAM lParam)
{
- LRESULT result = FALSE;
- BOOL resultSet = FALSE;
-
- if(__directshow__Displays){
- tsk_list_lock(__directshow__Displays);
+ LRESULT result = FALSE;
+ BOOL resultSet = FALSE;
+
+ if(__directshow__Displays) {
+ tsk_list_lock(__directshow__Displays);
- const tdshow_display_t *display = (const tdshow_display_t *)tsk_list_find_object_by_pred(__directshow__Displays, __pred_find_display_by_hwnd, &hWnd);
- if((resultSet = (display && display->display))){
- result = display->display->handleEvents(hWnd, uMsg, wParam, lParam);
- }
+ const tdshow_display_t *display = (const tdshow_display_t *)tsk_list_find_object_by_pred(__directshow__Displays, __pred_find_display_by_hwnd, &hWnd);
+ if((resultSet = (display && display->display))) {
+ result = display->display->handleEvents(hWnd, uMsg, wParam, lParam);
+ }
- tsk_list_unlock(__directshow__Displays);
- }
+ tsk_list_unlock(__directshow__Displays);
+ }
- return resultSet ? result : DefWindowProc(hWnd, uMsg, wParam, lParam);
+ return resultSet ? result : DefWindowProc(hWnd, uMsg, wParam, lParam);
}
DSDisplay::DSDisplay(HRESULT *hr)
{
- this->window = NULL;
- this->parentWindowProc = NULL;
- this->hooked = false;
- this->fullscreen = false;
- this->bPluginFirefox = false;
- this->top = 0;
- this->left = 0;
- this->width = this->imgWidth = 176;
- this->height = this->imgHeight = 144;
- this->fps = 15;
-
- this->graph = new DSDisplayGraph(hr);
- if (FAILED(*hr)) return;
+ this->window = NULL;
+ this->parentWindowProc = NULL;
+ this->hooked = false;
+ this->fullscreen = false;
+ this->bPluginFirefox = false;
+ this->top = 0;
+ this->left = 0;
+ this->width = this->imgWidth = 176;
+ this->height = this->imgHeight = 144;
+ this->fps = 15;
+
+ this->graph = new DSDisplayGraph(hr);
+ if (FAILED(*hr)) {
+ return;
+ }
#if USE_OVERLAY
- this->overlay = new DSDisplayOverlay();
+ this->overlay = new DSDisplayOverlay();
#else
- this->overlay = NULL;
+ this->overlay = NULL;
#endif
-
- this->graph->getVideoWindow()->put_Visible(OAFALSE);
+
+ this->graph->getVideoWindow()->put_Visible(OAFALSE);
}
DSDisplay::~DSDisplay()
{
- this->unhook();
+ this->unhook();
- SAFE_DELETE_PTR(this->overlay);
- SAFE_DELETE_PTR(this->graph);
+ SAFE_DELETE_PTR(this->overlay);
+ SAFE_DELETE_PTR(this->graph);
}
void DSDisplay::start()
{
- if (!this->graph->isRunning()){
- this->hook();
- }
- if (!this->graph->isRunning() || this->graph->isPaused()){
- this->graph->start();
- }
- this->graph->getVideoWindow()->put_Visible(OATRUE);
+ if (!this->graph->isRunning()) {
+ this->hook();
+ }
+ if (!this->graph->isRunning() || this->graph->isPaused()) {
+ this->graph->start();
+ }
+ this->graph->getVideoWindow()->put_Visible(OATRUE);
}
void DSDisplay::pause()
{
- this->graph->pause();
+ this->graph->pause();
}
void DSDisplay::stop()
{
- if (this->graph->isRunning()){
- this->setFullscreen(false);
+ if (this->graph->isRunning()) {
+ this->setFullscreen(false);
- this->graph->stop();
- this->unhook();
- }
+ this->graph->stop();
+ this->unhook();
+ }
}
void DSDisplay::attach(INT64 parent)
{
- this->attach((void*)parent);
+ this->attach((void*)parent);
}
void DSDisplay::attach(void *parent)
{
- // Don't reattach if this is the same parent
- if (this->isAttached() && parent){
- HWND hwnd = reinterpret_cast<HWND>(parent);
- if (hwnd != this->window){
- this->detach();
- }
- }
-
- // Gets the handle of the parent
- this->window = reinterpret_cast<HWND>(parent);
- // Hook to the parent WindowProc
- this->hook();
+ // Don't reattach if this is the same parent
+ if (this->isAttached() && parent) {
+ HWND hwnd = reinterpret_cast<HWND>(parent);
+ if (hwnd != this->window) {
+ this->detach();
+ }
+ }
+
+ // Gets the handle of the parent
+ this->window = reinterpret_cast<HWND>(parent);
+ // Hook to the parent WindowProc
+ this->hook();
#if USE_OVERLAY
- // Allows the overlay to initialize
- this->overlay->attach(this->window, this->graph);
+ // Allows the overlay to initialize
+ this->overlay->attach(this->window, this->graph);
#endif
}
void DSDisplay::detach(void *parent)
{
- // The detach action is only valid and if this is the same parent
- if (parent){
- HWND hwnd = reinterpret_cast<HWND>(parent);
- if (hwnd == this->window){
- this->detach();
- }
- }
+ // The detach action is only valid and if this is the same parent
+ if (parent) {
+ HWND hwnd = reinterpret_cast<HWND>(parent);
+ if (hwnd == this->window) {
+ this->detach();
+ }
+ }
}
void DSDisplay::detach()
{
- if (!this->isAttached()){
- return;
- }
+ if (!this->isAttached()) {
+ return;
+ }
#if USE_OVERLAY
- // Clean up overlay
- this->overlay->detach();
+ // Clean up overlay
+ this->overlay->detach();
#endif
- // Unhook from the parent WindowProc
- this->unhook();
+ // Unhook from the parent WindowProc
+ this->unhook();
- // Set the handle of the parent to NULL
- this->window = NULL;
+ // Set the handle of the parent to NULL
+ this->window = NULL;
}
bool DSDisplay::isAttached()
{
- return (this->window != NULL);
+ return (this->window != NULL);
}
int DSDisplay::getWidth()
{
- return this->width;
+ return this->width;
}
int DSDisplay::getHeight()
{
- return this->height;
+ return this->height;
}
void DSDisplay::setSize(int w, int h)
{
- //this->width = w;
- //this->height = h;
+ //this->width = w;
+ //this->height = h;
- if (!this->fullscreen){
- this->graph->setImageFormat(w, h);
- if(this->hooked){
+ if (!this->fullscreen) {
+ this->graph->setImageFormat(w, h);
+ if(this->hooked) {
#if 0
- #if defined(VMR9_WINDOWLESS)
- RECT rc;
- SetRect(&rc, 0, 0, w, h);
- this->graph->getWindowlessControl()->SetVideoPosition(&rc, &rc);
- #else
- this->graph->getVideoWindow()->SetWindowPosition(0, 0, this->width , this->height);
- #endif
+#if defined(VMR9_WINDOWLESS)
+ RECT rc;
+ SetRect(&rc, 0, 0, w, h);
+ this->graph->getWindowlessControl()->SetVideoPosition(&rc, &rc);
+#else
+ this->graph->getVideoWindow()->SetWindowPosition(0, 0, this->width , this->height);
#endif
- }
- }
+#endif
+ }
+ }
}
void DSDisplay::applyRatio(RECT rect)
{
- long w = rect.right - rect.left;
- long h = rect.bottom - rect.top;
- float ratio = ((float)this->imgWidth/(float)this->imgHeight);
- // (w/h)=ratio =>
- // 1) h=w/ratio
- // and
- // 2) w=h*ratio
- this->width = (int)(w/ratio) > h ? (int)(h * ratio) : w;
- this->height = (int)(this->width/ratio) > h ? h : (int)(this->width/ratio);
- this->left = ((w - this->width) >> 1);
- this->top = ((h - this->height) >> 1);
+ long w = rect.right - rect.left;
+ long h = rect.bottom - rect.top;
+ float ratio = ((float)this->imgWidth/(float)this->imgHeight);
+ // (w/h)=ratio =>
+ // 1) h=w/ratio
+ // and
+ // 2) w=h*ratio
+ this->width = (int)(w/ratio) > h ? (int)(h * ratio) : w;
+ this->height = (int)(this->width/ratio) > h ? h : (int)(this->width/ratio);
+ this->left = ((w - this->width) >> 1);
+ this->top = ((h - this->height) >> 1);
}
bool DSDisplay::isFullscreen()
{
#if defined(VMR9_WINDOWLESS)
- // TODO
+ // TODO
#else
- long result;
- HRESULT hr = this->graph->getVideoWindow()->get_FullScreenMode(&result);
- if (SUCCEEDED(hr)){
- this->fullscreen = (result == OATRUE);
- }
- else{
- TSK_DEBUG_ERROR("get_FullScreenMode failed with %ld", hr);
- this->fullscreen = FALSE;
- }
+ long result;
+ HRESULT hr = this->graph->getVideoWindow()->get_FullScreenMode(&result);
+ if (SUCCEEDED(hr)) {
+ this->fullscreen = (result == OATRUE);
+ }
+ else {
+ TSK_DEBUG_ERROR("get_FullScreenMode failed with %ld", hr);
+ this->fullscreen = FALSE;
+ }
#endif
- return this->fullscreen;
+ return this->fullscreen;
}
void DSDisplay::setFullscreen(bool value)
{
- if(!this->canFullscreen()){
- TSK_DEBUG_WARN("Cannot fullscreen");
- return;
- }
+ if(!this->canFullscreen()) {
+ TSK_DEBUG_WARN("Cannot fullscreen");
+ return;
+ }
- HRESULT hr;
+ HRESULT hr;
#if defined(VMR9_WINDOWLESS)
- // TODO
+ // TODO
#else
- if (this->isFullscreen() == value){
- return;
- }
+ if (this->isFullscreen() == value) {
+ return;
+ }
- hr = this->graph->getVideoWindow()->put_FullScreenMode(value ? OATRUE : OAFALSE);
- if (SUCCEEDED(hr)){
- this->fullscreen = value;
+ hr = this->graph->getVideoWindow()->put_FullScreenMode(value ? OATRUE : OAFALSE);
+ if (SUCCEEDED(hr)) {
+ this->fullscreen = value;
#if USE_OVERLAY
- this->overlay->show(this->fullscreen ? (OVERLAY_TIMEOUT * this->graph->getDisplayFps()) : 0);
+ this->overlay->show(this->fullscreen ? (OVERLAY_TIMEOUT * this->graph->getDisplayFps()) : 0);
#endif
- }
- else{
- TSK_DEBUG_ERROR("put_FullScreenMode failed with %ld", hr);
- }
+ }
+ else {
+ TSK_DEBUG_ERROR("put_FullScreenMode failed with %ld", hr);
+ }
#endif
}
void DSDisplay::setPluginFirefox(bool value)
{
- bPluginFirefox = value;
+ bPluginFirefox = value;
}
bool DSDisplay::canFullscreen()
{
#if defined(VMR9_WINDOWLESS)
- // TODO
+ // TODO
#else
- if(this->graph){
- UINT image_w, image_h;
-
- if( this->graph->getImageFormat(image_w, image_h) ){
- //this->graph->getVideoWindow()->GetMinIdealImageSize(&ideal_w, &ideal_h);
- return (((long)image_w >= FSCREEN_MIN_IDEAL_WIDTH) && ((long)image_h >= FSCREEN_MIN_IDEAL_HEIGHT));
- }
- }
+ if(this->graph) {
+ UINT image_w, image_h;
+
+ if( this->graph->getImageFormat(image_w, image_h) ) {
+ //this->graph->getVideoWindow()->GetMinIdealImageSize(&ideal_w, &ideal_h);
+ return (((long)image_w >= FSCREEN_MIN_IDEAL_WIDTH) && ((long)image_h >= FSCREEN_MIN_IDEAL_HEIGHT));
+ }
+ }
#endif
- return false;
+ return false;
}
void DSDisplay::setFps(int fps_)
{
- this->fps = fps_;
- this->graph->setDisplayFps(fps_);
+ this->fps = fps_;
+ this->graph->setDisplayFps(fps_);
}
// w and h are the size of the buffer not the display
void DSDisplay::handleVideoFrame(const void* data, int w, int h)
{
- if (this->graph->isRunning()){
- // The graph will take care of changing the source filter if needed
- // in case of dimension change or anything else...
- this->graph->handleFrame(data, w, h);
- if(this->imgWidth != w || this->imgHeight != h){
- this->imgWidth = w;
- this->imgHeight = h;
- if(this->window){
- SendMessage(this->window, WM_SIZE, SIZE_RESTORED, MAKELPARAM(this->width , this->height));
- }
- }
+ if (this->graph->isRunning()) {
+ // The graph will take care of changing the source filter if needed
+ // in case of dimension change or anything else...
+ this->graph->handleFrame(data, w, h);
+ if(this->imgWidth != w || this->imgHeight != h) {
+ this->imgWidth = w;
+ this->imgHeight = h;
+ if(this->window) {
+ SendMessage(this->window, WM_SIZE, SIZE_RESTORED, MAKELPARAM(this->width , this->height));
+ }
+ }
#if USE_OVERLAY
- this->overlay->update();
+ this->overlay->update();
#endif
- }
+ }
}
LRESULT DSDisplay::handleEvents(HWND hWnd, UINT uMsg, WPARAM wParam, LPARAM lParam)
{
- switch(uMsg)
- {
- case WM_CREATE:
- case WM_SIZE:
- case WM_MOVE:
- {
- RECT rect = {0};
- GetWindowRect(hWnd, &rect);
- applyRatio(rect);
+ switch(uMsg) {
+ case WM_CREATE:
+ case WM_SIZE:
+ case WM_MOVE: {
+ RECT rect = {0};
+ GetWindowRect(hWnd, &rect);
+ applyRatio(rect);
#if defined(VMR9_WINDOWLESS)
- this->graph->getWindowlessControl()->SetVideoPosition(&rect, &rect);
+ this->graph->getWindowlessControl()->SetVideoPosition(&rect, &rect);
#else
- this->graph->getVideoWindow()->SetWindowPosition(this->left, this->top, this->width , this->height);
+ this->graph->getVideoWindow()->SetWindowPosition(this->left, this->top, this->width , this->height);
#endif
- }
- break;
-
- case WM_LBUTTONDBLCLK:
- if(this->canFullscreen()){
- this->setFullscreen(true);
- }
- break;
-
- case WM_FULLSCREEN_SET:
- if(this->canFullscreen()){
- this->setFullscreen(!this->isFullscreen());
- }
- break;
-
- case WM_LBUTTONDOWN:
- case WM_RBUTTONDOWN:
- case WM_KEYDOWN:
- if(this->isFullscreen())
- {
+ }
+ break;
+
+ case WM_LBUTTONDBLCLK:
+ if(this->canFullscreen()) {
+ this->setFullscreen(true);
+ }
+ break;
+
+ case WM_FULLSCREEN_SET:
+ if(this->canFullscreen()) {
+ this->setFullscreen(!this->isFullscreen());
+ }
+ break;
+
+ case WM_LBUTTONDOWN:
+ case WM_RBUTTONDOWN:
+ case WM_KEYDOWN:
+ if(this->isFullscreen()) {
#if USE_OVERLAY
- // Re-Show overlay
- this->overlay->show(OVERLAY_TIMEOUT * this->graph->getDisplayFps());
+ // Re-Show overlay
+ this->overlay->show(OVERLAY_TIMEOUT * this->graph->getDisplayFps());
#endif
- }
- break;
-
- case WM_CHAR:
- case WM_KEYUP:
- if(this->isFullscreen() && (wParam == 0x1B || wParam == VK_ESCAPE))
- {
- // escape
- this->setFullscreen(false);
- }
-
- break;
-
- case WM_GRAPHNOTIFY:
- {
- long evCode;
- LONG_PTR param1, param2;
- HRESULT hr;
- while (hr = this->graph->getMediaEvent()->GetEvent(&evCode, &param1, &param2, 0), SUCCEEDED(hr))
- {
- hr = this->graph->getMediaEvent()->FreeEventParams(evCode, param1, param2);
-
- switch(evCode)
- {
- case EC_FULLSCREEN_LOST:
+ }
+ break;
+
+ case WM_CHAR:
+ case WM_KEYUP:
+ if(this->isFullscreen() && (wParam == 0x1B || wParam == VK_ESCAPE)) {
+ // escape
+ this->setFullscreen(false);
+ }
+
+ break;
+
+ case WM_GRAPHNOTIFY: {
+ long evCode;
+ LONG_PTR param1, param2;
+ HRESULT hr;
+ while (hr = this->graph->getMediaEvent()->GetEvent(&evCode, &param1, &param2, 0), SUCCEEDED(hr)) {
+ hr = this->graph->getMediaEvent()->FreeEventParams(evCode, param1, param2);
+
+ switch(evCode) {
+ case EC_FULLSCREEN_LOST:
#if USE_OVERLAY
- this->overlay->show(0);
+ this->overlay->show(0);
#endif
- break;
- case EC_COMPLETE:
- case EC_USERABORT:
- default:
- break;
- }
- }
- }
- break;
+ break;
+ case EC_COMPLETE:
+ case EC_USERABORT:
+ default:
+ break;
+ }
+ }
+ }
+ break;
#if defined(VMR9_WINDOWLESS)
- case WM_DISPLAYCHANGE:
- {
- this->graph->getWindowlessControl()->DisplayModeChanged();
- }
- break;
- case WM_PAINT:
- {
- RECT rect = {0};
- GetWindowRect(hWnd, &rect);
-
- PAINTSTRUCT ps;
- HDC hdc = BeginPaint(hWnd, &ps);
-
- this->graph->getWindowlessControl()->RepaintVideo(hWnd, hdc);
-
- EndPaint(hWnd, &ps);
- }
- break;
-#endif
+ case WM_DISPLAYCHANGE: {
+ this->graph->getWindowlessControl()->DisplayModeChanged();
+ }
+ break;
+ case WM_PAINT: {
+ RECT rect = {0};
+ GetWindowRect(hWnd, &rect);
+
+ PAINTSTRUCT ps;
+ HDC hdc = BeginPaint(hWnd, &ps);
+
+ this->graph->getWindowlessControl()->RepaintVideo(hWnd, hdc);
+
+ EndPaint(hWnd, &ps);
+ }
+ break;
+#endif
- }
+ }
- return bPluginFirefox ? DefWindowProc(hWnd, uMsg, wParam, lParam) : CallWindowProc(this->parentWindowProc, hWnd, uMsg, wParam, lParam);
+ return bPluginFirefox ? DefWindowProc(hWnd, uMsg, wParam, lParam) : CallWindowProc(this->parentWindowProc, hWnd, uMsg, wParam, lParam);
}
void DSDisplay::hook()
{
- HRESULT hr;
+ HRESULT hr;
- if (!this->window){
- return;
- }
+ if (!this->window) {
+ return;
+ }
- if(this->hooked){
- return;
- }
- this->hooked = TRUE;
+ if(this->hooked) {
+ return;
+ }
+ this->hooked = TRUE;
- bool lock = (__directshow__Displays != NULL);
+ bool lock = (__directshow__Displays != NULL);
- if(lock)
- tsk_list_lock(__directshow__Displays);
- {
- // Gets the parent Window procedure
+ if(lock) {
+ tsk_list_lock(__directshow__Displays);
+ }
+ {
+ // Gets the parent Window procedure
#if defined(_WIN32_WCE)
- // Workaround for bug in SetWindowLong, call twice the API
- //this->parentWindowProc = (WNDPROC)SetWindowLong( this->window, GWL_WNDPROC, (LONG) __directshow__WndProcWindow );
- //this->parentWindowProc = (WNDPROC)SetWindowLong( this->window, GWL_WNDPROC, (LONG) __directshow__WndProcWindow );
- //__directshow__Displays[this->window] = this;
+ // Workaround for bug in SetWindowLong, call twice the API
+ //this->parentWindowProc = (WNDPROC)SetWindowLong( this->window, GWL_WNDPROC, (LONG) __directshow__WndProcWindow );
+ //this->parentWindowProc = (WNDPROC)SetWindowLong( this->window, GWL_WNDPROC, (LONG) __directshow__WndProcWindow );
+ //__directshow__Displays[this->window] = this;
#else
- this->parentWindowProc = (WNDPROC) SetWindowLongPtr(this->window, GWLP_WNDPROC, (LONG_PTR) __directshow__WndProcWindow);
- // Add this instance to the callback map
- tsk_object_new(tdshow_display_def_t, this->window, this);
+ this->parentWindowProc = (WNDPROC) SetWindowLongPtr(this->window, GWLP_WNDPROC, (LONG_PTR) __directshow__WndProcWindow);
+ // Add this instance to the callback map
+ tsk_object_new(tdshow_display_def_t, this->window, this);
#endif
- }
- if(lock)
- tsk_list_unlock(__directshow__Displays);
+ }
+ if(lock) {
+ tsk_list_unlock(__directshow__Displays);
+ }
- RECT rect;
- GetWindowRect(this->window, &rect);
- applyRatio(rect);
+ RECT rect;
+ GetWindowRect(this->window, &rect);
+ applyRatio(rect);
#if defined(VMR9_WINDOWLESS)
- rect.left = 0;
- rect.top = 0;
- rect.right = this->width;
- rect.bottom = this->height;
-
- // TODO : Review
- hr = this->graph->getWindowlessControl()->SetVideoClippingWindow(this->window);
- hr = this->graph->getWindowlessControl()->SetBorderColor(RGB(0, 0, 128));
- hr = this->graph->getWindowlessControl()->SetVideoPosition(NULL, &rect);
+ rect.left = 0;
+ rect.top = 0;
+ rect.right = this->width;
+ rect.bottom = this->height;
+
+ // TODO : Review
+ hr = this->graph->getWindowlessControl()->SetVideoClippingWindow(this->window);
+ hr = this->graph->getWindowlessControl()->SetBorderColor(RGB(0, 0, 128));
+ hr = this->graph->getWindowlessControl()->SetVideoPosition(NULL, &rect);
#else
- // TODO : Review the order
- hr = this->graph->getVideoWindow()->put_Owner((OAHWND) this->window);
- hr = this->graph->getVideoWindow()->put_WindowStyle(WS_CHILD | WS_CLIPSIBLINGS | WS_CLIPCHILDREN);
- hr = this->graph->getVideoWindow()->SetWindowPosition(this->left, this->top, this->width, this->height);
- hr = this->graph->getVideoWindow()->put_MessageDrain((OAHWND) this->window);
- hr = this->graph->getVideoWindow()->put_Visible(OATRUE);
+ // TODO : Review the order
+ hr = this->graph->getVideoWindow()->put_Owner((OAHWND) this->window);
+ hr = this->graph->getVideoWindow()->put_WindowStyle(WS_CHILD | WS_CLIPSIBLINGS | WS_CLIPCHILDREN);
+ hr = this->graph->getVideoWindow()->SetWindowPosition(this->left, this->top, this->width, this->height);
+ hr = this->graph->getVideoWindow()->put_MessageDrain((OAHWND) this->window);
+ hr = this->graph->getVideoWindow()->put_Visible(OATRUE);
#endif
- hr = this->graph->getMediaEvent()->SetNotifyWindow((OAHWND) this->window, WM_GRAPHNOTIFY, 0);
+ hr = this->graph->getMediaEvent()->SetNotifyWindow((OAHWND) this->window, WM_GRAPHNOTIFY, 0);
}
void DSDisplay::unhook()
{
- HRESULT hr;
+ HRESULT hr;
- if(!this->window){
- return;
- }
+ if(!this->window) {
+ return;
+ }
- if(!this->hooked){
- return;
- }
-
- hr = this->graph->getMediaEvent()->SetNotifyWindow(NULL, WM_GRAPHNOTIFY, 0);
+ if(!this->hooked) {
+ return;
+ }
+
+ hr = this->graph->getMediaEvent()->SetNotifyWindow(NULL, WM_GRAPHNOTIFY, 0);
#if defined(VMR9_WINDOWLESS)
- // TODO : Review
- hr = this->graph->getWindowlessControl()->SetVideoClippingWindow(NULL);
+ // TODO : Review
+ hr = this->graph->getWindowlessControl()->SetVideoClippingWindow(NULL);
#else
- // TODO : Review the order
- hr = this->graph->getVideoWindow()->put_Visible(OAFALSE);
- hr = this->graph->getVideoWindow()->put_MessageDrain((OAHWND) NULL);
- hr = this->graph->getVideoWindow()->put_Owner((OAHWND) NULL);
- hr = this->graph->getVideoWindow()->put_AutoShow(OAFALSE);
+ // TODO : Review the order
+ hr = this->graph->getVideoWindow()->put_Visible(OAFALSE);
+ hr = this->graph->getVideoWindow()->put_MessageDrain((OAHWND) NULL);
+ hr = this->graph->getVideoWindow()->put_Owner((OAHWND) NULL);
+ hr = this->graph->getVideoWindow()->put_AutoShow(OAFALSE);
#endif
- bool lock = (__directshow__Displays != NULL);
- if(lock)
- tsk_list_lock(__directshow__Displays);
- {
- // Remove this instance from the callback map
- tsk_list_remove_item_by_pred(__directshow__Displays, __pred_find_display_by_hwnd, &this->window);
- // Restore parent Window procedure
+ bool lock = (__directshow__Displays != NULL);
+ if(lock) {
+ tsk_list_lock(__directshow__Displays);
+ }
+ {
+ // Remove this instance from the callback map
+ tsk_list_remove_item_by_pred(__directshow__Displays, __pred_find_display_by_hwnd, &this->window);
+ // Restore parent Window procedure
#if defined(_WIN32_WCE)
- // Workaround for bug in SetWindowLong, call twice the API
- //this->parentWindowProc = (WNDPROC)SetWindowLong( this->window, GWL_WNDPROC, (LONG) this->parentWindowProc );
- //this->parentWindowProc = (WNDPROC)SetWindowLong( this->window, GWL_WNDPROC, (LONG) this->parentWindowProc );
+ // Workaround for bug in SetWindowLong, call twice the API
+ //this->parentWindowProc = (WNDPROC)SetWindowLong( this->window, GWL_WNDPROC, (LONG) this->parentWindowProc );
+ //this->parentWindowProc = (WNDPROC)SetWindowLong( this->window, GWL_WNDPROC, (LONG) this->parentWindowProc );
#else
- SetWindowLongPtr(this->window, GWLP_WNDPROC, (LONG_PTR) this->parentWindowProc);
+ SetWindowLongPtr(this->window, GWLP_WNDPROC, (LONG_PTR) this->parentWindowProc);
#endif
- }
- if(lock)
- tsk_list_unlock(__directshow__Displays);
+ }
+ if(lock) {
+ tsk_list_unlock(__directshow__Displays);
+ }
- this->hooked = FALSE;
+ this->hooked = FALSE;
}
@@ -568,55 +564,58 @@ void DSDisplay::unhook()
//
static tsk_object_t* tdshow_display_ctor(tsk_object_t * self, va_list * app)
{
- tdshow_display_t *display = (tdshow_display_t *)self;
-
- if(display){
- display->hwnd = va_arg(*app, HWND);
- display->display = va_arg(*app, DSDisplay*);
-
- if(!__directshow__Displays){
- __directshow__Displays = tsk_list_create();
- }
- tsk_list_push_back_data(__directshow__Displays, (void**)&display);
- }
-
- return self;
+ tdshow_display_t *display = (tdshow_display_t *)self;
+
+ if(display) {
+ display->hwnd = va_arg(*app, HWND);
+ display->display = va_arg(*app, DSDisplay*);
+
+ if(!__directshow__Displays) {
+ __directshow__Displays = tsk_list_create();
+ }
+ tsk_list_push_back_data(__directshow__Displays, (void**)&display);
+ }
+
+ return self;
}
static tsk_object_t* tdshow_display_dtor(tsk_object_t * self)
-{
- tdshow_display_t *display = (tdshow_display_t *)self;
- if(display){
- if(__directshow__Displays){
- tsk_list_remove_item_by_data(__directshow__Displays, display);
- //if(TSK_LIST_IS_EMPTY(__directshow__Displays)){
- // TSK_OBJECT_SAFE_FREE(__directshow__Displays);
- //}
- }
- }
-
- return self;
+{
+ tdshow_display_t *display = (tdshow_display_t *)self;
+ if(display) {
+ if(__directshow__Displays) {
+ tsk_list_remove_item_by_data(__directshow__Displays, display);
+ //if(TSK_LIST_IS_EMPTY(__directshow__Displays)){
+ // TSK_OBJECT_SAFE_FREE(__directshow__Displays);
+ //}
+ }
+ }
+
+ return self;
}
static int tdshow_display_cmp(const tsk_object_t *_d1, const tsk_object_t *_d2)
{
- const tdshow_display_t *d1 = (const tdshow_display_t *)_d1;
- const tdshow_display_t *d2 = (const tdshow_display_t *)_d2;
-
- if(d1 && d2){
- int ret = 0;
- tsk_subsat_int32_ptr(d1->hwnd, d2->hwnd, &ret);
- return ret;
- }
- else if(!d1 && !d2) return 0;
- else return -1;
+ const tdshow_display_t *d1 = (const tdshow_display_t *)_d1;
+ const tdshow_display_t *d2 = (const tdshow_display_t *)_d2;
+
+ if(d1 && d2) {
+ int ret = 0;
+ tsk_subsat_int32_ptr(d1->hwnd, d2->hwnd, &ret);
+ return ret;
+ }
+ else if(!d1 && !d2) {
+ return 0;
+ }
+ else {
+ return -1;
+ }
}
-static const tsk_object_def_t tdshow_display_def_s =
-{
- sizeof(tdshow_display_t),
- tdshow_display_ctor,
- tdshow_display_dtor,
- tdshow_display_cmp,
+static const tsk_object_def_t tdshow_display_def_s = {
+ sizeof(tdshow_display_t),
+ tdshow_display_ctor,
+ tdshow_display_dtor,
+ tdshow_display_cmp,
};
extern const tsk_object_def_t *tdshow_display_def_t = &tdshow_display_def_s;
diff --git a/plugins/pluginDirectShow/internals/DSDisplay.h b/plugins/pluginDirectShow/internals/DSDisplay.h
index b2985ef..c20c17c 100755
--- a/plugins/pluginDirectShow/internals/DSDisplay.h
+++ b/plugins/pluginDirectShow/internals/DSDisplay.h
@@ -2,19 +2,19 @@
* Copyright (C) 2010-2011 Mamadou Diop.
*
* Contact: Mamadou Diop <diopmamadou(at)doubango.org>
-*
+*
* This file is part of Open Source Doubango Framework.
*
* DOUBANGO is free software: you can redistribute it and/or modify
* it under the terms of the GNU General Public License as published by
* the Free Software Foundation, either version 3 of the License, or
* (at your option) any later version.
-*
+*
* DOUBANGO is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
* GNU General Public License for more details.
-*
+*
* You should have received a copy of the GNU General Public License
* along with DOUBANGO.
*
@@ -32,53 +32,53 @@
class DSDisplay
{
public:
- DSDisplay(HRESULT *hr);
- virtual ~DSDisplay();
+ DSDisplay(HRESULT *hr);
+ virtual ~DSDisplay();
- virtual void attach(INT64 parent);
- virtual void attach(void *parent);
- virtual void detach(void *parent);
- virtual void detach();
- virtual bool isAttached();
+ virtual void attach(INT64 parent);
+ virtual void attach(void *parent);
+ virtual void detach(void *parent);
+ virtual void detach();
+ virtual bool isAttached();
- virtual void start();
- virtual void pause();
- virtual void stop();
+ virtual void start();
+ virtual void pause();
+ virtual void stop();
- virtual int getWidth();
- virtual int getHeight();
- virtual void setSize(int w, int h);
+ virtual int getWidth();
+ virtual int getHeight();
+ virtual void setSize(int w, int h);
- virtual bool isFullscreen();
- virtual void setFullscreen(bool value);
- virtual void setPluginFirefox(bool value);
+ virtual bool isFullscreen();
+ virtual void setFullscreen(bool value);
+ virtual void setPluginFirefox(bool value);
- virtual bool canFullscreen();
+ virtual bool canFullscreen();
- virtual void setFps(int fps_);
+ virtual void setFps(int fps_);
- virtual void handleVideoFrame(const void* data, int w, int h);
+ virtual void handleVideoFrame(const void* data, int w, int h);
- LRESULT handleEvents(HWND hWnd, UINT uMsg, WPARAM wParam, LPARAM lParam);
+ LRESULT handleEvents(HWND hWnd, UINT uMsg, WPARAM wParam, LPARAM lParam);
private:
- void hook();
- void unhook();
- void applyRatio(RECT rect);
+ void hook();
+ void unhook();
+ void applyRatio(RECT rect);
private:
- DSDisplayGraph *graph;
- DSDisplayOverlay *overlay;
-
- int fps;
- int left, top, width, height, imgWidth, imgHeight;
-
- bool bPluginFirefox;
- bool fullscreen;
- HWND window;
- WNDPROC parentWindowProc;
-
- bool hooked;
+ DSDisplayGraph *graph;
+ DSDisplayOverlay *overlay;
+
+ int fps;
+ int left, top, width, height, imgWidth, imgHeight;
+
+ bool bPluginFirefox;
+ bool fullscreen;
+ HWND window;
+ WNDPROC parentWindowProc;
+
+ bool hooked;
};
#endif
diff --git a/plugins/pluginDirectShow/internals/DSDisplayGraph.cxx b/plugins/pluginDirectShow/internals/DSDisplayGraph.cxx
index b2da43b..75b00f4 100755
--- a/plugins/pluginDirectShow/internals/DSDisplayGraph.cxx
+++ b/plugins/pluginDirectShow/internals/DSDisplayGraph.cxx
@@ -1,17 +1,17 @@
/* Copyright (C) 2011-2013 Doubango Telecom <http://www.doubango.org>
-*
+*
* This file is part of Open Source Doubango Framework.
*
* DOUBANGO is free software: you can redistribute it and/or modify
* it under the terms of the GNU General Public License as published by
* the Free Software Foundation, either version 3 of the License, or
* (at your option) any later version.
-*
+*
* DOUBANGO is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
* GNU General Public License for more details.
-*
+*
* You should have received a copy of the GNU General Public License
* along with DOUBANGO.
*/
@@ -31,315 +31,359 @@ using namespace std;
DSDisplayGraph::DSDisplayGraph(HRESULT *hr)
{
- this->running = FALSE;
- this->paused = FALSE;
- this->fps = 15;
-
- this->graphBuilder = NULL;
+ this->running = FALSE;
+ this->paused = FALSE;
+ this->fps = 15;
+
+ this->graphBuilder = NULL;
- this->sourceFilter = NULL;
- this->colorspaceConverterFilter = NULL;
- this->videoRendererFilter = NULL;
+ this->sourceFilter = NULL;
+ this->colorspaceConverterFilter = NULL;
+ this->videoRendererFilter = NULL;
- this->mediaController = NULL;
- this->mediaEvent = NULL;
- this->videoWindow = NULL;
+ this->mediaController = NULL;
+ this->mediaEvent = NULL;
+ this->videoWindow = NULL;
#if defined(VMR) ||defined(VMR9) || defined(VMR9_WINDOWLESS)
- this->mixerBitmap = NULL;
- this->filterConfig = NULL;
+ this->mixerBitmap = NULL;
+ this->filterConfig = NULL;
#endif
#if defined(VMR9_WINDOWLESS)
- this->windowlessControl = NULL;
+ this->windowlessControl = NULL;
#endif
- *hr = this->createDisplayGraph();
- if (FAILED(*hr)) return;
+ *hr = this->createDisplayGraph();
+ if (FAILED(*hr)) {
+ return;
+ }
- *hr = this->connect();
- if (FAILED(*hr)) return;
+ *hr = this->connect();
+ if (FAILED(*hr)) {
+ return;
+ }
}
DSDisplayGraph::~DSDisplayGraph()
{
- this->disconnect();
+ this->disconnect();
#if defined(VMR9_WINDOWLESS)
- SAFE_RELEASE(this->windowlessControl);
+ SAFE_RELEASE(this->windowlessControl);
#endif
#if defined(VMR) ||defined(VMR9) || defined(VMR9_WINDOWLESS)
- SAFE_RELEASE(this->filterConfig);
- SAFE_RELEASE(this->mixerBitmap);
+ SAFE_RELEASE(this->filterConfig);
+ SAFE_RELEASE(this->mixerBitmap);
#endif
- SAFE_RELEASE(this->videoWindow);
- SAFE_RELEASE(this->mediaEvent);
- SAFE_RELEASE(this->mediaController);
+ SAFE_RELEASE(this->videoWindow);
+ SAFE_RELEASE(this->mediaEvent);
+ SAFE_RELEASE(this->mediaController);
- SAFE_RELEASE(this->colorspaceConverterFilter);
- SAFE_RELEASE(this->videoRendererFilter);
- //SAFE_RELEASE(this->sourceFilter);
+ SAFE_RELEASE(this->colorspaceConverterFilter);
+ SAFE_RELEASE(this->videoRendererFilter);
+ //SAFE_RELEASE(this->sourceFilter);
- SAFE_RELEASE(this->graphBuilder);
+ SAFE_RELEASE(this->graphBuilder);
}
void DSDisplayGraph::setDisplayFps(int fps_)
{
- this->fps = fps_;
- if(this->sourceFilter){
- this->sourceFilter->setFps(fps_);
- }
+ this->fps = fps_;
+ if(this->sourceFilter) {
+ this->sourceFilter->setFps(fps_);
+ }
}
bool DSDisplayGraph::getImageFormat(UINT &width, UINT &height)
{
- if(this->sourceFilter){
- return this->sourceFilter->getImageFormat(width, height);
- }
- return false;
+ if(this->sourceFilter) {
+ return this->sourceFilter->getImageFormat(width, height);
+ }
+ return false;
}
bool DSDisplayGraph::setImageFormat(UINT width, UINT height)
{
- bool ret = true;
- if(this->sourceFilter){
- UINT w=width, h = height;
- if(this->sourceFilter->getImageFormat(w, h)){
- if(w!= width || h!=height){ // Image format has changed
- bool reconnect = this->connected; // IMPORTANT: Must reconnect all elements
- HRESULT hr;
- if(reconnect){
- if((hr = this->disconnect()) != S_OK){
- return false;
- }
- }
- ret = (this->sourceFilter->setImageFormat(width, height) == S_OK);
- if(reconnect){
- if((hr = this->connect())){
- return false;
- }
- }
- }
- }
- }
- return ret;
+ bool ret = true;
+ if(this->sourceFilter) {
+ UINT w=width, h = height;
+ if(this->sourceFilter->getImageFormat(w, h)) {
+ if(w!= width || h!=height) { // Image format has changed
+ bool reconnect = this->connected; // IMPORTANT: Must reconnect all elements
+ HRESULT hr;
+ if(reconnect) {
+ if((hr = this->disconnect()) != S_OK) {
+ return false;
+ }
+ }
+ ret = (this->sourceFilter->setImageFormat(width, height) == S_OK);
+ if(reconnect) {
+ if((hr = this->connect())) {
+ return false;
+ }
+ }
+ }
+ }
+ }
+ return ret;
}
HRESULT DSDisplayGraph::connect()
{
- HRESULT hr;
-
- if((hr = ConnectFilters(this->graphBuilder, this->sourceFilter, this->colorspaceConverterFilter)) != S_OK){
- TSK_DEBUG_ERROR("Failed to connect sourcefilter with the colorspace");
- return hr;
- }
- if((hr = ConnectFilters(this->graphBuilder, this->colorspaceConverterFilter, this->videoRendererFilter)) != S_OK){
- TSK_DEBUG_ERROR("Failed to connect colorspace with the videorenderer");
- return hr;
- }
-
- this->connected = true;
- return S_OK;
+ HRESULT hr;
+
+ if((hr = ConnectFilters(this->graphBuilder, this->sourceFilter, this->colorspaceConverterFilter)) != S_OK) {
+ TSK_DEBUG_ERROR("Failed to connect sourcefilter with the colorspace");
+ return hr;
+ }
+ if((hr = ConnectFilters(this->graphBuilder, this->colorspaceConverterFilter, this->videoRendererFilter)) != S_OK) {
+ TSK_DEBUG_ERROR("Failed to connect colorspace with the videorenderer");
+ return hr;
+ }
+
+ this->connected = true;
+ return S_OK;
}
HRESULT DSDisplayGraph::disconnect()
{
- HRESULT hr;
-
- if((hr = DisconnectFilters(this->graphBuilder, this->sourceFilter, this->colorspaceConverterFilter)) != S_OK){
- TSK_DEBUG_ERROR("Failed to disconnect sourcefilter with the colorspace");
- return hr;
- }
- if((hr = DisconnectFilters(this->graphBuilder, this->colorspaceConverterFilter, this->videoRendererFilter)) != S_OK){
- TSK_DEBUG_ERROR("Failed to connect colorspace with the videorenderer");
- return hr;
- }
-
- this->connected = false;
- return S_OK;
+ HRESULT hr;
+
+ if((hr = DisconnectFilters(this->graphBuilder, this->sourceFilter, this->colorspaceConverterFilter)) != S_OK) {
+ TSK_DEBUG_ERROR("Failed to disconnect sourcefilter with the colorspace");
+ return hr;
+ }
+ if((hr = DisconnectFilters(this->graphBuilder, this->colorspaceConverterFilter, this->videoRendererFilter)) != S_OK) {
+ TSK_DEBUG_ERROR("Failed to connect colorspace with the videorenderer");
+ return hr;
+ }
+
+ this->connected = false;
+ return S_OK;
}
HRESULT DSDisplayGraph::start()
{
- HRESULT hr;
- this->running = true;
- this->sourceFilter->reset();
-
- hr = this->mediaController->Run();
- if (!SUCCEEDED(hr)){
- TSK_DEBUG_ERROR("DSDisplayGraph::mediaController->Run() has failed with %ld", hr);
- }
- return hr;
+ HRESULT hr;
+ this->running = true;
+ this->sourceFilter->reset();
+
+ hr = this->mediaController->Run();
+ if (!SUCCEEDED(hr)) {
+ TSK_DEBUG_ERROR("DSDisplayGraph::mediaController->Run() has failed with %ld", hr);
+ }
+ return hr;
}
HRESULT DSDisplayGraph::pause()
{
- HRESULT hr = S_OK;
- if(isRunning() && !isPaused()){
- hr = this->mediaController->Pause();
- if(SUCCEEDED(hr)){
- this->paused = true;
- }
- }
- return hr;
+ HRESULT hr = S_OK;
+ if(isRunning() && !isPaused()) {
+ hr = this->mediaController->Pause();
+ if(SUCCEEDED(hr)) {
+ this->paused = true;
+ }
+ }
+ return hr;
}
HRESULT DSDisplayGraph::stop()
{
- HRESULT hr;
+ HRESULT hr;
- hr = this->mediaController->Pause();
- if (hr == S_FALSE){
- TSK_DEBUG_ERROR("DSDisplayGraph::mediaController->Pause() has failed with %ld. Waiting for transition.", hr);
- FILTER_STATE pfs;
- hr = this->mediaController->GetState(2500, (OAFilterState*) &pfs);
- }
+ hr = this->mediaController->Pause();
+ if (hr == S_FALSE) {
+ TSK_DEBUG_ERROR("DSDisplayGraph::mediaController->Pause() has failed with %ld. Waiting for transition.", hr);
+ FILTER_STATE pfs;
+ hr = this->mediaController->GetState(2500, (OAFilterState*) &pfs);
+ }
- hr = this->mediaController->Stop();
- if (!SUCCEEDED(hr)){
- TSK_DEBUG_ERROR("DSDisplayGraph::mediaController->Stop() has failed with %ld", hr);
- }
+ hr = this->mediaController->Stop();
+ if (!SUCCEEDED(hr)) {
+ TSK_DEBUG_ERROR("DSDisplayGraph::mediaController->Stop() has failed with %ld", hr);
+ }
- this->running = false;
- this->paused = false;
+ this->running = false;
+ this->paused = false;
- return hr;
+ return hr;
}
bool DSDisplayGraph::isRunning()
{
- return this->running;
+ return this->running;
}
bool DSDisplayGraph::isPaused()
{
- return this->paused;
+ return this->paused;
}
void DSDisplayGraph::handleFrame(const void* data, int w, int h)
{
- HRESULT hr;
-
- if(!this->sourceFilter){
- TSK_DEBUG_ERROR("Invalid parameter");
- return;
- }
-
- if(!data || !this->running){
- this->sourceFilter->setBuffer(NULL, (w*h*3));
- return;
- }
-
- hr = this->sourceFilter->setImageFormat(w, h);
- if (hr == S_OK){
- this->stop();
-
- this->disconnect();
- this->connect();
-
- this->start();
- }
-
- this->sourceFilter->setBuffer((void*)data, (w*h*3));
+ HRESULT hr;
+
+ if(!this->sourceFilter) {
+ TSK_DEBUG_ERROR("Invalid parameter");
+ return;
+ }
+
+ if(!data || !this->running) {
+ this->sourceFilter->setBuffer(NULL, (w*h*3));
+ return;
+ }
+
+ hr = this->sourceFilter->setImageFormat(w, h);
+ if (hr == S_OK) {
+ this->stop();
+
+ this->disconnect();
+ this->connect();
+
+ this->start();
+ }
+
+ this->sourceFilter->setBuffer((void*)data, (w*h*3));
}
HRESULT DSDisplayGraph::createDisplayGraph()
{
- HRESULT hr;
+ HRESULT hr;
- // Create the graph builder
- hr = COCREATE(CLSID_FilterGraph, IID_IGraphBuilder, this->graphBuilder);
- if(FAILED(hr)) return hr;
+ // Create the graph builder
+ hr = COCREATE(CLSID_FilterGraph, IID_IGraphBuilder, this->graphBuilder);
+ if(FAILED(hr)) {
+ return hr;
+ }
- // Create my custom filter
- LPUNKNOWN pUnk = NULL;
- this->sourceFilter = new DSOutputFilter(pUnk, &hr /*, this*/);
- if(FAILED(hr) || this->sourceFilter == NULL) return hr;
+ // Create my custom filter
+ LPUNKNOWN pUnk = NULL;
+ this->sourceFilter = new DSOutputFilter(pUnk, &hr /*, this*/);
+ if(FAILED(hr) || this->sourceFilter == NULL) {
+ return hr;
+ }
- // Create the color space convertor filter
- hr = COCREATE(CLSID_Colour, IID_IBaseFilter, this->colorspaceConverterFilter);
- if(FAILED(hr)) return hr;
+ // Create the color space convertor filter
+ hr = COCREATE(CLSID_Colour, IID_IBaseFilter, this->colorspaceConverterFilter);
+ if(FAILED(hr)) {
+ return hr;
+ }
#if defined(VMR)
- // Create the video mixing renderer based on Direct X
- hr = COCREATE(CLSID_VideoMixingRenderer, IID_IBaseFilter, this->videoRendererFilter);
- if(FAILED(hr)) return hr;
+ // Create the video mixing renderer based on Direct X
+ hr = COCREATE(CLSID_VideoMixingRenderer, IID_IBaseFilter, this->videoRendererFilter);
+ if(FAILED(hr)) {
+ return hr;
+ }
#elif defined(VMR9) || defined(VMR9_WINDOWLESS)
- // Create the video mixing renderer based on Direct X 9.0
- hr = COCREATE(CLSID_VideoMixingRenderer9, IID_IBaseFilter, this->videoRendererFilter);
- if(FAILED(hr)) return hr;
+ // Create the video mixing renderer based on Direct X 9.0
+ hr = COCREATE(CLSID_VideoMixingRenderer9, IID_IBaseFilter, this->videoRendererFilter);
+ if(FAILED(hr)) {
+ return hr;
+ }
#else
- // Create the video renderer
- hr = COCREATE(CLSID_VideoRenderer, IID_IBaseFilter, this->videoRendererFilter);
- if(FAILED(hr)) return hr;
+ // Create the video renderer
+ hr = COCREATE(CLSID_VideoRenderer, IID_IBaseFilter, this->videoRendererFilter);
+ if(FAILED(hr)) {
+ return hr;
+ }
#endif
- // Add dource filter to the graph
- hr = this->graphBuilder->AddFilter(this->sourceFilter, FILTER_OUTPUT);
- if(FAILED(hr)) return hr;
+ // Add dource filter to the graph
+ hr = this->graphBuilder->AddFilter(this->sourceFilter, FILTER_OUTPUT);
+ if(FAILED(hr)) {
+ return hr;
+ }
- // Add the color space convertor to the graph
- hr = this->graphBuilder->AddFilter(this->colorspaceConverterFilter, FILTER_COLORSPACE_CONVERTOR);
- if(FAILED(hr)) return hr;
+ // Add the color space convertor to the graph
+ hr = this->graphBuilder->AddFilter(this->colorspaceConverterFilter, FILTER_COLORSPACE_CONVERTOR);
+ if(FAILED(hr)) {
+ return hr;
+ }
- // Add video renderer to the graph
- hr = this->graphBuilder->AddFilter(this->videoRendererFilter, FILTER_VIDEO_RENDERER);
- if(FAILED(hr)) return hr;
+ // Add video renderer to the graph
+ hr = this->graphBuilder->AddFilter(this->videoRendererFilter, FILTER_VIDEO_RENDERER);
+ if(FAILED(hr)) {
+ return hr;
+ }
- // Find media control
- hr = QUERY(this->graphBuilder, IID_IMediaControl, this->mediaController);
- if(FAILED(hr)) return hr;
+ // Find media control
+ hr = QUERY(this->graphBuilder, IID_IMediaControl, this->mediaController);
+ if(FAILED(hr)) {
+ return hr;
+ }
- // Find media event
- hr = QUERY(this->graphBuilder, IID_IMediaEventEx, this->mediaEvent);
- if(FAILED(hr)) return hr;
- // hr = this->mediaEvent->SetNotifyFlags(AM_MEDIAEVENT_NONOTIFY);
+ // Find media event
+ hr = QUERY(this->graphBuilder, IID_IMediaEventEx, this->mediaEvent);
+ if(FAILED(hr)) {
+ return hr;
+ }
+ // hr = this->mediaEvent->SetNotifyFlags(AM_MEDIAEVENT_NONOTIFY);
#if defined(VMR)
- // Find the bitmap mixer (Direct X)
- hr = QUERY(this->videoRendererFilter, IID_IVMRMixerBitmap, this->mixerBitmap);
- if(FAILED(hr)) return hr;
-
- // Find the bitmap configurer (Direct X)
- hr = QUERY(this->videoRendererFilter, IID_IVMRFilterConfig, this->filterConfig);
- if(FAILED(hr)) return hr;
-
- // Set the number of streams (Direct X)
- hr = this->filterConfig->SetNumberOfStreams(1);
- if(FAILED(hr)) return hr;
+ // Find the bitmap mixer (Direct X)
+ hr = QUERY(this->videoRendererFilter, IID_IVMRMixerBitmap, this->mixerBitmap);
+ if(FAILED(hr)) {
+ return hr;
+ }
+
+ // Find the bitmap configurer (Direct X)
+ hr = QUERY(this->videoRendererFilter, IID_IVMRFilterConfig, this->filterConfig);
+ if(FAILED(hr)) {
+ return hr;
+ }
+
+ // Set the number of streams (Direct X)
+ hr = this->filterConfig->SetNumberOfStreams(1);
+ if(FAILED(hr)) {
+ return hr;
+ }
#elif defined(VMR9) || defined(VMR9_WINDOWLESS)
- // Find the bitmap mixer (Direct X 9.0)
- hr = QUERY(this->videoRendererFilter, IID_IVMRMixerBitmap9, this->mixerBitmap);
- if(FAILED(hr)) return hr;
-
- // Find the bitmap configurer (Direct X 9.0)
- hr = QUERY(this->videoRendererFilter, IID_IVMRFilterConfig9, this->filterConfig);
- if(FAILED(hr)) return hr;
-
- // Set the number of streams (Direct X 9.0)
- hr = this->filterConfig->SetNumberOfStreams(1);
- if(FAILED(hr)) return hr;
+ // Find the bitmap mixer (Direct X 9.0)
+ hr = QUERY(this->videoRendererFilter, IID_IVMRMixerBitmap9, this->mixerBitmap);
+ if(FAILED(hr)) {
+ return hr;
+ }
+
+ // Find the bitmap configurer (Direct X 9.0)
+ hr = QUERY(this->videoRendererFilter, IID_IVMRFilterConfig9, this->filterConfig);
+ if(FAILED(hr)) {
+ return hr;
+ }
+
+ // Set the number of streams (Direct X 9.0)
+ hr = this->filterConfig->SetNumberOfStreams(1);
+ if(FAILED(hr)) {
+ return hr;
+ }
#endif
#if defined(VMR9_WINDOWLESS)
- // Set the rendering mode (Direct X 9.0)
- hr = this->filterConfig->SetRenderingMode(VMR9Mode_Windowless);
- if(FAILED(hr)) return hr;
-
- // Find the windowless control (Direct X 9.0)
- hr = QUERY(this->videoRendererFilter, IID_IVMRWindowlessControl9, this->windowlessControl);
- if(FAILED(hr)) return hr;
+ // Set the rendering mode (Direct X 9.0)
+ hr = this->filterConfig->SetRenderingMode(VMR9Mode_Windowless);
+ if(FAILED(hr)) {
+ return hr;
+ }
+
+ // Find the windowless control (Direct X 9.0)
+ hr = QUERY(this->videoRendererFilter, IID_IVMRWindowlessControl9, this->windowlessControl);
+ if(FAILED(hr)) {
+ return hr;
+ }
#else
- // Find IVideoWindow interface
- hr = QUERY(this->graphBuilder, IID_IVideoWindow, this->videoWindow);
- if(FAILED(hr)) return hr;
+ // Find IVideoWindow interface
+ hr = QUERY(this->graphBuilder, IID_IVideoWindow, this->videoWindow);
+ if(FAILED(hr)) {
+ return hr;
+ }
#endif
- return hr;
+ return hr;
}
diff --git a/plugins/pluginDirectShow/internals/DSDisplayGraph.h b/plugins/pluginDirectShow/internals/DSDisplayGraph.h
index c9080fe..4260e45 100755
--- a/plugins/pluginDirectShow/internals/DSDisplayGraph.h
+++ b/plugins/pluginDirectShow/internals/DSDisplayGraph.h
@@ -2,19 +2,19 @@
* Copyright (C) 2010-2011 Mamadou Diop.
*
* Contact: Mamadou Diop <diopmamadou(at)doubango.org>
-*
+*
* This file is part of Open Source Doubango Framework.
*
* DOUBANGO is free software: you can redistribute it and/or modify
* it under the terms of the GNU General Public License as published by
* the Free Software Foundation, either version 3 of the License, or
* (at your option) any later version.
-*
+*
* DOUBANGO is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
* GNU General Public License for more details.
-*
+*
* You should have received a copy of the GNU General Public License
* along with DOUBANGO.
*
@@ -39,72 +39,90 @@
class DSDisplayGraph
{
public:
- DSDisplayGraph(HRESULT *hr);
- virtual ~DSDisplayGraph();
-
- int getDisplayFps() { return this->fps; };
- void setDisplayFps(int fps_);
-
- bool getImageFormat(UINT &width, UINT &height);
- bool setImageFormat(UINT width, UINT height);
-
- HRESULT connect();
- HRESULT disconnect();
-
- HRESULT start();
- HRESULT pause();
- HRESULT stop();
- bool isRunning();
- bool isPaused();
-
- IMediaEventEx *getMediaEvent() { return this->mediaEvent; };
- IVideoWindow *getVideoWindow() { return this->videoWindow; };
- DSOutputFilter *getSourceFilter() { return this->sourceFilter; };
+ DSDisplayGraph(HRESULT *hr);
+ virtual ~DSDisplayGraph();
+
+ int getDisplayFps() {
+ return this->fps;
+ };
+ void setDisplayFps(int fps_);
+
+ bool getImageFormat(UINT &width, UINT &height);
+ bool setImageFormat(UINT width, UINT height);
+
+ HRESULT connect();
+ HRESULT disconnect();
+
+ HRESULT start();
+ HRESULT pause();
+ HRESULT stop();
+ bool isRunning();
+ bool isPaused();
+
+ IMediaEventEx *getMediaEvent() {
+ return this->mediaEvent;
+ };
+ IVideoWindow *getVideoWindow() {
+ return this->videoWindow;
+ };
+ DSOutputFilter *getSourceFilter() {
+ return this->sourceFilter;
+ };
#if defined(VMR)
- IVMRMixerBitmap *getMixerBitmap() { return this->mixerBitmap; };
+ IVMRMixerBitmap *getMixerBitmap() {
+ return this->mixerBitmap;
+ };
#elif defined(VMR9)
- IVMRMixerBitmap9 *getMixerBitmap() { return this->mixerBitmap; };
+ IVMRMixerBitmap9 *getMixerBitmap() {
+ return this->mixerBitmap;
+ };
#elif defined(VMR9_WINDOWLESS)
- IVMRMixerBitmap9 *getMixerBitmap() { return this->mixerBitmap; };
- IVMRMixerControl9 *getMixerControl() { return this->mixerControl; };
- IVMRWindowlessControl9 *getWindowlessControl() { return this->windowlessControl; };
+ IVMRMixerBitmap9 *getMixerBitmap() {
+ return this->mixerBitmap;
+ };
+ IVMRMixerControl9 *getMixerControl() {
+ return this->mixerControl;
+ };
+ IVMRWindowlessControl9 *getWindowlessControl() {
+ return this->windowlessControl;
+ };
#endif
- void handleFrame(const void* data, int w, int h);
+ void handleFrame(const void* data, int w, int h);
private:
- HRESULT createDisplayGraph();
+ HRESULT createDisplayGraph();
private:
- IGraphBuilder *graphBuilder;
+ IGraphBuilder *graphBuilder;
- DSOutputFilter *sourceFilter;
- IBaseFilter *colorspaceConverterFilter;
- IBaseFilter *videoRendererFilter;
+ DSOutputFilter *sourceFilter;
+ IBaseFilter *colorspaceConverterFilter;
+ IBaseFilter *videoRendererFilter;
- IMediaControl *mediaController;
- IMediaEventEx *mediaEvent;
- IVideoWindow *videoWindow;
+ IMediaControl *mediaController;
+ IMediaEventEx *mediaEvent;
+ IVideoWindow *videoWindow;
#if defined(VMR)
- IVMRMixerBitmap *mixerBitmap;
- IVMRFilterConfig *filterConfig;
+ IVMRMixerBitmap *mixerBitmap;
+ IVMRFilterConfig *filterConfig;
#elif defined(VMR9)
- IVMRMixerBitmap9 *mixerBitmap;
- IVMRMixerControl9 *mixerControl;
- IVMRFilterConfig9 *filterConfig;
+ IVMRMixerBitmap9 *mixerBitmap;
+ IVMRMixerControl9 *mixerControl;
+ IVMRFilterConfig9 *filterConfig;
#elif defined(VMR9_WINDOWLESS)
- IVMRMixerBitmap9 *mixerBitmap;
- IVMRMixerControl9 *mixerControl;
- IVMRFilterConfig9 *filterConfig;
- IVMRWindowlessControl9 *windowlessControl;
+ IVMRMixerBitmap9 *mixerBitmap;
+ IVMRMixerControl9 *mixerControl;
+ IVMRFilterConfig9 *filterConfig;
+ IVMRWindowlessControl9 *windowlessControl;
#endif
- bool connected;
- bool running;
- bool paused;
- int fps;
+ bool connected;
+ bool running;
+ bool paused;
+ int fps;
};
#endif
diff --git a/plugins/pluginDirectShow/internals/DSDisplayOverlay.VMR.cxx b/plugins/pluginDirectShow/internals/DSDisplayOverlay.VMR.cxx
index dacea84..188f7ec 100755
--- a/plugins/pluginDirectShow/internals/DSDisplayOverlay.VMR.cxx
+++ b/plugins/pluginDirectShow/internals/DSDisplayOverlay.VMR.cxx
@@ -2,19 +2,19 @@
* Copyright (C) 2010-2011 Mamadou Diop.
*
* Contact: Mamadou Diop <diopmamadou(at)doubango.org>
-*
+*
* This file is part of Open Source Doubango Framework.
*
* DOUBANGO is free software: you can redistribute it and/or modify
* it under the terms of the GNU General Public License as published by
* the Free Software Foundation, either version 3 of the License, or
* (at your option) any later version.
-*
+*
* DOUBANGO is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
* GNU General Public License for more details.
-*
+*
* You should have received a copy of the GNU General Public License
* along with DOUBANGO.
*
@@ -39,9 +39,9 @@ EXTERN_C IMAGE_DOS_HEADER __ImageBase;
DSDisplayOverlay::DSDisplayOverlay()
{
- this->window = NULL;
- this->hdcBmp = NULL;
- this->hbmOld = NULL;
+ this->window = NULL;
+ this->hdcBmp = NULL;
+ this->hbmOld = NULL;
}
DSDisplayOverlay::~DSDisplayOverlay()
@@ -50,130 +50,122 @@ DSDisplayOverlay::~DSDisplayOverlay()
void DSDisplayOverlay::attach(HWND parent, DSDisplayGraph *graph)
{
- HRESULT hr;
-
- // Gets the handle of the parent and the graph
- this->window = parent;
- this->displayGraph = graph;
-
- if (this->window)
- {
- // Hack to get module of the current code
- TCHAR *modulePath = (TCHAR *) calloc(255, sizeof(TCHAR));
- GetModuleFileName((HINSTANCE)&__ImageBase, modulePath, 255);
- HMODULE module = GetModuleHandle(modulePath);
- delete[] modulePath;
- if (!module)
- {
- cout << "Failed to get current module";
- return;
- }
-
- HBITMAP bitmap = LoadBitmap(module, MAKEINTRESOURCE(IDB_BITMAP_OVERLAY));
- if (!bitmap)
- {
- cout << "Failed to load overlay bitmap" << endl;
- return;
- }
-
- RECT rect;
- hr = GetWindowRect(this->window, &rect);
- if (FAILED(hr))
- {
- cout << "Failed to get window size" << endl;
- return;
- }
-
- BITMAP bm;
- HDC hdc = GetDC(this->window);
- this->hdcBmp = CreateCompatibleDC(hdc);
- ReleaseDC(this->window, hdc);
-
- GetObject(bitmap, sizeof(bm), &bm);
- this->hbmOld= (HBITMAP) SelectObject(this->hdcBmp, bitmap);
-
- ZeroMemory(&this->alphaBitmap, sizeof(VMRALPHABITMAP));
- this->alphaBitmap.dwFlags = VMRBITMAP_HDC | VMRBITMAP_SRCCOLORKEY;
- this->alphaBitmap.hdc = this->hdcBmp;
- this->alphaBitmap.clrSrcKey = 0x00FF00FF;
- // Source rectangle
- this->alphaBitmap.rSrc.left = 0;
- this->alphaBitmap.rSrc.top = 0;
- this->alphaBitmap.rSrc.right = bm.bmWidth;
- this->alphaBitmap.rSrc.bottom = bm.bmHeight;
- // Destination rectangle
- this->alphaBitmap.rDest.left = (rect.right - rect.left - bm.bmWidth) / 2.0;
- this->alphaBitmap.rDest.top = (rect.bottom - rect.top - bm.bmHeight) / 2.0;
- this->alphaBitmap.rDest.right = this->alphaBitmap.rDest.left + bm.bmWidth;
- this->alphaBitmap.rDest.bottom = this->alphaBitmap.rDest.top + bm.bmHeight;
- this->alphaBitmap.rDest.left /= (rect.right - rect.left);
- this->alphaBitmap.rDest.top /= (rect.bottom - rect.top);
- this->alphaBitmap.rDest.right /= (rect.right - rect.left);
- this->alphaBitmap.rDest.bottom /= (rect.bottom - rect.top);
- // Alpha value for start
- this->alphaBitmap.fAlpha = ALPHA_VALUE_START;
-
- }
+ HRESULT hr;
+
+ // Gets the handle of the parent and the graph
+ this->window = parent;
+ this->displayGraph = graph;
+
+ if (this->window) {
+ // Hack to get module of the current code
+ TCHAR *modulePath = (TCHAR *) calloc(255, sizeof(TCHAR));
+ GetModuleFileName((HINSTANCE)&__ImageBase, modulePath, 255);
+ HMODULE module = GetModuleHandle(modulePath);
+ delete[] modulePath;
+ if (!module) {
+ cout << "Failed to get current module";
+ return;
+ }
+
+ HBITMAP bitmap = LoadBitmap(module, MAKEINTRESOURCE(IDB_BITMAP_OVERLAY));
+ if (!bitmap) {
+ cout << "Failed to load overlay bitmap" << endl;
+ return;
+ }
+
+ RECT rect;
+ hr = GetWindowRect(this->window, &rect);
+ if (FAILED(hr)) {
+ cout << "Failed to get window size" << endl;
+ return;
+ }
+
+ BITMAP bm;
+ HDC hdc = GetDC(this->window);
+ this->hdcBmp = CreateCompatibleDC(hdc);
+ ReleaseDC(this->window, hdc);
+
+ GetObject(bitmap, sizeof(bm), &bm);
+ this->hbmOld= (HBITMAP) SelectObject(this->hdcBmp, bitmap);
+
+ ZeroMemory(&this->alphaBitmap, sizeof(VMRALPHABITMAP));
+ this->alphaBitmap.dwFlags = VMRBITMAP_HDC | VMRBITMAP_SRCCOLORKEY;
+ this->alphaBitmap.hdc = this->hdcBmp;
+ this->alphaBitmap.clrSrcKey = 0x00FF00FF;
+ // Source rectangle
+ this->alphaBitmap.rSrc.left = 0;
+ this->alphaBitmap.rSrc.top = 0;
+ this->alphaBitmap.rSrc.right = bm.bmWidth;
+ this->alphaBitmap.rSrc.bottom = bm.bmHeight;
+ // Destination rectangle
+ this->alphaBitmap.rDest.left = (rect.right - rect.left - bm.bmWidth) / 2.0;
+ this->alphaBitmap.rDest.top = (rect.bottom - rect.top - bm.bmHeight) / 2.0;
+ this->alphaBitmap.rDest.right = this->alphaBitmap.rDest.left + bm.bmWidth;
+ this->alphaBitmap.rDest.bottom = this->alphaBitmap.rDest.top + bm.bmHeight;
+ this->alphaBitmap.rDest.left /= (rect.right - rect.left);
+ this->alphaBitmap.rDest.top /= (rect.bottom - rect.top);
+ this->alphaBitmap.rDest.right /= (rect.right - rect.left);
+ this->alphaBitmap.rDest.bottom /= (rect.bottom - rect.top);
+ // Alpha value for start
+ this->alphaBitmap.fAlpha = ALPHA_VALUE_START;
+
+ }
}
void DSDisplayOverlay::detach()
{
- // Clean up
- DeleteObject(SelectObject(this->hdcBmp, this->hbmOld));
- DeleteDC(this->hdcBmp);
-
- this->hdcBmp = NULL;
- this->hbmOld = NULL;
- this->displayGraph = NULL;
- this->window = NULL;
+ // Clean up
+ DeleteObject(SelectObject(this->hdcBmp, this->hbmOld));
+ DeleteDC(this->hdcBmp);
+
+ this->hdcBmp = NULL;
+ this->hbmOld = NULL;
+ this->displayGraph = NULL;
+ this->window = NULL;
}
void DSDisplayOverlay::show(int value)
{
- // Store the ticks to count down
- this->ticks = value;
+ // Store the ticks to count down
+ this->ticks = value;
- // Compute alpha value decrement
- this->alphaStep = (this->ticks > 0) ? ((ALPHA_VALUE_START - ALPHA_VALUE_STOP) / this->ticks) : 0;
- this->alphaBitmap.fAlpha = ALPHA_VALUE_START;
+ // Compute alpha value decrement
+ this->alphaStep = (this->ticks > 0) ? ((ALPHA_VALUE_START - ALPHA_VALUE_STOP) / this->ticks) : 0;
+ this->alphaBitmap.fAlpha = ALPHA_VALUE_START;
- this->internalUpdate();
+ this->internalUpdate();
}
void DSDisplayOverlay::update()
{
- if (this->displayGraph && (this->ticks > 0))
- {
- this->ticks--;
+ if (this->displayGraph && (this->ticks > 0)) {
+ this->ticks--;
- // Be sure alpha is in 0.0 .. 1.0 range.
- float value = this->alphaBitmap.fAlpha;
- value -= this->alphaStep;
- this->alphaBitmap.fAlpha = (value >= 0.0f) ? value : 0.0f;
+ // Be sure alpha is in 0.0 .. 1.0 range.
+ float value = this->alphaBitmap.fAlpha;
+ value -= this->alphaStep;
+ this->alphaBitmap.fAlpha = (value >= 0.0f) ? value : 0.0f;
- this->internalUpdate();
- }
+ this->internalUpdate();
+ }
}
void DSDisplayOverlay::internalUpdate()
{
- HRESULT hr;
-
- if (this->ticks > 0)
- {
- this->alphaBitmap.dwFlags = VMRBITMAP_HDC | VMRBITMAP_SRCCOLORKEY;
- }
- else
- {
- this->alphaBitmap.dwFlags = VMRBITMAP_DISABLE;
- }
-
- hr = this->displayGraph->getMixerBitmap()->SetAlphaBitmap(&this->alphaBitmap);
- if (FAILED(hr))
- {
- cout << "Failed to mix overylay (" << hr << ")" << endl;
- return;
- }
+ HRESULT hr;
+
+ if (this->ticks > 0) {
+ this->alphaBitmap.dwFlags = VMRBITMAP_HDC | VMRBITMAP_SRCCOLORKEY;
+ }
+ else {
+ this->alphaBitmap.dwFlags = VMRBITMAP_DISABLE;
+ }
+
+ hr = this->displayGraph->getMixerBitmap()->SetAlphaBitmap(&this->alphaBitmap);
+ if (FAILED(hr)) {
+ cout << "Failed to mix overylay (" << hr << ")" << endl;
+ return;
+ }
}
#endif
diff --git a/plugins/pluginDirectShow/internals/DSDisplayOverlay.VMR9.cxx b/plugins/pluginDirectShow/internals/DSDisplayOverlay.VMR9.cxx
index 972945f..4504849 100755
--- a/plugins/pluginDirectShow/internals/DSDisplayOverlay.VMR9.cxx
+++ b/plugins/pluginDirectShow/internals/DSDisplayOverlay.VMR9.cxx
@@ -1,17 +1,17 @@
/* Copyright (C) 2011-2013 Doubango Telecom <http://www.doubango.org>
-*
+*
* This file is part of Open Source Doubango Framework.
*
* DOUBANGO is free software: you can redistribute it and/or modify
* it under the terms of the GNU General Public License as published by
* the Free Software Foundation, either version 3 of the License, or
* (at your option) any later version.
-*
+*
* DOUBANGO is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
* GNU General Public License for more details.
-*
+*
* You should have received a copy of the GNU General Public License
* along with DOUBANGO.
*/
@@ -32,176 +32,165 @@ using namespace std;
DSDisplayOverlay::DSDisplayOverlay()
{
- this->window = NULL;
- this->direct3DDevice = NULL;
- this->direct3DSurface = NULL;
-
- this->direct3D = Direct3DCreate9(D3D_SDK_VERSION);
- if (!this->direct3D)
- {
- cout << "Cannot create Direct3D environment" << endl;
- return;
- }
+ this->window = NULL;
+ this->direct3DDevice = NULL;
+ this->direct3DSurface = NULL;
+
+ this->direct3D = Direct3DCreate9(D3D_SDK_VERSION);
+ if (!this->direct3D) {
+ cout << "Cannot create Direct3D environment" << endl;
+ return;
+ }
}
DSDisplayOverlay::~DSDisplayOverlay()
{
- SAFE_RELEASE(this->direct3D);
+ SAFE_RELEASE(this->direct3D);
}
void DSDisplayOverlay::attach(HWND parent, DSDisplayGraph *graph)
{
- HRESULT hr;
-
- // Gets the handle of the parent and the graph
- this->window = parent;
- this->displayGraph = graph;
-
- if (this->window)
- {
- D3DPRESENT_PARAMETERS d3dpp;
- ZeroMemory(&d3dpp, sizeof(D3DPRESENT_PARAMETERS));
- d3dpp.Windowed = TRUE;
- d3dpp.SwapEffect = D3DSWAPEFFECT_COPY;
-
- hr = this->direct3D->CreateDevice(
- D3DADAPTER_DEFAULT,
- D3DDEVTYPE_HAL,
- this->window,
- D3DCREATE_SOFTWARE_VERTEXPROCESSING,
- &d3dpp,
- &this->direct3DDevice);
- if (FAILED(hr))
- {
- cout << "Cannot create Direct3D device" << endl;
- return;
- }
-
- ZeroMemory(&this->overlayInfo, sizeof(D3DXIMAGE_INFO));
- hr = D3DXGetImageInfoFromFile(FILENAME, &this->overlayInfo);
- if (FAILED(hr))
- {
- cout << "Cannot stat overlay file" << endl;
- return;
- }
-
- hr = this->direct3DDevice->CreateOffscreenPlainSurface(
- this->overlayInfo.Width,
- this->overlayInfo.Height,
- D3DFMT_A8R8G8B8,
- D3DPOOL_SYSTEMMEM,
- &this->direct3DSurface,
- NULL);
- if (FAILED(hr))
- {
- cout << "Cannot create Direct3D surface" << endl;
- return;
- }
-
- D3DCOLOR alphaKey = 0xFF000000;
-
- hr = D3DXLoadSurfaceFromFile(this->direct3DSurface,
- NULL,
- NULL,
- FILENAME,
- NULL,
- D3DX_FILTER_NONE,
- alphaKey,
- &this->overlayInfo);
- if (FAILED(hr))
- {
- cout << "Cannot load overlay file" << endl;
- return;
- }
-
- D3DVIEWPORT9 viewport;
- ZeroMemory(&viewport, sizeof(D3DVIEWPORT9));
-
- hr= this->direct3DDevice->GetViewport(&viewport);
- if (FAILED(hr))
- {
- cout << "Cannot get view port" << endl;
- return;
- }
-
- ZeroMemory(&this->alphaBitmap, sizeof(VMR9AlphaBitmap));
- this->alphaBitmap.dwFlags = VMR9AlphaBitmap_EntireDDS;
- this->alphaBitmap.hdc = NULL;
- this->alphaBitmap.pDDS = this->direct3DSurface;
- // Source rectangle
- this->alphaBitmap.rSrc.left = 0;
- this->alphaBitmap.rSrc.top = 0;
- this->alphaBitmap.rSrc.right = this->overlayInfo.Width;
- this->alphaBitmap.rSrc.bottom = this->overlayInfo.Height;
- // Destination rectangle
- this->alphaBitmap.rDest.left = (viewport.Width - this->overlayInfo.Width) / 2.0;
- this->alphaBitmap.rDest.top = (viewport.Height - this->overlayInfo.Height) / 2.0;
- this->alphaBitmap.rDest.right = this->alphaBitmap.rDest.left + this->overlayInfo.Width;
- this->alphaBitmap.rDest.bottom = this->alphaBitmap.rDest.top + this->overlayInfo.Height;
- this->alphaBitmap.rDest.left /= viewport.Width;
- this->alphaBitmap.rDest.top /= viewport.Height;
- this->alphaBitmap.rDest.right /= viewport.Width;
- this->alphaBitmap.rDest.bottom /= viewport.Height;
- // Alpha value for start
- this->alphaBitmap.fAlpha = ALPHA_VALUE_START;
- }
+ HRESULT hr;
+
+ // Gets the handle of the parent and the graph
+ this->window = parent;
+ this->displayGraph = graph;
+
+ if (this->window) {
+ D3DPRESENT_PARAMETERS d3dpp;
+ ZeroMemory(&d3dpp, sizeof(D3DPRESENT_PARAMETERS));
+ d3dpp.Windowed = TRUE;
+ d3dpp.SwapEffect = D3DSWAPEFFECT_COPY;
+
+ hr = this->direct3D->CreateDevice(
+ D3DADAPTER_DEFAULT,
+ D3DDEVTYPE_HAL,
+ this->window,
+ D3DCREATE_SOFTWARE_VERTEXPROCESSING,
+ &d3dpp,
+ &this->direct3DDevice);
+ if (FAILED(hr)) {
+ cout << "Cannot create Direct3D device" << endl;
+ return;
+ }
+
+ ZeroMemory(&this->overlayInfo, sizeof(D3DXIMAGE_INFO));
+ hr = D3DXGetImageInfoFromFile(FILENAME, &this->overlayInfo);
+ if (FAILED(hr)) {
+ cout << "Cannot stat overlay file" << endl;
+ return;
+ }
+
+ hr = this->direct3DDevice->CreateOffscreenPlainSurface(
+ this->overlayInfo.Width,
+ this->overlayInfo.Height,
+ D3DFMT_A8R8G8B8,
+ D3DPOOL_SYSTEMMEM,
+ &this->direct3DSurface,
+ NULL);
+ if (FAILED(hr)) {
+ cout << "Cannot create Direct3D surface" << endl;
+ return;
+ }
+
+ D3DCOLOR alphaKey = 0xFF000000;
+
+ hr = D3DXLoadSurfaceFromFile(this->direct3DSurface,
+ NULL,
+ NULL,
+ FILENAME,
+ NULL,
+ D3DX_FILTER_NONE,
+ alphaKey,
+ &this->overlayInfo);
+ if (FAILED(hr)) {
+ cout << "Cannot load overlay file" << endl;
+ return;
+ }
+
+ D3DVIEWPORT9 viewport;
+ ZeroMemory(&viewport, sizeof(D3DVIEWPORT9));
+
+ hr= this->direct3DDevice->GetViewport(&viewport);
+ if (FAILED(hr)) {
+ cout << "Cannot get view port" << endl;
+ return;
+ }
+
+ ZeroMemory(&this->alphaBitmap, sizeof(VMR9AlphaBitmap));
+ this->alphaBitmap.dwFlags = VMR9AlphaBitmap_EntireDDS;
+ this->alphaBitmap.hdc = NULL;
+ this->alphaBitmap.pDDS = this->direct3DSurface;
+ // Source rectangle
+ this->alphaBitmap.rSrc.left = 0;
+ this->alphaBitmap.rSrc.top = 0;
+ this->alphaBitmap.rSrc.right = this->overlayInfo.Width;
+ this->alphaBitmap.rSrc.bottom = this->overlayInfo.Height;
+ // Destination rectangle
+ this->alphaBitmap.rDest.left = (viewport.Width - this->overlayInfo.Width) / 2.0;
+ this->alphaBitmap.rDest.top = (viewport.Height - this->overlayInfo.Height) / 2.0;
+ this->alphaBitmap.rDest.right = this->alphaBitmap.rDest.left + this->overlayInfo.Width;
+ this->alphaBitmap.rDest.bottom = this->alphaBitmap.rDest.top + this->overlayInfo.Height;
+ this->alphaBitmap.rDest.left /= viewport.Width;
+ this->alphaBitmap.rDest.top /= viewport.Height;
+ this->alphaBitmap.rDest.right /= viewport.Width;
+ this->alphaBitmap.rDest.bottom /= viewport.Height;
+ // Alpha value for start
+ this->alphaBitmap.fAlpha = ALPHA_VALUE_START;
+ }
}
void DSDisplayOverlay::detach()
{
- SAFE_RELEASE(this->direct3DSurface);
- SAFE_RELEASE(this->direct3DDevice);
+ SAFE_RELEASE(this->direct3DSurface);
+ SAFE_RELEASE(this->direct3DDevice);
- this->displayGraph = NULL;
- this->window = NULL;
+ this->displayGraph = NULL;
+ this->window = NULL;
}
void DSDisplayOverlay::show(int value)
{
- // Store the ticks to count down
- this->ticks = value;
+ // Store the ticks to count down
+ this->ticks = value;
- // Compute alpha value decrement
- this->alphaStep = (this->ticks > 0) ? ((ALPHA_VALUE_START - ALPHA_VALUE_STOP) / this->ticks) : 0;
- this->alphaBitmap.fAlpha = ALPHA_VALUE_START;
+ // Compute alpha value decrement
+ this->alphaStep = (this->ticks > 0) ? ((ALPHA_VALUE_START - ALPHA_VALUE_STOP) / this->ticks) : 0;
+ this->alphaBitmap.fAlpha = ALPHA_VALUE_START;
- this->internalUpdate();
+ this->internalUpdate();
}
void DSDisplayOverlay::update()
{
- if (this->displayGraph && (this->ticks > 0))
- {
- this->ticks--;
+ if (this->displayGraph && (this->ticks > 0)) {
+ this->ticks--;
- // Be sure alpha is in 0.0 .. 1.0 range.
- float value = this->alphaBitmap.fAlpha;
- value -= this->alphaStep;
- this->alphaBitmap.fAlpha = (value >= 0.0f) ? value : 0.0f;
+ // Be sure alpha is in 0.0 .. 1.0 range.
+ float value = this->alphaBitmap.fAlpha;
+ value -= this->alphaStep;
+ this->alphaBitmap.fAlpha = (value >= 0.0f) ? value : 0.0f;
- this->internalUpdate();
- }
+ this->internalUpdate();
+ }
}
void DSDisplayOverlay::internalUpdate()
{
- HRESULT hr;
-
- if (this->ticks > 0)
- {
- this->alphaBitmap.dwFlags = VMR9AlphaBitmap_EntireDDS;
- }
- else
- {
- this->alphaBitmap.dwFlags = VMR9AlphaBitmap_Disable;
- }
-
- hr = this->displayGraph->getMixerBitmap()->SetAlphaBitmap(&this->alphaBitmap);
- if (FAILED(hr))
- {
- cout << "Failed to mix overylay (" << hr << ")" << endl;
- return;
- }
+ HRESULT hr;
+
+ if (this->ticks > 0) {
+ this->alphaBitmap.dwFlags = VMR9AlphaBitmap_EntireDDS;
+ }
+ else {
+ this->alphaBitmap.dwFlags = VMR9AlphaBitmap_Disable;
+ }
+
+ hr = this->displayGraph->getMixerBitmap()->SetAlphaBitmap(&this->alphaBitmap);
+ if (FAILED(hr)) {
+ cout << "Failed to mix overylay (" << hr << ")" << endl;
+ return;
+ }
}
#endif
diff --git a/plugins/pluginDirectShow/internals/DSDisplayOverlay.cxx b/plugins/pluginDirectShow/internals/DSDisplayOverlay.cxx
index eb355c4..7c5463b 100755
--- a/plugins/pluginDirectShow/internals/DSDisplayOverlay.cxx
+++ b/plugins/pluginDirectShow/internals/DSDisplayOverlay.cxx
@@ -1,17 +1,17 @@
/* Copyright (C) 2011-2013 Doubango Telecom <http://www.doubango.org>
-*
+*
* This file is part of Open Source Doubango Framework.
*
* DOUBANGO is free software: you can redistribute it and/or modify
* it under the terms of the GNU General Public License as published by
* the Free Software Foundation, either version 3 of the License, or
* (at your option) any later version.
-*
+*
* DOUBANGO is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
* GNU General Public License for more details.
-*
+*
* You should have received a copy of the GNU General Public License
* along with DOUBANGO.
*/
@@ -34,34 +34,33 @@ DSDisplayOverlay::~DSDisplayOverlay()
void DSDisplayOverlay::attach(HWND parent, DSDisplayGraph *graph)
{
- this->displayGraph = graph;
+ this->displayGraph = graph;
}
void DSDisplayOverlay::detach()
{
- this->displayGraph = NULL;
+ this->displayGraph = NULL;
}
void DSDisplayOverlay::show(int value)
{
- // Store the ticks to count down
- this->ticks = value;
+ // Store the ticks to count down
+ this->ticks = value;
- this->internalUpdate();
+ this->internalUpdate();
}
void DSDisplayOverlay::update()
{
- if (this->displayGraph && (this->ticks > 0))
- {
- this->ticks--;
- this->internalUpdate();
- }
+ if (this->displayGraph && (this->ticks > 0)) {
+ this->ticks--;
+ this->internalUpdate();
+ }
}
void DSDisplayOverlay::internalUpdate()
{
- this->displayGraph->getSourceFilter()->showOverlay(this->ticks);
+ this->displayGraph->getSourceFilter()->showOverlay(this->ticks);
}
#endif
diff --git a/plugins/pluginDirectShow/internals/DSDisplayOverlay.h b/plugins/pluginDirectShow/internals/DSDisplayOverlay.h
index 0db887d..69ed6a3 100755
--- a/plugins/pluginDirectShow/internals/DSDisplayOverlay.h
+++ b/plugins/pluginDirectShow/internals/DSDisplayOverlay.h
@@ -1,17 +1,17 @@
/* Copyright (C) 2011-2013 Doubango Telecom <http://www.doubango.org>
-*
+*
* This file is part of Open Source Doubango Framework.
*
* DOUBANGO is free software: you can redistribute it and/or modify
* it under the terms of the GNU General Public License as published by
* the Free Software Foundation, either version 3 of the License, or
* (at your option) any later version.
-*
+*
* DOUBANGO is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
* GNU General Public License for more details.
-*
+*
* You should have received a copy of the GNU General Public License
* along with DOUBANGO.
*/
@@ -32,36 +32,36 @@ class DSDisplayGraph;
class DSDisplayOverlay
{
public:
- DSDisplayOverlay();
- virtual ~DSDisplayOverlay();
+ DSDisplayOverlay();
+ virtual ~DSDisplayOverlay();
- void attach(HWND parent, DSDisplayGraph *graph);
- void detach();
+ void attach(HWND parent, DSDisplayGraph *graph);
+ void detach();
- void show(int value);
- void update();
+ void show(int value);
+ void update();
private:
- void internalUpdate();
+ void internalUpdate();
private:
- HWND window;
+ HWND window;
- DSDisplayGraph *displayGraph;
- int ticks;
+ DSDisplayGraph *displayGraph;
+ int ticks;
#if defined(VMR)
- HDC hdcBmp;
- HBITMAP hbmOld;
- VMRALPHABITMAP alphaBitmap;
- float alphaStep;
+ HDC hdcBmp;
+ HBITMAP hbmOld;
+ VMRALPHABITMAP alphaBitmap;
+ float alphaStep;
#elif defined(VMR9) || defined(VMR9_WINDOWLESS)
- IDirect3D9 *direct3D;
- IDirect3DDevice9 *direct3DDevice;
- IDirect3DSurface9 *direct3DSurface;
- D3DXIMAGE_INFO overlayInfo;
- VMR9AlphaBitmap alphaBitmap;
- float alphaStep;
+ IDirect3D9 *direct3D;
+ IDirect3DDevice9 *direct3DDevice;
+ IDirect3DSurface9 *direct3DSurface;
+ D3DXIMAGE_INFO overlayInfo;
+ VMR9AlphaBitmap alphaBitmap;
+ float alphaStep;
#endif
};
diff --git a/plugins/pluginDirectShow/internals/DSFrameRateFilter.cxx b/plugins/pluginDirectShow/internals/DSFrameRateFilter.cxx
index cbf2a0a..c44ba03 100755
--- a/plugins/pluginDirectShow/internals/DSFrameRateFilter.cxx
+++ b/plugins/pluginDirectShow/internals/DSFrameRateFilter.cxx
@@ -1,17 +1,17 @@
/* Copyright (C) 2011-2013 Doubango Telecom <http://www.doubango.org>
-*
+*
* This file is part of Open Source Doubango Framework.
*
* DOUBANGO is free software: you can redistribute it and/or modify
* it under the terms of the GNU General Public License as published by
* the Free Software Foundation, either version 3 of the License, or
* (at your option) any later version.
-*
+*
* DOUBANGO is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
* GNU General Public License for more details.
-*
+*
* You should have received a copy of the GNU General Public License
* along with DOUBANGO.
*/
@@ -26,20 +26,20 @@ using namespace std;
#define FPS_OUTPUT 5
// {7F9F08CF-139F-40b2-A283-01C4EC26A452}
-TDSHOW_DEFINE_GUID(CLSID_DSFrameRateFilter,
-0x7f9f08cf, 0x139f, 0x40b2, 0xa2, 0x83, 0x1, 0xc4, 0xec, 0x26, 0xa4, 0x52);
+TDSHOW_DEFINE_GUID(CLSID_DSFrameRateFilter,
+ 0x7f9f08cf, 0x139f, 0x40b2, 0xa2, 0x83, 0x1, 0xc4, 0xec, 0x26, 0xa4, 0x52);
DSFrameRateFilter::DSFrameRateFilter(TCHAR *tszName, LPUNKNOWN punk, HRESULT *phr)
-:CTransInPlaceFilter (tszName, punk, CLSID_DSFrameRateFilter, phr)
+ :CTransInPlaceFilter (tszName, punk, CLSID_DSFrameRateFilter, phr)
{
- this->m_rtFrameLength = (10000000)/FPS_OUTPUT;
+ this->m_rtFrameLength = (10000000)/FPS_OUTPUT;
- this->m_inputFps = FPS_INPUT;
- this->m_outputFps = FPS_OUTPUT;
+ this->m_inputFps = FPS_INPUT;
+ this->m_outputFps = FPS_OUTPUT;
- this->m_iFrameNumber = 0;
- this->m_progress = 0;
- this->m_bProcessFrame = true;
+ this->m_iFrameNumber = 0;
+ this->m_progress = 0;
+ this->m_bProcessFrame = true;
}
DSFrameRateFilter::~DSFrameRateFilter()
@@ -48,73 +48,74 @@ DSFrameRateFilter::~DSFrameRateFilter()
HRESULT DSFrameRateFilter::SetFps(int inputFps, int outputFps)
{
- if(inputFps <= 0 || outputFps <= 0)
- {
- return E_FAIL;
- }
-
- // Stop prcessing
- this->m_bProcessFrame = false;
-
- if (inputFps < outputFps) {
- this->m_inputFps = this->m_outputFps = inputFps;
- }
- else {
- this->m_outputFps = outputFps;
- this->m_inputFps = inputFps;
- }
-
- // Restart processing
- this->m_iFrameNumber = 0;
- this->m_progress = 0;
- this->m_bProcessFrame = true;
-
- return S_OK;
+ if(inputFps <= 0 || outputFps <= 0) {
+ return E_FAIL;
+ }
+
+ // Stop prcessing
+ this->m_bProcessFrame = false;
+
+ if (inputFps < outputFps) {
+ this->m_inputFps = this->m_outputFps = inputFps;
+ }
+ else {
+ this->m_outputFps = outputFps;
+ this->m_inputFps = inputFps;
+ }
+
+ // Restart processing
+ this->m_iFrameNumber = 0;
+ this->m_progress = 0;
+ this->m_bProcessFrame = true;
+
+ return S_OK;
}
HRESULT DSFrameRateFilter::Transform(IMediaSample *pSample)
-{
- if(!this->m_bProcessFrame) return S_FALSE;
+{
+ if(!this->m_bProcessFrame) {
+ return S_FALSE;
+ }
- CheckPointer(pSample, E_POINTER);
+ CheckPointer(pSample, E_POINTER);
- HRESULT hr = S_OK;
- HRESULT ret = S_FALSE;
+ HRESULT hr = S_OK;
+ HRESULT ret = S_FALSE;
- pSample->SetTime(NULL, NULL);
+ pSample->SetTime(NULL, NULL);
- // Drop frame?
- if (this->m_iFrameNumber == 0) {
- ret = S_OK;
- }
- else if (this->m_progress >= this->m_inputFps) {
- this->m_progress -= this->m_inputFps;
- ret = S_OK;
- }
+ // Drop frame?
+ if (this->m_iFrameNumber == 0) {
+ ret = S_OK;
+ }
+ else if (this->m_progress >= this->m_inputFps) {
+ this->m_progress -= this->m_inputFps;
+ ret = S_OK;
+ }
- // Mark frame as accepted
- if (ret == S_OK) {
- // Set TRUE on every sample for uncompressed frames
- pSample->SetSyncPoint(TRUE);
- }
+ // Mark frame as accepted
+ if (ret == S_OK) {
+ // Set TRUE on every sample for uncompressed frames
+ pSample->SetSyncPoint(TRUE);
+ }
- this->m_progress += this->m_outputFps;
- this->m_iFrameNumber++;
+ this->m_progress += this->m_outputFps;
+ this->m_iFrameNumber++;
- return ret;
+ return ret;
}
HRESULT DSFrameRateFilter::CheckInputType(const CMediaType* mtIn)
-{
- return S_OK;
+{
+ return S_OK;
}
//Implement CreateInstance for your filter object. Typically, CreateInstance calls the constructor of your filter clas
CUnknown * WINAPI DSFrameRateFilter::CreateInstance(LPUNKNOWN punk, HRESULT *phr)
{
- DSFrameRateFilter *pNewObject = new DSFrameRateFilter(_T("Tdshow DirectShow Framerate Limiter Filter."), punk, phr );
- if (pNewObject == NULL) {
- *phr = E_OUTOFMEMORY;
- }
- return pNewObject;
-}
+ DSFrameRateFilter *pNewObject = new DSFrameRateFilter(_T("Tdshow DirectShow Framerate Limiter Filter."), punk, phr );
+ if (pNewObject == NULL) {
+ *phr = E_OUTOFMEMORY;
+ }
+ return pNewObject;
+}
diff --git a/plugins/pluginDirectShow/internals/DSFrameRateFilter.h b/plugins/pluginDirectShow/internals/DSFrameRateFilter.h
index 9f2296e..eff2bfb 100755
--- a/plugins/pluginDirectShow/internals/DSFrameRateFilter.h
+++ b/plugins/pluginDirectShow/internals/DSFrameRateFilter.h
@@ -1,17 +1,17 @@
/* Copyright (C) 2011-2013 Doubango Telecom <http://www.doubango.org>
-*
+*
* This file is part of Open Source Doubango Framework.
*
* DOUBANGO is free software: you can redistribute it and/or modify
* it under the terms of the GNU General Public License as published by
* the Free Software Foundation, either version 3 of the License, or
* (at your option) any later version.
-*
+*
* DOUBANGO is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
* GNU General Public License for more details.
-*
+*
* You should have received a copy of the GNU General Public License
* along with DOUBANGO.
*/
@@ -26,39 +26,39 @@
class DSFrameRateFilter : public CTransInPlaceFilter
{
public:
- DSFrameRateFilter(TCHAR *tszName, LPUNKNOWN punk, HRESULT *phr);
- ~DSFrameRateFilter(void);
+ DSFrameRateFilter(TCHAR *tszName, LPUNKNOWN punk, HRESULT *phr);
+ ~DSFrameRateFilter(void);
public:
- HRESULT Transform(IMediaSample *pSample);
+ HRESULT Transform(IMediaSample *pSample);
HRESULT CheckInputType(const CMediaType* mtIn);
public:
- /**
- * \def SetFps
- * \brief fps1 define source .
- */
- HRESULT SetFps(int inputFps, int outputFps);
+ /**
+ * \def SetFps
+ * \brief fps1 define source .
+ */
+ HRESULT SetFps(int inputFps, int outputFps);
static CUnknown *WINAPI CreateInstance(LPUNKNOWN punk, HRESULT *phr);
DECLARE_IUNKNOWN;
- /*STDMETHODIMP_(ULONG) NonDelegatingRelease()
- {
- if(InterlockedDecrement(&m_cRef) == 0)
- {
- delete this;
- return 0;
- }
- return m_cRef;
- }*/
+ /*STDMETHODIMP_(ULONG) NonDelegatingRelease()
+ {
+ if(InterlockedDecrement(&m_cRef) == 0)
+ {
+ delete this;
+ return 0;
+ }
+ return m_cRef;
+ }*/
private:
- int m_progress;
- int m_inputFps, m_outputFps;
- bool m_bProcessFrame;
- REFERENCE_TIME m_rtFrameLength; // UNITS/fps
- LONGLONG m_iFrameNumber;
+ int m_progress;
+ int m_inputFps, m_outputFps;
+ bool m_bProcessFrame;
+ REFERENCE_TIME m_rtFrameLength; // UNITS/fps
+ LONGLONG m_iFrameNumber;
};
#endif ////DSFrameRateFilter_H \ No newline at end of file
diff --git a/plugins/pluginDirectShow/internals/DSGrabber.cxx b/plugins/pluginDirectShow/internals/DSGrabber.cxx
index e4dc3a7..18df2a3 100755
--- a/plugins/pluginDirectShow/internals/DSGrabber.cxx
+++ b/plugins/pluginDirectShow/internals/DSGrabber.cxx
@@ -1,17 +1,17 @@
/* Copyright (C) 2011-2013 Doubango Telecom <http://www.doubango.org>
-*
+*
* This file is part of Open Source Doubango Framework.
*
* DOUBANGO is free software: you can redistribute it and/or modify
* it under the terms of the GNU General Public License as published by
* the Free Software Foundation, either version 3 of the License, or
* (at your option) any later version.
-*
+*
* DOUBANGO is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
* GNU General Public License for more details.
-*
+*
* You should have received a copy of the GNU General Public License
* along with DOUBANGO.
*/
@@ -33,260 +33,257 @@
using namespace std;
DSGrabber::DSGrabber(HRESULT *hr, BOOL _screenCast)
-: mutex_buffer(NULL), preview(NULL)
-, screenCast(_screenCast)
+ : mutex_buffer(NULL), preview(NULL)
+ , screenCast(_screenCast)
{
#if defined(_WIN32_WCE)
- assert(!screenCast);
- this->graph = new DSCaptureGraph(this, hr);
- CHECK_HR((*hr));
+ assert(!screenCast);
+ this->graph = new DSCaptureGraph(this, hr);
+ CHECK_HR((*hr));
#else
- this->graph = screenCast ? dynamic_cast<DSBaseCaptureGraph*>(new DSScreenCastGraph(this, hr)) : dynamic_cast<DSBaseCaptureGraph*>(new DSCaptureGraph(this, hr));
- CHECK_HR((*hr));
- this->preview = new DSDisplay(hr);
+ this->graph = screenCast ? dynamic_cast<DSBaseCaptureGraph*>(new DSScreenCastGraph(this, hr)) : dynamic_cast<DSBaseCaptureGraph*>(new DSCaptureGraph(this, hr));
+ CHECK_HR((*hr));
+ this->preview = new DSDisplay(hr);
#endif
- // Init the bitmap info header with default values
- memset(&(this->bitmapInfo), 0, sizeof(BITMAPINFOHEADER));
- this->bitmapInfo.biSize = sizeof(BITMAPINFOHEADER);
- this->bitmapInfo.biWidth = 352;
- this->bitmapInfo.biHeight = 288;
- this->bitmapInfo.biPlanes = 1;
- this->bitmapInfo.biBitCount = 24;
- this->bitmapInfo.biCompression = 0;
- this->bitmapInfo.biXPelsPerMeter = 0;
- this->bitmapInfo.biYPelsPerMeter = 0;
- this->bitmapInfo.biClrUsed = 0;
- this->bitmapInfo.biClrImportant = 0;
-
- this->plugin_cb = NULL;
- this->buffer = NULL;
- this->mutex_buffer = tsk_mutex_create();
-
-bail: ;
+ // Init the bitmap info header with default values
+ memset(&(this->bitmapInfo), 0, sizeof(BITMAPINFOHEADER));
+ this->bitmapInfo.biSize = sizeof(BITMAPINFOHEADER);
+ this->bitmapInfo.biWidth = 352;
+ this->bitmapInfo.biHeight = 288;
+ this->bitmapInfo.biPlanes = 1;
+ this->bitmapInfo.biBitCount = 24;
+ this->bitmapInfo.biCompression = 0;
+ this->bitmapInfo.biXPelsPerMeter = 0;
+ this->bitmapInfo.biYPelsPerMeter = 0;
+ this->bitmapInfo.biClrUsed = 0;
+ this->bitmapInfo.biClrImportant = 0;
+
+ this->plugin_cb = NULL;
+ this->buffer = NULL;
+ this->mutex_buffer = tsk_mutex_create();
+
+bail:
+ ;
}
DSGrabber::~DSGrabber()
{
- SAFE_DELETE_PTR ( this->graph );
- SAFE_DELETE_PTR ( this->preview );
- SAFE_DELETE_ARRAY ( this->buffer );
- tsk_mutex_destroy(&this->mutex_buffer);
+ SAFE_DELETE_PTR ( this->graph );
+ SAFE_DELETE_PTR ( this->preview );
+ SAFE_DELETE_ARRAY ( this->buffer );
+ tsk_mutex_destroy(&this->mutex_buffer);
}
void DSGrabber::setCaptureDevice(const std::string &devicePath)
{
- this->graph->setSource(devicePath);
+ this->graph->setSource(devicePath);
}
void DSGrabber::setCallback(tmedia_producer_enc_cb_f callback, const void* callback_data)
{
- this->plugin_cb = callback;
- this->plugin_cb_data = callback_data;
+ this->plugin_cb = callback;
+ this->plugin_cb_data = callback_data;
}
void DSGrabber::start()
{
- if(this->graph->isPaused()){
- this->graph->start();
- this->preview->start();
- return;
- }
-
- if (!this->graph->isRunning()){
- first_buffer = true;
-
- if(this->preview){
- this->preview->start();
- }
- this->graph->connect();
- this->graph->start();
- }
+ if(this->graph->isPaused()) {
+ this->graph->start();
+ this->preview->start();
+ return;
+ }
+
+ if (!this->graph->isRunning()) {
+ first_buffer = true;
+
+ if(this->preview) {
+ this->preview->start();
+ }
+ this->graph->connect();
+ this->graph->start();
+ }
}
void DSGrabber::pause()
{
- if(this->graph && this->graph->isRunning()){
- this->graph->pause();
- this->preview->pause();
- }
+ if(this->graph && this->graph->isRunning()) {
+ this->graph->pause();
+ this->preview->pause();
+ }
}
void DSGrabber::stop()
{
- if (this->graph->isRunning()){
- if(this->preview){
- this->preview->stop();
- }
- this->graph->stop();
- this->graph->disconnect();
- }
+ if (this->graph->isRunning()) {
+ if(this->preview) {
+ this->preview->stop();
+ }
+ this->graph->stop();
+ this->graph->disconnect();
+ }
}
bool DSGrabber::setCaptureParameters(int w, int h, int f)
{
- tsk_mutex_lock(this->mutex_buffer);
-
- // Store the framerate
- this->fps = f;
- this->width = w;
- this->height = h;
-
- // Store the required dimensions
- this->bitmapInfo.biWidth = this->width;
- this->bitmapInfo.biHeight = this->height;
- this->bitmapInfo.biBitCount = 24;
- this->bitmapInfo.biSizeImage = (this->width * this->height * 3);
-
- // Change the intermediate buffer
- SAFE_DELETE_ARRAY ( this->buffer );
- this->buffer = new BYTE[this->bitmapInfo.biSizeImage];
- memset(this->buffer,0,this->bitmapInfo.biSizeImage);
-
- // Find closest matching format to drive the source filter
- DSCaptureFormat *fmt = NULL;
- int score = 0;
- std::vector<DSCaptureFormat> *formats = this->graph->getFormats();
- std::vector<DSCaptureFormat>::iterator iter;
- std::vector<DSCaptureFormat>::iterator last = formats->end();
- for(iter = formats->begin(); iter != last; iter++){
- int value = (*iter).getMatchScore(this->width, this->height);
- if (value > score || !fmt){
- score = value;
- fmt = &(*iter);
- }
- }
-
- // Setup source filter in the graph
- HRESULT hr = this->graph->setParameters(fmt, this->fps);
- // Set preview parameters
- if(this->preview){
- this->preview->setFps(this->fps);
- this->preview->setSize(this->width, this->height);
- }
-
- tsk_mutex_unlock(this->mutex_buffer);
-
- return SUCCEEDED(hr);
+ tsk_mutex_lock(this->mutex_buffer);
+
+ // Store the framerate
+ this->fps = f;
+ this->width = w;
+ this->height = h;
+
+ // Store the required dimensions
+ this->bitmapInfo.biWidth = this->width;
+ this->bitmapInfo.biHeight = this->height;
+ this->bitmapInfo.biBitCount = 24;
+ this->bitmapInfo.biSizeImage = (this->width * this->height * 3);
+
+ // Change the intermediate buffer
+ SAFE_DELETE_ARRAY ( this->buffer );
+ this->buffer = new BYTE[this->bitmapInfo.biSizeImage];
+ memset(this->buffer,0,this->bitmapInfo.biSizeImage);
+
+ // Find closest matching format to drive the source filter
+ DSCaptureFormat *fmt = NULL;
+ int score = 0;
+ std::vector<DSCaptureFormat> *formats = this->graph->getFormats();
+ std::vector<DSCaptureFormat>::iterator iter;
+ std::vector<DSCaptureFormat>::iterator last = formats->end();
+ for(iter = formats->begin(); iter != last; iter++) {
+ int value = (*iter).getMatchScore(this->width, this->height);
+ if (value > score || !fmt) {
+ score = value;
+ fmt = &(*iter);
+ }
+ }
+
+ // Setup source filter in the graph
+ HRESULT hr = this->graph->setParameters(fmt, this->fps);
+ // Set preview parameters
+ if(this->preview) {
+ this->preview->setFps(this->fps);
+ this->preview->setSize(this->width, this->height);
+ }
+
+ tsk_mutex_unlock(this->mutex_buffer);
+
+ return SUCCEEDED(hr);
}
void DSGrabber::setPluginFirefox(bool value)
{
- if(this->preview){
- this->preview->setPluginFirefox(value);
- }
+ if(this->preview) {
+ this->preview->setPluginFirefox(value);
+ }
}
bool DSGrabber::setCaptureParameters(int format, int f)
{
- int w, h;
- // Get size from the format
- VIDEOFORMAT_TO_SIZE(format, w, h);
- return this->setCaptureParameters(w, h, f);
+ int w, h;
+ // Get size from the format
+ VIDEOFORMAT_TO_SIZE(format, w, h);
+ return this->setCaptureParameters(w, h, f);
}
int DSGrabber::getFramerate()
{
- return this->fps;
+ return this->fps;
}
HRESULT DSGrabber::getConnectedMediaType(AM_MEDIA_TYPE *mediaType)
{
- if (!this->graph || !mediaType) {
- return E_INVALIDARG;
- }
- return this->graph->getConnectedMediaType(mediaType);
+ if (!this->graph || !mediaType) {
+ return E_INVALIDARG;
+ }
+ return this->graph->getConnectedMediaType(mediaType);
}
HRESULT DSGrabber::SampleCB(double SampleTime, IMediaSample *pSample)
{
- return S_OK;
+ return S_OK;
}
HRESULT DSGrabber::BufferCB(double SampleTime, BYTE *pBuffer, long BufferLen)
-{
- HRESULT hr;
-
- tsk_mutex_lock(this->mutex_buffer);
-
- AM_MEDIA_TYPE mediaType;
- hr = this->graph->getConnectedMediaType(&mediaType);
- if (FAILED(hr) || !this->buffer){
- return hr;
- }
-
- if(first_buffer){
- first_buffer = false;
-
- tsk_mutex_unlock(this->mutex_buffer);
- return hr;
- }
-
- // Examine the format block.
- if ((mediaType.formattype == FORMAT_VideoInfo) && (mediaType.cbFormat >= sizeof(VIDEOINFOHEADER)) && (mediaType.pbFormat != NULL) )
- {
- VIDEOINFOHEADER *pVih = reinterpret_cast<VIDEOINFOHEADER *>(mediaType.pbFormat);
- BITMAPINFOHEADER* bih = &pVih->bmiHeader;
-
- //int framerate = pVih->AvgTimePerFrame;
- if( (bih->biHeight == this->bitmapInfo.biHeight) && (bih->biWidth == this->bitmapInfo.biWidth) && (bih->biBitCount == this->bitmapInfo.biBitCount) )
- {
- memmove(this->buffer, pBuffer, this->bitmapInfo.biSizeImage);
- }
- else
- {
- ResizeRGB(
- bih,
- (const unsigned char *) pBuffer,
- &this->bitmapInfo,
- (unsigned char *) this->buffer,
- this->width,
- this->height);
- }
-
- // for the network
- if(this->plugin_cb){
- this->plugin_cb(this->plugin_cb_data, this->buffer, (this->width*this->height*3));
- }
-
- // for the preview
- if(this->preview){
- this->preview->handleVideoFrame(this->buffer, this->width, this->height);
- }
- }
-
- // Free the format
+{
+ HRESULT hr;
+
+ tsk_mutex_lock(this->mutex_buffer);
+
+ AM_MEDIA_TYPE mediaType;
+ hr = this->graph->getConnectedMediaType(&mediaType);
+ if (FAILED(hr) || !this->buffer) {
+ return hr;
+ }
+
+ if(first_buffer) {
+ first_buffer = false;
+
+ tsk_mutex_unlock(this->mutex_buffer);
+ return hr;
+ }
+
+ // Examine the format block.
+ if ((mediaType.formattype == FORMAT_VideoInfo) && (mediaType.cbFormat >= sizeof(VIDEOINFOHEADER)) && (mediaType.pbFormat != NULL) ) {
+ VIDEOINFOHEADER *pVih = reinterpret_cast<VIDEOINFOHEADER *>(mediaType.pbFormat);
+ BITMAPINFOHEADER* bih = &pVih->bmiHeader;
+
+ //int framerate = pVih->AvgTimePerFrame;
+ if( (bih->biHeight == this->bitmapInfo.biHeight) && (bih->biWidth == this->bitmapInfo.biWidth) && (bih->biBitCount == this->bitmapInfo.biBitCount) ) {
+ memmove(this->buffer, pBuffer, this->bitmapInfo.biSizeImage);
+ }
+ else {
+ ResizeRGB(
+ bih,
+ (const unsigned char *) pBuffer,
+ &this->bitmapInfo,
+ (unsigned char *) this->buffer,
+ this->width,
+ this->height);
+ }
+
+ // for the network
+ if(this->plugin_cb) {
+ this->plugin_cb(this->plugin_cb_data, this->buffer, (this->width*this->height*3));
+ }
+
+ // for the preview
+ if(this->preview) {
+ this->preview->handleVideoFrame(this->buffer, this->width, this->height);
+ }
+ }
+
+ // Free the format
#ifdef _WIN32_WCE
- // Nothing had been allocated
+ // Nothing had been allocated
#else
- FreeMediaType(mediaType);
+ FreeMediaType(mediaType);
#endif
- tsk_mutex_unlock(this->mutex_buffer);
+ tsk_mutex_unlock(this->mutex_buffer);
- return hr;
+ return hr;
}
HRESULT DSGrabber::QueryInterface(REFIID iid, LPVOID *ppv)
{
#ifdef _WIN32_WCE
- assert(1==0);
+ assert(1==0);
#else
- if( iid == IID_ISampleGrabberCB || iid == IID_IUnknown )
- {
- *ppv = (void *) static_cast<ISampleGrabberCB*>(this);
- return NOERROR;
- }
+ if( iid == IID_ISampleGrabberCB || iid == IID_IUnknown ) {
+ *ppv = (void *) static_cast<ISampleGrabberCB*>(this);
+ return NOERROR;
+ }
#endif
- return E_NOINTERFACE;
+ return E_NOINTERFACE;
}
ULONG DSGrabber::AddRef()
{
- return 2;
+ return 2;
}
ULONG DSGrabber::Release()
{
- return 1;
+ return 1;
} \ No newline at end of file
diff --git a/plugins/pluginDirectShow/internals/DSGrabber.h b/plugins/pluginDirectShow/internals/DSGrabber.h
index 64cde75..3445fb3 100755
--- a/plugins/pluginDirectShow/internals/DSGrabber.h
+++ b/plugins/pluginDirectShow/internals/DSGrabber.h
@@ -1,17 +1,17 @@
/* Copyright (C) 2011-2013 Doubango Telecom <http://www.doubango.org>
-*
+*
* This file is part of Open Source Doubango Framework.
*
* DOUBANGO is free software: you can redistribute it and/or modify
* it under the terms of the GNU General Public License as published by
* the Free Software Foundation, either version 3 of the License, or
* (at your option) any later version.
-*
+*
* DOUBANGO is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
* GNU General Public License for more details.
-*
+*
* You should have received a copy of the GNU General Public License
* along with DOUBANGO.
*/
@@ -36,57 +36,57 @@ class DSDisplay;
class DSGrabber : public
#if defined(_WIN32_WCE)
- DSISampleGrabberCB
+ DSISampleGrabberCB
#else
- ISampleGrabberCB
+ ISampleGrabberCB
#endif
{
public:
- DSGrabber(HRESULT *hr, BOOL screenCast);
- virtual ~DSGrabber();
+ DSGrabber(HRESULT *hr, BOOL screenCast);
+ virtual ~DSGrabber();
- void setCallback(tmedia_producer_enc_cb_f callback, const void* callback_data);
- void setCaptureDevice(const std::string &devicePath);
+ void setCallback(tmedia_producer_enc_cb_f callback, const void* callback_data);
+ void setCaptureDevice(const std::string &devicePath);
- virtual void start();
- virtual void pause();
- virtual void stop();
+ virtual void start();
+ virtual void pause();
+ virtual void stop();
- virtual bool setCaptureParameters(int format, int f);
- virtual bool setCaptureParameters(int w, int h, int f);
+ virtual bool setCaptureParameters(int format, int f);
+ virtual bool setCaptureParameters(int w, int h, int f);
- virtual void setPluginFirefox(bool value);
+ virtual void setPluginFirefox(bool value);
- virtual int getFramerate();
- virtual HRESULT getConnectedMediaType(AM_MEDIA_TYPE *mediaType);
+ virtual int getFramerate();
+ virtual HRESULT getConnectedMediaType(AM_MEDIA_TYPE *mediaType);
- virtual HRESULT STDMETHODCALLTYPE SampleCB(double SampleTime, IMediaSample *pSample);
+ virtual HRESULT STDMETHODCALLTYPE SampleCB(double SampleTime, IMediaSample *pSample);
virtual HRESULT STDMETHODCALLTYPE BufferCB(double SampleTime, BYTE *pBuffer, long BufferLen);
- virtual HRESULT STDMETHODCALLTYPE QueryInterface(REFIID riid, void __RPC_FAR *__RPC_FAR *ppvObject);
- virtual ULONG STDMETHODCALLTYPE AddRef();
- virtual ULONG STDMETHODCALLTYPE Release();
+ virtual HRESULT STDMETHODCALLTYPE QueryInterface(REFIID riid, void __RPC_FAR *__RPC_FAR *ppvObject);
+ virtual ULONG STDMETHODCALLTYPE AddRef();
+ virtual ULONG STDMETHODCALLTYPE Release();
- DSDisplay *preview;
+ DSDisplay *preview;
private:
- int width;
- int height;
- int fps;
+ int width;
+ int height;
+ int fps;
- DSBaseCaptureGraph *graph;
+ DSBaseCaptureGraph *graph;
- //VideoFrame *currentFrame;
- BITMAPINFOHEADER bitmapInfo;
- BYTE *buffer;
+ //VideoFrame *currentFrame;
+ BITMAPINFOHEADER bitmapInfo;
+ BYTE *buffer;
- tsk_mutex_handle_t *mutex_buffer;
+ tsk_mutex_handle_t *mutex_buffer;
- BOOL first_buffer;
- BOOL screenCast;
+ BOOL first_buffer;
+ BOOL screenCast;
- const void* plugin_cb_data;
- tmedia_producer_enc_cb_f plugin_cb;
+ const void* plugin_cb_data;
+ tmedia_producer_enc_cb_f plugin_cb;
};
#endif
diff --git a/plugins/pluginDirectShow/internals/DSOutputFilter.cxx b/plugins/pluginDirectShow/internals/DSOutputFilter.cxx
index ab5aa0f..7efbcc6 100755
--- a/plugins/pluginDirectShow/internals/DSOutputFilter.cxx
+++ b/plugins/pluginDirectShow/internals/DSOutputFilter.cxx
@@ -1,18 +1,18 @@
/*
* Copyright (C) 2010-2011 Mamadou DIOP.
-*
+*
* This file is part of Open Source Doubango Framework.
*
* DOUBANGO is free software: you can redistribute it and/or modify
* it under the terms of the GNU General Public License as published by
* the Free Software Foundation, either version 3 of the License, or
* (at your option) any later version.
-*
+*
* DOUBANGO is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
* GNU General Public License for more details.
-*
+*
* You should have received a copy of the GNU General Public License
* along with DOUBANGO.
*
@@ -23,91 +23,92 @@
#include "tsk_memory.h"
-DSOutputFilter::DSOutputFilter(LPUNKNOWN pUnk, HRESULT *phr)
-: CSource(_T("TDSHOW_OUTPUT"), pUnk, CLSID_TdshowOutputFilter)
+DSOutputFilter::DSOutputFilter(LPUNKNOWN pUnk, HRESULT *phr)
+ : CSource(_T("TDSHOW_OUTPUT"), pUnk, CLSID_TdshowOutputFilter)
{
#if !(defined(_WIN32_WCE) && defined(_DEBUG))
CAutoLock cAutoLock(&m_cStateLock);
#endif
- // Add one source stream (output pin)!
- this->outputStream = new DSOutputStream(phr, this, _T("Out"));
+ // Add one source stream (output pin)!
+ this->outputStream = new DSOutputStream(phr, this, _T("Out"));
}
DSOutputFilter::~DSOutputFilter()
{
- //SAFE_RELEASE(this->outputStream);
+ //SAFE_RELEASE(this->outputStream);
}
void DSOutputFilter::setBuffer(void *pointer, int size)
{
- this->outputStream->lockBuffer();
- if(pointer && size){
- if(this->outputStream->buffer_size != size){
- if((this->outputStream->buffer = tsk_realloc(this->outputStream->buffer, size))){
- this->outputStream->buffer_size = size;
- }
- else goto done;
- }
- memcpy(this->outputStream->buffer, pointer, size);
- }
+ this->outputStream->lockBuffer();
+ if(pointer && size) {
+ if(this->outputStream->buffer_size != size) {
+ if((this->outputStream->buffer = tsk_realloc(this->outputStream->buffer, size))) {
+ this->outputStream->buffer_size = size;
+ }
+ else {
+ goto done;
+ }
+ }
+ memcpy(this->outputStream->buffer, pointer, size);
+ }
done:
- this->outputStream->unlockBuffer();
+ this->outputStream->unlockBuffer();
}
void DSOutputFilter::getMediaType(AM_MEDIA_TYPE* &pmt)
{
- //if(pmt)
- //{
- // memcpy(pmt, &this->outputStream->pmt, sizeof(AM_MEDIA_TYPE));
- //}
+ //if(pmt)
+ //{
+ // memcpy(pmt, &this->outputStream->pmt, sizeof(AM_MEDIA_TYPE));
+ //}
}
HRESULT DSOutputFilter::setMediaType(const AM_MEDIA_TYPE* pmt)
{
- return this->ReconnectPin(this->outputStream, pmt);
+ return this->ReconnectPin(this->outputStream, pmt);
}
HRESULT DSOutputFilter::setImageFormat(UINT width, UINT height)
{
- return this->outputStream->setImageFormat(width, height);
+ return this->outputStream->setImageFormat(width, height);
}
bool DSOutputFilter::getImageFormat(UINT &width, UINT &height)
{
- if(this->outputStream){
- return this->outputStream->getImageFormat(width, height);
- }
- return false;
+ if(this->outputStream) {
+ return this->outputStream->getImageFormat(width, height);
+ }
+ return false;
}
void DSOutputFilter::setFps(int fps_)
{
- this->outputStream->setFps(fps_);
+ this->outputStream->setFps(fps_);
}
void DSOutputFilter::showOverlay(int value)
{
- this->outputStream->showOverlay(value);
+ this->outputStream->showOverlay(value);
}
void DSOutputFilter::reset()
{
- this->outputStream->frameNumber = 0;
- this->outputStream->lockBuffer();
- this->outputStream->buffer = NULL;
- this->outputStream->buffer_size = 0;
- this->outputStream->unlockBuffer();
+ this->outputStream->frameNumber = 0;
+ this->outputStream->lockBuffer();
+ this->outputStream->buffer = NULL;
+ this->outputStream->buffer_size = 0;
+ this->outputStream->unlockBuffer();
}
#ifdef _WIN32_WCE
STDMETHODIMP_(ULONG) DSOutputFilter::NonDelegatingRelease()
{
- if(InterlockedDecrement(&m_cRef) == 0)
- {
- delete this;
- return 0;
- }
- return m_cRef;
+ if(InterlockedDecrement(&m_cRef) == 0) {
+ delete this;
+ return 0;
+ }
+ return m_cRef;
}
#endif
diff --git a/plugins/pluginDirectShow/internals/DSOutputFilter.h b/plugins/pluginDirectShow/internals/DSOutputFilter.h
index fea2d23..eb424c4 100755
--- a/plugins/pluginDirectShow/internals/DSOutputFilter.h
+++ b/plugins/pluginDirectShow/internals/DSOutputFilter.h
@@ -1,17 +1,17 @@
/* Copyright (C) 2011-2013 Doubango Telecom <http://www.doubango.org>
-*
+*
* This file is part of Open Source Doubango Framework.
*
* DOUBANGO is free software: you can redistribute it and/or modify
* it under the terms of the GNU General Public License as published by
* the Free Software Foundation, either version 3 of the License, or
* (at your option) any later version.
-*
+*
* DOUBANGO is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
* GNU General Public License for more details.
-*
+*
* You should have received a copy of the GNU General Public License
* along with DOUBANGO.
*/
@@ -26,87 +26,87 @@
class DSOutputStream;
// {17D9D5CB-850D-4339-B72A-F72D084D8D64}
-TDSHOW_DEFINE_GUID(CLSID_TdshowOutputFilter,
-0x17d9d5cb, 0x850d, 0x4339, 0xb7, 0x2a, 0xf7, 0x2d, 0x8, 0x4d, 0x8d, 0x64);
+TDSHOW_DEFINE_GUID(CLSID_TdshowOutputFilter,
+ 0x17d9d5cb, 0x850d, 0x4339, 0xb7, 0x2a, 0xf7, 0x2d, 0x8, 0x4d, 0x8d, 0x64);
class DSOutputFilter : public CSource, public DSBufferWriter
{
public:
- DSOutputFilter(LPUNKNOWN pUnk, HRESULT *phr);
- virtual ~DSOutputFilter();
-
- //static CUnknown * WINAPI CreateInstance(LPUNKNOWN pUnk, HRESULT *phr);
- DECLARE_IUNKNOWN;
-
- virtual void setBuffer(void *pointer, int size);
- virtual inline HRESULT setImageFormat(UINT width, UINT height);
- virtual bool getImageFormat(UINT &width, UINT &height);
+ DSOutputFilter(LPUNKNOWN pUnk, HRESULT *phr);
+ virtual ~DSOutputFilter();
+
+ //static CUnknown * WINAPI CreateInstance(LPUNKNOWN pUnk, HRESULT *phr);
+ DECLARE_IUNKNOWN;
+
+ virtual void setBuffer(void *pointer, int size);
+ virtual inline HRESULT setImageFormat(UINT width, UINT height);
+ virtual bool getImageFormat(UINT &width, UINT &height);
- virtual void setFps(int fps_);
- virtual void showOverlay(int value);
+ virtual void setFps(int fps_);
+ virtual void showOverlay(int value);
- virtual void getMediaType(AM_MEDIA_TYPE* &pmt);
- virtual HRESULT setMediaType(const AM_MEDIA_TYPE* pmt);
+ virtual void getMediaType(AM_MEDIA_TYPE* &pmt);
+ virtual HRESULT setMediaType(const AM_MEDIA_TYPE* pmt);
- void reset();
+ void reset();
#ifdef _WIN32_WCE
- STDMETHODIMP_(ULONG) NonDelegatingRelease();
+ STDMETHODIMP_(ULONG) NonDelegatingRelease();
#endif
//protected:
#ifdef _WIN32_WCE
- /*STDMETHODIMP QueryInterface(REFIID riid, void **ppv)
- {
- CheckPointer(ppv, E_POINTER);
-
- if (riid == IID_IBaseFilter
- || riid == IID_IBaseFilter
- || riid == IID_IUnknown
- || riid == IID_IMediaFilter
- )
- {
- return GetInterface((IBaseFilter *) this, ppv);
- }
- else
- {
- *ppv = NULL;
- return E_NOINTERFACE;
- }
+ /*STDMETHODIMP QueryInterface(REFIID riid, void **ppv)
+ {
+ CheckPointer(ppv, E_POINTER);
+
+ if (riid == IID_IBaseFilter
+ || riid == IID_IBaseFilter
+ || riid == IID_IUnknown
+ || riid == IID_IMediaFilter
+ )
+ {
+ return GetInterface((IBaseFilter *) this, ppv);
+ }
+ else
+ {
+ *ppv = NULL;
+ return E_NOINTERFACE;
+ }
};
STDMETHODIMP_(ULONG) AddRef() {
- //return GetOwner()->AddRef();
- //return 1;
- return (ULONG)InterlockedIncrement(&m_cRef);
+ //return GetOwner()->AddRef();
+ //return 1;
+ return (ULONG)InterlockedIncrement(&m_cRef);
};
STDMETHODIMP_(ULONG) Release() {
LONG lRefCount = InterlockedDecrement(&m_cRef);
- if(m_cRef < 1) delete this;
- return (ULONG)m_cRef;
+ if(m_cRef < 1) delete this;
+ return (ULONG)m_cRef;
};
- STDMETHODIMP_(ULONG) NonDelegatingAddRef()
- {
- return InterlockedIncrement(&m_cRef);
- }*/
+ STDMETHODIMP_(ULONG) NonDelegatingAddRef()
+ {
+ return InterlockedIncrement(&m_cRef);
+ }*/
#endif
-/*
- STDMETHODIMP_(ULONG) NonDelegatingRelease()
- {
- if(InterlockedDecrement(&m_cRef) == 0)
- {
- delete this;
- return 0;
- }
- return m_cRef;
- }*/
+ /*
+ STDMETHODIMP_(ULONG) NonDelegatingRelease()
+ {
+ if(InterlockedDecrement(&m_cRef) == 0)
+ {
+ delete this;
+ return 0;
+ }
+ return m_cRef;
+ }*/
private:
- DSOutputStream *outputStream;
+ DSOutputStream *outputStream;
- friend class DSOutputStream;
+ friend class DSOutputStream;
};
#endif
diff --git a/plugins/pluginDirectShow/internals/DSOutputStream.cxx b/plugins/pluginDirectShow/internals/DSOutputStream.cxx
index 670d0ae..1e13d78 100755
--- a/plugins/pluginDirectShow/internals/DSOutputStream.cxx
+++ b/plugins/pluginDirectShow/internals/DSOutputStream.cxx
@@ -1,17 +1,17 @@
/* Copyright (C) 2011-2013 Doubango Telecom <http://www.doubango.org>
-*
+*
* This file is part of Open Source Doubango Framework.
*
* DOUBANGO is free software: you can redistribute it and/or modify
* it under the terms of the GNU General Public License as published by
* the Free Software Foundation, either version 3 of the License, or
* (at your option) any later version.
-*
+*
* DOUBANGO is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
* GNU General Public License for more details.
-*
+*
* You should have received a copy of the GNU General Public License
* along with DOUBANGO.
*/
@@ -34,280 +34,281 @@ using namespace std;
#define OVERLAY_TEXT TEXT("Press ESC to exit full screen mode")
#define OVERLAY_DURATION 3 // in seconds
-DSOutputStream::DSOutputStream(HRESULT *phr, DSOutputFilter *pParent, LPCWSTR pPinName)
-: CSourceStream(_T("DSOutputStream"), phr, pParent, pPinName)
+DSOutputStream::DSOutputStream(HRESULT *phr, DSOutputFilter *pParent, LPCWSTR pPinName)
+ : CSourceStream(_T("DSOutputStream"), phr, pParent, pPinName)
{
#if !(defined(_WIN32_WCE) && defined(_DEBUG))
- CAutoLock cAutoLock(m_pFilter->pStateLock());
+ CAutoLock cAutoLock(m_pFilter->pStateLock());
#endif
- this->buffer = NULL;
- this->buffer_size = NULL;
+ this->buffer = NULL;
+ this->buffer_size = NULL;
- this->frameNumber = 0;
- this->frameLength = (1000)/DEFAULT_FPS;
- this->fps = DEFAULT_FPS;
+ this->frameNumber = 0;
+ this->frameLength = (1000)/DEFAULT_FPS;
+ this->fps = DEFAULT_FPS;
- this->width = 352;
- this->height = 288;
+ this->width = 352;
+ this->height = 288;
- this->overlay = false;
+ this->overlay = false;
- this->paintBuffer = NULL;
- this->paintDC = NULL;
- this->hDibSection = NULL;
- this->hObject = NULL;
+ this->paintBuffer = NULL;
+ this->paintDC = NULL;
+ this->hDibSection = NULL;
+ this->hObject = NULL;
- this->mutex = tsk_mutex_create();
+ this->mutex = tsk_mutex_create();
}
DSOutputStream::~DSOutputStream()
{
- TSK_FREE(this->buffer);
- tsk_mutex_destroy(&this->mutex);
- // TODO : Is there anything to free ???
+ TSK_FREE(this->buffer);
+ tsk_mutex_destroy(&this->mutex);
+ // TODO : Is there anything to free ???
}
void DSOutputStream::setFps(int fps_)
{
- this->fps = fps_;
- this->frameLength = (1000)/this->fps;
+ this->fps = fps_;
+ this->frameLength = (1000)/this->fps;
}
void DSOutputStream::showOverlay(int value)
{
- if (value == 0){
- this->overlay = false;
- }
- this->overlay = (value > 0);
+ if (value == 0) {
+ this->overlay = false;
+ }
+ this->overlay = (value > 0);
}
HRESULT DSOutputStream::setImageFormat(UINT width, UINT height)
{
- if ((this->width == width) && (this->height == height)) return S_FALSE;
+ if ((this->width == width) && (this->height == height)) {
+ return S_FALSE;
+ }
- this->width = width;
- this->height = height;
+ this->width = width;
+ this->height = height;
- this->frameNumber = 0;
+ this->frameNumber = 0;
- return S_OK;
+ return S_OK;
}
bool DSOutputStream::getImageFormat(UINT &width, UINT &height)
{
- width = this->width;
- height = this->height;
- return true;
+ width = this->width;
+ height = this->height;
+ return true;
}
HRESULT DSOutputStream::GetMediaType(CMediaType *pMediaType)
{
- HRESULT hr = S_OK;
+ HRESULT hr = S_OK;
#if !(defined(_WIN32_WCE) && defined(_DEBUG))
- CAutoLock lock(m_pFilter->pStateLock());
+ CAutoLock lock(m_pFilter->pStateLock());
#endif
- ZeroMemory(pMediaType, sizeof(CMediaType));
-
- VIDEOINFO *pvi = (VIDEOINFO *)pMediaType->AllocFormatBuffer(sizeof(VIDEOINFO));
- if (NULL == pvi)
- return E_OUTOFMEMORY;
-
- ZeroMemory(pvi, sizeof(VIDEOINFO));
-
- pvi->bmiHeader.biCompression = BI_RGB;
- pvi->bmiHeader.biBitCount = 24;
- pvi->bmiHeader.biSize = sizeof(BITMAPINFOHEADER);
- pvi->bmiHeader.biWidth = this->width;
- pvi->bmiHeader.biHeight = this->height;
- pvi->bmiHeader.biPlanes = 1;
- pvi->bmiHeader.biSizeImage = GetBitmapSize(&pvi->bmiHeader);
- pvi->bmiHeader.biClrImportant = 0;
-
- // Frame rate
- pvi->AvgTimePerFrame = DS_MILLIS_TO_100NS(1000/this->fps);
-
- SetRectEmpty(&(pvi->rcSource)); // we want the whole image area rendered.
- SetRectEmpty(&(pvi->rcTarget)); // no particular destination rectangle
-
- pMediaType->SetType(&MEDIATYPE_Video);
- pMediaType->SetFormatType(&FORMAT_VideoInfo);
- pMediaType->SetTemporalCompression(FALSE);
-
- pMediaType->SetSubtype(&MEDIASUBTYPE_RGB24);
- pMediaType->SetSampleSize(pvi->bmiHeader.biSizeImage);
-
- bitmapInfo.bmiHeader = pvi->bmiHeader;
-
- return hr;
+ ZeroMemory(pMediaType, sizeof(CMediaType));
+
+ VIDEOINFO *pvi = (VIDEOINFO *)pMediaType->AllocFormatBuffer(sizeof(VIDEOINFO));
+ if (NULL == pvi) {
+ return E_OUTOFMEMORY;
+ }
+
+ ZeroMemory(pvi, sizeof(VIDEOINFO));
+
+ pvi->bmiHeader.biCompression = BI_RGB;
+ pvi->bmiHeader.biBitCount = 24;
+ pvi->bmiHeader.biSize = sizeof(BITMAPINFOHEADER);
+ pvi->bmiHeader.biWidth = this->width;
+ pvi->bmiHeader.biHeight = this->height;
+ pvi->bmiHeader.biPlanes = 1;
+ pvi->bmiHeader.biSizeImage = GetBitmapSize(&pvi->bmiHeader);
+ pvi->bmiHeader.biClrImportant = 0;
+
+ // Frame rate
+ pvi->AvgTimePerFrame = DS_MILLIS_TO_100NS(1000/this->fps);
+
+ SetRectEmpty(&(pvi->rcSource)); // we want the whole image area rendered.
+ SetRectEmpty(&(pvi->rcTarget)); // no particular destination rectangle
+
+ pMediaType->SetType(&MEDIATYPE_Video);
+ pMediaType->SetFormatType(&FORMAT_VideoInfo);
+ pMediaType->SetTemporalCompression(FALSE);
+
+ pMediaType->SetSubtype(&MEDIASUBTYPE_RGB24);
+ pMediaType->SetSampleSize(pvi->bmiHeader.biSizeImage);
+
+ bitmapInfo.bmiHeader = pvi->bmiHeader;
+
+ return hr;
}
HRESULT DSOutputStream::DecideBufferSize(IMemAllocator *pMemAlloc, ALLOCATOR_PROPERTIES *pProperties)
{
- CheckPointer(pMemAlloc, E_POINTER);
- CheckPointer(pProperties, E_POINTER);
+ CheckPointer(pMemAlloc, E_POINTER);
+ CheckPointer(pProperties, E_POINTER);
#if !(defined(_WIN32_WCE) && defined(_DEBUG))
- CAutoLock cAutoLock(m_pFilter->pStateLock());
+ CAutoLock cAutoLock(m_pFilter->pStateLock());
#endif
- HRESULT hr = NOERROR;
-
- VIDEOINFO *pvi = (VIDEOINFO *) m_mt.Format();
- pProperties->cBuffers = 1;
- pProperties->cbBuffer = pvi->bmiHeader.biSizeImage;
-
- // Ask the allocator to reserve us some sample memory. NOTE: the function
- // can succeed (return NOERROR) but still not have allocated the
- // memory that we requested, so we must check we got whatever we wanted.
- ALLOCATOR_PROPERTIES Actual;
- hr = pMemAlloc->SetProperties(pProperties,&Actual);
- if(FAILED(hr)){
- return hr;
- }
-
- // Is this allocator unsuitable?
- if(Actual.cbBuffer < pProperties->cbBuffer)
- {
- return E_FAIL;
- }
-
- // Make sure that we have only 1 buffer (we erase the ball in the
- // old buffer to save having to zero a 200k+ buffer every time
- // we draw a frame)
- return NOERROR;
+ HRESULT hr = NOERROR;
+
+ VIDEOINFO *pvi = (VIDEOINFO *) m_mt.Format();
+ pProperties->cBuffers = 1;
+ pProperties->cbBuffer = pvi->bmiHeader.biSizeImage;
+
+ // Ask the allocator to reserve us some sample memory. NOTE: the function
+ // can succeed (return NOERROR) but still not have allocated the
+ // memory that we requested, so we must check we got whatever we wanted.
+ ALLOCATOR_PROPERTIES Actual;
+ hr = pMemAlloc->SetProperties(pProperties,&Actual);
+ if(FAILED(hr)) {
+ return hr;
+ }
+
+ // Is this allocator unsuitable?
+ if(Actual.cbBuffer < pProperties->cbBuffer) {
+ return E_FAIL;
+ }
+
+ // Make sure that we have only 1 buffer (we erase the ball in the
+ // old buffer to save having to zero a 200k+ buffer every time
+ // we draw a frame)
+ return NOERROR;
}
HRESULT DSOutputStream::OnThreadCreate()
{
#if OVERLAY
- hDibSection = CreateDIBSection(NULL, (BITMAPINFO *) &bitmapInfo, DIB_RGB_COLORS, &paintBuffer, NULL, 0);
+ hDibSection = CreateDIBSection(NULL, (BITMAPINFO *) &bitmapInfo, DIB_RGB_COLORS, &paintBuffer, NULL, 0);
- HDC hDC = GetDC(NULL);
- paintDC = CreateCompatibleDC(hDC);
- SetMapMode(paintDC, GetMapMode(hDC));
- SetBkMode(paintDC, TRANSPARENT);
- SetTextColor(paintDC, RGB(255,255,255));
+ HDC hDC = GetDC(NULL);
+ paintDC = CreateCompatibleDC(hDC);
+ SetMapMode(paintDC, GetMapMode(hDC));
+ SetBkMode(paintDC, TRANSPARENT);
+ SetTextColor(paintDC, RGB(255,255,255));
- hObject = SelectObject(paintDC, hDibSection);
+ hObject = SelectObject(paintDC, hDibSection);
#endif
- return CSourceStream::OnThreadCreate();
+ return CSourceStream::OnThreadCreate();
}
HRESULT DSOutputStream::OnThreadDestroy()
{
#if OVERLAY
- if (paintDC) DeleteDC(paintDC);
- if (hObject) DeleteObject(hObject);
-
- if (paintBuffer)
- {
- //delete[] paintBuffer; // will be done
- //paintBuffer = NULL;
- }
+ if (paintDC) {
+ DeleteDC(paintDC);
+ }
+ if (hObject) {
+ DeleteObject(hObject);
+ }
+
+ if (paintBuffer) {
+ //delete[] paintBuffer; // will be done
+ //paintBuffer = NULL;
+ }
#endif
- return CSourceStream::OnThreadDestroy();
+ return CSourceStream::OnThreadDestroy();
}
inline HRESULT DSOutputStream::DrawOverLay(void *pBuffer, long lSize)
{
- // called only #if OVERLAY
- CopyMemory(paintBuffer, pBuffer, lSize);
+ // called only #if OVERLAY
+ CopyMemory(paintBuffer, pBuffer, lSize);
- // Draw the current frame
+ // Draw the current frame
#ifdef _WIN32_WCE
-
+
#else
- if( !TextOut( paintDC, 0, 0, OVERLAY_TEXT, (int)_tcslen( OVERLAY_TEXT ) ) ) return E_FAIL;
+ if( !TextOut( paintDC, 0, 0, OVERLAY_TEXT, (int)_tcslen( OVERLAY_TEXT ) ) ) {
+ return E_FAIL;
+ }
#endif
-
- CopyMemory(pBuffer, paintBuffer, lSize);
- return S_OK;
+ CopyMemory(pBuffer, paintBuffer, lSize);
+
+ return S_OK;
}
static __inline void TransfertBuffer(void* src, void* dest, long lSize)
{
- __try
- {
+ __try {
#if MEMCPY_WORKAROUND
- //#ifdef _WIN32_WCE
- memmove(dest, src, lSize);
- /*#else
- unsigned char * pDst = (unsigned char *) dest;
-
- if(src){
- unsigned char const * pSrc = (unsigned char const *) src;
- for( register int i=0; ((i< lSize) && src); i++) *pDst++ = *pSrc++;
- }else{
- for( register int i=0; i< lSize; i++) *pDst++ = 0;
- }
- #endif*/
+ //#ifdef _WIN32_WCE
+ memmove(dest, src, lSize);
+ /*#else
+ unsigned char * pDst = (unsigned char *) dest;
+
+ if(src){
+ unsigned char const * pSrc = (unsigned char const *) src;
+ for( register int i=0; ((i< lSize) && src); i++) *pDst++ = *pSrc++;
+ }else{
+ for( register int i=0; i< lSize; i++) *pDst++ = 0;
+ }
+ #endif*/
#else
- CopyMemory(dest, src, lSize); //BUGGY
+ CopyMemory(dest, src, lSize); //BUGGY
#endif
- }
- __except(EXCEPTION_ACCESS_VIOLATION == GetExceptionCode())
- {
- //ZeroMemory(dest, sizeof(void*));
- }
+ }
+ __except(EXCEPTION_ACCESS_VIOLATION == GetExceptionCode()) {
+ //ZeroMemory(dest, sizeof(void*));
+ }
}
HRESULT DSOutputStream::FillBuffer(IMediaSample *pSample)
{
- CheckPointer(pSample, E_POINTER);
+ CheckPointer(pSample, E_POINTER);
#if !(defined(_WIN32_WCE) && defined(_DEBUG))
- CAutoLock lock(m_pFilter->pStateLock());
+ CAutoLock lock(m_pFilter->pStateLock());
#endif
- HRESULT hr;
- BYTE *pBuffer = NULL;
- long lSize, lDataSize;
+ HRESULT hr;
+ BYTE *pBuffer = NULL;
+ long lSize, lDataSize;
- hr = pSample->GetPointer(&pBuffer);
- if (SUCCEEDED(hr))
- {
- lDataSize = lSize = pSample->GetSize();
+ hr = pSample->GetPointer(&pBuffer);
+ if (SUCCEEDED(hr)) {
+ lDataSize = lSize = pSample->GetSize();
- // Check that we're still using video
- //ASSERT(m_mt.formattype == FORMAT_VideoInfo);
+ // Check that we're still using video
+ //ASSERT(m_mt.formattype == FORMAT_VideoInfo);
- if (this->buffer)
- {
+ if (this->buffer) {
#if OVERLAY
- if (this->overlay)
- {
- DrawOverLay(this->buffer, lSize);
- }
+ if (this->overlay) {
+ DrawOverLay(this->buffer, lSize);
+ }
#endif
- // Why try do not work, see: http://msdn2.microsoft.com/en-us/library/xwtb73ad(vs.80).aspx
- this->lockBuffer();
- lDataSize = TSK_MIN(lSize, this->buffer_size);
- TransfertBuffer(this->buffer, (void*)pBuffer, lDataSize);
- this->unlockBuffer();
- }
- else
- {
- // Avoid caching last image
- memset((void*)pBuffer, NULL, lSize);
- }
-
- REFERENCE_TIME rtStart = DS_MILLIS_TO_100NS(this->frameNumber * this->frameLength);
- REFERENCE_TIME rtStop = rtStart + DS_MILLIS_TO_100NS(this->frameLength);
-
- this->frameNumber++;
-
- pSample->SetTime(&rtStart, &rtStop);
- //pSample->SetMediaTime(&rtStart, &rtStop);
- pSample->SetActualDataLength(lDataSize);
- pSample->SetPreroll(FALSE);
- pSample->SetDiscontinuity(FALSE);
- }
-
- // Set TRUE on every sample for uncompressed frames (KEYFRAME)
- pSample->SetSyncPoint(TRUE);
-
- return S_OK;
+ // Why try do not work, see: http://msdn2.microsoft.com/en-us/library/xwtb73ad(vs.80).aspx
+ this->lockBuffer();
+ lDataSize = TSK_MIN(lSize, this->buffer_size);
+ TransfertBuffer(this->buffer, (void*)pBuffer, lDataSize);
+ this->unlockBuffer();
+ }
+ else {
+ // Avoid caching last image
+ memset((void*)pBuffer, NULL, lSize);
+ }
+
+ REFERENCE_TIME rtStart = DS_MILLIS_TO_100NS(this->frameNumber * this->frameLength);
+ REFERENCE_TIME rtStop = rtStart + DS_MILLIS_TO_100NS(this->frameLength);
+
+ this->frameNumber++;
+
+ pSample->SetTime(&rtStart, &rtStop);
+ //pSample->SetMediaTime(&rtStart, &rtStop);
+ pSample->SetActualDataLength(lDataSize);
+ pSample->SetPreroll(FALSE);
+ pSample->SetDiscontinuity(FALSE);
+ }
+
+ // Set TRUE on every sample for uncompressed frames (KEYFRAME)
+ pSample->SetSyncPoint(TRUE);
+
+ return S_OK;
}
diff --git a/plugins/pluginDirectShow/internals/DSOutputStream.h b/plugins/pluginDirectShow/internals/DSOutputStream.h
index db3ede4..211bdd8 100755
--- a/plugins/pluginDirectShow/internals/DSOutputStream.h
+++ b/plugins/pluginDirectShow/internals/DSOutputStream.h
@@ -1,17 +1,17 @@
/* Copyright (C) 2011-2013 Doubango Telecom <http://www.doubango.org>
-*
+*
* This file is part of Open Source Doubango Framework.
*
* DOUBANGO is free software: you can redistribute it and/or modify
* it under the terms of the GNU General Public License as published by
* the Free Software Foundation, either version 3 of the License, or
* (at your option) any later version.
-*
+*
* DOUBANGO is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
* GNU General Public License for more details.
-*
+*
* You should have received a copy of the GNU General Public License
* along with DOUBANGO.
*/
@@ -29,61 +29,63 @@ class DSOutputFilter;
class DSOutputStream : public CSourceStream
{
public:
- DSOutputStream(HRESULT *phr, DSOutputFilter *pParent, LPCWSTR pPinName);
- virtual ~DSOutputStream();
-
- void setFps(int fps_);
- void showOverlay(int value);
-
- HRESULT setImageFormat(UINT width, UINT height);
- bool getImageFormat(UINT &width, UINT &height);
- STDMETHODIMP Notify(IBaseFilter *pSelf, Quality q) { return E_NOTIMPL; };
- inline bool lockBuffer() {
- if (this->mutex) {
- return tsk_mutex_lock(this->mutex) == 0;
- }
- return false;
- }
- inline bool unlockBuffer() {
- if (this->mutex) {
- return tsk_mutex_unlock(this->mutex) == 0;
- }
- return false;
- }
+ DSOutputStream(HRESULT *phr, DSOutputFilter *pParent, LPCWSTR pPinName);
+ virtual ~DSOutputStream();
+
+ void setFps(int fps_);
+ void showOverlay(int value);
+
+ HRESULT setImageFormat(UINT width, UINT height);
+ bool getImageFormat(UINT &width, UINT &height);
+ STDMETHODIMP Notify(IBaseFilter *pSelf, Quality q) {
+ return E_NOTIMPL;
+ };
+ inline bool lockBuffer() {
+ if (this->mutex) {
+ return tsk_mutex_lock(this->mutex) == 0;
+ }
+ return false;
+ }
+ inline bool unlockBuffer() {
+ if (this->mutex) {
+ return tsk_mutex_unlock(this->mutex) == 0;
+ }
+ return false;
+ }
public:
- void *buffer;
- int buffer_size;
- LONGLONG frameNumber;
+ void *buffer;
+ int buffer_size;
+ LONGLONG frameNumber;
protected: // Overrides
- HRESULT GetMediaType(CMediaType *pMediaType);
- HRESULT DecideBufferSize(IMemAllocator *pMemAlloc, ALLOCATOR_PROPERTIES *pProperties);
- HRESULT OnThreadCreate();
- HRESULT OnThreadDestroy();
- HRESULT FillBuffer(IMediaSample *pSample);
+ HRESULT GetMediaType(CMediaType *pMediaType);
+ HRESULT DecideBufferSize(IMemAllocator *pMemAlloc, ALLOCATOR_PROPERTIES *pProperties);
+ HRESULT OnThreadCreate();
+ HRESULT OnThreadDestroy();
+ HRESULT FillBuffer(IMediaSample *pSample);
private:
- inline HRESULT DrawOverLay(void *pBuffer, long lSize);
+ inline HRESULT DrawOverLay(void *pBuffer, long lSize);
private:
- // TIMING
- REFERENCE_TIME frameLength;
- int fps;
-
- // sizing
- UINT width;
- UINT height;
-
- // overlaying
- bool overlay;
- BITMAPINFO bitmapInfo;
- void *paintBuffer;
- HDC paintDC;
- HBITMAP hDibSection;
- HGDIOBJ hObject;
-
- tsk_mutex_handle_t* mutex;
+ // TIMING
+ REFERENCE_TIME frameLength;
+ int fps;
+
+ // sizing
+ UINT width;
+ UINT height;
+
+ // overlaying
+ bool overlay;
+ BITMAPINFO bitmapInfo;
+ void *paintBuffer;
+ HDC paintDC;
+ HBITMAP hDibSection;
+ HGDIOBJ hObject;
+
+ tsk_mutex_handle_t* mutex;
};
diff --git a/plugins/pluginDirectShow/internals/DSPushSource.h b/plugins/pluginDirectShow/internals/DSPushSource.h
index 140d9bd..e78c47e 100755
--- a/plugins/pluginDirectShow/internals/DSPushSource.h
+++ b/plugins/pluginDirectShow/internals/DSPushSource.h
@@ -12,7 +12,7 @@
#include <strsafe.h>
-// UNITS = 10 ^ 7
+// UNITS = 10 ^ 7
// UNITS / 30 = 30 fps;
// UNITS / 20 = 20 fps, etc
const REFERENCE_TIME FPS_30 = UNITS / 30;
@@ -35,8 +35,8 @@ const REFERENCE_TIME rtDefaultFrameLength = FPS_10;
#define NUM_FILES 5
// {3FD3081A-A8C9-4958-9F75-07EC89690024}
-TDSHOW_DEFINE_GUID(CLSID_PushSourceDesktop,
-0x3fd3081a, 0xa8c9, 0x4958, 0x9f, 0x75, 0x7, 0xec, 0x89, 0x69, 0x0, 0x24);
+TDSHOW_DEFINE_GUID(CLSID_PushSourceDesktop,
+ 0x3fd3081a, 0xa8c9, 0x4958, 0x9f, 0x75, 0x7, 0xec, 0x89, 0x69, 0x0, 0x24);
@@ -56,11 +56,11 @@ protected:
BITMAPINFO *m_pBmi; // Pointer to the bitmap header
DWORD m_cbBitmapInfo; // Size of the bitmap header
-
- // File opening variables
- HANDLE m_hFile; // Handle returned from CreateFile
+
+ // File opening variables
+ HANDLE m_hFile; // Handle returned from CreateFile
BYTE * m_pFile; // Points to beginning of file buffer
- BYTE * m_pImage; // Points to pixel bits
+ BYTE * m_pImage; // Points to pixel bits
int m_iFrameNumber;
const REFERENCE_TIME m_rtFrameLength;
@@ -77,13 +77,12 @@ public:
HRESULT GetMediaType(CMediaType *pMediaType);
HRESULT DecideBufferSize(IMemAllocator *pAlloc, ALLOCATOR_PROPERTIES *pRequest);
HRESULT FillBuffer(IMediaSample *pSample);
-
+
// Quality control
- // Not implemented because we aren't going in real time.
- // If the file-writing filter slows the graph down, we just do nothing, which means
- // wait until we're unblocked. No frames are ever dropped.
- STDMETHODIMP Notify(IBaseFilter *pSelf, Quality q)
- {
+ // Not implemented because we aren't going in real time.
+ // If the file-writing filter slows the graph down, we just do nothing, which means
+ // wait until we're unblocked. No frames are ever dropped.
+ STDMETHODIMP Notify(IBaseFilter *pSelf, Quality q) {
return E_FAIL;
}
@@ -100,11 +99,11 @@ protected:
BITMAPINFO *m_pBmi[NUM_FILES]; // Pointer to the bitmap headers
DWORD m_cbBitmapInfo[NUM_FILES]; // Size of the bitmap headers
-
- // File opening variables
- HANDLE m_hFile[NUM_FILES]; // Handles returned from CreateFile
+
+ // File opening variables
+ HANDLE m_hFile[NUM_FILES]; // Handles returned from CreateFile
BYTE * m_pFile[NUM_FILES]; // Points to beginning of file buffers
- BYTE * m_pImage[NUM_FILES]; // Points to pixel bits
+ BYTE * m_pImage[NUM_FILES]; // Points to pixel bits
BOOL m_bFilesLoaded;
int m_iCurrentBitmap; // Which bitmap is being displayed
@@ -123,13 +122,12 @@ public:
HRESULT GetMediaType(CMediaType *pMediaType);
HRESULT DecideBufferSize(IMemAllocator *pAlloc, ALLOCATOR_PROPERTIES *pRequest);
HRESULT FillBuffer(IMediaSample *pSample);
-
+
// Quality control
- // Not implemented because we aren't going in real time.
- // If the file-writing filter slows the graph down, we just do nothing, which means
- // wait until we're unblocked. No frames are ever dropped.
- STDMETHODIMP Notify(IBaseFilter *pSelf, Quality q)
- {
+ // Not implemented because we aren't going in real time.
+ // If the file-writing filter slows the graph down, we just do nothing, which means
+ // wait until we're unblocked. No frames are ever dropped.
+ STDMETHODIMP Notify(IBaseFilter *pSelf, Quality q) {
return E_FAIL;
}
@@ -158,7 +156,7 @@ protected:
CCritSec m_cSharedState; // Protects our internal state
CImageDisplay m_Display; // Figures out our media type for us
- HWND m_hSrcHwnd; // Handle to the window to grab
+ HWND m_hSrcHwnd; // Handle to the window to grab
public:
@@ -168,7 +166,7 @@ public:
// Override the version that offers exactly one media type
HRESULT DecideBufferSize(IMemAllocator *pAlloc, ALLOCATOR_PROPERTIES *pRequest);
HRESULT FillBuffer(IMediaSample *pSample);
-
+
// Set the agreed media type and set up the necessary parameters
HRESULT SetMediaType(const CMediaType *pMediaType);
@@ -177,19 +175,17 @@ public:
HRESULT GetMediaType(int iPosition, CMediaType *pmt);
// Quality control
- // Not implemented because we aren't going in real time.
- // If the file-writing filter slows the graph down, we just do nothing, which means
- // wait until we're unblocked. No frames are ever dropped.
- STDMETHODIMP Notify(IBaseFilter *pSelf, Quality q)
- {
+ // Not implemented because we aren't going in real time.
+ // If the file-writing filter slows the graph down, we just do nothing, which means
+ // wait until we're unblocked. No frames are ever dropped.
+ STDMETHODIMP Notify(IBaseFilter *pSelf, Quality q) {
return E_FAIL;
}
- HRESULT SetSrcHwnd(HWND hWnd)
- {
- m_hSrcHwnd = hWnd;
- return S_OK;
- }
+ HRESULT SetSrcHwnd(HWND hWnd) {
+ m_hSrcHwnd = hWnd;
+ return S_OK;
+ }
};
@@ -206,7 +202,7 @@ private:
CPushPinBitmap *m_pPin;
public:
- static CUnknown * WINAPI CreateInstance(IUnknown *pUnk, HRESULT *phr);
+ static CUnknown * WINAPI CreateInstance(IUnknown *pUnk, HRESULT *phr);
};
@@ -222,7 +218,7 @@ private:
CPushPinBitmapSet *m_pPin;
public:
- static CUnknown * WINAPI CreateInstance(IUnknown *pUnk, HRESULT *phr);
+ static CUnknown * WINAPI CreateInstance(IUnknown *pUnk, HRESULT *phr);
};
@@ -238,10 +234,10 @@ private:
CPushPinDesktop *m_pPin;
public:
- static CUnknown * WINAPI CreateInstance(IUnknown *pUnk, HRESULT *phr);
- DECLARE_IUNKNOWN;
-
- HRESULT SetSrcHwnd(HWND hWnd);
+ static CUnknown * WINAPI CreateInstance(IUnknown *pUnk, HRESULT *phr);
+ DECLARE_IUNKNOWN;
+
+ HRESULT SetSrcHwnd(HWND hWnd);
};
@@ -260,7 +256,7 @@ public:
#include <strsafe.h>
-// UNITS = 10 ^ 7
+// UNITS = 10 ^ 7
// UNITS / 30 = 30 fps;
// UNITS / 20 = 20 fps, etc
const REFERENCE_TIME FPS_30 = UNITS / 30;
@@ -283,8 +279,8 @@ const REFERENCE_TIME rtDefaultFrameLength = FPS_10;
#define NUM_FILES 5
// {3FD3081A-A8C9-4958-9F75-07EC89690024}
-TDSHOW_DEFINE_GUID(CLSID_PushSourceDesktop,
-0x3fd3081a, 0xa8c9, 0x4958, 0x9f, 0x75, 0x7, 0xec, 0x89, 0x69, 0x0, 0x24);
+TDSHOW_DEFINE_GUID(CLSID_PushSourceDesktop,
+ 0x3fd3081a, 0xa8c9, 0x4958, 0x9f, 0x75, 0x7, 0xec, 0x89, 0x69, 0x0, 0x24);
@@ -304,11 +300,11 @@ protected:
BITMAPINFO *m_pBmi; // Pointer to the bitmap header
DWORD m_cbBitmapInfo; // Size of the bitmap header
-
- // File opening variables
- HANDLE m_hFile; // Handle returned from CreateFile
+
+ // File opening variables
+ HANDLE m_hFile; // Handle returned from CreateFile
BYTE * m_pFile; // Points to beginning of file buffer
- BYTE * m_pImage; // Points to pixel bits
+ BYTE * m_pImage; // Points to pixel bits
int m_iFrameNumber;
const REFERENCE_TIME m_rtFrameLength;
@@ -325,13 +321,12 @@ public:
HRESULT GetMediaType(CMediaType *pMediaType);
HRESULT DecideBufferSize(IMemAllocator *pAlloc, ALLOCATOR_PROPERTIES *pRequest);
HRESULT FillBuffer(IMediaSample *pSample);
-
+
// Quality control
- // Not implemented because we aren't going in real time.
- // If the file-writing filter slows the graph down, we just do nothing, which means
- // wait until we're unblocked. No frames are ever dropped.
- STDMETHODIMP Notify(IBaseFilter *pSelf, Quality q)
- {
+ // Not implemented because we aren't going in real time.
+ // If the file-writing filter slows the graph down, we just do nothing, which means
+ // wait until we're unblocked. No frames are ever dropped.
+ STDMETHODIMP Notify(IBaseFilter *pSelf, Quality q) {
return E_FAIL;
}
@@ -348,11 +343,11 @@ protected:
BITMAPINFO *m_pBmi[NUM_FILES]; // Pointer to the bitmap headers
DWORD m_cbBitmapInfo[NUM_FILES]; // Size of the bitmap headers
-
- // File opening variables
- HANDLE m_hFile[NUM_FILES]; // Handles returned from CreateFile
+
+ // File opening variables
+ HANDLE m_hFile[NUM_FILES]; // Handles returned from CreateFile
BYTE * m_pFile[NUM_FILES]; // Points to beginning of file buffers
- BYTE * m_pImage[NUM_FILES]; // Points to pixel bits
+ BYTE * m_pImage[NUM_FILES]; // Points to pixel bits
BOOL m_bFilesLoaded;
int m_iCurrentBitmap; // Which bitmap is being displayed
@@ -371,13 +366,12 @@ public:
HRESULT GetMediaType(CMediaType *pMediaType);
HRESULT DecideBufferSize(IMemAllocator *pAlloc, ALLOCATOR_PROPERTIES *pRequest);
HRESULT FillBuffer(IMediaSample *pSample);
-
+
// Quality control
- // Not implemented because we aren't going in real time.
- // If the file-writing filter slows the graph down, we just do nothing, which means
- // wait until we're unblocked. No frames are ever dropped.
- STDMETHODIMP Notify(IBaseFilter *pSelf, Quality q)
- {
+ // Not implemented because we aren't going in real time.
+ // If the file-writing filter slows the graph down, we just do nothing, which means
+ // wait until we're unblocked. No frames are ever dropped.
+ STDMETHODIMP Notify(IBaseFilter *pSelf, Quality q) {
return E_FAIL;
}
@@ -406,7 +400,7 @@ protected:
CCritSec m_cSharedState; // Protects our internal state
CImageDisplay m_Display; // Figures out our media type for us
- HWND m_hSrcHwnd; // Handle to the window to grab
+ HWND m_hSrcHwnd; // Handle to the window to grab
public:
@@ -416,7 +410,7 @@ public:
// Override the version that offers exactly one media type
HRESULT DecideBufferSize(IMemAllocator *pAlloc, ALLOCATOR_PROPERTIES *pRequest);
HRESULT FillBuffer(IMediaSample *pSample);
-
+
// Set the agreed media type and set up the necessary parameters
HRESULT SetMediaType(const CMediaType *pMediaType);
@@ -425,19 +419,17 @@ public:
HRESULT GetMediaType(int iPosition, CMediaType *pmt);
// Quality control
- // Not implemented because we aren't going in real time.
- // If the file-writing filter slows the graph down, we just do nothing, which means
- // wait until we're unblocked. No frames are ever dropped.
- STDMETHODIMP Notify(IBaseFilter *pSelf, Quality q)
- {
+ // Not implemented because we aren't going in real time.
+ // If the file-writing filter slows the graph down, we just do nothing, which means
+ // wait until we're unblocked. No frames are ever dropped.
+ STDMETHODIMP Notify(IBaseFilter *pSelf, Quality q) {
return E_FAIL;
}
- HRESULT SetSrcHwnd(HWND hWnd)
- {
- m_hSrcHwnd = hWnd;
- return S_OK;
- }
+ HRESULT SetSrcHwnd(HWND hWnd) {
+ m_hSrcHwnd = hWnd;
+ return S_OK;
+ }
};
@@ -454,7 +446,7 @@ private:
CPushPinBitmap *m_pPin;
public:
- static CUnknown * WINAPI CreateInstance(IUnknown *pUnk, HRESULT *phr);
+ static CUnknown * WINAPI CreateInstance(IUnknown *pUnk, HRESULT *phr);
};
@@ -470,7 +462,7 @@ private:
CPushPinBitmapSet *m_pPin;
public:
- static CUnknown * WINAPI CreateInstance(IUnknown *pUnk, HRESULT *phr);
+ static CUnknown * WINAPI CreateInstance(IUnknown *pUnk, HRESULT *phr);
};
@@ -486,10 +478,10 @@ private:
CPushPinDesktop *m_pPin;
public:
- static CUnknown * WINAPI CreateInstance(IUnknown *pUnk, HRESULT *phr);
- DECLARE_IUNKNOWN;
-
- HRESULT SetSrcHwnd(HWND hWnd);
+ static CUnknown * WINAPI CreateInstance(IUnknown *pUnk, HRESULT *phr);
+ DECLARE_IUNKNOWN;
+
+ HRESULT SetSrcHwnd(HWND hWnd);
};
diff --git a/plugins/pluginDirectShow/internals/DSPushSourceDesktop.cxx b/plugins/pluginDirectShow/internals/DSPushSourceDesktop.cxx
index 1bdee9e..3e68ee2 100755
--- a/plugins/pluginDirectShow/internals/DSPushSourceDesktop.cxx
+++ b/plugins/pluginDirectShow/internals/DSPushSourceDesktop.cxx
@@ -18,27 +18,27 @@
/**********************************************
*
* CPushPinDesktop Class
- *
+ *
*
**********************************************/
CPushPinDesktop::CPushPinDesktop(HRESULT *phr, CSource *pFilter)
- : CSourceStream(NAME("Push Source Desktop"), phr, pFilter, L"Out"),
- m_FramesWritten(0),
- m_bZeroMemory(0),
- m_iFrameNumber(0),
- m_rtFrameLength(FPS_5), // Capture and display desktop 5 times per second
- m_nCurrentBitDepth(24),
- m_hSrcHwnd(NULL)
+ : CSourceStream(NAME("Push Source Desktop"), phr, pFilter, L"Out"),
+ m_FramesWritten(0),
+ m_bZeroMemory(0),
+ m_iFrameNumber(0),
+ m_rtFrameLength(FPS_5), // Capture and display desktop 5 times per second
+ m_nCurrentBitDepth(24),
+ m_hSrcHwnd(NULL)
{
- // The main point of this sample is to demonstrate how to take a DIB
- // in host memory and insert it into a video stream.
+ // The main point of this sample is to demonstrate how to take a DIB
+ // in host memory and insert it into a video stream.
- // To keep this sample as simple as possible, we just read the desktop image
- // from a file and copy it into every frame that we send downstream.
+ // To keep this sample as simple as possible, we just read the desktop image
+ // from a file and copy it into every frame that we send downstream.
//
- // In the filter graph, we connect this filter to the AVI Mux, which creates
- // the AVI file with the video frames we pass to it. In this case,
+ // In the filter graph, we connect this filter to the AVI Mux, which creates
+ // the AVI file with the video frames we pass to it. In this case,
// the end result is a screen capture video (GDI images only, with no
// support for overlay surfaces).
@@ -60,7 +60,7 @@ CPushPinDesktop::CPushPinDesktop(HRESULT *phr, CSource *pFilter)
}
CPushPinDesktop::~CPushPinDesktop()
-{
+{
DbgLog((LOG_TRACE, 3, TEXT("Frames written %d"), m_iFrameNumber));
}
@@ -84,74 +84,76 @@ HRESULT CPushPinDesktop::GetMediaType(int iPosition, CMediaType *pmt)
CheckPointer(pmt,E_POINTER);
CAutoLock cAutoLock(m_pFilter->pStateLock());
- if(iPosition < 0)
+ if(iPosition < 0) {
return E_INVALIDARG;
+ }
// Have we run off the end of types?
- if(iPosition > 4)
+ if(iPosition > 4) {
return VFW_S_NO_MORE_ITEMS;
+ }
VIDEOINFO *pvi = (VIDEOINFO *) pmt->AllocFormatBuffer(sizeof(VIDEOINFO));
- if(NULL == pvi)
+ if(NULL == pvi) {
return(E_OUTOFMEMORY);
+ }
// Initialize the VideoInfo structure before configuring its members
ZeroMemory(pvi, sizeof(VIDEOINFO));
- switch(iPosition)
- {
- case 0:
- {
- // Return our highest quality 32bit format
-
- // Since we use RGB888 (the default for 32 bit), there is
- // no reason to use BI_BITFIELDS to specify the RGB
- // masks. Also, not everything supports BI_BITFIELDS
- pvi->bmiHeader.biCompression = BI_RGB;
- pvi->bmiHeader.biBitCount = 32;
- break;
- }
+ switch(iPosition) {
+ case 0: {
+ // Return our highest quality 32bit format
- case 1:
- { // Return our 24bit format
- pvi->bmiHeader.biCompression = BI_RGB;
- pvi->bmiHeader.biBitCount = 24;
- break;
- }
+ // Since we use RGB888 (the default for 32 bit), there is
+ // no reason to use BI_BITFIELDS to specify the RGB
+ // masks. Also, not everything supports BI_BITFIELDS
+ pvi->bmiHeader.biCompression = BI_RGB;
+ pvi->bmiHeader.biBitCount = 32;
+ break;
+ }
- case 2:
- {
- // 16 bit per pixel RGB565
+ case 1: {
+ // Return our 24bit format
+ pvi->bmiHeader.biCompression = BI_RGB;
+ pvi->bmiHeader.biBitCount = 24;
+ break;
+ }
- // Place the RGB masks as the first 3 doublewords in the palette area
- for(int i = 0; i < 3; i++)
- pvi->TrueColorInfo.dwBitMasks[i] = bits565[i];
+ case 2: {
+ // 16 bit per pixel RGB565
- pvi->bmiHeader.biCompression = BI_BITFIELDS;
- pvi->bmiHeader.biBitCount = 16;
- break;
+ // Place the RGB masks as the first 3 doublewords in the palette area
+ for(int i = 0; i < 3; i++) {
+ pvi->TrueColorInfo.dwBitMasks[i] = bits565[i];
}
- case 3:
- { // 16 bits per pixel RGB555
+ pvi->bmiHeader.biCompression = BI_BITFIELDS;
+ pvi->bmiHeader.biBitCount = 16;
+ break;
+ }
- // Place the RGB masks as the first 3 doublewords in the palette area
- for(int i = 0; i < 3; i++)
- pvi->TrueColorInfo.dwBitMasks[i] = bits555[i];
+ case 3: {
+ // 16 bits per pixel RGB555
- pvi->bmiHeader.biCompression = BI_BITFIELDS;
- pvi->bmiHeader.biBitCount = 16;
- break;
+ // Place the RGB masks as the first 3 doublewords in the palette area
+ for(int i = 0; i < 3; i++) {
+ pvi->TrueColorInfo.dwBitMasks[i] = bits555[i];
}
- case 4:
- { // 8 bit palettised
+ pvi->bmiHeader.biCompression = BI_BITFIELDS;
+ pvi->bmiHeader.biBitCount = 16;
+ break;
+ }
- pvi->bmiHeader.biCompression = BI_RGB;
- pvi->bmiHeader.biBitCount = 8;
- pvi->bmiHeader.biClrUsed = iPALETTE_COLORS;
- break;
- }
+ case 4: {
+ // 8 bit palettised
+
+ pvi->bmiHeader.biCompression = BI_RGB;
+ pvi->bmiHeader.biBitCount = 8;
+ pvi->bmiHeader.biClrUsed = iPALETTE_COLORS;
+ break;
+ }
}
// Adjust the parameters common to all formats
@@ -191,38 +193,37 @@ HRESULT CPushPinDesktop::CheckMediaType(const CMediaType *pMediaType)
CheckPointer(pMediaType,E_POINTER);
if((*(pMediaType->Type()) != MEDIATYPE_Video) || // we only output video
- !(pMediaType->IsFixedSize())) // in fixed size samples
- {
+ !(pMediaType->IsFixedSize())) { // in fixed size samples
return E_INVALIDARG;
}
// Check for the subtypes we support
const GUID *SubType = pMediaType->Subtype();
- if (SubType == NULL)
+ if (SubType == NULL) {
return E_INVALIDARG;
+ }
if( (*SubType != MEDIASUBTYPE_RGB24)
#if 0
- && (*SubType != MEDIASUBTYPE_RGB565)
- && (*SubType != MEDIASUBTYPE_RGB555)
- && (*SubType != MEDIASUBTYPE_RGB32)
- && (*SubType != MEDIASUBTYPE_RGB8)
+ && (*SubType != MEDIASUBTYPE_RGB565)
+ && (*SubType != MEDIASUBTYPE_RGB555)
+ && (*SubType != MEDIASUBTYPE_RGB32)
+ && (*SubType != MEDIASUBTYPE_RGB8)
#endif
- )
- {
+ ) {
return E_INVALIDARG;
}
// Get the format area of the media type
VIDEOINFO *pvi = (VIDEOINFO *) pMediaType->Format();
- if(pvi == NULL)
+ if(pvi == NULL) {
return E_INVALIDARG;
+ }
// Check if the image width & height have changed
- if( pvi->bmiHeader.biWidth != m_iImageWidth ||
- abs(pvi->bmiHeader.biHeight) != m_iImageHeight)
- {
+ if( pvi->bmiHeader.biWidth != m_iImageWidth ||
+ abs(pvi->bmiHeader.biHeight) != m_iImageHeight) {
// If the image width/height is changed, fail CheckMediaType() to force
// the renderer to resize the image.
return E_INVALIDARG;
@@ -230,8 +231,9 @@ HRESULT CPushPinDesktop::CheckMediaType(const CMediaType *pMediaType)
// Don't accept formats with negative height, which would cause the desktop
// image to be displayed upside down.
- if (pvi->bmiHeader.biHeight < 0)
+ if (pvi->bmiHeader.biHeight < 0) {
return E_INVALIDARG;
+ }
return S_OK; // This format is acceptable.
@@ -246,7 +248,7 @@ HRESULT CPushPinDesktop::CheckMediaType(const CMediaType *pMediaType)
// Then we can ask for buffers of the correct size to contain them.
//
HRESULT CPushPinDesktop::DecideBufferSize(IMemAllocator *pAlloc,
- ALLOCATOR_PROPERTIES *pProperties)
+ ALLOCATOR_PROPERTIES *pProperties)
{
CheckPointer(pAlloc,E_POINTER);
CheckPointer(pProperties,E_POINTER);
@@ -265,14 +267,12 @@ HRESULT CPushPinDesktop::DecideBufferSize(IMemAllocator *pAlloc,
// memory that we requested, so we must check we got whatever we wanted.
ALLOCATOR_PROPERTIES Actual;
hr = pAlloc->SetProperties(pProperties,&Actual);
- if(FAILED(hr))
- {
+ if(FAILED(hr)) {
return hr;
}
// Is this allocator unsuitable?
- if(Actual.cbBuffer < pProperties->cbBuffer)
- {
+ if(Actual.cbBuffer < pProperties->cbBuffer) {
return E_FAIL;
}
@@ -297,31 +297,30 @@ HRESULT CPushPinDesktop::SetMediaType(const CMediaType *pMediaType)
// Pass the call up to my base class
HRESULT hr = CSourceStream::SetMediaType(pMediaType);
- if(SUCCEEDED(hr))
- {
+ if(SUCCEEDED(hr)) {
VIDEOINFO * pvi = (VIDEOINFO *) m_mt.Format();
- if (pvi == NULL)
+ if (pvi == NULL) {
return E_UNEXPECTED;
+ }
- switch(pvi->bmiHeader.biBitCount)
- {
- case 8: // 8-bit palettized
- case 16: // RGB565, RGB555
- case 24: // RGB24
- case 32: // RGB32
- // Save the current media type and bit depth
- m_MediaType = *pMediaType;
- m_nCurrentBitDepth = pvi->bmiHeader.biBitCount;
- hr = S_OK;
- break;
-
- default:
- // We should never agree any other media types
- ASSERT(FALSE);
- hr = E_INVALIDARG;
- break;
+ switch(pvi->bmiHeader.biBitCount) {
+ case 8: // 8-bit palettized
+ case 16: // RGB565, RGB555
+ case 24: // RGB24
+ case 32: // RGB32
+ // Save the current media type and bit depth
+ m_MediaType = *pMediaType;
+ m_nCurrentBitDepth = pvi->bmiHeader.biBitCount;
+ hr = S_OK;
+ break;
+
+ default:
+ // We should never agree any other media types
+ ASSERT(FALSE);
+ hr = E_INVALIDARG;
+ break;
}
- }
+ }
return hr;
@@ -332,7 +331,7 @@ HRESULT CPushPinDesktop::SetMediaType(const CMediaType *pMediaType)
// FillBuffer is called once for every sample in the stream.
HRESULT CPushPinDesktop::FillBuffer(IMediaSample *pSample)
{
- BYTE *pData;
+ BYTE *pData;
long cbData;
CheckPointer(pSample, E_POINTER);
@@ -348,18 +347,19 @@ HRESULT CPushPinDesktop::FillBuffer(IMediaSample *pSample)
VIDEOINFOHEADER *pVih = (VIDEOINFOHEADER*)m_mt.pbFormat;
- // Copy the DIB bits over into our filter's output buffer.
+ // Copy the DIB bits over into our filter's output buffer.
// Since sample size may be larger than the image size, bound the copy size.
int nSize = min(pVih->bmiHeader.biSizeImage, (DWORD) cbData);
HDIB hDib = CopyScreenToBitmap(&m_rScreen, pData, (BITMAPINFO *) &(pVih->bmiHeader));
- if (hDib)
+ if (hDib) {
DeleteObject(hDib);
+ }
- // Set the timestamps that will govern playback frame rate.
- // If this file is getting written out as an AVI,
- // then you'll also need to configure the AVI Mux filter to
- // set the Average Time Per Frame for the AVI Header.
+ // Set the timestamps that will govern playback frame rate.
+ // If this file is getting written out as an AVI,
+ // then you'll also need to configure the AVI Mux filter to
+ // set the Average Time Per Frame for the AVI Header.
// The current time is the sample's start.
REFERENCE_TIME rtStart = m_iFrameNumber * m_rtFrameLength;
REFERENCE_TIME rtStop = rtStart + m_rtFrameLength;
@@ -367,7 +367,7 @@ HRESULT CPushPinDesktop::FillBuffer(IMediaSample *pSample)
pSample->SetTime(&rtStart, &rtStop);
m_iFrameNumber++;
- // Set TRUE on every sample for uncompressed frames
+ // Set TRUE on every sample for uncompressed frames
pSample->SetSyncPoint(TRUE);
return S_OK;
@@ -382,28 +382,28 @@ HRESULT CPushPinDesktop::FillBuffer(IMediaSample *pSample)
**********************************************/
CPushSourceDesktop::CPushSourceDesktop(IUnknown *pUnk, HRESULT *phr)
- : CSource(NAME("PushSourceDesktop"), pUnk, CLSID_PushSourceDesktop)
+ : CSource(NAME("PushSourceDesktop"), pUnk, CLSID_PushSourceDesktop)
{
// The pin magically adds itself to our pin array.
m_pPin = new CPushPinDesktop(phr, this);
- if (phr)
- {
- if (m_pPin == NULL)
- *phr = E_OUTOFMEMORY;
- else
- *phr = S_OK;
- }
+ if (phr) {
+ if (m_pPin == NULL) {
+ *phr = E_OUTOFMEMORY;
+ }
+ else {
+ *phr = S_OK;
+ }
+ }
}
CPushSourceDesktop::~CPushSourceDesktop()
{
- if (m_pPin)
- {
- delete m_pPin;
- m_pPin = NULL;
- }
+ if (m_pPin) {
+ delete m_pPin;
+ m_pPin = NULL;
+ }
}
@@ -411,24 +411,24 @@ CUnknown * WINAPI CPushSourceDesktop::CreateInstance(IUnknown *pUnk, HRESULT *ph
{
CPushSourceDesktop *pNewFilter = new CPushSourceDesktop(pUnk, phr );
- if (phr)
- {
- if (pNewFilter == NULL)
- *phr = E_OUTOFMEMORY;
- else
- *phr = S_OK;
- }
+ if (phr) {
+ if (pNewFilter == NULL) {
+ *phr = E_OUTOFMEMORY;
+ }
+ else {
+ *phr = S_OK;
+ }
+ }
return pNewFilter;
}
HRESULT CPushSourceDesktop::SetSrcHwnd(HWND hWnd)
{
- if (m_pPin)
- {
- return m_pPin->SetSrcHwnd(hWnd);
- }
- return E_FAIL;
+ if (m_pPin) {
+ return m_pPin->SetSrcHwnd(hWnd);
+ }
+ return E_FAIL;
}
#endif /* _WIN32_WCE */
diff --git a/plugins/pluginDirectShow/internals/DSScreenCastGraph.cxx b/plugins/pluginDirectShow/internals/DSScreenCastGraph.cxx
index b425d65..efdb261 100755
--- a/plugins/pluginDirectShow/internals/DSScreenCastGraph.cxx
+++ b/plugins/pluginDirectShow/internals/DSScreenCastGraph.cxx
@@ -1,18 +1,18 @@
#if !defined(_WIN32_WCE)
/* Copyright (C) 2014 Mamadou DIOP
-*
+*
* This file is part of Open Source Doubango Framework.
*
* DOUBANGO is free software: you can redistribute it and/or modify
* it under the terms of the GNU General Public License as published by
* the Free Software Foundation, either version 3 of the License, or
* (at your option) any later version.
-*
+*
* DOUBANGO is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
* GNU General Public License for more details.
-*
+*
* You should have received a copy of the GNU General Public License
* along with DOUBANGO.
*/
@@ -29,46 +29,46 @@
using namespace std;
DSScreenCastGraph::DSScreenCastGraph(ISampleGrabberCB* callback, HRESULT *hr)
-: DSBaseCaptureGraph(callback, hr)
+ : DSBaseCaptureGraph(callback, hr)
{
- this->grabberCallback = callback;
+ this->grabberCallback = callback;
- this->captureFormat = NULL;
- this->captureGraphBuilder = NULL;
- this->graphBuilder = NULL;
+ this->captureFormat = NULL;
+ this->captureGraphBuilder = NULL;
+ this->graphBuilder = NULL;
- this->sourceFilter = NULL;
- this->sampleGrabberFilter = NULL;
+ this->sourceFilter = NULL;
+ this->sampleGrabberFilter = NULL;
- this->nullRendererFilter = NULL;
- this->grabberController = NULL;
- this->mediaController = NULL;
- this->mediaEventController = NULL;
+ this->nullRendererFilter = NULL;
+ this->grabberController = NULL;
+ this->mediaController = NULL;
+ this->mediaEventController = NULL;
- this->running = FALSE;
- this->paused = FALSE;
+ this->running = FALSE;
+ this->paused = FALSE;
- *hr = this->createCaptureGraph();
+ *hr = this->createCaptureGraph();
}
DSScreenCastGraph::~DSScreenCastGraph()
{
- SAFE_RELEASE(this->mediaEventController);
- SAFE_RELEASE(this->mediaController);
- SAFE_RELEASE(this->grabberController);
+ SAFE_RELEASE(this->mediaEventController);
+ SAFE_RELEASE(this->mediaController);
+ SAFE_RELEASE(this->grabberController);
- SAFE_RELEASE(this->nullRendererFilter);
- SAFE_RELEASE(this->sampleGrabberFilter);
+ SAFE_RELEASE(this->nullRendererFilter);
+ SAFE_RELEASE(this->sampleGrabberFilter);
- SAFE_RELEASE(this->graphBuilder);
- SAFE_RELEASE(this->captureGraphBuilder);
+ SAFE_RELEASE(this->graphBuilder);
+ SAFE_RELEASE(this->captureGraphBuilder);
- SAFE_RELEASE(this->sourceFilter);
+ SAFE_RELEASE(this->sourceFilter);
}
HRESULT DSScreenCastGraph::setParameters(DSCaptureFormat *format, int framerate)
{
- return S_OK;
+ return S_OK;
}
#ifdef _WIN32_WCE
@@ -77,181 +77,207 @@ HRESULT DSScreenCastGraph::setParameters(DSCaptureFormat *format, int framerate)
HRESULT DSScreenCastGraph::connect()
{
- HRESULT hr;
+ HRESULT hr;
- if (!this->sourceFilter){
- TSK_DEBUG_ERROR("Invalid source filter");
- return E_FAIL;
- }
+ if (!this->sourceFilter) {
+ TSK_DEBUG_ERROR("Invalid source filter");
+ return E_FAIL;
+ }
#if 0
- if (!this->captureFormat){
- TSK_DEBUG_ERROR("Invalid capture format");
- return E_FAIL;
- }
+ if (!this->captureFormat) {
+ TSK_DEBUG_ERROR("Invalid capture format");
+ return E_FAIL;
+ }
#endif
-
- hr = ConnectFilters(this->graphBuilder, this->sourceFilter, this->sampleGrabberFilter); if(FAILED(hr)) { TSK_DEBUG_ERROR("ConnectFilters failed"); return hr; }
- hr = ConnectFilters(this->graphBuilder, this->sampleGrabberFilter, this->nullRendererFilter); if(FAILED(hr)) { TSK_DEBUG_ERROR("ConnectFilters failed"); return hr; }
- return hr;
+ hr = ConnectFilters(this->graphBuilder, this->sourceFilter, this->sampleGrabberFilter);
+ if(FAILED(hr)) {
+ TSK_DEBUG_ERROR("ConnectFilters failed");
+ return hr;
+ }
+ hr = ConnectFilters(this->graphBuilder, this->sampleGrabberFilter, this->nullRendererFilter);
+ if(FAILED(hr)) {
+ TSK_DEBUG_ERROR("ConnectFilters failed");
+ return hr;
+ }
+
+ return hr;
}
HRESULT DSScreenCastGraph::disconnect()
{
- HRESULT hr;
+ HRESULT hr;
- if (!this->sourceFilter)
- {
- return E_FAIL;
- }
+ if (!this->sourceFilter) {
+ return E_FAIL;
+ }
#if 0
- if (!this->captureFormat)
- {
- return E_FAIL;
- }
+ if (!this->captureFormat) {
+ return E_FAIL;
+ }
#endif
- hr = DisconnectFilters(this->graphBuilder, this->sourceFilter, this->sampleGrabberFilter);
- hr = DisconnectFilters(this->graphBuilder, this->sampleGrabberFilter, this->nullRendererFilter);
+ hr = DisconnectFilters(this->graphBuilder, this->sourceFilter, this->sampleGrabberFilter);
+ hr = DisconnectFilters(this->graphBuilder, this->sampleGrabberFilter, this->nullRendererFilter);
- return hr;
+ return hr;
}
HRESULT DSScreenCastGraph::start()
{
- HRESULT hr;
-
- if (isRunning() && !isPaused())
- {
- return S_OK;
- }
-
- hr = this->mediaController->Run();
- if (!SUCCEEDED(hr))
- {
- TSK_DEBUG_ERROR("DSScreenCastGraph::mediaController->Run() has failed with %ld", hr);
- return hr;
- }
- this->running = true;
- return hr;
+ HRESULT hr;
+
+ if (isRunning() && !isPaused()) {
+ return S_OK;
+ }
+
+ hr = this->mediaController->Run();
+ if (!SUCCEEDED(hr)) {
+ TSK_DEBUG_ERROR("DSScreenCastGraph::mediaController->Run() has failed with %ld", hr);
+ return hr;
+ }
+ this->running = true;
+ return hr;
}
HRESULT DSScreenCastGraph::pause()
{
- HRESULT hr = S_OK;
- if (isRunning())
- {
- hr = this->mediaController->Pause();
- if (SUCCEEDED(hr))
- {
- this->paused = TRUE;
- }
- }
- return hr;
+ HRESULT hr = S_OK;
+ if (isRunning()) {
+ hr = this->mediaController->Pause();
+ if (SUCCEEDED(hr)) {
+ this->paused = TRUE;
+ }
+ }
+ return hr;
}
HRESULT DSScreenCastGraph::stop()
{
- if (!this->running)
- {
- return S_OK;
- }
-
- HRESULT hr;
- hr = this->mediaController->Stop();
- if (FAILED(hr))
- {
- TSK_DEBUG_ERROR("DSScreenCastGraph::mediaController->Stop() has failed with %ld", hr);
- }
- this->running = false;
- this->paused = false;
- return hr;
+ if (!this->running) {
+ return S_OK;
+ }
+
+ HRESULT hr;
+ hr = this->mediaController->Stop();
+ if (FAILED(hr)) {
+ TSK_DEBUG_ERROR("DSScreenCastGraph::mediaController->Stop() has failed with %ld", hr);
+ }
+ this->running = false;
+ this->paused = false;
+ return hr;
}
bool DSScreenCastGraph::isRunning()
{
- return this->running;
+ return this->running;
}
bool DSScreenCastGraph::isPaused()
{
- return this->paused;
+ return this->paused;
}
HRESULT DSScreenCastGraph::getConnectedMediaType(AM_MEDIA_TYPE *mediaType)
{
- return this->grabberController->GetConnectedMediaType(mediaType);
+ return this->grabberController->GetConnectedMediaType(mediaType);
}
HRESULT DSScreenCastGraph::createCaptureGraph()
{
- HRESULT hr;
-
- // Create capture graph builder
- hr = COCREATE(CLSID_CaptureGraphBuilder2, IID_ICaptureGraphBuilder2, this->captureGraphBuilder);
- if(FAILED(hr)) return hr;
-
- // Create the graph builder
- hr = COCREATE(CLSID_FilterGraph, IID_IGraphBuilder, this->graphBuilder);
- if(FAILED(hr)) return hr;
-
- // Initialize the Capture Graph Builder.
- hr = this->captureGraphBuilder->SetFiltergraph(this->graphBuilder);
- if(FAILED(hr)) return hr;
-
- // Create source filter
- LPUNKNOWN pUnk = NULL;
- this->sourceFilter = (CPushSourceDesktop*)CPushSourceDesktop::CreateInstance(pUnk, &hr);
- if(FAILED(hr)) return hr;
- this->sourceFilter->AddRef();
-
- // Create the sample grabber filter
- hr = COCREATE(CLSID_SampleGrabber, IID_IBaseFilter, this->sampleGrabberFilter);
- if(FAILED(hr)) return hr;
-
- // Create the NULL renderer
- hr = COCREATE(CLSID_NullRenderer, IID_IBaseFilter, this->nullRendererFilter);
- if(FAILED(hr)) return hr;
-
- // Add source filter to the graph
- hr = this->graphBuilder->AddFilter(this->sourceFilter, FILTER_SCREENCAST);
- if(FAILED(hr)) return hr;
-
- // Add sample grabber to the graph
- hr = this->graphBuilder->AddFilter(this->sampleGrabberFilter, FITLER_SAMPLE_GRABBER);
- if(FAILED(hr)) return hr;
-
- // Add null renderer to the graph
- hr = this->graphBuilder->AddFilter(this->nullRendererFilter, FILTER_NULL_RENDERER);
- if(FAILED(hr)) return hr;
-
- // Find media control
- hr = QUERY(this->graphBuilder, IID_IMediaControl, this->mediaController);
- if(FAILED(hr)) return hr;
-
- // Create the sample grabber controller
- hr = QUERY(this->sampleGrabberFilter, IID_ISampleGrabber, this->grabberController);
- if(FAILED(hr)) return hr;
-
- // Set the sample grabber media type (RGB24)
- // TODO : CHECK
- AM_MEDIA_TYPE mt;
- ZeroMemory(&mt, sizeof(AM_MEDIA_TYPE));
- mt.majortype = MEDIATYPE_Video;
- mt.subtype = MEDIASUBTYPE_RGB24;
- mt.formattype = FORMAT_VideoInfo;
-
- hr = this->grabberController->SetMediaType(&mt);
- if(FAILED(hr)) return hr;
-
- // Set sample grabber media type
- this->grabberController->SetOneShot(FALSE);
- this->grabberController->SetBufferSamples(FALSE);
-
- hr = this->grabberController->SetCallback(this->grabberCallback, 1);
- if(FAILED(hr)) return hr;
-
- return hr;
+ HRESULT hr;
+
+ // Create capture graph builder
+ hr = COCREATE(CLSID_CaptureGraphBuilder2, IID_ICaptureGraphBuilder2, this->captureGraphBuilder);
+ if(FAILED(hr)) {
+ return hr;
+ }
+
+ // Create the graph builder
+ hr = COCREATE(CLSID_FilterGraph, IID_IGraphBuilder, this->graphBuilder);
+ if(FAILED(hr)) {
+ return hr;
+ }
+
+ // Initialize the Capture Graph Builder.
+ hr = this->captureGraphBuilder->SetFiltergraph(this->graphBuilder);
+ if(FAILED(hr)) {
+ return hr;
+ }
+
+ // Create source filter
+ LPUNKNOWN pUnk = NULL;
+ this->sourceFilter = (CPushSourceDesktop*)CPushSourceDesktop::CreateInstance(pUnk, &hr);
+ if(FAILED(hr)) {
+ return hr;
+ }
+ this->sourceFilter->AddRef();
+
+ // Create the sample grabber filter
+ hr = COCREATE(CLSID_SampleGrabber, IID_IBaseFilter, this->sampleGrabberFilter);
+ if(FAILED(hr)) {
+ return hr;
+ }
+
+ // Create the NULL renderer
+ hr = COCREATE(CLSID_NullRenderer, IID_IBaseFilter, this->nullRendererFilter);
+ if(FAILED(hr)) {
+ return hr;
+ }
+
+ // Add source filter to the graph
+ hr = this->graphBuilder->AddFilter(this->sourceFilter, FILTER_SCREENCAST);
+ if(FAILED(hr)) {
+ return hr;
+ }
+
+ // Add sample grabber to the graph
+ hr = this->graphBuilder->AddFilter(this->sampleGrabberFilter, FITLER_SAMPLE_GRABBER);
+ if(FAILED(hr)) {
+ return hr;
+ }
+
+ // Add null renderer to the graph
+ hr = this->graphBuilder->AddFilter(this->nullRendererFilter, FILTER_NULL_RENDERER);
+ if(FAILED(hr)) {
+ return hr;
+ }
+
+ // Find media control
+ hr = QUERY(this->graphBuilder, IID_IMediaControl, this->mediaController);
+ if(FAILED(hr)) {
+ return hr;
+ }
+
+ // Create the sample grabber controller
+ hr = QUERY(this->sampleGrabberFilter, IID_ISampleGrabber, this->grabberController);
+ if(FAILED(hr)) {
+ return hr;
+ }
+
+ // Set the sample grabber media type (RGB24)
+ // TODO : CHECK
+ AM_MEDIA_TYPE mt;
+ ZeroMemory(&mt, sizeof(AM_MEDIA_TYPE));
+ mt.majortype = MEDIATYPE_Video;
+ mt.subtype = MEDIASUBTYPE_RGB24;
+ mt.formattype = FORMAT_VideoInfo;
+
+ hr = this->grabberController->SetMediaType(&mt);
+ if(FAILED(hr)) {
+ return hr;
+ }
+
+ // Set sample grabber media type
+ this->grabberController->SetOneShot(FALSE);
+ this->grabberController->SetBufferSamples(FALSE);
+
+ hr = this->grabberController->SetCallback(this->grabberCallback, 1);
+ if(FAILED(hr)) {
+ return hr;
+ }
+
+ return hr;
}
#endif /* _WIN32_WCE */ \ No newline at end of file
diff --git a/plugins/pluginDirectShow/internals/DSScreenCastGraph.h b/plugins/pluginDirectShow/internals/DSScreenCastGraph.h
index d201668..38400d0 100755
--- a/plugins/pluginDirectShow/internals/DSScreenCastGraph.h
+++ b/plugins/pluginDirectShow/internals/DSScreenCastGraph.h
@@ -1,17 +1,17 @@
/* Copyright (C) 2014 Mamadou DIOP.
-*
+*
* This file is part of Open Source Doubango Framework.
*
* DOUBANGO is free software: you can redistribute it and/or modify
* it under the terms of the GNU General Public License as published by
* the Free Software Foundation, either version 3 of the License, or
* (at your option) any later version.
-*
+*
* DOUBANGO is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
* GNU General Public License for more details.
-*
+*
* You should have received a copy of the GNU General Public License
* along with DOUBANGO.
*/
@@ -31,67 +31,73 @@ class CPushSourceDesktop;
class DSScreenCastGraph : public DSBaseCaptureGraph
{
public:
- DSScreenCastGraph(ISampleGrabberCB* callback, HRESULT *hr);
- virtual ~DSScreenCastGraph();
+ DSScreenCastGraph(ISampleGrabberCB* callback, HRESULT *hr);
+ virtual ~DSScreenCastGraph();
- std::vector<DSCaptureFormat> *getFormats() { return &this->supportedFormats; };
+ std::vector<DSCaptureFormat> *getFormats() {
+ return &this->supportedFormats;
+ };
- virtual HRESULT setSource(const std::string &devicePath) { return S_OK; }
- HRESULT setParameters(DSCaptureFormat *format, int framerate);
+ virtual HRESULT setSource(const std::string &devicePath) {
+ return S_OK;
+ }
+ HRESULT setParameters(DSCaptureFormat *format, int framerate);
- HRESULT connect();
- HRESULT disconnect();
+ HRESULT connect();
+ HRESULT disconnect();
- HRESULT start();
- HRESULT stop();
- HRESULT pause();
- bool isRunning();
- bool isPaused();
+ HRESULT start();
+ HRESULT stop();
+ HRESULT pause();
+ bool isRunning();
+ bool isPaused();
- HRESULT getConnectedMediaType(AM_MEDIA_TYPE *mediaType);
+ HRESULT getConnectedMediaType(AM_MEDIA_TYPE *mediaType);
- virtual std::string getDeviceId() const { return std::string("screencast"); }
+ virtual std::string getDeviceId() const {
+ return std::string("screencast");
+ }
private:
- HRESULT createCaptureGraph();
+ HRESULT createCaptureGraph();
private:
- ISampleGrabberCB *grabberCallback;
+ ISampleGrabberCB *grabberCallback;
- ICaptureGraphBuilder2 *captureGraphBuilder;
- IGraphBuilder *graphBuilder;
+ ICaptureGraphBuilder2 *captureGraphBuilder;
+ IGraphBuilder *graphBuilder;
- CPushSourceDesktop *sourceFilter;
- IBaseFilter *nullRendererFilter;
- IBaseFilter *sampleGrabberFilter;
+ CPushSourceDesktop *sourceFilter;
+ IBaseFilter *nullRendererFilter;
+ IBaseFilter *sampleGrabberFilter;
- ISampleGrabber *grabberController;
+ ISampleGrabber *grabberController;
- IMediaControl *mediaController;
- IMediaEventEx *mediaEventController;
+ IMediaControl *mediaController;
+ IMediaEventEx *mediaEventController;
- std::vector<DSCaptureFormat> supportedFormats;
- DSCaptureFormat *captureFormat;
+ std::vector<DSCaptureFormat> supportedFormats;
+ DSCaptureFormat *captureFormat;
- bool running;
- bool paused;
+ bool running;
+ bool paused;
};
#endif /* PLUGIN_DSHOW_DSSCREENCAST_H */
/* Copyright (C) 2014 Mamadou DIOP.
-*
+*
* This file is part of Open Source Doubango Framework.
*
* DOUBANGO is free software: you can redistribute it and/or modify
* it under the terms of the GNU General Public License as published by
* the Free Software Foundation, either version 3 of the License, or
* (at your option) any later version.
-*
+*
* DOUBANGO is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
* GNU General Public License for more details.
-*
+*
* You should have received a copy of the GNU General Public License
* along with DOUBANGO.
*/
@@ -111,50 +117,56 @@ class CPushSourceDesktop;
class DSScreenCastGraph : public DSBaseCaptureGraph
{
public:
- DSScreenCastGraph(ISampleGrabberCB* callback, HRESULT *hr);
- virtual ~DSScreenCastGraph();
+ DSScreenCastGraph(ISampleGrabberCB* callback, HRESULT *hr);
+ virtual ~DSScreenCastGraph();
- std::vector<DSCaptureFormat> *getFormats() { return &this->supportedFormats; };
+ std::vector<DSCaptureFormat> *getFormats() {
+ return &this->supportedFormats;
+ };
- virtual HRESULT setSource(const std::string &devicePath) { return S_OK; }
- HRESULT setParameters(DSCaptureFormat *format, int framerate);
+ virtual HRESULT setSource(const std::string &devicePath) {
+ return S_OK;
+ }
+ HRESULT setParameters(DSCaptureFormat *format, int framerate);
- HRESULT connect();
- HRESULT disconnect();
+ HRESULT connect();
+ HRESULT disconnect();
- HRESULT start();
- HRESULT stop();
- HRESULT pause();
- bool isRunning();
- bool isPaused();
+ HRESULT start();
+ HRESULT stop();
+ HRESULT pause();
+ bool isRunning();
+ bool isPaused();
- HRESULT getConnectedMediaType(AM_MEDIA_TYPE *mediaType);
+ HRESULT getConnectedMediaType(AM_MEDIA_TYPE *mediaType);
- virtual std::string getDeviceId() const { return std::string("screencast"); }
+ virtual std::string getDeviceId() const {
+ return std::string("screencast");
+ }
private:
- HRESULT createCaptureGraph();
+ HRESULT createCaptureGraph();
private:
- ISampleGrabberCB *grabberCallback;
+ ISampleGrabberCB *grabberCallback;
- ICaptureGraphBuilder2 *captureGraphBuilder;
- IGraphBuilder *graphBuilder;
+ ICaptureGraphBuilder2 *captureGraphBuilder;
+ IGraphBuilder *graphBuilder;
- CPushSourceDesktop *sourceFilter;
- IBaseFilter *nullRendererFilter;
- IBaseFilter *sampleGrabberFilter;
+ CPushSourceDesktop *sourceFilter;
+ IBaseFilter *nullRendererFilter;
+ IBaseFilter *sampleGrabberFilter;
- ISampleGrabber *grabberController;
+ ISampleGrabber *grabberController;
- IMediaControl *mediaController;
- IMediaEventEx *mediaEventController;
+ IMediaControl *mediaController;
+ IMediaEventEx *mediaEventController;
- std::vector<DSCaptureFormat> supportedFormats;
- DSCaptureFormat *captureFormat;
+ std::vector<DSCaptureFormat> supportedFormats;
+ DSCaptureFormat *captureFormat;
- bool running;
- bool paused;
+ bool running;
+ bool paused;
};
#endif /* PLUGIN_DSHOW_DSSCREENCAST_H */
diff --git a/plugins/pluginDirectShow/internals/DSUtils.cxx b/plugins/pluginDirectShow/internals/DSUtils.cxx
index 913c081..6187e0f 100755
--- a/plugins/pluginDirectShow/internals/DSUtils.cxx
+++ b/plugins/pluginDirectShow/internals/DSUtils.cxx
@@ -1,17 +1,17 @@
/* Copyright (C) 2011-2013 Doubango Telecom <http://www.doubango.org>
-*
+*
* This file is part of Open Source Doubango Framework.
*
* DOUBANGO is free software: you can redistribute it and/or modify
* it under the terms of the GNU General Public License as published by
* the Free Software Foundation, either version 3 of the License, or
* (at your option) any later version.
-*
+*
* DOUBANGO is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
* GNU General Public License for more details.
-*
+*
* You should have received a copy of the GNU General Public License
* along with DOUBANGO.
*/
@@ -29,221 +29,223 @@
HWND GetMainWindow()
{
- HWND hWnd;
- if (!(hWnd = GetActiveWindow())) {
- if (!(hWnd = GetForegroundWindow())) {
+ HWND hWnd;
+ if (!(hWnd = GetActiveWindow())) {
+ if (!(hWnd = GetForegroundWindow())) {
#if !defined(_WIN32_WCE)
- if (!(hWnd = GetConsoleWindow())) {
- return NULL;
- }
+ if (!(hWnd = GetConsoleWindow())) {
+ return NULL;
+ }
#endif
- }
- }
- return hWnd;
+ }
+ }
+ return hWnd;
}
bool IsMainThread()
-{
- HWND hWnd = GetMainWindow();
- if (hWnd) {
- DWORD mainTid = GetWindowThreadProcessId(hWnd, NULL);
- DWORD currentTid = GetCurrentThreadId();
- return (mainTid == currentTid);
- }
- return false;
+{
+ HWND hWnd = GetMainWindow();
+ if (hWnd) {
+ DWORD mainTid = GetWindowThreadProcessId(hWnd, NULL);
+ DWORD currentTid = GetCurrentThreadId();
+ return (mainTid == currentTid);
+ }
+ return false;
}
bool IsD3D9Supported()
{
#if defined(_WIN32_WCE)
- return false;
+ return false;
#else
- static bool g_bChecked = false;
- static bool g_bSupported = false;
-
- if (g_bChecked) {
- return g_bSupported;
- }
- g_bChecked = true;
- HRESULT hr = S_OK;
- IDirect3D9* pD3D = NULL;
- D3DDISPLAYMODE mode = { 0 };
- D3DPRESENT_PARAMETERS pp = {0};
- IDirect3DDevice9* pDevice = NULL;
-
- if (!(pD3D = Direct3DCreate9(D3D_SDK_VERSION))) {
+ static bool g_bChecked = false;
+ static bool g_bSupported = false;
+
+ if (g_bChecked) {
+ return g_bSupported;
+ }
+ g_bChecked = true;
+ HRESULT hr = S_OK;
+ IDirect3D9* pD3D = NULL;
+ D3DDISPLAYMODE mode = { 0 };
+ D3DPRESENT_PARAMETERS pp = {0};
+ IDirect3DDevice9* pDevice = NULL;
+
+ if (!(pD3D = Direct3DCreate9(D3D_SDK_VERSION))) {
hr = E_OUTOFMEMORY;
- goto bail;
+ goto bail;
}
hr = pD3D->GetAdapterDisplayMode(
- D3DADAPTER_DEFAULT,
- &mode
- );
- if (FAILED(hr)) {
- goto bail;
- }
+ D3DADAPTER_DEFAULT,
+ &mode
+ );
+ if (FAILED(hr)) {
+ goto bail;
+ }
hr = pD3D->CheckDeviceType(
- D3DADAPTER_DEFAULT,
- D3DDEVTYPE_HAL,
- mode.Format,
- D3DFMT_X8R8G8B8,
- TRUE // windowed
- );
- if (FAILED(hr)) {
- goto bail;
- }
+ D3DADAPTER_DEFAULT,
+ D3DDEVTYPE_HAL,
+ mode.Format,
+ D3DFMT_X8R8G8B8,
+ TRUE // windowed
+ );
+ if (FAILED(hr)) {
+ goto bail;
+ }
pp.BackBufferFormat = D3DFMT_X8R8G8B8;
pp.SwapEffect = D3DSWAPEFFECT_DISCARD;
pp.PresentationInterval = D3DPRESENT_INTERVAL_IMMEDIATE;
- pp.Windowed = TRUE;
- pp.hDeviceWindow = GetDesktopWindow();
+ pp.Windowed = TRUE;
+ pp.hDeviceWindow = GetDesktopWindow();
hr = pD3D->CreateDevice(
- D3DADAPTER_DEFAULT,
- D3DDEVTYPE_HAL,
- pp.hDeviceWindow,
- D3DCREATE_HARDWARE_VERTEXPROCESSING,
- &pp,
- &pDevice
- );
- if (FAILED(hr)) {
- goto bail;
- }
-
- // Everythings is OK
- g_bSupported = true;
- TSK_DEBUG_INFO("D3D9 supported");
+ D3DADAPTER_DEFAULT,
+ D3DDEVTYPE_HAL,
+ pp.hDeviceWindow,
+ D3DCREATE_HARDWARE_VERTEXPROCESSING,
+ &pp,
+ &pDevice
+ );
+ if (FAILED(hr)) {
+ goto bail;
+ }
+
+ // Everythings is OK
+ g_bSupported = true;
+ TSK_DEBUG_INFO("D3D9 supported");
bail:
- if (!g_bSupported) {
- TSK_DEBUG_WARN("D3D9 not supported");
- }
- SAFE_RELEASE(pDevice);
- SAFE_RELEASE(pD3D);
- return g_bSupported;
+ if (!g_bSupported) {
+ TSK_DEBUG_WARN("D3D9 not supported");
+ }
+ SAFE_RELEASE(pDevice);
+ SAFE_RELEASE(pD3D);
+ return g_bSupported;
#endif /* _WIN32_WCE */
}
IPin *GetPin(IBaseFilter *filter, PIN_DIRECTION direction)
{
- IEnumPins *enumPins = NULL;
- IPin *pin = NULL;
-
- HRESULT hr = filter->EnumPins(&enumPins);
- if(!enumPins){
- return NULL;
- }
-
- for(;;){
- ULONG fetched = 0;
- PIN_DIRECTION pinDir = PIN_DIRECTION(-1);
- pin = NULL;
-
- if (FAILED(enumPins->Next(1, &pin, &fetched))){
- enumPins->Release();
- return NULL;
- }
-
- if (fetched == 1 && pin){
- pin->QueryDirection(&pinDir);
- if(pinDir == direction){
- break;
- }
- pin->Release();
- }
- }
-
- enumPins->Release();
- return pin;
+ IEnumPins *enumPins = NULL;
+ IPin *pin = NULL;
+
+ HRESULT hr = filter->EnumPins(&enumPins);
+ if(!enumPins) {
+ return NULL;
+ }
+
+ for(;;) {
+ ULONG fetched = 0;
+ PIN_DIRECTION pinDir = PIN_DIRECTION(-1);
+ pin = NULL;
+
+ if (FAILED(enumPins->Next(1, &pin, &fetched))) {
+ enumPins->Release();
+ return NULL;
+ }
+
+ if (fetched == 1 && pin) {
+ pin->QueryDirection(&pinDir);
+ if(pinDir == direction) {
+ break;
+ }
+ pin->Release();
+ }
+ }
+
+ enumPins->Release();
+ return pin;
}
HRESULT ConnectFilters(IGraphBuilder *graphBuilder, IBaseFilter *source, IBaseFilter *destination, AM_MEDIA_TYPE *mediaType)
{
- HRESULT hr;
+ HRESULT hr;
- IPin *outPin = GetPin(source, PINDIR_OUTPUT);
- IPin *inPin = GetPin(destination, PINDIR_INPUT);
+ IPin *outPin = GetPin(source, PINDIR_OUTPUT);
+ IPin *inPin = GetPin(destination, PINDIR_INPUT);
- if (mediaType != NULL){
- hr = graphBuilder->ConnectDirect(outPin, inPin, mediaType);
- }
- else{
- hr = graphBuilder->Connect(outPin, inPin);
- }
+ if (mediaType != NULL) {
+ hr = graphBuilder->ConnectDirect(outPin, inPin, mediaType);
+ }
+ else {
+ hr = graphBuilder->Connect(outPin, inPin);
+ }
- SAFE_RELEASE(outPin);
- SAFE_RELEASE(inPin);
+ SAFE_RELEASE(outPin);
+ SAFE_RELEASE(inPin);
- return hr;
+ return hr;
}
HRESULT DisconnectFilters(IGraphBuilder *graphBuilder, IBaseFilter *source, IBaseFilter *destination)
{
- HRESULT hr;
+ HRESULT hr;
- IPin *outPin = GetPin(source, PINDIR_OUTPUT);
- IPin *inPin = GetPin(destination, PINDIR_INPUT);
+ IPin *outPin = GetPin(source, PINDIR_OUTPUT);
+ IPin *inPin = GetPin(destination, PINDIR_INPUT);
- if (inPin){
- hr = graphBuilder->Disconnect(inPin);
- }
+ if (inPin) {
+ hr = graphBuilder->Disconnect(inPin);
+ }
- if (outPin){
- hr = graphBuilder->Disconnect(outPin);
- }
+ if (outPin) {
+ hr = graphBuilder->Disconnect(outPin);
+ }
- SAFE_RELEASE(outPin);
- SAFE_RELEASE(inPin);
+ SAFE_RELEASE(outPin);
+ SAFE_RELEASE(inPin);
- return hr;
+ return hr;
}
bool DisconnectAllFilters(IGraphBuilder *graphBuilder)
{
- IEnumFilters* filterEnum = NULL;
- IBaseFilter* currentFilter = NULL;
- ULONG fetched;
- HRESULT hr;
-
- hr = graphBuilder->EnumFilters(&filterEnum);
- if (FAILED(hr)) {
- SAFE_RELEASE(filterEnum);
- return false;
- }
-
- while(filterEnum->Next(1, &currentFilter, &fetched) == S_OK){
- hr = DisconnectFilters(graphBuilder, currentFilter, currentFilter);
- SAFE_RELEASE(currentFilter);
- }
- SAFE_RELEASE(filterEnum);
- SAFE_RELEASE(currentFilter);
- return true;
+ IEnumFilters* filterEnum = NULL;
+ IBaseFilter* currentFilter = NULL;
+ ULONG fetched;
+ HRESULT hr;
+
+ hr = graphBuilder->EnumFilters(&filterEnum);
+ if (FAILED(hr)) {
+ SAFE_RELEASE(filterEnum);
+ return false;
+ }
+
+ while(filterEnum->Next(1, &currentFilter, &fetched) == S_OK) {
+ hr = DisconnectFilters(graphBuilder, currentFilter, currentFilter);
+ SAFE_RELEASE(currentFilter);
+ }
+ SAFE_RELEASE(filterEnum);
+ SAFE_RELEASE(currentFilter);
+ return true;
}
bool RemoveAllFilters(IGraphBuilder *graphBuilder)
{
- IEnumFilters* filterEnum = NULL;
- IBaseFilter* currentFilter = NULL;
- ULONG fetched;
- HRESULT hr;
-
- hr = graphBuilder->EnumFilters(&filterEnum);
- if (FAILED(hr)) return false;
-
- while(filterEnum->Next(1, &currentFilter, &fetched) == S_OK){
- hr = graphBuilder->RemoveFilter(currentFilter);
- if (FAILED(hr)){
- SAFE_RELEASE(filterEnum);
- return false;
- }
- SAFE_RELEASE(currentFilter);
- filterEnum->Reset();
- }
-
- SAFE_RELEASE(filterEnum);
- SAFE_RELEASE(currentFilter);
- return true;
+ IEnumFilters* filterEnum = NULL;
+ IBaseFilter* currentFilter = NULL;
+ ULONG fetched;
+ HRESULT hr;
+
+ hr = graphBuilder->EnumFilters(&filterEnum);
+ if (FAILED(hr)) {
+ return false;
+ }
+
+ while(filterEnum->Next(1, &currentFilter, &fetched) == S_OK) {
+ hr = graphBuilder->RemoveFilter(currentFilter);
+ if (FAILED(hr)) {
+ SAFE_RELEASE(filterEnum);
+ return false;
+ }
+ SAFE_RELEASE(currentFilter);
+ filterEnum->Reset();
+ }
+
+ SAFE_RELEASE(filterEnum);
+ SAFE_RELEASE(currentFilter);
+ return true;
}
@@ -257,109 +259,113 @@ bool RemoveAllFilters(IGraphBuilder *graphBuilder)
// C Callback that dispatch event to create display on UI thread
static LRESULT CALLBACK __create__WndProcWindow(HWND hWnd, UINT uMsg, WPARAM wParam, LPARAM lParam)
{
- HANDLE* event = reinterpret_cast<HANDLE*>(wParam);
- BOOL* isScreenCast = reinterpret_cast<BOOL*>(GetProp(hWnd, TEXT("screnCast")));
-
- if(event && lParam){
- switch(uMsg){
- case WM_CREATE_DISPLAY_ON_UI_THREAD:
- {
- HRESULT hr;
- DSDisplay** ppDisplay = reinterpret_cast<DSDisplay**>(lParam);
- *ppDisplay = new DSDisplay(&hr);
- SetEvent(event);
- break;
- }
- case WM_CREATE_GRABBER_ON_UI_THREAD:
- {
- HRESULT hr;
- DSGrabber** ppGrabber = reinterpret_cast<DSGrabber**>(lParam);
- *ppGrabber = new DSGrabber(&hr, *isScreenCast);
- SetEvent(event);
- break;
- }
- }
- }
- return DefWindowProc(hWnd, uMsg, wParam, lParam);
+ HANDLE* event = reinterpret_cast<HANDLE*>(wParam);
+ BOOL* isScreenCast = reinterpret_cast<BOOL*>(GetProp(hWnd, TEXT("screnCast")));
+
+ if(event && lParam) {
+ switch(uMsg) {
+ case WM_CREATE_DISPLAY_ON_UI_THREAD: {
+ HRESULT hr;
+ DSDisplay** ppDisplay = reinterpret_cast<DSDisplay**>(lParam);
+ *ppDisplay = new DSDisplay(&hr);
+ SetEvent(event);
+ break;
+ }
+ case WM_CREATE_GRABBER_ON_UI_THREAD: {
+ HRESULT hr;
+ DSGrabber** ppGrabber = reinterpret_cast<DSGrabber**>(lParam);
+ *ppGrabber = new DSGrabber(&hr, *isScreenCast);
+ SetEvent(event);
+ break;
+ }
+ }
+ }
+ return DefWindowProc(hWnd, uMsg, wParam, lParam);
}
int createOnCurrentThead(HWND hWnd, void** ppRet, BOOL display, BOOL screnCast)
{
- HRESULT hr;
- if(display) *ppRet = new DSDisplay(&hr);
- else *ppRet = new DSGrabber(&hr, screnCast);
- if(FAILED(hr)){
- TSK_DEBUG_ERROR("Failed to created DirectShow %s", display ? "Display" : "Grabber");
- SAFE_DELETE_PTR(*ppRet);
- return -2;
- }
- return 0;
+ HRESULT hr;
+ if(display) {
+ *ppRet = new DSDisplay(&hr);
+ }
+ else {
+ *ppRet = new DSGrabber(&hr, screnCast);
+ }
+ if(FAILED(hr)) {
+ TSK_DEBUG_ERROR("Failed to created DirectShow %s", display ? "Display" : "Grabber");
+ SAFE_DELETE_PTR(*ppRet);
+ return -2;
+ }
+ return 0;
}
int createOnUIThead(HWND hWnd, void** ppRet, BOOL display, BOOL screnCast)
{
- static BOOL __isScreenCastFalse = FALSE;
- static BOOL __isScreenCastTrue = TRUE;
- if(!ppRet){
- TSK_DEBUG_ERROR("Invalid parameter");
- return -1;
- }
-
- if (IsMainThread()) {
- return createOnCurrentThead(hWnd, ppRet, display, screnCast);
- }
- else{
- TSK_DEBUG_INFO("Create DirectShow element on worker thread");
- HANDLE event = NULL;
- int ret = 0;
- DWORD retWait, retryCount = 3;
-
- if(!hWnd){
- if (!(hWnd = FindWindow(NULL, TEXT("Boghe - IMS/RCS Client")))) {
- if(!(hWnd = GetMainWindow())){
- TSK_DEBUG_ERROR("No Window handle could be used");
- return -2;
- }
- }
- }
+ static BOOL __isScreenCastFalse = FALSE;
+ static BOOL __isScreenCastTrue = TRUE;
+ if(!ppRet) {
+ TSK_DEBUG_ERROR("Invalid parameter");
+ return -1;
+ }
+
+ if (IsMainThread()) {
+ return createOnCurrentThead(hWnd, ppRet, display, screnCast);
+ }
+ else {
+ TSK_DEBUG_INFO("Create DirectShow element on worker thread");
+ HANDLE event = NULL;
+ int ret = 0;
+ DWORD retWait, retryCount = 3;
+
+ if(!hWnd) {
+ if (!(hWnd = FindWindow(NULL, TEXT("Boghe - IMS/RCS Client")))) {
+ if(!(hWnd = GetMainWindow())) {
+ TSK_DEBUG_ERROR("No Window handle could be used");
+ return -2;
+ }
+ }
+ }
#if defined(_WIN32_WCE)
- WNDPROC wndProc = (WNDPROC) SetWindowLong(hWnd, GWL_WNDPROC, (LONG) __create__WndProcWindow);
+ WNDPROC wndProc = (WNDPROC) SetWindowLong(hWnd, GWL_WNDPROC, (LONG) __create__WndProcWindow);
#else
- WNDPROC wndProc = (WNDPROC) SetWindowLongPtr(hWnd, GWLP_WNDPROC, (LONG_PTR) __create__WndProcWindow);
+ WNDPROC wndProc = (WNDPROC) SetWindowLongPtr(hWnd, GWLP_WNDPROC, (LONG_PTR) __create__WndProcWindow);
#endif
- if (!wndProc) {
- TSK_DEBUG_ERROR("SetWindowLongPtr() failed with errcode=%d", GetLastError());
- return createOnCurrentThead(hWnd, ppRet, display, screnCast);
- }
-
- if (!(event = CreateEvent(NULL, TRUE, FALSE, NULL))) {
- TSK_DEBUG_ERROR("Failed to create new event");
- ret = -4; goto bail;
- }
- SetProp(hWnd, TEXT("screnCast"), screnCast ? &__isScreenCastTrue : &__isScreenCastFalse);
- if (!PostMessage(hWnd, display ? WM_CREATE_DISPLAY_ON_UI_THREAD : WM_CREATE_GRABBER_ON_UI_THREAD, reinterpret_cast<WPARAM>(event), reinterpret_cast<LPARAM>(ppRet))) {
- TSK_DEBUG_ERROR("PostMessageA() failed");
- ret = -5; goto bail;
- }
-
- do {
- retWait = WaitForSingleObject(event, WM_CREATE_ON_UI_THREAD_TIMEOUT);
- }
- while (retryCount-- > 0 && (retWait == WAIT_TIMEOUT));
-
- bail:
- // restore
- if (hWnd && wndProc) {
+ if (!wndProc) {
+ TSK_DEBUG_ERROR("SetWindowLongPtr() failed with errcode=%d", GetLastError());
+ return createOnCurrentThead(hWnd, ppRet, display, screnCast);
+ }
+
+ if (!(event = CreateEvent(NULL, TRUE, FALSE, NULL))) {
+ TSK_DEBUG_ERROR("Failed to create new event");
+ ret = -4;
+ goto bail;
+ }
+ SetProp(hWnd, TEXT("screnCast"), screnCast ? &__isScreenCastTrue : &__isScreenCastFalse);
+ if (!PostMessage(hWnd, display ? WM_CREATE_DISPLAY_ON_UI_THREAD : WM_CREATE_GRABBER_ON_UI_THREAD, reinterpret_cast<WPARAM>(event), reinterpret_cast<LPARAM>(ppRet))) {
+ TSK_DEBUG_ERROR("PostMessageA() failed");
+ ret = -5;
+ goto bail;
+ }
+
+ do {
+ retWait = WaitForSingleObject(event, WM_CREATE_ON_UI_THREAD_TIMEOUT);
+ }
+ while (retryCount-- > 0 && (retWait == WAIT_TIMEOUT));
+
+bail:
+ // restore
+ if (hWnd && wndProc) {
#if defined(_WIN32_WCE)
- SetWindowLong(hWnd, GWL_WNDPROC, (LONG)wndProc);
+ SetWindowLong(hWnd, GWL_WNDPROC, (LONG)wndProc);
#else
- SetWindowLongPtr(hWnd, GWLP_WNDPROC, (LONG_PTR)wndProc);
+ SetWindowLongPtr(hWnd, GWLP_WNDPROC, (LONG_PTR)wndProc);
#endif
- }
- if (event) {
- CloseHandle(event);
- }
+ }
+ if (event) {
+ CloseHandle(event);
+ }
- return ret;
- }
+ return ret;
+ }
} \ No newline at end of file
diff --git a/plugins/pluginDirectShow/internals/DSUtils.h b/plugins/pluginDirectShow/internals/DSUtils.h
index 09690ea..4d4cc97 100755
--- a/plugins/pluginDirectShow/internals/DSUtils.h
+++ b/plugins/pluginDirectShow/internals/DSUtils.h
@@ -1,17 +1,17 @@
/* Copyright (C) 2011-2013 Doubango Telecom <http://www.doubango.org>
-*
+*
* This file is part of Open Source Doubango Framework.
*
* DOUBANGO is free software: you can redistribute it and/or modify
* it under the terms of the GNU General Public License as published by
* the Free Software Foundation, either version 3 of the License, or
* (at your option) any later version.
-*
+*
* DOUBANGO is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
* GNU General Public License for more details.
-*
+*
* You should have received a copy of the GNU General Public License
* along with DOUBANGO.
*/
diff --git a/plugins/pluginDirectShow/internals/Resizer.cxx b/plugins/pluginDirectShow/internals/Resizer.cxx
index 32bcfac..94db49e 100755
--- a/plugins/pluginDirectShow/internals/Resizer.cxx
+++ b/plugins/pluginDirectShow/internals/Resizer.cxx
@@ -1,17 +1,17 @@
/* Copyright (C) 2011-2013 Doubango Telecom <http://www.doubango.org>
-*
+*
* This file is part of Open Source Doubango Framework.
*
* DOUBANGO is free software: you can redistribute it and/or modify
* it under the terms of the GNU General Public License as published by
* the Free Software Foundation, either version 3 of the License, or
* (at your option) any later version.
-*
+*
* DOUBANGO is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
* GNU General Public License for more details.
-*
+*
* You should have received a copy of the GNU General Public License
* along with DOUBANGO.
*/
@@ -33,19 +33,19 @@
#define STRETCH_2_1 7
void __stdcall StretchDIB(
- LPBITMAPINFOHEADER biDst, // --> BITMAPINFO of destination
- LPVOID lpvDst, // --> to destination bits
- int DstX, // Destination origin - x coordinate
- int DstY, // Destination origin - y coordinate
- int DstXE, // x extent of the BLT
- int DstYE, // y extent of the BLT
- LPBITMAPINFOHEADER biSrc, // --> BITMAPINFO of source
- LPVOID lpvSrc, // --> to source bits
- int SrcX, // Source origin - x coordinate
- int SrcY, // Source origin - y coordinate
- int SrcXE, // x extent of the BLT
- int SrcYE // y extent of the BLT
- );
+ LPBITMAPINFOHEADER biDst, // --> BITMAPINFO of destination
+ LPVOID lpvDst, // --> to destination bits
+ int DstX, // Destination origin - x coordinate
+ int DstY, // Destination origin - y coordinate
+ int DstXE, // x extent of the BLT
+ int DstYE, // y extent of the BLT
+ LPBITMAPINFOHEADER biSrc, // --> BITMAPINFO of source
+ LPVOID lpvSrc, // --> to source bits
+ int SrcX, // Source origin - x coordinate
+ int SrcY, // Source origin - y coordinate
+ int SrcXE, // x extent of the BLT
+ int SrcYE // y extent of the BLT
+);
/*
* an X_FUNC is a function that copies one scanline, stretching or shrinking it
@@ -63,10 +63,10 @@ void __stdcall StretchDIB(
* the fraction is multiplied by 65536.
*/
typedef void (*X_FUNC) (LPBYTE lpSrc,
- LPBYTE lpDst,
- int SrcXE,
- int DstXE,
- int x_fract);
+ LPBYTE lpDst,
+ int SrcXE,
+ int DstXE,
+ int x_fract);
void X_Stretch_1_1_8Bits(LPBYTE lpSrc, LPBYTE lpDst, int SrcXE, int DstXE, int x_fract);
@@ -97,23 +97,23 @@ void X_Stretch_N_1_32Bits(LPBYTE lpSrc, LPBYTE lpDst, int SrcXE, int DstXE, int
*/
void Y_Stretch_1_N(LPBYTE lpSrc, LPBYTE lpDst, int SrcXE,int SrcYE, int DstXE,
- int DstYE, int SrcWidth, int DstWidth, int x_fract,
- X_FUNC x_func, int nBits);
+ int DstYE, int SrcWidth, int DstWidth, int x_fract,
+ X_FUNC x_func, int nBits);
void Y_Stretch_N_1(LPBYTE lpSrc, LPBYTE lpDst, int SrcXE,int SrcYE, int DstXE,
- int DstYE, int SrcWidth, int DstWidth, int x_fract,
- X_FUNC x_func);
+ int DstYE, int SrcWidth, int DstWidth, int x_fract,
+ X_FUNC x_func);
/*
* special case y-stretch functions for 1:2 in both dimensions for 8 and 16 bits
* takes no X_FUNC arg. Will do entire stretch.
*/
void Stretch_1_2_8Bits(LPBYTE lpSrc, LPBYTE lpDst, int SrcXE,int SrcYE, int DstXE,
- int DstYE, int SrcWidth, int DstWidth, int x_fract);
+ int DstYE, int SrcWidth, int DstWidth, int x_fract);
void Stretch_1_2_16Bits(LPBYTE lpSrc, LPBYTE lpDst, int SrcXE,int SrcYE, int DstXE,
- int DstYE, int SrcWidth, int DstWidth, int x_fract);
+ int DstYE, int SrcWidth, int DstWidth, int x_fract);
/* straight copy of one scanline of count bytes */
void X_CopyScanline(LPBYTE lpSrc, LPBYTE lpDst, int count);
@@ -123,27 +123,27 @@ void X_CopyScanline(LPBYTE lpSrc, LPBYTE lpDst, int count);
// Resize function
//
void ResizeRGB( BITMAPINFOHEADER *pbiIn, //Src's BitMapInFoHeader
- const unsigned char * dibBits, //Src bits
- BITMAPINFOHEADER *pbiOut,
- unsigned char *pFrame, //Dst bits
- int iNewWidth, //new W in pixel
- int iNewHeight) //new H in pixel
-{
- StretchDIB( pbiOut, // --> BITMAPINFO of destination
- pFrame, // --> to destination bits
- 0, // Destination origin - x coordinate
- 0, // Destination origin - y coordinate
- iNewWidth, // x extent of the BLT
- iNewHeight, // y extent of the BLT
- pbiIn, // --> BITMAPINFO of destination
- (void*) dibBits, // --> to source bits
- 0, // Source origin - x coordinate
- 0, // Source origin - y coordinate
- pbiIn->biWidth, // x extent of the BLT
- pbiIn->biHeight // y extent of the BLT
- );
-
- return;
+ const unsigned char * dibBits, //Src bits
+ BITMAPINFOHEADER *pbiOut,
+ unsigned char *pFrame, //Dst bits
+ int iNewWidth, //new W in pixel
+ int iNewHeight) //new H in pixel
+{
+ StretchDIB( pbiOut, // --> BITMAPINFO of destination
+ pFrame, // --> to destination bits
+ 0, // Destination origin - x coordinate
+ 0, // Destination origin - y coordinate
+ iNewWidth, // x extent of the BLT
+ iNewHeight, // y extent of the BLT
+ pbiIn, // --> BITMAPINFO of destination
+ (void*) dibBits, // --> to source bits
+ 0, // Source origin - x coordinate
+ 0, // Source origin - y coordinate
+ pbiIn->biWidth, // x extent of the BLT
+ pbiIn->biHeight // y extent of the BLT
+ );
+
+ return;
}
@@ -166,44 +166,49 @@ StretchFactor(int SrcE, int DstE, int *pfract)
{
- if (SrcE == DstE) {
- if (pfract != NULL) {
- pfract = 0;
- }
-
- return(STRETCH_1_1);
-
- }
-
-
- if (SrcE > DstE) {
- if (pfract != NULL) {
- *pfract = ( (DstE << 16) / SrcE) & 0xffff;
- }
-
- if (SrcE == (DstE * 2)) {
- return(STRETCH_2_1);
- } else if (SrcE == (DstE * 4)) {
- return(STRETCH_4_1);
- } else {
- return(STRETCH_N_1);
- }
-
- } else {
-
- /* calculate delta fraction based on smallest / largest */
- if (pfract != NULL) {
- *pfract = ( (SrcE << 16) / DstE) & 0xffff;
- }
-
- if (DstE == (SrcE * 2)) {
- return(STRETCH_1_2);
- } else if (DstE == (SrcE * 4)) {
- return(STRETCH_1_4);
- } else {
- return(STRETCH_1_N);
- }
- }
+ if (SrcE == DstE) {
+ if (pfract != NULL) {
+ pfract = 0;
+ }
+
+ return(STRETCH_1_1);
+
+ }
+
+
+ if (SrcE > DstE) {
+ if (pfract != NULL) {
+ *pfract = ( (DstE << 16) / SrcE) & 0xffff;
+ }
+
+ if (SrcE == (DstE * 2)) {
+ return(STRETCH_2_1);
+ }
+ else if (SrcE == (DstE * 4)) {
+ return(STRETCH_4_1);
+ }
+ else {
+ return(STRETCH_N_1);
+ }
+
+ }
+ else {
+
+ /* calculate delta fraction based on smallest / largest */
+ if (pfract != NULL) {
+ *pfract = ( (SrcE << 16) / DstE) & 0xffff;
+ }
+
+ if (DstE == (SrcE * 2)) {
+ return(STRETCH_1_2);
+ }
+ else if (DstE == (SrcE * 4)) {
+ return(STRETCH_1_4);
+ }
+ else {
+ return(STRETCH_1_N);
+ }
+ }
}
@@ -216,226 +221,227 @@ StretchFactor(int SrcE, int DstE, int *pfract)
void FAR PASCAL
StretchDIB(
- LPBITMAPINFOHEADER biDst, // --> BITMAPINFO of destination
- LPVOID lpvDst, // --> to destination bits
- int DstX, // Destination origin - x coordinate
- int DstY, // Destination origin - y coordinate
- int DstXE, // x extent of the BLT
- int DstYE, // y extent of the BLT
- LPBITMAPINFOHEADER biSrc, // --> BITMAPINFO of source
- LPVOID lpvSrc, // --> to source bits
- int SrcX, // Source origin - x coordinate
- int SrcY, // Source origin - y coordinate
- int SrcXE, // x extent of the BLT
- int SrcYE // y extent of the BLT
- )
+ LPBITMAPINFOHEADER biDst, // --> BITMAPINFO of destination
+ LPVOID lpvDst, // --> to destination bits
+ int DstX, // Destination origin - x coordinate
+ int DstY, // Destination origin - y coordinate
+ int DstXE, // x extent of the BLT
+ int DstYE, // y extent of the BLT
+ LPBITMAPINFOHEADER biSrc, // --> BITMAPINFO of source
+ LPVOID lpvSrc, // --> to source bits
+ int SrcX, // Source origin - x coordinate
+ int SrcY, // Source origin - y coordinate
+ int SrcXE, // x extent of the BLT
+ int SrcYE // y extent of the BLT
+)
{
- int nBits;
- int SrcWidth, DstWidth;
- LPBYTE lpDst = (LPBYTE)lpvDst, lpSrc = (LPBYTE)lpvSrc;
- int x_fract;
- int x_factor;
- int y_factor;
- X_FUNC xfunc;
-
-
- /*
- * chek that sizes are not same
- */
- /*if(DstXE == SrcXE && DstYE == SrcYE)
- {
- return;
- }*/
- /*
- * check that bit depths are same and 8, 16 or 24
- */
-
- if ((nBits = biDst->biBitCount) != biSrc->biBitCount) {
- return;
- }
-
- if ( (nBits != 8 ) && (nBits != 16) && (nBits != 24) &&
- (nBits != 32)) {
- return;
- }
-
- /*
- * check that extents are not bad
- */
- if ( (SrcXE <= 0) || (SrcYE <= 0) || (DstXE <= 0) || (DstYE <= 0)) {
- return;
- }
-
- /*
- * calculate width of one scan line in bytes, rounded up to
- * DWORD boundary.
- */
- SrcWidth = (((biSrc->biWidth * nBits) + 31) & ~31) / 8;
- DstWidth = (((biDst->biWidth * nBits) + 31) & ~31) / 8;
-
- /*
- * set initial source and dest pointers
- */
- lpSrc += (SrcY * SrcWidth) + ((SrcX * nBits) / 8);
- lpDst += (DstY * DstWidth) + ((DstX * nBits) / 8);
-
-
- /*
- * calculate stretch proportions (1:1, 1:2, 1:N, N:1 etc) and
- * also the fractional stretch factor. (we are not interested in
- * the y stretch fraction - this is only used in x stretching.
- */
-
- y_factor = StretchFactor(SrcYE, DstYE, NULL);
- x_factor = StretchFactor(SrcXE, DstXE, &x_fract);
-
- /*
- * we have special case routines for 1:2 in both dimensions
- * for 8 and 16 bits
- */
- if ((y_factor == x_factor) && (y_factor == STRETCH_1_2)) {
-
- if (nBits == 8) {
- //StartCounting();
- Stretch_1_2_8Bits(lpSrc, lpDst, SrcXE, SrcYE,
- DstXE, DstYE, SrcWidth, DstWidth,
- x_fract);
- //EndCounting("8 bit");
- return;
-
- } else if (nBits == 16) {
- //StartCounting();
- Stretch_1_2_16Bits(lpSrc, lpDst, SrcXE, SrcYE,
- DstXE, DstYE, SrcWidth, DstWidth,
- x_fract);
- //EndCounting("16 bit");
- return;
- }
- }
-
-
- /* pick an X stretch function */
- switch(nBits) {
-
- case 8:
- switch(x_factor) {
- case STRETCH_1_1:
- xfunc = X_Stretch_1_1_8Bits;
- break;
-
- case STRETCH_1_2:
- xfunc = X_Stretch_1_2_8Bits;
- break;
-
- case STRETCH_1_4:
- xfunc = X_Stretch_1_4_8Bits;
- break;
-
- case STRETCH_1_N:
- xfunc = X_Stretch_1_N_8Bits;
- break;
-
- case STRETCH_N_1:
- case STRETCH_4_1:
- case STRETCH_2_1:
- xfunc = X_Stretch_N_1_8Bits;
- break;
-
- }
- break;
-
- case 16:
- switch(x_factor) {
- case STRETCH_1_1:
- xfunc = X_Stretch_1_1_16Bits;
- break;
-
- case STRETCH_1_2:
- xfunc = X_Stretch_1_2_16Bits;
- break;
-
- case STRETCH_1_4:
- case STRETCH_1_N:
- xfunc = X_Stretch_1_N_16Bits;
- break;
-
- case STRETCH_N_1:
- case STRETCH_4_1:
- case STRETCH_2_1:
- xfunc = X_Stretch_N_1_16Bits;
- break;
-
- }
- break;
-
- case 24:
- switch(x_factor) {
- case STRETCH_1_1:
- xfunc = X_Stretch_1_1_24Bits;
- break;
-
- case STRETCH_1_2:
- case STRETCH_1_4:
- case STRETCH_1_N:
- xfunc = X_Stretch_1_N_24Bits;
- break;
-
- case STRETCH_N_1:
- case STRETCH_4_1:
- case STRETCH_2_1:
- xfunc = X_Stretch_N_1_24Bits;
- break;
-
- }
- break;
-
- case 32:
- switch(x_factor) {
- case STRETCH_1_1:
- xfunc = X_Stretch_1_1_32Bits;
- break;
-
- case STRETCH_1_2:
- case STRETCH_1_4:
- case STRETCH_1_N:
- xfunc = X_Stretch_1_N_32Bits;
- break;
-
- case STRETCH_N_1:
- case STRETCH_4_1:
- case STRETCH_2_1:
- xfunc = X_Stretch_N_1_32Bits;
- break;
-
- }
- break;
-
- }
-
-
- /*
- * now call appropriate stretching function depending
- * on the y stretch factor
- */
- switch (y_factor) {
- case STRETCH_1_1:
- case STRETCH_1_2:
- case STRETCH_1_4:
- case STRETCH_1_N:
- Y_Stretch_1_N(lpSrc, lpDst, SrcXE, SrcYE,
- DstXE, DstYE, SrcWidth, DstWidth, x_fract, xfunc, nBits);
- break;
-
- case STRETCH_N_1:
- case STRETCH_4_1:
- case STRETCH_2_1:
- Y_Stretch_N_1(lpSrc, lpDst, SrcXE, SrcYE,
- DstXE, DstYE, SrcWidth, DstWidth, x_fract, xfunc);
- break;
-
- }
- return;
+ int nBits;
+ int SrcWidth, DstWidth;
+ LPBYTE lpDst = (LPBYTE)lpvDst, lpSrc = (LPBYTE)lpvSrc;
+ int x_fract;
+ int x_factor;
+ int y_factor;
+ X_FUNC xfunc;
+
+
+ /*
+ * chek that sizes are not same
+ */
+ /*if(DstXE == SrcXE && DstYE == SrcYE)
+ {
+ return;
+ }*/
+ /*
+ * check that bit depths are same and 8, 16 or 24
+ */
+
+ if ((nBits = biDst->biBitCount) != biSrc->biBitCount) {
+ return;
+ }
+
+ if ( (nBits != 8 ) && (nBits != 16) && (nBits != 24) &&
+ (nBits != 32)) {
+ return;
+ }
+
+ /*
+ * check that extents are not bad
+ */
+ if ( (SrcXE <= 0) || (SrcYE <= 0) || (DstXE <= 0) || (DstYE <= 0)) {
+ return;
+ }
+
+ /*
+ * calculate width of one scan line in bytes, rounded up to
+ * DWORD boundary.
+ */
+ SrcWidth = (((biSrc->biWidth * nBits) + 31) & ~31) / 8;
+ DstWidth = (((biDst->biWidth * nBits) + 31) & ~31) / 8;
+
+ /*
+ * set initial source and dest pointers
+ */
+ lpSrc += (SrcY * SrcWidth) + ((SrcX * nBits) / 8);
+ lpDst += (DstY * DstWidth) + ((DstX * nBits) / 8);
+
+
+ /*
+ * calculate stretch proportions (1:1, 1:2, 1:N, N:1 etc) and
+ * also the fractional stretch factor. (we are not interested in
+ * the y stretch fraction - this is only used in x stretching.
+ */
+
+ y_factor = StretchFactor(SrcYE, DstYE, NULL);
+ x_factor = StretchFactor(SrcXE, DstXE, &x_fract);
+
+ /*
+ * we have special case routines for 1:2 in both dimensions
+ * for 8 and 16 bits
+ */
+ if ((y_factor == x_factor) && (y_factor == STRETCH_1_2)) {
+
+ if (nBits == 8) {
+ //StartCounting();
+ Stretch_1_2_8Bits(lpSrc, lpDst, SrcXE, SrcYE,
+ DstXE, DstYE, SrcWidth, DstWidth,
+ x_fract);
+ //EndCounting("8 bit");
+ return;
+
+ }
+ else if (nBits == 16) {
+ //StartCounting();
+ Stretch_1_2_16Bits(lpSrc, lpDst, SrcXE, SrcYE,
+ DstXE, DstYE, SrcWidth, DstWidth,
+ x_fract);
+ //EndCounting("16 bit");
+ return;
+ }
+ }
+
+
+ /* pick an X stretch function */
+ switch(nBits) {
+
+ case 8:
+ switch(x_factor) {
+ case STRETCH_1_1:
+ xfunc = X_Stretch_1_1_8Bits;
+ break;
+
+ case STRETCH_1_2:
+ xfunc = X_Stretch_1_2_8Bits;
+ break;
+
+ case STRETCH_1_4:
+ xfunc = X_Stretch_1_4_8Bits;
+ break;
+
+ case STRETCH_1_N:
+ xfunc = X_Stretch_1_N_8Bits;
+ break;
+
+ case STRETCH_N_1:
+ case STRETCH_4_1:
+ case STRETCH_2_1:
+ xfunc = X_Stretch_N_1_8Bits;
+ break;
+
+ }
+ break;
+
+ case 16:
+ switch(x_factor) {
+ case STRETCH_1_1:
+ xfunc = X_Stretch_1_1_16Bits;
+ break;
+
+ case STRETCH_1_2:
+ xfunc = X_Stretch_1_2_16Bits;
+ break;
+
+ case STRETCH_1_4:
+ case STRETCH_1_N:
+ xfunc = X_Stretch_1_N_16Bits;
+ break;
+
+ case STRETCH_N_1:
+ case STRETCH_4_1:
+ case STRETCH_2_1:
+ xfunc = X_Stretch_N_1_16Bits;
+ break;
+
+ }
+ break;
+
+ case 24:
+ switch(x_factor) {
+ case STRETCH_1_1:
+ xfunc = X_Stretch_1_1_24Bits;
+ break;
+
+ case STRETCH_1_2:
+ case STRETCH_1_4:
+ case STRETCH_1_N:
+ xfunc = X_Stretch_1_N_24Bits;
+ break;
+
+ case STRETCH_N_1:
+ case STRETCH_4_1:
+ case STRETCH_2_1:
+ xfunc = X_Stretch_N_1_24Bits;
+ break;
+
+ }
+ break;
+
+ case 32:
+ switch(x_factor) {
+ case STRETCH_1_1:
+ xfunc = X_Stretch_1_1_32Bits;
+ break;
+
+ case STRETCH_1_2:
+ case STRETCH_1_4:
+ case STRETCH_1_N:
+ xfunc = X_Stretch_1_N_32Bits;
+ break;
+
+ case STRETCH_N_1:
+ case STRETCH_4_1:
+ case STRETCH_2_1:
+ xfunc = X_Stretch_N_1_32Bits;
+ break;
+
+ }
+ break;
+
+ }
+
+
+ /*
+ * now call appropriate stretching function depending
+ * on the y stretch factor
+ */
+ switch (y_factor) {
+ case STRETCH_1_1:
+ case STRETCH_1_2:
+ case STRETCH_1_4:
+ case STRETCH_1_N:
+ Y_Stretch_1_N(lpSrc, lpDst, SrcXE, SrcYE,
+ DstXE, DstYE, SrcWidth, DstWidth, x_fract, xfunc, nBits);
+ break;
+
+ case STRETCH_N_1:
+ case STRETCH_4_1:
+ case STRETCH_2_1:
+ Y_Stretch_N_1(lpSrc, lpDst, SrcXE, SrcYE,
+ DstXE, DstYE, SrcWidth, DstWidth, x_fract, xfunc);
+ break;
+
+ }
+ return;
}
@@ -456,49 +462,50 @@ StretchDIB(
void
Y_Stretch_1_N(LPBYTE lpSrc,
- LPBYTE lpDst,
- int SrcXE,
- int SrcYE,
- int DstXE,
- int DstYE,
- int SrcWidth,
- int DstWidth,
- int x_fract,
- X_FUNC x_func,
- int nBits)
+ LPBYTE lpDst,
+ int SrcXE,
+ int SrcYE,
+ int DstXE,
+ int DstYE,
+ int SrcWidth,
+ int DstWidth,
+ int x_fract,
+ X_FUNC x_func,
+ int nBits)
{
- int ydelta;
- register int i;
- LPBYTE lpPrev = NULL;
-
- ydelta = DstYE -1;
-
- for (i = 0; i < DstYE; i++) {
-
- /* have we already stretched this scanline ? */
- if (lpPrev == NULL) {
- /* no - copy one scanline */
- (*x_func)(lpSrc, lpDst, SrcXE, DstXE, x_fract);
- lpPrev = lpDst;
- } else {
- /* yes - this is a duplicate scanline. do
- * a straight copy of one that has already
- * been stretched/shrunk
- */
- X_CopyScanline(lpPrev, lpDst, DstXE * nBits / 8);
- }
-
- /* advance dest pointer */
- lpDst += DstWidth;
-
- /* should we advance source pointer this time ? */
- if ( (ydelta -= SrcYE) < 0) {
- ydelta += DstYE;
- lpSrc += SrcWidth;
- lpPrev = NULL;
- }
- }
+ int ydelta;
+ register int i;
+ LPBYTE lpPrev = NULL;
+
+ ydelta = DstYE -1;
+
+ for (i = 0; i < DstYE; i++) {
+
+ /* have we already stretched this scanline ? */
+ if (lpPrev == NULL) {
+ /* no - copy one scanline */
+ (*x_func)(lpSrc, lpDst, SrcXE, DstXE, x_fract);
+ lpPrev = lpDst;
+ }
+ else {
+ /* yes - this is a duplicate scanline. do
+ * a straight copy of one that has already
+ * been stretched/shrunk
+ */
+ X_CopyScanline(lpPrev, lpDst, DstXE * nBits / 8);
+ }
+
+ /* advance dest pointer */
+ lpDst += DstWidth;
+
+ /* should we advance source pointer this time ? */
+ if ( (ydelta -= SrcYE) < 0) {
+ ydelta += DstYE;
+ lpSrc += SrcWidth;
+ lpPrev = NULL;
+ }
+ }
}
@@ -510,38 +517,39 @@ Y_Stretch_1_N(LPBYTE lpSrc,
*/
void
Y_Stretch_N_1(LPBYTE lpSrc,
- LPBYTE lpDst,
- int SrcXE,
- int SrcYE,
- int DstXE,
- int DstYE,
- int SrcWidth,
- int DstWidth,
- int x_fract,
- X_FUNC x_func)
+ LPBYTE lpDst,
+ int SrcXE,
+ int SrcYE,
+ int DstXE,
+ int DstYE,
+ int SrcWidth,
+ int DstWidth,
+ int x_fract,
+ X_FUNC x_func)
{
- int ydelta;
- register int i;
+ int ydelta;
+ register int i;
- ydelta = SrcYE -1;
+ ydelta = SrcYE -1;
- for (i = 0; i < DstYE; i++) {
+ for (i = 0; i < DstYE; i++) {
- /* copy one scanline */
- (*x_func)(lpSrc, lpDst, SrcXE, DstXE, x_fract);
+ /* copy one scanline */
+ (*x_func)(lpSrc, lpDst, SrcXE, DstXE, x_fract);
- /* advance dest pointer */
- lpDst += DstWidth;
+ /* advance dest pointer */
+ lpDst += DstWidth;
- /* how many times do we advance source pointer this time ? */
- do {
- lpSrc += SrcWidth;
- ydelta -= DstYE;
- } while (ydelta >= 0);
+ /* how many times do we advance source pointer this time ? */
+ do {
+ lpSrc += SrcWidth;
+ ydelta -= DstYE;
+ }
+ while (ydelta >= 0);
- ydelta += SrcYE;
- }
+ ydelta += SrcYE;
+ }
}
/* ---8-bit X stretching -------------------------------------------------- */
@@ -553,27 +561,27 @@ Y_Stretch_N_1(LPBYTE lpSrc,
*/
void
X_Stretch_1_N_8Bits(LPBYTE lpSrc,
- LPBYTE lpDst,
- int SrcXE,
- int DstXE,
- int x_fract)
+ LPBYTE lpDst,
+ int SrcXE,
+ int DstXE,
+ int x_fract)
{
- int xdelta;
- register int i;
+ int xdelta;
+ register int i;
- xdelta = DstXE -1;
+ xdelta = DstXE -1;
- for (i = 0; i < DstXE; i++) {
+ for (i = 0; i < DstXE; i++) {
- /* copy one byte and advance dest */
- *lpDst++ = *lpSrc;
+ /* copy one byte and advance dest */
+ *lpDst++ = *lpSrc;
- /* should we advance source pointer this time ? */
- if ( (xdelta -= SrcXE) < 0) {
- xdelta += DstXE;
- lpSrc++;
- }
- }
+ /* should we advance source pointer this time ? */
+ if ( (xdelta -= SrcXE) < 0) {
+ xdelta += DstXE;
+ lpSrc++;
+ }
+ }
}
@@ -584,29 +592,30 @@ X_Stretch_1_N_8Bits(LPBYTE lpSrc,
*/
void
X_Stretch_N_1_8Bits(LPBYTE lpSrc,
- LPBYTE lpDst,
- int SrcXE,
- int DstXE,
- int x_fract)
+ LPBYTE lpDst,
+ int SrcXE,
+ int DstXE,
+ int x_fract)
{
- int xdelta;
- register int i;
+ int xdelta;
+ register int i;
- xdelta = SrcXE -1;
+ xdelta = SrcXE -1;
- for (i = 0; i < DstXE; i++) {
+ for (i = 0; i < DstXE; i++) {
- /* copy one byte and advance dest */
- *lpDst++ = *lpSrc;
+ /* copy one byte and advance dest */
+ *lpDst++ = *lpSrc;
- /* how many times do we advance source pointer this time ? */
- do {
- lpSrc++;
- xdelta -= DstXE;
- } while (xdelta >= 0);
+ /* how many times do we advance source pointer this time ? */
+ do {
+ lpSrc++;
+ xdelta -= DstXE;
+ }
+ while (xdelta >= 0);
- xdelta += SrcXE;
- }
+ xdelta += SrcXE;
+ }
}
/*
@@ -616,51 +625,52 @@ X_Stretch_N_1_8Bits(LPBYTE lpSrc,
void
X_CopyScanline(LPBYTE lpSrc, LPBYTE lpDst, int count)
{
- register int i;
-
- /*
- * if the alignment of lpSrc and lpDst is the same, then
- * we can get them aligned and do a faster copy
- */
- if (((DWORD_PTR) lpSrc & 0x3) == ( (DWORD_PTR) lpDst & 0x3)) {
-
- /* align on WORD boundary */
- if ( (DWORD_PTR) lpSrc & 0x1) {
- *lpDst++ = *lpSrc++;
- count--;
- }
-
- /* align on DWORD boundary */
- if ((DWORD_PTR) lpSrc & 0x2) {
- * ((LPWORD) lpDst) = *((LPWORD) lpSrc);
- lpDst += sizeof(WORD);
- lpSrc += sizeof(WORD);
- count -= sizeof(WORD);
- }
-
- /* copy whole DWORDS */
- for ( i = (count / 4); i > 0; i--) {
- *((LPDWORD) lpDst) = *((LPDWORD) lpSrc);
- lpSrc += sizeof(DWORD);
- lpDst += sizeof(DWORD);
- }
- } else {
- /* the lpSrc and lpDst pointers are different
- * alignment, so leave them unaligned and
- * copy all the whole DWORDs
- */
- for (i = (count / 4); i> 0; i--) {
- *( (DWORD UNALIGNED FAR *) lpDst) =
- *((DWORD UNALIGNED FAR *) lpSrc);
- lpSrc += sizeof(DWORD);
- lpDst += sizeof(DWORD);
- }
- }
-
- /* in either case, copy last (up to 3) bytes. */
- for ( i = count % 4; i > 0; i--) {
- *lpDst++ = *lpSrc++;
- }
+ register int i;
+
+ /*
+ * if the alignment of lpSrc and lpDst is the same, then
+ * we can get them aligned and do a faster copy
+ */
+ if (((DWORD_PTR) lpSrc & 0x3) == ( (DWORD_PTR) lpDst & 0x3)) {
+
+ /* align on WORD boundary */
+ if ( (DWORD_PTR) lpSrc & 0x1) {
+ *lpDst++ = *lpSrc++;
+ count--;
+ }
+
+ /* align on DWORD boundary */
+ if ((DWORD_PTR) lpSrc & 0x2) {
+ * ((LPWORD) lpDst) = *((LPWORD) lpSrc);
+ lpDst += sizeof(WORD);
+ lpSrc += sizeof(WORD);
+ count -= sizeof(WORD);
+ }
+
+ /* copy whole DWORDS */
+ for ( i = (count / 4); i > 0; i--) {
+ *((LPDWORD) lpDst) = *((LPDWORD) lpSrc);
+ lpSrc += sizeof(DWORD);
+ lpDst += sizeof(DWORD);
+ }
+ }
+ else {
+ /* the lpSrc and lpDst pointers are different
+ * alignment, so leave them unaligned and
+ * copy all the whole DWORDs
+ */
+ for (i = (count / 4); i> 0; i--) {
+ *( (DWORD UNALIGNED FAR *) lpDst) =
+ *((DWORD UNALIGNED FAR *) lpSrc);
+ lpSrc += sizeof(DWORD);
+ lpDst += sizeof(DWORD);
+ }
+ }
+
+ /* in either case, copy last (up to 3) bytes. */
+ for ( i = count % 4; i > 0; i--) {
+ *lpDst++ = *lpSrc++;
+ }
}
/*
@@ -670,13 +680,13 @@ X_CopyScanline(LPBYTE lpSrc, LPBYTE lpDst, int count)
*/
void
X_Stretch_1_1_8Bits(LPBYTE lpSrc,
- LPBYTE lpDst,
- int SrcXE,
- int DstXE,
- int x_fract)
+ LPBYTE lpDst,
+ int SrcXE,
+ int DstXE,
+ int x_fract)
{
- X_CopyScanline(lpSrc, lpDst, DstXE);
+ X_CopyScanline(lpSrc, lpDst, DstXE);
}
@@ -687,22 +697,22 @@ X_Stretch_1_1_8Bits(LPBYTE lpSrc,
*/
void
X_Stretch_1_2_8Bits(LPBYTE lpSrc,
- LPBYTE lpDst,
- int SrcXE,
- int DstXE,
- int x_fract)
+ LPBYTE lpDst,
+ int SrcXE,
+ int DstXE,
+ int x_fract)
{
- WORD wPix;
- register int i;
+ WORD wPix;
+ register int i;
- for (i = 0; i < SrcXE; i++) {
+ for (i = 0; i < SrcXE; i++) {
- /* get a pixel and double it */
- wPix = *lpSrc++;
- wPix |= (wPix << 8);
- * ((WORD UNALIGNED *) lpDst) = wPix;
- lpDst += sizeof(WORD);
- }
+ /* get a pixel and double it */
+ wPix = *lpSrc++;
+ wPix |= (wPix << 8);
+ * ((WORD UNALIGNED *) lpDst) = wPix;
+ lpDst += sizeof(WORD);
+ }
}
@@ -713,23 +723,23 @@ X_Stretch_1_2_8Bits(LPBYTE lpSrc,
*/
void
X_Stretch_1_4_8Bits(LPBYTE lpSrc,
- LPBYTE lpDst,
- int SrcXE,
- int DstXE,
- int x_fract)
+ LPBYTE lpDst,
+ int SrcXE,
+ int DstXE,
+ int x_fract)
{
- DWORD dwPix;
- register int i;
-
- for (i = 0; i < SrcXE; i++) {
-
- /* get a pixel and make four copies of it */
- dwPix = *lpSrc++;
- dwPix |= (dwPix <<8);
- dwPix |= (dwPix << 16);
- * ((DWORD UNALIGNED *) lpDst) = dwPix;
- lpDst += sizeof(DWORD);
- }
+ DWORD dwPix;
+ register int i;
+
+ for (i = 0; i < SrcXE; i++) {
+
+ /* get a pixel and make four copies of it */
+ dwPix = *lpSrc++;
+ dwPix |= (dwPix <<8);
+ dwPix |= (dwPix << 16);
+ * ((DWORD UNALIGNED *) lpDst) = dwPix;
+ lpDst += sizeof(DWORD);
+ }
}
@@ -740,13 +750,13 @@ X_Stretch_1_4_8Bits(LPBYTE lpSrc,
*/
void
X_Stretch_1_1_16Bits(LPBYTE lpSrc,
- LPBYTE lpDst,
- int SrcXE,
- int DstXE,
- int x_fract)
+ LPBYTE lpDst,
+ int SrcXE,
+ int DstXE,
+ int x_fract)
{
- X_CopyScanline(lpSrc, lpDst, DstXE * sizeof(WORD));
+ X_CopyScanline(lpSrc, lpDst, DstXE * sizeof(WORD));
}
@@ -756,25 +766,25 @@ X_Stretch_1_1_16Bits(LPBYTE lpSrc,
*/
void
X_Stretch_1_2_16Bits(LPBYTE lpSrc,
- LPBYTE lpDst,
- int SrcXE,
- int DstXE,
- int x_fract)
+ LPBYTE lpDst,
+ int SrcXE,
+ int DstXE,
+ int x_fract)
{
- DWORD dwPix;
- register int i;
+ DWORD dwPix;
+ register int i;
- for (i = 0; i < SrcXE; i++) {
+ for (i = 0; i < SrcXE; i++) {
- /* get a pixel and double it */
- dwPix = * ((WORD *)lpSrc);
- dwPix |= (dwPix << 16);
- * ((DWORD UNALIGNED *) lpDst) = dwPix;
+ /* get a pixel and double it */
+ dwPix = * ((WORD *)lpSrc);
+ dwPix |= (dwPix << 16);
+ * ((DWORD UNALIGNED *) lpDst) = dwPix;
- lpDst += sizeof(DWORD);
- lpSrc += sizeof(WORD);
- }
+ lpDst += sizeof(DWORD);
+ lpSrc += sizeof(WORD);
+ }
}
@@ -783,29 +793,29 @@ X_Stretch_1_2_16Bits(LPBYTE lpSrc,
*/
void
X_Stretch_1_N_16Bits(LPBYTE lpSrc,
- LPBYTE lpDst,
- int SrcXE,
- int DstXE,
- int x_fract)
+ LPBYTE lpDst,
+ int SrcXE,
+ int DstXE,
+ int x_fract)
{
- int xdelta;
- register int i;
+ int xdelta;
+ register int i;
- xdelta = DstXE -1;
+ xdelta = DstXE -1;
- for (i = 0; i < DstXE; i++) {
+ for (i = 0; i < DstXE; i++) {
- /* copy one pixel and advance dest */
- *((WORD *) lpDst) = *((WORD *) lpSrc);
+ /* copy one pixel and advance dest */
+ *((WORD *) lpDst) = *((WORD *) lpSrc);
- lpDst += sizeof(WORD);
+ lpDst += sizeof(WORD);
- /* should we advance source pointer this time ? */
- if ( (xdelta -= SrcXE) < 0) {
- xdelta += DstXE;
- lpSrc += sizeof(WORD);
- }
- }
+ /* should we advance source pointer this time ? */
+ if ( (xdelta -= SrcXE) < 0) {
+ xdelta += DstXE;
+ lpSrc += sizeof(WORD);
+ }
+ }
}
/*
@@ -813,32 +823,33 @@ X_Stretch_1_N_16Bits(LPBYTE lpSrc,
*/
void
X_Stretch_N_1_16Bits(LPBYTE lpSrc,
- LPBYTE lpDst,
- int SrcXE,
- int DstXE,
- int x_fract)
+ LPBYTE lpDst,
+ int SrcXE,
+ int DstXE,
+ int x_fract)
{
- int xdelta;
- register int i;
+ int xdelta;
+ register int i;
- xdelta = SrcXE -1;
+ xdelta = SrcXE -1;
- for (i = 0; i < DstXE; i++) {
+ for (i = 0; i < DstXE; i++) {
- /* copy one pixel and advance dest */
- *((WORD *) lpDst) = *((WORD *)lpSrc);
+ /* copy one pixel and advance dest */
+ *((WORD *) lpDst) = *((WORD *)lpSrc);
- lpDst += sizeof(WORD);
+ lpDst += sizeof(WORD);
- /* how many times do we advance source pointer this time ? */
- do {
- lpSrc += sizeof(WORD);
- xdelta -= DstXE;
- } while (xdelta >= 0);
+ /* how many times do we advance source pointer this time ? */
+ do {
+ lpSrc += sizeof(WORD);
+ xdelta -= DstXE;
+ }
+ while (xdelta >= 0);
- xdelta += SrcXE;
- }
+ xdelta += SrcXE;
+ }
}
@@ -850,12 +861,12 @@ X_Stretch_N_1_16Bits(LPBYTE lpSrc,
*/
void
X_Stretch_1_1_24Bits(LPBYTE lpSrc,
- LPBYTE lpDst,
- int SrcXE,
- int DstXE,
- int x_fract)
+ LPBYTE lpDst,
+ int SrcXE,
+ int DstXE,
+ int x_fract)
{
- X_CopyScanline(lpSrc, lpDst, DstXE * 3);
+ X_CopyScanline(lpSrc, lpDst, DstXE * 3);
}
/*
@@ -863,32 +874,32 @@ X_Stretch_1_1_24Bits(LPBYTE lpSrc,
*/
void
X_Stretch_1_N_24Bits(LPBYTE lpSrc,
- LPBYTE lpDst,
- int SrcXE,
- int DstXE,
- int x_fract)
+ LPBYTE lpDst,
+ int SrcXE,
+ int DstXE,
+ int x_fract)
{
- int xdelta;
- register int i;
+ int xdelta;
+ register int i;
- xdelta = DstXE -1;
+ xdelta = DstXE -1;
- for (i = 0; i < DstXE; i++) {
- /* copy first word of pixel and advance dest */
- *((WORD UNALIGNED *) lpDst) = *((WORD UNALIGNED *) lpSrc);
+ for (i = 0; i < DstXE; i++) {
+ /* copy first word of pixel and advance dest */
+ *((WORD UNALIGNED *) lpDst) = *((WORD UNALIGNED *) lpSrc);
- lpDst += sizeof(WORD);
+ lpDst += sizeof(WORD);
- /* copy third byte and advance dest */
- *lpDst++ = lpSrc[sizeof(WORD)];
+ /* copy third byte and advance dest */
+ *lpDst++ = lpSrc[sizeof(WORD)];
- /* should we advance source pointer this time ? */
- if ( (xdelta -= SrcXE) < 0) {
- xdelta += DstXE;
- lpSrc += 3;
- }
- }
+ /* should we advance source pointer this time ? */
+ if ( (xdelta -= SrcXE) < 0) {
+ xdelta += DstXE;
+ lpSrc += 3;
+ }
+ }
}
/*
@@ -896,36 +907,37 @@ X_Stretch_1_N_24Bits(LPBYTE lpSrc,
*/
void
X_Stretch_N_1_24Bits(LPBYTE lpSrc,
- LPBYTE lpDst,
- int SrcXE,
- int DstXE,
- int x_fract)
+ LPBYTE lpDst,
+ int SrcXE,
+ int DstXE,
+ int x_fract)
{
- int xdelta;
- register int i;
+ int xdelta;
+ register int i;
- xdelta = SrcXE -1;
+ xdelta = SrcXE -1;
- for (i = 0; i < DstXE; i++) {
+ for (i = 0; i < DstXE; i++) {
- /* copy first word of pixel and advance dest */
- *((WORD UNALIGNED *) lpDst) = *((WORD UNALIGNED *) lpSrc);
+ /* copy first word of pixel and advance dest */
+ *((WORD UNALIGNED *) lpDst) = *((WORD UNALIGNED *) lpSrc);
- lpDst += sizeof(WORD);
+ lpDst += sizeof(WORD);
- /* copy third byte and advance dest */
- *lpDst++ = lpSrc[sizeof(WORD)];
+ /* copy third byte and advance dest */
+ *lpDst++ = lpSrc[sizeof(WORD)];
- /* how many times do we advance source pointer this time ? */
- do {
- lpSrc += 3;
- xdelta -= DstXE;
- } while (xdelta >= 0);
+ /* how many times do we advance source pointer this time ? */
+ do {
+ lpSrc += 3;
+ xdelta -= DstXE;
+ }
+ while (xdelta >= 0);
- xdelta += SrcXE;
- }
-}
+ xdelta += SrcXE;
+ }
+}
/* 32-bits ---------------------------------------------------------*/
@@ -935,12 +947,12 @@ X_Stretch_N_1_24Bits(LPBYTE lpSrc,
*/
void
X_Stretch_1_1_32Bits(LPBYTE lpSrc,
- LPBYTE lpDst,
- int SrcXE,
- int DstXE,
- int x_fract)
+ LPBYTE lpDst,
+ int SrcXE,
+ int DstXE,
+ int x_fract)
{
- X_CopyScanline((BYTE*) lpSrc, (BYTE*) lpDst, DstXE * sizeof( RGBQUAD ) );
+ X_CopyScanline((BYTE*) lpSrc, (BYTE*) lpDst, DstXE * sizeof( RGBQUAD ) );
}
/*
@@ -948,34 +960,32 @@ X_Stretch_1_1_32Bits(LPBYTE lpSrc,
*/
void
X_Stretch_1_N_32Bits(LPBYTE lpSrc0,
- LPBYTE lpDst0,
- int SrcXE,
- int DstXE,
- int x_fract)
+ LPBYTE lpDst0,
+ int SrcXE,
+ int DstXE,
+ int x_fract)
{
- int xdelta;
- register int i;
+ int xdelta;
+ register int i;
- RGBQUAD *lpSrc=(RGBQUAD *)lpSrc0;
- RGBQUAD *lpDst=(RGBQUAD *)lpDst0;
+ RGBQUAD *lpSrc=(RGBQUAD *)lpSrc0;
+ RGBQUAD *lpDst=(RGBQUAD *)lpDst0;
- xdelta = DstXE -1;
+ xdelta = DstXE -1;
- for (i = 0; i < DstXE; i++)
- {
- /* copy first word of pixel and advance dest */
- *lpDst = *lpSrc;
- lpDst++;
+ for (i = 0; i < DstXE; i++) {
+ /* copy first word of pixel and advance dest */
+ *lpDst = *lpSrc;
+ lpDst++;
- /* should we advance source pointer this time ? */
- if ( (xdelta -= SrcXE) < 0)
- {
- xdelta += DstXE;
- lpSrc++;
- }
- }
+ /* should we advance source pointer this time ? */
+ if ( (xdelta -= SrcXE) < 0) {
+ xdelta += DstXE;
+ lpSrc++;
+ }
+ }
}
/*
@@ -983,34 +993,33 @@ X_Stretch_1_N_32Bits(LPBYTE lpSrc0,
*/
void
X_Stretch_N_1_32Bits(LPBYTE lpSrc0,
- LPBYTE lpDst0,
- int SrcXE,
- int DstXE,
- int x_fract)
+ LPBYTE lpDst0,
+ int SrcXE,
+ int DstXE,
+ int x_fract)
{
- int xdelta;
- register int i;
+ int xdelta;
+ register int i;
- RGBQUAD *lpSrc=(RGBQUAD *)lpSrc0;
- RGBQUAD *lpDst=(RGBQUAD *)lpDst0;
+ RGBQUAD *lpSrc=(RGBQUAD *)lpSrc0;
+ RGBQUAD *lpDst=(RGBQUAD *)lpDst0;
- xdelta = SrcXE -1;
+ xdelta = SrcXE -1;
- for (i = 0; i < DstXE; i++)
- {
- *lpDst = *lpSrc;
- lpDst++;
+ for (i = 0; i < DstXE; i++) {
+ *lpDst = *lpSrc;
+ lpDst++;
- /* how many times do we advance source pointer this time ? */
- do
- {
- lpSrc++;
- xdelta -= DstXE;
- } while (xdelta >= 0);
+ /* how many times do we advance source pointer this time ? */
+ do {
+ lpSrc++;
+ xdelta -= DstXE;
+ }
+ while (xdelta >= 0);
- xdelta += SrcXE;
- }
-}
+ xdelta += SrcXE;
+ }
+}
@@ -1026,116 +1035,116 @@ X_Stretch_N_1_32Bits(LPBYTE lpSrc0,
*/
void
Stretch_1_2_8Bits(LPBYTE lpSrc, LPBYTE lpDst, int SrcXE,int SrcYE, int DstXE,
- int DstYE, int SrcWidth, int DstWidth, int x_fract)
+ int DstYE, int SrcWidth, int DstWidth, int x_fract)
{
- int SrcInc, DstInc;
- register int i, j;
- WORD wPix;
- DWORD dwPix4;
+ int SrcInc, DstInc;
+ register int i, j;
+ WORD wPix;
+ DWORD dwPix4;
- /* amount to advance source by at the end of each scan */
- SrcInc = SrcWidth - SrcXE;
+ /* amount to advance source by at the end of each scan */
+ SrcInc = SrcWidth - SrcXE;
- /* amount to advance dest by at the end of each scan - note
- * that we write two scans at once, so advance past the next
- * scan line
- */
- DstInc = (DstWidth * 2) - DstXE;
+ /* amount to advance dest by at the end of each scan - note
+ * that we write two scans at once, so advance past the next
+ * scan line
+ */
+ DstInc = (DstWidth * 2) - DstXE;
- /*
- * we would like to copy the pixels DWORD at a time. this means
- * being aligned. if we are currently aligned on a WORD boundary,
- * then copy one pixel to get aligned. If we are on a byte
- * boundary, we can never get aligned, so use the slower loop.
- */
- if ( ((DWORD_PTR)lpDst) & 1) {
+ /*
+ * we would like to copy the pixels DWORD at a time. this means
+ * being aligned. if we are currently aligned on a WORD boundary,
+ * then copy one pixel to get aligned. If we are on a byte
+ * boundary, we can never get aligned, so use the slower loop.
+ */
+ if ( ((DWORD_PTR)lpDst) & 1) {
- /*
- * dest is byte aligned - so we can never align it
- * by writing WORDs - use slow loop.
- */
- for (i = 0; i < SrcYE; i++) {
+ /*
+ * dest is byte aligned - so we can never align it
+ * by writing WORDs - use slow loop.
+ */
+ for (i = 0; i < SrcYE; i++) {
- for (j = 0; j < SrcXE; j++) {
+ for (j = 0; j < SrcXE; j++) {
- /* get a pixel and double it */
+ /* get a pixel and double it */
- wPix = *lpSrc++;
- wPix |= (wPix<<8);
+ wPix = *lpSrc++;
+ wPix |= (wPix<<8);
- /* write doubled pixel to this scanline */
+ /* write doubled pixel to this scanline */
- *( (WORD UNALIGNED *) lpDst) = wPix;
+ *( (WORD UNALIGNED *) lpDst) = wPix;
- /* write double pixel to next scanline */
- *( (WORD UNALIGNED *) (lpDst + DstWidth)) = wPix;
+ /* write double pixel to next scanline */
+ *( (WORD UNALIGNED *) (lpDst + DstWidth)) = wPix;
- lpDst += sizeof(WORD);
- }
- lpSrc += SrcInc;
- lpDst += DstInc;
- }
- return;
- }
+ lpDst += sizeof(WORD);
+ }
+ lpSrc += SrcInc;
+ lpDst += DstInc;
+ }
+ return;
+ }
- /*
- * this will be the aligned version. align each scan line
- */
- for ( i = 0; i < SrcYE; i++) {
+ /*
+ * this will be the aligned version. align each scan line
+ */
+ for ( i = 0; i < SrcYE; i++) {
- /* count of pixels remaining */
- j = SrcXE;
+ /* count of pixels remaining */
+ j = SrcXE;
- /* align this scan line */
- if (((DWORD_PTR)lpDst) & 2) {
+ /* align this scan line */
+ if (((DWORD_PTR)lpDst) & 2) {
- /* word aligned - copy one doubled pixel and we are ok */
- wPix = *lpSrc++;
- wPix |= (wPix << 8);
+ /* word aligned - copy one doubled pixel and we are ok */
+ wPix = *lpSrc++;
+ wPix |= (wPix << 8);
- *( (WORD *) lpDst) = wPix;
- *( (WORD *) (lpDst + DstWidth)) = wPix;
- lpDst += sizeof(WORD);
+ *( (WORD *) lpDst) = wPix;
+ *( (WORD *) (lpDst + DstWidth)) = wPix;
+ lpDst += sizeof(WORD);
- j -= 1;
- }
+ j -= 1;
+ }
- /* now dest is aligned - so loop eating two pixels at a time
- * until there is at most one left
- */
- for ( ; j > 1; j -= 2) {
+ /* now dest is aligned - so loop eating two pixels at a time
+ * until there is at most one left
+ */
+ for ( ; j > 1; j -= 2) {
- /* read two pixels and double them */
- wPix = * ((WORD UNALIGNED *) lpSrc);
- lpSrc += sizeof(WORD);
+ /* read two pixels and double them */
+ wPix = * ((WORD UNALIGNED *) lpSrc);
+ lpSrc += sizeof(WORD);
- dwPix4 = (wPix & 0xff) | ((wPix & 0xff) << 8);
- dwPix4 |= ((wPix & 0xff00) << 8) | ((wPix & 0xff00) << 16);
- *((DWORD *) lpDst) = dwPix4;
- *((DWORD *) (lpDst + DstWidth)) = dwPix4;
+ dwPix4 = (wPix & 0xff) | ((wPix & 0xff) << 8);
+ dwPix4 |= ((wPix & 0xff00) << 8) | ((wPix & 0xff00) << 16);
+ *((DWORD *) lpDst) = dwPix4;
+ *((DWORD *) (lpDst + DstWidth)) = dwPix4;
- lpDst += sizeof(DWORD);
- }
+ lpDst += sizeof(DWORD);
+ }
- /* odd byte remaining ? */
- if (j > 0) {
- /* word aligned - copy one doubled pixel and we are ok */
- wPix = *lpSrc++;
- wPix |= (wPix << 8);
+ /* odd byte remaining ? */
+ if (j > 0) {
+ /* word aligned - copy one doubled pixel and we are ok */
+ wPix = *lpSrc++;
+ wPix |= (wPix << 8);
- *( (WORD *) lpDst) = wPix;
- *( (WORD *) (lpDst + DstWidth)) = wPix;
- lpDst += sizeof(WORD);
+ *( (WORD *) lpDst) = wPix;
+ *( (WORD *) (lpDst + DstWidth)) = wPix;
+ lpDst += sizeof(WORD);
- j -= 1;
- }
- lpSrc += SrcInc;
- lpDst += DstInc;
- }
+ j -= 1;
+ }
+ lpSrc += SrcInc;
+ lpDst += DstInc;
+ }
}
@@ -1148,45 +1157,45 @@ Stretch_1_2_8Bits(LPBYTE lpSrc, LPBYTE lpDst, int SrcXE,int SrcYE, int DstXE,
void
Stretch_1_2_16Bits(LPBYTE lpSrc, LPBYTE lpDst, int SrcXE,int SrcYE, int DstXE,
- int DstYE, int SrcWidth, int DstWidth, int x_fract)
+ int DstYE, int SrcWidth, int DstWidth, int x_fract)
{
- int SrcInc, DstInc;
- register int i, j;
- DWORD dwPix;
+ int SrcInc, DstInc;
+ register int i, j;
+ DWORD dwPix;
- /* amount to advance source by at the end of each scan */
- SrcInc = SrcWidth - (SrcXE * sizeof(WORD));
+ /* amount to advance source by at the end of each scan */
+ SrcInc = SrcWidth - (SrcXE * sizeof(WORD));
- /* amount to advance dest by at the end of each scan - note
- * that we write two scans at once, so advance past the next
- * scan line
- */
- DstInc = (DstWidth * 2) - (DstXE * sizeof(WORD));
+ /* amount to advance dest by at the end of each scan - note
+ * that we write two scans at once, so advance past the next
+ * scan line
+ */
+ DstInc = (DstWidth * 2) - (DstXE * sizeof(WORD));
- for (i = 0; i < SrcYE; i++) {
+ for (i = 0; i < SrcYE; i++) {
- for (j = 0; j < SrcXE; j++) {
+ for (j = 0; j < SrcXE; j++) {
- /* get a pixel and double it */
+ /* get a pixel and double it */
- dwPix = *((WORD *)lpSrc);
- dwPix |= (dwPix<<16);
+ dwPix = *((WORD *)lpSrc);
+ dwPix |= (dwPix<<16);
- lpSrc += sizeof(WORD);
+ lpSrc += sizeof(WORD);
- /* write doubled pixel to this scanline */
+ /* write doubled pixel to this scanline */
- *( (DWORD UNALIGNED *) lpDst) = dwPix;
+ *( (DWORD UNALIGNED *) lpDst) = dwPix;
- /* write double pixel to next scanline */
- *( (DWORD UNALIGNED *) (lpDst + DstWidth)) = dwPix;
+ /* write double pixel to next scanline */
+ *( (DWORD UNALIGNED *) (lpDst + DstWidth)) = dwPix;
- lpDst += sizeof(DWORD);
- }
- lpSrc += SrcInc;
- lpDst += DstInc;
+ lpDst += sizeof(DWORD);
+ }
+ lpSrc += SrcInc;
+ lpDst += DstInc;
- }
+ }
}
diff --git a/plugins/pluginDirectShow/internals/Resizer.h b/plugins/pluginDirectShow/internals/Resizer.h
index 6c76970..e91b980 100755
--- a/plugins/pluginDirectShow/internals/Resizer.h
+++ b/plugins/pluginDirectShow/internals/Resizer.h
@@ -1,17 +1,17 @@
/* Copyright (C) 2011-2013 Doubango Telecom <http://www.doubango.org>
-*
+*
* This file is part of Open Source Doubango Framework.
*
* DOUBANGO is free software: you can redistribute it and/or modify
* it under the terms of the GNU General Public License as published by
* the Free Software Foundation, either version 3 of the License, or
* (at your option) any later version.
-*
+*
* DOUBANGO is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
* GNU General Public License for more details.
-*
+*
* You should have received a copy of the GNU General Public License
* along with DOUBANGO.
*/
@@ -67,10 +67,10 @@
void ResizeRGB( BITMAPINFOHEADER *pbiIn, //Src's BitMapInFoHeader
- const unsigned char * dibBits, //Src bits
- BITMAPINFOHEADER *pbiOut,
- unsigned char *pFrame, //Dst bits
- int iNewWidth, //new W in pixel
- int iNewHeight); //new H in pixel
+ const unsigned char * dibBits, //Src bits
+ BITMAPINFOHEADER *pbiOut,
+ unsigned char *pFrame, //Dst bits
+ int iNewWidth, //new W in pixel
+ int iNewHeight); //new H in pixel
#endif //RESIZER_H
diff --git a/plugins/pluginDirectShow/internals/VideoDisplayName.cxx b/plugins/pluginDirectShow/internals/VideoDisplayName.cxx
index 6b4d0b0..ba71428 100755
--- a/plugins/pluginDirectShow/internals/VideoDisplayName.cxx
+++ b/plugins/pluginDirectShow/internals/VideoDisplayName.cxx
@@ -1,17 +1,17 @@
/* Copyright (C) 2011-2013 Doubango Telecom <http://www.doubango.org>
-*
+*
* This file is part of Open Source Doubango Framework.
*
* DOUBANGO is free software: you can redistribute it and/or modify
* it under the terms of the GNU General Public License as published by
* the Free Software Foundation, either version 3 of the License, or
* (at your option) any later version.
-*
+*
* DOUBANGO is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
* GNU General Public License for more details.
-*
+*
* You should have received a copy of the GNU General Public License
* along with DOUBANGO.
*/
@@ -23,15 +23,15 @@ VideoDisplayName::VideoDisplayName(std::string name_, std::string descr) : name(
std::string VideoDisplayName::getName() const
{
- return this->name;
+ return this->name;
}
std::string VideoDisplayName::getDescription() const
{
- return this->description;
+ return this->description;
}
int VideoDisplayName::operator==(const VideoDisplayName &dev) const
{
- return this->name == dev.name;
+ return this->name == dev.name;
}
diff --git a/plugins/pluginDirectShow/internals/VideoDisplayName.h b/plugins/pluginDirectShow/internals/VideoDisplayName.h
index 82dc0d0..34914b7 100755
--- a/plugins/pluginDirectShow/internals/VideoDisplayName.h
+++ b/plugins/pluginDirectShow/internals/VideoDisplayName.h
@@ -1,17 +1,17 @@
/* Copyright (C) 2011-2013 Doubango Telecom <http://www.doubango.org>
-*
+*
* This file is part of Open Source Doubango Framework.
*
* DOUBANGO is free software: you can redistribute it and/or modify
* it under the terms of the GNU General Public License as published by
* the Free Software Foundation, either version 3 of the License, or
* (at your option) any later version.
-*
+*
* DOUBANGO is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
* GNU General Public License for more details.
-*
+*
* You should have received a copy of the GNU General Public License
* along with DOUBANGO.
*/
@@ -25,19 +25,19 @@
class VideoDisplayName
{
public:
- VideoDisplayName() {}
+ VideoDisplayName() {}
- VideoDisplayName(std::string name, std::string description);
+ VideoDisplayName(std::string name, std::string description);
- std::string getName() const;
+ std::string getName() const;
- std::string getDescription() const;
+ std::string getDescription() const;
- int operator==( const VideoDisplayName &dev ) const;
+ int operator==( const VideoDisplayName &dev ) const;
private:
- std::string name;
- std::string description;
+ std::string name;
+ std::string description;
};
#endif /* PLUGIN_DSHOW_VIDEODISPLAYNAME_H */
diff --git a/plugins/pluginDirectShow/internals/VideoFrame.h b/plugins/pluginDirectShow/internals/VideoFrame.h
index 2c910a6..d9a7a9f 100755
--- a/plugins/pluginDirectShow/internals/VideoFrame.h
+++ b/plugins/pluginDirectShow/internals/VideoFrame.h
@@ -1,17 +1,17 @@
/* Copyright (C) 2011-2013 Doubango Telecom <http://www.doubango.org>
-*
+*
* This file is part of Open Source Doubango Framework.
*
* DOUBANGO is free software: you can redistribute it and/or modify
* it under the terms of the GNU General Public License as published by
* the Free Software Foundation, either version 3 of the License, or
* (at your option) any later version.
-*
+*
* DOUBANGO is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
* GNU General Public License for more details.
-*
+*
* You should have received a copy of the GNU General Public License
* along with DOUBANGO.
*/
@@ -19,20 +19,19 @@
#define PLUGIN_DSHOW_VIDEOFRAME_H
// Define supported video formats
-typedef enum _VIDEOFORMAT
-{
- VIDEOFORMAT_NULL = 0, // 0 x 0 : Null
- VIDEOFORMAT_SQCIF, // 128 x 96 : SQCIF
- VIDEOFORMAT_QCIF, // 176 x 144 : QCIF
- VIDEOFORMAT_QVGA, // 320 x 240 : QVGA
- VIDEOFORMAT_CIF, // 352 x 288 : CIF
- VIDEOFORMAT_IOS_MEDIUM, // 480 x 360 : IOS_MEDIUM
- VIDEOFORMAT_VGA, // 640 x 480 : VGA
- VIDEOFORMAT_4CIF, // 704 x 576 : 4CIF
- VIDEOFORMAT_SVGA, // 800 x 600 : SVGA
- VIDEOFORMAT_XGA, // 1024 x 768 : XGA
- VIDEOFORMAT_SXGA, // 1280 x 1024 : SXGA
- VIDEOFORMAT_16CIF, // 1408 x 1152 : 16CIF
+typedef enum _VIDEOFORMAT {
+ VIDEOFORMAT_NULL = 0, // 0 x 0 : Null
+ VIDEOFORMAT_SQCIF, // 128 x 96 : SQCIF
+ VIDEOFORMAT_QCIF, // 176 x 144 : QCIF
+ VIDEOFORMAT_QVGA, // 320 x 240 : QVGA
+ VIDEOFORMAT_CIF, // 352 x 288 : CIF
+ VIDEOFORMAT_IOS_MEDIUM, // 480 x 360 : IOS_MEDIUM
+ VIDEOFORMAT_VGA, // 640 x 480 : VGA
+ VIDEOFORMAT_4CIF, // 704 x 576 : 4CIF
+ VIDEOFORMAT_SVGA, // 800 x 600 : SVGA
+ VIDEOFORMAT_XGA, // 1024 x 768 : XGA
+ VIDEOFORMAT_SXGA, // 1280 x 1024 : SXGA
+ VIDEOFORMAT_16CIF, // 1408 x 1152 : 16CIF
} VIDEOFORMAT;
@@ -54,7 +53,7 @@ typedef enum _VIDEOFORMAT
case VIDEOFORMAT_NULL: \
default: width = 0; height = 0; break; \
} \
-
+
// Macro to get a video format from its size
#define SIZE_TO_VIDEOFORMAT(width, height, format) \
@@ -70,7 +69,7 @@ typedef enum _VIDEOFORMAT
else if ((width == 1280) && (height = 1024)) format = VIDEOFORMAT_SXGA; \
else if ((width == 1408) && (height = 1152)) format = VIDEOFORMAT_16CIF; \
else format = VIDEOFORMAT_NULL; \
-
+
// Constants for consumer and producer Ids
#define GRABBER_VIDEO_ID 0x1FFFFFFF
@@ -80,28 +79,52 @@ typedef enum _VIDEOFORMAT
class VideoFrame
{
public:
- VideoFrame() { this->data = NULL; };
- virtual ~VideoFrame() { if(this->data) { this->data = NULL;} };
+ VideoFrame() {
+ this->data = NULL;
+ };
+ virtual ~VideoFrame() {
+ if(this->data) {
+ this->data = NULL;
+ }
+ };
- int getWidth() { return this->width; };
- int getHeight() { return this->height; };
- int getBitsPerPixel() { return this->bpp; };
- int getTotalBits () { return this->width * this->height * (this->bpp/8); };
- void* getData() { return this->data; };
+ int getWidth() {
+ return this->width;
+ };
+ int getHeight() {
+ return this->height;
+ };
+ int getBitsPerPixel() {
+ return this->bpp;
+ };
+ int getTotalBits () {
+ return this->width * this->height * (this->bpp/8);
+ };
+ void* getData() {
+ return this->data;
+ };
- void setWidth(int width_) { this->width = width_; };
- void setHeight(int height_) { this->height = height_; };
- void setBitsPerPixel( int bpp_) { this->bpp = bpp_; };
- void setData( void* data_) { this->data = data_; };
+ void setWidth(int width_) {
+ this->width = width_;
+ };
+ void setHeight(int height_) {
+ this->height = height_;
+ };
+ void setBitsPerPixel( int bpp_) {
+ this->bpp = bpp_;
+ };
+ void setData( void* data_) {
+ this->data = data_;
+ };
- VIDEOFORMAT getSize();
- void setSize(VIDEOFORMAT format);
+ VIDEOFORMAT getSize();
+ void setSize(VIDEOFORMAT format);
private:
- void *data;
- int width;
- int height;
- int bpp;
+ void *data;
+ int width;
+ int height;
+ int bpp;
};
#endif /* VIDEOFRAME_H */
diff --git a/plugins/pluginDirectShow/internals/VideoGrabberName.cxx b/plugins/pluginDirectShow/internals/VideoGrabberName.cxx
index 4b418cf..dd681c2 100755
--- a/plugins/pluginDirectShow/internals/VideoGrabberName.cxx
+++ b/plugins/pluginDirectShow/internals/VideoGrabberName.cxx
@@ -1,17 +1,17 @@
/* Copyright (C) 2011-2013 Doubango Telecom <http://www.doubango.org>
-*
+*
* This file is part of Open Source Doubango Framework.
*
* DOUBANGO is free software: you can redistribute it and/or modify
* it under the terms of the GNU General Public License as published by
* the Free Software Foundation, either version 3 of the License, or
* (at your option) any later version.
-*
+*
* DOUBANGO is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
* GNU General Public License for more details.
-*
+*
* You should have received a copy of the GNU General Public License
* along with DOUBANGO.
*/
@@ -23,15 +23,15 @@ VideoGrabberName::VideoGrabberName(std::string name_, std::string descr) : name(
std::string VideoGrabberName::getName() const
{
- return this->name;
+ return this->name;
}
std::string VideoGrabberName::getDescription() const
{
- return this->description;
+ return this->description;
}
int VideoGrabberName::operator==(const VideoGrabberName &dev) const
{
- return this->name == dev.name;
+ return this->name == dev.name;
} \ No newline at end of file
diff --git a/plugins/pluginDirectShow/internals/VideoGrabberName.h b/plugins/pluginDirectShow/internals/VideoGrabberName.h
index 0bb45b5..aaf7d75 100755
--- a/plugins/pluginDirectShow/internals/VideoGrabberName.h
+++ b/plugins/pluginDirectShow/internals/VideoGrabberName.h
@@ -1,17 +1,17 @@
/* Copyright (C) 2011-2013 Doubango Telecom <http://www.doubango.org>
-*
+*
* This file is part of Open Source Doubango Framework.
*
* DOUBANGO is free software: you can redistribute it and/or modify
* it under the terms of the GNU General Public License as published by
* the Free Software Foundation, either version 3 of the License, or
* (at your option) any later version.
-*
+*
* DOUBANGO is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
* GNU General Public License for more details.
-*
+*
* You should have received a copy of the GNU General Public License
* along with DOUBANGO.
*/
@@ -25,19 +25,19 @@
class VideoGrabberName
{
public:
- VideoGrabberName() {}
+ VideoGrabberName() {}
- VideoGrabberName(std::string name, std::string description);
+ VideoGrabberName(std::string name, std::string description);
- std::string getName() const;
+ std::string getName() const;
- std::string getDescription() const;
+ std::string getDescription() const;
- int operator==( const VideoGrabberName &dev ) const;
+ int operator==( const VideoGrabberName &dev ) const;
private:
- std::string name;
- std::string description;
+ std::string name;
+ std::string description;
};
#endif /* PLUGIN_DSHOW_VIDEOGRABBERNAME_H */
diff --git a/plugins/pluginDirectShow/internals/wince/CPropertyBag.cxx b/plugins/pluginDirectShow/internals/wince/CPropertyBag.cxx
index a6b436a..045530d 100755
--- a/plugins/pluginDirectShow/internals/wince/CPropertyBag.cxx
+++ b/plugins/pluginDirectShow/internals/wince/CPropertyBag.cxx
@@ -16,14 +16,14 @@
#include "internals/wince/CPropertyBag.h"
CPropertyBag::CPropertyBag() : _refCount(1), pVar(0)
-{
+{
}
CPropertyBag::~CPropertyBag()
{
VAR_LIST *pTemp = pVar;
HRESULT hr = S_OK;
-
+
while(pTemp) {
VAR_LIST *pDel = pTemp;
VariantClear(&pTemp->var);
@@ -35,13 +35,13 @@ CPropertyBag::~CPropertyBag()
}
HRESULT STDMETHODCALLTYPE
-CPropertyBag::Read(LPCOLESTR pszPropName,
- VARIANT *_pVar,
- IErrorLog *pErrorLog)
+CPropertyBag::Read(LPCOLESTR pszPropName,
+ VARIANT *_pVar,
+ IErrorLog *pErrorLog)
{
VAR_LIST *pTemp = pVar;
HRESULT hr = S_OK;
-
+
while (pTemp) {
if (0 == wcscmp(pszPropName, pTemp->pBSTRName)) {
hr = VariantCopy(_pVar, &pTemp->var);
@@ -54,8 +54,8 @@ CPropertyBag::Read(LPCOLESTR pszPropName,
HRESULT STDMETHODCALLTYPE
-CPropertyBag::Write(LPCOLESTR pszPropName,
- VARIANT *_pVar)
+CPropertyBag::Write(LPCOLESTR pszPropName,
+ VARIANT *_pVar)
{
HRESULT hr = S_OK;
VAR_LIST *pTemp = new VAR_LIST();
@@ -72,37 +72,37 @@ CPropertyBag::Write(LPCOLESTR pszPropName,
return VariantCopy(&pTemp->var, _pVar);
}
-ULONG STDMETHODCALLTYPE
-CPropertyBag::AddRef()
+ULONG STDMETHODCALLTYPE
+CPropertyBag::AddRef()
{
return InterlockedIncrement((LONG *)&_refCount);
}
-ULONG STDMETHODCALLTYPE
-CPropertyBag::Release()
+ULONG STDMETHODCALLTYPE
+CPropertyBag::Release()
{
ASSERT(_refCount != 0xFFFFFFFF);
- ULONG ret = InterlockedDecrement((LONG *)&_refCount);
- if (!ret) {
- delete this;
- }
+ ULONG ret = InterlockedDecrement((LONG *)&_refCount);
+ if (!ret) {
+ delete this;
+ }
return ret;
}
-HRESULT STDMETHODCALLTYPE
-CPropertyBag::QueryInterface(REFIID riid, void** ppv)
+HRESULT STDMETHODCALLTYPE
+CPropertyBag::QueryInterface(REFIID riid, void** ppv)
{
- if (!ppv) {
+ if (!ppv) {
return E_POINTER;
- }
- if (riid == IID_IPropertyBag) {
+ }
+ if (riid == IID_IPropertyBag) {
*ppv = static_cast<IPropertyBag*>(this);
- }
- else {
+ }
+ else {
return *ppv = 0, E_NOINTERFACE;
- }
-
- return AddRef(), S_OK;
+ }
+
+ return AddRef(), S_OK;
}
#endif /* _WIN32_WCE */
diff --git a/plugins/pluginDirectShow/internals/wince/CPropertyBag.h b/plugins/pluginDirectShow/internals/wince/CPropertyBag.h
index 20ce779..8abb728 100755
--- a/plugins/pluginDirectShow/internals/wince/CPropertyBag.h
+++ b/plugins/pluginDirectShow/internals/wince/CPropertyBag.h
@@ -4,40 +4,39 @@
#include "plugin_dshow_config.h"
-struct VAR_LIST
-{
+struct VAR_LIST {
VARIANT var;
VAR_LIST *pNext;
BSTR pBSTRName;
};
class CPropertyBag : public IPropertyBag
-{
+{
public:
CPropertyBag();
~CPropertyBag();
-
+
HRESULT STDMETHODCALLTYPE
Read(
- LPCOLESTR pszPropName,
- VARIANT *pVar,
+ LPCOLESTR pszPropName,
+ VARIANT *pVar,
IErrorLog *pErrorLog
- );
-
-
+ );
+
+
HRESULT STDMETHODCALLTYPE
Write(
- LPCOLESTR pszPropName,
+ LPCOLESTR pszPropName,
VARIANT *pVar
- );
-
- ULONG STDMETHODCALLTYPE AddRef();
- ULONG STDMETHODCALLTYPE Release();
- HRESULT STDMETHODCALLTYPE QueryInterface(REFIID riid, void** ppv);
+ );
+
+ ULONG STDMETHODCALLTYPE AddRef();
+ ULONG STDMETHODCALLTYPE Release();
+ HRESULT STDMETHODCALLTYPE QueryInterface(REFIID riid, void** ppv);
private:
- ULONG _refCount;
- VAR_LIST *pVar;
+ ULONG _refCount;
+ VAR_LIST *pVar;
};
#endif /* _WIN32_WCE */
diff --git a/plugins/pluginDirectShow/internals/wince/DSISampleGrabberCB.h b/plugins/pluginDirectShow/internals/wince/DSISampleGrabberCB.h
index 89d8909..ec983d7 100755
--- a/plugins/pluginDirectShow/internals/wince/DSISampleGrabberCB.h
+++ b/plugins/pluginDirectShow/internals/wince/DSISampleGrabberCB.h
@@ -1,17 +1,17 @@
/* Copyright (C) 2014-2015 Mamadou DIOP
-*
+*
* This file is part of Open Source Doubango Framework.
*
* DOUBANGO is free software: you can redistribute it and/or modify
* it under the terms of the GNU General Public License as published by
* the Free Software Foundation, either version 3 of the License, or
* (at your option) any later version.
-*
+*
* DOUBANGO is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
* GNU General Public License for more details.
-*
+*
* You should have received a copy of the GNU General Public License
* along with DOUBANGO.
*/
@@ -21,9 +21,8 @@
#include "plugin_dshow_config.h"
-interface DSISampleGrabberCB
-{
- virtual HRESULT STDMETHODCALLTYPE SampleCB(double SampleTime, IMediaSample *pSample) = 0;
+interface DSISampleGrabberCB {
+ virtual HRESULT STDMETHODCALLTYPE SampleCB(double SampleTime, IMediaSample *pSample) = 0;
virtual HRESULT STDMETHODCALLTYPE BufferCB(double SampleTime, BYTE *pBuffer, long BufferLen) = 0;
};
diff --git a/plugins/pluginDirectShow/internals/wince/DSNullFilter.cxx b/plugins/pluginDirectShow/internals/wince/DSNullFilter.cxx
index 76d713c..8b43d83 100755
--- a/plugins/pluginDirectShow/internals/wince/DSNullFilter.cxx
+++ b/plugins/pluginDirectShow/internals/wince/DSNullFilter.cxx
@@ -1,19 +1,19 @@
#if defined(_WIN32_WCE)
/* Copyright (C) 2014-2015 Mamadou DIOP
-*
+*
* This file is part of Open Source Doubango Framework.
*
* DOUBANGO is free software: you can redistribute it and/or modify
* it under the terms of the GNU General Public License as published by
* the Free Software Foundation, either version 3 of the License, or
* (at your option) any later version.
-*
+*
* DOUBANGO is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
* GNU General Public License for more details.
-*
+*
* You should have received a copy of the GNU General Public License
* along with DOUBANGO.
*/
@@ -21,10 +21,10 @@
#include "internals/wince/DSNullFilter.h"
// {7F9F08CF-139F-40b2-A283-01C4EC26A452}
-TDSHOW_DEFINE_GUID(CLSID_DSNullFilter,
-0x7f9f08cf, 0x139f, 0x40b2, 0xa2, 0x83, 0x1, 0xc4, 0xec, 0x26, 0xa4, 0x52);
+TDSHOW_DEFINE_GUID(CLSID_DSNullFilter,
+ 0x7f9f08cf, 0x139f, 0x40b2, 0xa2, 0x83, 0x1, 0xc4, 0xec, 0x26, 0xa4, 0x52);
-DSNullFilter::DSNullFilter(LPUNKNOWN punk,HRESULT *phr)
+DSNullFilter::DSNullFilter(LPUNKNOWN punk,HRESULT *phr)
: CTransInPlaceFilter(TEXT("NullRenderer"), punk, CLSID_DSNullFilter, phr)
{
}
diff --git a/plugins/pluginDirectShow/internals/wince/DSNullFilter.h b/plugins/pluginDirectShow/internals/wince/DSNullFilter.h
index fc9b76a..608a2b6 100755
--- a/plugins/pluginDirectShow/internals/wince/DSNullFilter.h
+++ b/plugins/pluginDirectShow/internals/wince/DSNullFilter.h
@@ -1,17 +1,17 @@
/* Copyright (C) 2014-2015 Mamadou DIOP
-*
+*
* This file is part of Open Source Doubango Framework.
*
* DOUBANGO is free software: you can redistribute it and/or modify
* it under the terms of the GNU General Public License as published by
* the Free Software Foundation, either version 3 of the License, or
* (at your option) any later version.
-*
+*
* DOUBANGO is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
* GNU General Public License for more details.
-*
+*
* You should have received a copy of the GNU General Public License
* along with DOUBANGO.
*/
diff --git a/plugins/pluginDirectShow/internals/wince/DSSampleGrabber.cxx b/plugins/pluginDirectShow/internals/wince/DSSampleGrabber.cxx
index d33d105..920111f 100755
--- a/plugins/pluginDirectShow/internals/wince/DSSampleGrabber.cxx
+++ b/plugins/pluginDirectShow/internals/wince/DSSampleGrabber.cxx
@@ -6,192 +6,191 @@
// {38589364-71FD-4641-B426-E443DB023568}
-TDSHOW_DEFINE_GUID(CLSID_SampleGrabber,
-0x38589364, 0x71fd, 0x4641, 0xb4, 0x26, 0xe4, 0x43, 0xdb, 0x2, 0x35, 0x68);
+TDSHOW_DEFINE_GUID(CLSID_SampleGrabber,
+ 0x38589364, 0x71fd, 0x4641, 0xb4, 0x26, 0xe4, 0x43, 0xdb, 0x2, 0x35, 0x68);
#define RGB565_MASK_RED 0xF800
#define RGB565_MASK_GREEN 0x07E0
#define RGB565_MASK_BLUE 0x001F
-DSSampleGrabber::DSSampleGrabber(TCHAR *tszName, LPUNKNOWN punk, HRESULT *phr)
-:CTransInPlaceFilter (tszName, punk, CLSID_SampleGrabber, phr)
-{
+DSSampleGrabber::DSSampleGrabber(TCHAR *tszName, LPUNKNOWN punk, HRESULT *phr)
+ :CTransInPlaceFilter (tszName, punk, CLSID_SampleGrabber, phr)
+{
#define FPS_INPUT 30
#define FPS_OUTPUT 5
- this->m_rtFrameLength = (10000000)/FPS_OUTPUT;
+ this->m_rtFrameLength = (10000000)/FPS_OUTPUT;
- this->m_inputFps = FPS_INPUT;
- this->m_outputFps = FPS_OUTPUT;
+ this->m_inputFps = FPS_INPUT;
+ this->m_outputFps = FPS_OUTPUT;
- this->m_iFrameNumber = 0;
- this->m_progress = 0;
- this->m_bProcessFrame = true;
+ this->m_iFrameNumber = 0;
+ this->m_progress = 0;
+ this->m_bProcessFrame = true;
- this->callback = NULL;
- this->m_rgb24 = NULL;
+ this->callback = NULL;
+ this->m_rgb24 = NULL;
- m_cRef = 0;
+ m_cRef = 0;
}
-DSSampleGrabber::~DSSampleGrabber() {
- this->callback = NULL;
- if(this->m_rgb24)
- {
- delete[]this->m_rgb24;
- this->m_rgb24 = NULL;
- }
+DSSampleGrabber::~DSSampleGrabber()
+{
+ this->callback = NULL;
+ if(this->m_rgb24) {
+ delete[]this->m_rgb24;
+ this->m_rgb24 = NULL;
+ }
}
HRESULT DSSampleGrabber::SetFps(int inputFps, int outputFps)
{
- if (inputFps <= 0 || outputFps <= 0) {
- return E_FAIL;
- }
-
- // Stop prcessing
- this->m_bProcessFrame = false;
-
- if (inputFps < outputFps) {
- this->m_inputFps = this->m_outputFps = inputFps;
- }
- else {
- this->m_outputFps = outputFps;
- this->m_inputFps = inputFps;
- }
-
- // Restart processing
- this->m_iFrameNumber = 0;
- this->m_progress = 0;
- this->m_bProcessFrame = true;
-
- return S_OK;
+ if (inputFps <= 0 || outputFps <= 0) {
+ return E_FAIL;
+ }
+
+ // Stop prcessing
+ this->m_bProcessFrame = false;
+
+ if (inputFps < outputFps) {
+ this->m_inputFps = this->m_outputFps = inputFps;
+ }
+ else {
+ this->m_outputFps = outputFps;
+ this->m_inputFps = inputFps;
+ }
+
+ // Restart processing
+ this->m_iFrameNumber = 0;
+ this->m_progress = 0;
+ this->m_bProcessFrame = true;
+
+ return S_OK;
}
HRESULT DSSampleGrabber::Transform(IMediaSample *pSample)
-{
- BYTE *pData = NULL;
- HRESULT hr = S_OK;
- HRESULT ret = S_FALSE;
-
- if (!this->m_bProcessFrame) {
- return S_FALSE;
- }
-
- // Get pointer to the video buffer data
- if ( FAILED(pSample->GetPointer(&pData)) ) {
- ret = E_FAIL;
- goto bail;
- }
-
- pSample->SetTime(NULL, NULL);
-
- // Drop frame?
- if (this->m_iFrameNumber == 0) {
- ret = S_OK;
- }
- else if (this->m_progress >= this->m_inputFps) {
- this->m_progress -= this->m_inputFps;
- ret = S_OK;
- }
-
- // Mark frame as accepted
- if (ret == S_OK) {
- // Set TRUE on every sample for uncompressed frames
- pSample->SetSyncPoint(TRUE);
-
- long Size = pSample->GetSize();
- if ( this->callback ) {
- LONGLONG start, end;
- WORD *rgb565 = (WORD*)pData;
-
- for(int i = 0, i24 = 0, i565 = 0; i< (Size/2); i++, i24+=3, i565+=1) {
- BYTE *p24 = (this->m_rgb24+i24);
- WORD val565 = *(rgb565 + i565);
-
- // extract RGB
- p24[2] = (val565 & RGB565_MASK_RED) >> 11;
- p24[1] = (val565 & RGB565_MASK_GREEN) >> 5;
- p24[0] = (val565 & RGB565_MASK_BLUE);
-
- // amplify the image
- p24[2] <<= 3;
- p24[1] <<= 2;
- p24[0] <<= 3;
- }
-
- pSample->GetMediaTime(&start, &end);
- this->callback->BufferCB( (double)start, this->m_rgb24, ((Size >> 1) * 3));
- }
- }
-
- this->m_progress += this->m_outputFps;
- this->m_iFrameNumber++;
+{
+ BYTE *pData = NULL;
+ HRESULT hr = S_OK;
+ HRESULT ret = S_FALSE;
+
+ if (!this->m_bProcessFrame) {
+ return S_FALSE;
+ }
+
+ // Get pointer to the video buffer data
+ if ( FAILED(pSample->GetPointer(&pData)) ) {
+ ret = E_FAIL;
+ goto bail;
+ }
+
+ pSample->SetTime(NULL, NULL);
+
+ // Drop frame?
+ if (this->m_iFrameNumber == 0) {
+ ret = S_OK;
+ }
+ else if (this->m_progress >= this->m_inputFps) {
+ this->m_progress -= this->m_inputFps;
+ ret = S_OK;
+ }
+
+ // Mark frame as accepted
+ if (ret == S_OK) {
+ // Set TRUE on every sample for uncompressed frames
+ pSample->SetSyncPoint(TRUE);
+
+ long Size = pSample->GetSize();
+ if ( this->callback ) {
+ LONGLONG start, end;
+ WORD *rgb565 = (WORD*)pData;
+
+ for(int i = 0, i24 = 0, i565 = 0; i< (Size/2); i++, i24+=3, i565+=1) {
+ BYTE *p24 = (this->m_rgb24+i24);
+ WORD val565 = *(rgb565 + i565);
+
+ // extract RGB
+ p24[2] = (val565 & RGB565_MASK_RED) >> 11;
+ p24[1] = (val565 & RGB565_MASK_GREEN) >> 5;
+ p24[0] = (val565 & RGB565_MASK_BLUE);
+
+ // amplify the image
+ p24[2] <<= 3;
+ p24[1] <<= 2;
+ p24[0] <<= 3;
+ }
+
+ pSample->GetMediaTime(&start, &end);
+ this->callback->BufferCB( (double)start, this->m_rgb24, ((Size >> 1) * 3));
+ }
+ }
+
+ this->m_progress += this->m_outputFps;
+ this->m_iFrameNumber++;
bail:
- SAFE_DELETE_ARRAY( pData );
- SAFE_RELEASE(pSample);
+ SAFE_DELETE_ARRAY( pData );
+ SAFE_RELEASE(pSample);
- return ret;
+ return ret;
}
HRESULT DSSampleGrabber::CheckInputType(const CMediaType* mtIn)
-{
- VIDEOINFO *video;
- if ( !IsEqualGUID( *mtIn->Subtype(), MEDIASUBTYPE_RGB565 ) || !(video=(VIDEOINFO *)mtIn->Format()) ) {
- return E_FAIL;
- }
+{
+ VIDEOINFO *video;
+ if ( !IsEqualGUID( *mtIn->Subtype(), MEDIASUBTYPE_RGB565 ) || !(video=(VIDEOINFO *)mtIn->Format()) ) {
+ return E_FAIL;
+ }
- return S_OK;
+ return S_OK;
}
-STDMETHODIMP DSSampleGrabber::SetCallback( DSISampleGrabberCB* callback_ )
+STDMETHODIMP DSSampleGrabber::SetCallback( DSISampleGrabberCB* callback_ )
{
- if (!callback_) {
- return E_FAIL;
- }
+ if (!callback_) {
+ return E_FAIL;
+ }
- this->callback = callback_;
- return S_OK;
+ this->callback = callback_;
+ return S_OK;
}
HRESULT DSSampleGrabber::SetSize(int width, int height)
{
- ZeroMemory(&this->mt, sizeof(CMediaType));
-
- VIDEOINFO *pvi = (VIDEOINFO *)this->mt.AllocFormatBuffer(sizeof(VIDEOINFO));
- if (NULL == pvi)
- {
- return E_OUTOFMEMORY;
- }
-
- ZeroMemory(pvi, sizeof(VIDEOINFO));
-
- pvi->bmiHeader.biCompression = BI_RGB;
- pvi->bmiHeader.biBitCount = 24;
- pvi->bmiHeader.biSize = sizeof(BITMAPINFOHEADER);
- pvi->bmiHeader.biWidth = width;
- pvi->bmiHeader.biHeight = height;
- pvi->bmiHeader.biPlanes = 1;
- pvi->bmiHeader.biSizeImage = GetBitmapSize(&pvi->bmiHeader);
- pvi->bmiHeader.biClrImportant = 0;
-
- // Frame rate
- pvi->AvgTimePerFrame = 10000000/this->m_outputFps;
-
- SetRectEmpty(&(pvi->rcSource)); // we want the whole image area rendered.
- SetRectEmpty(&(pvi->rcTarget)); // no particular destination rectangle
-
- this->mt.SetType(&MEDIATYPE_Video);
- this->mt.SetFormatType(&FORMAT_VideoInfo);
- this->mt.SetTemporalCompression(FALSE);
-
- this->mt.SetSubtype(&MEDIASUBTYPE_RGB24);
- this->mt.SetSampleSize(pvi->bmiHeader.biSizeImage);
-
- this->m_rgb24 = new BYTE[pvi->bmiHeader.biSizeImage];
-
- return S_OK;
+ ZeroMemory(&this->mt, sizeof(CMediaType));
+
+ VIDEOINFO *pvi = (VIDEOINFO *)this->mt.AllocFormatBuffer(sizeof(VIDEOINFO));
+ if (NULL == pvi) {
+ return E_OUTOFMEMORY;
+ }
+
+ ZeroMemory(pvi, sizeof(VIDEOINFO));
+
+ pvi->bmiHeader.biCompression = BI_RGB;
+ pvi->bmiHeader.biBitCount = 24;
+ pvi->bmiHeader.biSize = sizeof(BITMAPINFOHEADER);
+ pvi->bmiHeader.biWidth = width;
+ pvi->bmiHeader.biHeight = height;
+ pvi->bmiHeader.biPlanes = 1;
+ pvi->bmiHeader.biSizeImage = GetBitmapSize(&pvi->bmiHeader);
+ pvi->bmiHeader.biClrImportant = 0;
+
+ // Frame rate
+ pvi->AvgTimePerFrame = 10000000/this->m_outputFps;
+
+ SetRectEmpty(&(pvi->rcSource)); // we want the whole image area rendered.
+ SetRectEmpty(&(pvi->rcTarget)); // no particular destination rectangle
+
+ this->mt.SetType(&MEDIATYPE_Video);
+ this->mt.SetFormatType(&FORMAT_VideoInfo);
+ this->mt.SetTemporalCompression(FALSE);
+
+ this->mt.SetSubtype(&MEDIASUBTYPE_RGB24);
+ this->mt.SetSampleSize(pvi->bmiHeader.biSizeImage);
+
+ this->m_rgb24 = new BYTE[pvi->bmiHeader.biSizeImage];
+
+ return S_OK;
}
#endif /* _WIN32_WCE */
diff --git a/plugins/pluginDirectShow/internals/wince/DSSampleGrabber.h b/plugins/pluginDirectShow/internals/wince/DSSampleGrabber.h
index 39ee5c6..9ae11cd 100755
--- a/plugins/pluginDirectShow/internals/wince/DSSampleGrabber.h
+++ b/plugins/pluginDirectShow/internals/wince/DSSampleGrabber.h
@@ -1,17 +1,17 @@
/* Copyright (C) 2014-2015 Mamadou DIOP
-*
+*
* This file is part of Open Source Doubango Framework.
*
* DOUBANGO is free software: you can redistribute it and/or modify
* it under the terms of the GNU General Public License as published by
* the Free Software Foundation, either version 3 of the License, or
* (at your option) any later version.
-*
+*
* DOUBANGO is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
* GNU General Public License for more details.
-*
+*
* You should have received a copy of the GNU General Public License
* along with DOUBANGO.
*/
@@ -32,42 +32,44 @@
class DSSampleGrabber : public CTransInPlaceFilter
{
public:
- // instantiation
- DSSampleGrabber( TCHAR *tszName, LPUNKNOWN punk, HRESULT *phr );
- ~DSSampleGrabber(void);
-
+ // instantiation
+ DSSampleGrabber( TCHAR *tszName, LPUNKNOWN punk, HRESULT *phr );
+ ~DSSampleGrabber(void);
+
public:
- HRESULT Transform(IMediaSample *pSample);
+ HRESULT Transform(IMediaSample *pSample);
HRESULT CheckInputType(const CMediaType* mtIn);
-
- HRESULT SetFps(int inputFps, int outputFps);
+
+ HRESULT SetFps(int inputFps, int outputFps);
// DECLARE_IUNKNOWN;
- STDMETHODIMP QueryInterface(REFIID riid, void **ppv) {
- return GetOwner()->QueryInterface(riid,ppv);
- };
- STDMETHODIMP_(ULONG) AddRef() {
- return InterlockedIncrement(&m_cRef);
- };
+ STDMETHODIMP QueryInterface(REFIID riid, void **ppv) {
+ return GetOwner()->QueryInterface(riid,ppv);
+ };
+ STDMETHODIMP_(ULONG) AddRef() {
+ return InterlockedIncrement(&m_cRef);
+ };
STDMETHODIMP_(ULONG) Release() {
- return GetOwner()->Release();
+ return GetOwner()->Release();
};
- STDMETHODIMP SetCallback(DSISampleGrabberCB* callback_);
- HRESULT SetSize(int width, int height);
+ STDMETHODIMP SetCallback(DSISampleGrabberCB* callback_);
+ HRESULT SetSize(int width, int height);
- inline AM_MEDIA_TYPE GetMediaType() { return (AM_MEDIA_TYPE)this->mt; }
+ inline AM_MEDIA_TYPE GetMediaType() {
+ return (AM_MEDIA_TYPE)this->mt;
+ }
private:
- int m_progress;
- int m_inputFps, m_outputFps;
- bool m_bProcessFrame;
- REFERENCE_TIME m_rtFrameLength; // UNITS/fps
- LONGLONG m_iFrameNumber;
+ int m_progress;
+ int m_inputFps, m_outputFps;
+ bool m_bProcessFrame;
+ REFERENCE_TIME m_rtFrameLength; // UNITS/fps
+ LONGLONG m_iFrameNumber;
- DSISampleGrabberCB* callback;
- CMediaType mt;
- BYTE *m_rgb24;
+ DSISampleGrabberCB* callback;
+ CMediaType mt;
+ BYTE *m_rgb24;
};
#endif /* _WIN32_WCE */
diff --git a/plugins/pluginDirectShow/internals/wince/DSSampleGrabberUtils.h b/plugins/pluginDirectShow/internals/wince/DSSampleGrabberUtils.h
index 01e1728..e22be83 100755
--- a/plugins/pluginDirectShow/internals/wince/DSSampleGrabberUtils.h
+++ b/plugins/pluginDirectShow/internals/wince/DSSampleGrabberUtils.h
@@ -1,17 +1,17 @@
/* Copyright (C) 2014-2015 Mamadou DIOP
-*
+*
* This file is part of Open Source Doubango Framework.
*
* DOUBANGO is free software: you can redistribute it and/or modify
* it under the terms of the GNU General Public License as published by
* the Free Software Foundation, either version 3 of the License, or
* (at your option) any later version.
-*
+*
* DOUBANGO is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
* GNU General Public License for more details.
-*
+*
* You should have received a copy of the GNU General Public License
* along with DOUBANGO.
*/
@@ -25,14 +25,16 @@ typedef void (CALLBACK *MANAGEDCALLBACKPROC)(BYTE* pdata, long len);
// ISampleGrabber interface definition
// {04951BFF-696A-4ade-828D-42A5F1EDB631}
-DEFINE_GUID(IID_ISampleGrabber,
- 0x4951bff, 0x696a, 0x4ade, 0x82, 0x8d, 0x42, 0xa5, 0xf1, 0xed, 0xb6, 0x31);
+DEFINE_GUID(IID_ISampleGrabber,
+ 0x4951bff, 0x696a, 0x4ade, 0x82, 0x8d, 0x42, 0xa5, 0xf1, 0xed, 0xb6, 0x31);
-DECLARE_INTERFACE_(ISampleGrabber, IUnknown) {
- STDMETHOD(SetCallback)(MANAGEDCALLBACKPROC callback) PURE;};
+DECLARE_INTERFACE_(ISampleGrabber, IUnknown)
+{
+ STDMETHOD(SetCallback)(MANAGEDCALLBACKPROC callback) PURE;
+};
- // {D11DFE19-8864-4a60-B26C-552F9AA472E1}
+// {D11DFE19-8864-4a60-B26C-552F9AA472E1}
DEFINE_GUID(CLSID_NullRenderer,
- 0xd11dfe19, 0x8864, 0x4a60, 0xb2, 0x6c, 0x55, 0x2f, 0x9a, 0xa4, 0x72, 0xe1);
+ 0xd11dfe19, 0x8864, 0x4a60, 0xb2, 0x6c, 0x55, 0x2f, 0x9a, 0xa4, 0x72, 0xe1);
#endif /* _WIN32_WCE */
diff --git a/plugins/pluginDirectShow/plugin_dshow_config.h b/plugins/pluginDirectShow/plugin_dshow_config.h
index f58e4b2..df01cda 100755
--- a/plugins/pluginDirectShow/plugin_dshow_config.h
+++ b/plugins/pluginDirectShow/plugin_dshow_config.h
@@ -1,17 +1,17 @@
/* Copyright (C) 2011-2013 Doubango Telecom <http://www.doubango.org>
-*
+*
* This file is part of Open Source Doubango Framework.
*
* DOUBANGO is free software: you can redistribute it and/or modify
* it under the terms of the GNU General Public License as published by
* the Free Software Foundation, either version 3 of the License, or
* (at your option) any later version.
-*
+*
* DOUBANGO is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
* GNU General Public License for more details.
-*
+*
* You should have received a copy of the GNU General Public License
* along with DOUBANGO.
*/
@@ -42,13 +42,13 @@
# define PLUGIN_DSHOW_GEXTERN extern
#endif
-/* Guards against C++ name mangling
+/* Guards against C++ name mangling
*/
#ifdef __cplusplus
# define PLUGIN_DSHOW_BEGIN_DECLS extern "C" {
# define PLUGIN_DSHOW_END_DECLS }
#else
-# define PLUGIN_DSHOW_BEGIN_DECLS
+# define PLUGIN_DSHOW_BEGIN_DECLS
# define PLUGIN_DSHOW_END_DECLS
#endif
@@ -79,7 +79,7 @@
#endif
#if HAVE_CONFIG_H
- #include <config.h>
+#include <config.h>
#endif
#if !defined(TDSHOW_DEFINE_GUID) && !defined(_WIN32_WCE)
diff --git a/plugins/pluginDirectShow/plugin_screencast_dshow_producer.cxx b/plugins/pluginDirectShow/plugin_screencast_dshow_producer.cxx
index af09c4e..789c7b9 100755
--- a/plugins/pluginDirectShow/plugin_screencast_dshow_producer.cxx
+++ b/plugins/pluginDirectShow/plugin_screencast_dshow_producer.cxx
@@ -1,17 +1,17 @@
/* Copyright (C) 2014 Mamadou DIOP
-*
+*
* This file is part of Open Source Doubango Framework.
*
* DOUBANGO is free software: you can redistribute it and/or modify
* it under the terms of the GNU General Public License as published by
* the Free Software Foundation, either version 3 of the License, or
* (at your option) any later version.
-*
+*
* DOUBANGO is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
* GNU General Public License for more details.
-*
+*
* You should have received a copy of the GNU General Public License
* along with DOUBANGO.
*/
@@ -26,181 +26,184 @@
#define DSPRODUCER(self) ((plugin_screencast_dshow_producer_t*)(self))
-typedef struct plugin_screencast_dshow_producer_s
-{
- TMEDIA_DECLARE_PRODUCER;
-
- DSGrabber* grabber;
- INT64 previewHwnd;
-
- tsk_bool_t started;
- tsk_bool_t mute;
- tsk_bool_t create_on_ui_thread;
+typedef struct plugin_screencast_dshow_producer_s {
+ TMEDIA_DECLARE_PRODUCER;
+
+ DSGrabber* grabber;
+ INT64 previewHwnd;
+
+ tsk_bool_t started;
+ tsk_bool_t mute;
+ tsk_bool_t create_on_ui_thread;
}
plugin_screencast_dshow_producer_t;
// Producer callback (From DirectShow Grabber to our plugin)
static int plugin_video_dshow_plugin_cb(const void* callback_data, const void* buffer, tsk_size_t size)
{
- const plugin_screencast_dshow_producer_t* producer = (const plugin_screencast_dshow_producer_t*)callback_data;
+ const plugin_screencast_dshow_producer_t* producer = (const plugin_screencast_dshow_producer_t*)callback_data;
- if (producer && TMEDIA_PRODUCER(producer)->enc_cb.callback) {
- TMEDIA_PRODUCER(producer)->enc_cb.callback(TMEDIA_PRODUCER(producer)->enc_cb.callback_data, buffer, size);
- }
+ if (producer && TMEDIA_PRODUCER(producer)->enc_cb.callback) {
+ TMEDIA_PRODUCER(producer)->enc_cb.callback(TMEDIA_PRODUCER(producer)->enc_cb.callback_data, buffer, size);
+ }
- return 0;
+ return 0;
}
/* ============ Media Producer Interface ================= */
static int plugin_screencast_dshow_producer_set(tmedia_producer_t *self, const tmedia_param_t* param)
{
- int ret = 0;
- plugin_screencast_dshow_producer_t* producer = (plugin_screencast_dshow_producer_t*)self;
-
- if(!producer || !param){
- TSK_DEBUG_ERROR("Invalid parameter");
- return -1;
- }
-
- if(param->value_type == tmedia_pvt_int64){
- if(tsk_striequals(param->key, "local-hwnd")){
- DSPRODUCER(producer)->previewHwnd = (INT64)*((int64_t*)param->value);
- if(DSPRODUCER(producer)->grabber && DSPRODUCER(self)->grabber->preview){
- DSPRODUCER(producer)->grabber->preview->attach(DSPRODUCER(producer)->previewHwnd);
- }
- }
- }
- else if(param->value_type == tmedia_pvt_int32){
- if(tsk_striequals(param->key, "mute")){
- producer->mute = (TSK_TO_INT32((uint8_t*)param->value) != 0);
- if(producer->started){
- if(producer->mute){
- producer->grabber->pause();
- }
- else{
- producer->grabber->start();
- }
- }
- }
- else if(tsk_striequals(param->key, "create-on-current-thead")){
- producer->create_on_ui_thread = *((int32_t*)param->value) ? tsk_false : tsk_true;
- }
- else if(tsk_striequals(param->key, "plugin-firefox")){
- TSK_DEBUG_INFO("'plugin-firefox' ignored for screencast");
- }
- }
-
- return ret;
+ int ret = 0;
+ plugin_screencast_dshow_producer_t* producer = (plugin_screencast_dshow_producer_t*)self;
+
+ if(!producer || !param) {
+ TSK_DEBUG_ERROR("Invalid parameter");
+ return -1;
+ }
+
+ if(param->value_type == tmedia_pvt_int64) {
+ if(tsk_striequals(param->key, "local-hwnd")) {
+ DSPRODUCER(producer)->previewHwnd = (INT64)*((int64_t*)param->value);
+ if(DSPRODUCER(producer)->grabber && DSPRODUCER(self)->grabber->preview) {
+ DSPRODUCER(producer)->grabber->preview->attach(DSPRODUCER(producer)->previewHwnd);
+ }
+ }
+ }
+ else if(param->value_type == tmedia_pvt_int32) {
+ if(tsk_striequals(param->key, "mute")) {
+ producer->mute = (TSK_TO_INT32((uint8_t*)param->value) != 0);
+ if(producer->started) {
+ if(producer->mute) {
+ producer->grabber->pause();
+ }
+ else {
+ producer->grabber->start();
+ }
+ }
+ }
+ else if(tsk_striequals(param->key, "create-on-current-thead")) {
+ producer->create_on_ui_thread = *((int32_t*)param->value) ? tsk_false : tsk_true;
+ }
+ else if(tsk_striequals(param->key, "plugin-firefox")) {
+ TSK_DEBUG_INFO("'plugin-firefox' ignored for screencast");
+ }
+ }
+
+ return ret;
}
static int plugin_screencast_dshow_producer_prepare(tmedia_producer_t* self, const tmedia_codec_t* codec)
{
- plugin_screencast_dshow_producer_t* producer = (plugin_screencast_dshow_producer_t*)self;
-
- if(!producer || !codec && codec->plugin){
- TSK_DEBUG_ERROR("Invalid parameter");
- return -1;
- }
-
- TMEDIA_PRODUCER(producer)->video.fps = TMEDIA_CODEC_VIDEO(codec)->out.fps;
- TMEDIA_PRODUCER(producer)->video.width = TMEDIA_CODEC_VIDEO(codec)->out.width;
- TMEDIA_PRODUCER(producer)->video.height = TMEDIA_CODEC_VIDEO(codec)->out.height;
-
- return 0;
+ plugin_screencast_dshow_producer_t* producer = (plugin_screencast_dshow_producer_t*)self;
+
+ if(!producer || !codec && codec->plugin) {
+ TSK_DEBUG_ERROR("Invalid parameter");
+ return -1;
+ }
+
+ TMEDIA_PRODUCER(producer)->video.fps = TMEDIA_CODEC_VIDEO(codec)->out.fps;
+ TMEDIA_PRODUCER(producer)->video.width = TMEDIA_CODEC_VIDEO(codec)->out.width;
+ TMEDIA_PRODUCER(producer)->video.height = TMEDIA_CODEC_VIDEO(codec)->out.height;
+
+ return 0;
}
static int plugin_screencast_dshow_producer_start(tmedia_producer_t* self)
{
- plugin_screencast_dshow_producer_t* producer = (plugin_screencast_dshow_producer_t*)self;
- HRESULT hr = S_OK;
-
- if (!producer) {
- TSK_DEBUG_ERROR("Invalid parameter");
- return -1;
- }
-
- if (producer->started) {
- return 0;
- }
-
- // create grabber on ALWAYS current thread
- if (!producer->grabber) {
- static BOOL __isDisplayFalse = FALSE;
- static BOOL __isScreenCastTrue = TRUE;
- if(producer->create_on_ui_thread) createOnUIThead(reinterpret_cast<HWND>((void*)DSPRODUCER(producer)->previewHwnd), (void**)&producer->grabber, __isDisplayFalse, __isScreenCastTrue);
- else createOnCurrentThead(reinterpret_cast<HWND>((void*)DSPRODUCER(producer)->previewHwnd), (void**)&producer->grabber, __isDisplayFalse, __isScreenCastTrue);
- if (!producer->grabber) {
- TSK_DEBUG_ERROR("Failed to create grabber");
- return -2;
- }
- }
-
- // set parameters
- producer->grabber->setCaptureParameters((int)TMEDIA_PRODUCER(producer)->video.width, (int)TMEDIA_PRODUCER(producer)->video.height, TMEDIA_PRODUCER(producer)->video.fps);
-
- // set callback function
- producer->grabber->setCallback(plugin_video_dshow_plugin_cb, producer);
-
- // attach preview
- if (producer->grabber->preview) {
- if (producer->previewHwnd) {
- producer->grabber->preview->attach(producer->previewHwnd);
- }
- producer->grabber->preview->setSize((int)TMEDIA_PRODUCER(producer)->video.width, (int)TMEDIA_PRODUCER(producer)->video.height);
- }
-
- // start grabber
- if (!producer->mute) {
- producer->grabber->start();
- }
-
- producer->started = tsk_true;
-
- return 0;
+ plugin_screencast_dshow_producer_t* producer = (plugin_screencast_dshow_producer_t*)self;
+ HRESULT hr = S_OK;
+
+ if (!producer) {
+ TSK_DEBUG_ERROR("Invalid parameter");
+ return -1;
+ }
+
+ if (producer->started) {
+ return 0;
+ }
+
+ // create grabber on ALWAYS current thread
+ if (!producer->grabber) {
+ static BOOL __isDisplayFalse = FALSE;
+ static BOOL __isScreenCastTrue = TRUE;
+ if(producer->create_on_ui_thread) {
+ createOnUIThead(reinterpret_cast<HWND>((void*)DSPRODUCER(producer)->previewHwnd), (void**)&producer->grabber, __isDisplayFalse, __isScreenCastTrue);
+ }
+ else {
+ createOnCurrentThead(reinterpret_cast<HWND>((void*)DSPRODUCER(producer)->previewHwnd), (void**)&producer->grabber, __isDisplayFalse, __isScreenCastTrue);
+ }
+ if (!producer->grabber) {
+ TSK_DEBUG_ERROR("Failed to create grabber");
+ return -2;
+ }
+ }
+
+ // set parameters
+ producer->grabber->setCaptureParameters((int)TMEDIA_PRODUCER(producer)->video.width, (int)TMEDIA_PRODUCER(producer)->video.height, TMEDIA_PRODUCER(producer)->video.fps);
+
+ // set callback function
+ producer->grabber->setCallback(plugin_video_dshow_plugin_cb, producer);
+
+ // attach preview
+ if (producer->grabber->preview) {
+ if (producer->previewHwnd) {
+ producer->grabber->preview->attach(producer->previewHwnd);
+ }
+ producer->grabber->preview->setSize((int)TMEDIA_PRODUCER(producer)->video.width, (int)TMEDIA_PRODUCER(producer)->video.height);
+ }
+
+ // start grabber
+ if (!producer->mute) {
+ producer->grabber->start();
+ }
+
+ producer->started = tsk_true;
+
+ return 0;
}
static int plugin_screencast_dshow_producer_pause(tmedia_producer_t* self)
{
- plugin_screencast_dshow_producer_t* producer = (plugin_screencast_dshow_producer_t*)self;
+ plugin_screencast_dshow_producer_t* producer = (plugin_screencast_dshow_producer_t*)self;
- if(!producer){
- TSK_DEBUG_ERROR("Invalid parameter");
- return -1;
- }
+ if(!producer) {
+ TSK_DEBUG_ERROR("Invalid parameter");
+ return -1;
+ }
- if(!producer->grabber){
- TSK_DEBUG_ERROR("Invalid internal grabber");
- return -2;
- }
+ if(!producer->grabber) {
+ TSK_DEBUG_ERROR("Invalid internal grabber");
+ return -2;
+ }
- producer->grabber->pause();
+ producer->grabber->pause();
- return 0;
+ return 0;
}
static int plugin_screencast_dshow_producer_stop(tmedia_producer_t* self)
{
- plugin_screencast_dshow_producer_t* producer = (plugin_screencast_dshow_producer_t*)self;
+ plugin_screencast_dshow_producer_t* producer = (plugin_screencast_dshow_producer_t*)self;
- if(!self){
- TSK_DEBUG_ERROR("Invalid parameter");
- return -1;
- }
+ if(!self) {
+ TSK_DEBUG_ERROR("Invalid parameter");
+ return -1;
+ }
- if(!producer->started){
- return 0;
- }
+ if(!producer->started) {
+ return 0;
+ }
- if(!producer->grabber){
- TSK_DEBUG_ERROR("Invalid internal grabber");
- return -2;
- }
+ if(!producer->grabber) {
+ TSK_DEBUG_ERROR("Invalid internal grabber");
+ return -2;
+ }
- producer->grabber->stop();
- producer->started = tsk_false;
+ producer->grabber->stop();
+ producer->started = tsk_false;
- return 0;
+ return 0;
}
@@ -210,64 +213,62 @@ static int plugin_screencast_dshow_producer_stop(tmedia_producer_t* self)
/* constructor */
static tsk_object_t* plugin_screencast_dshow_producer_ctor(tsk_object_t * self, va_list * app)
{
- CoInitializeEx(NULL, COINIT_MULTITHREADED);
-
- plugin_screencast_dshow_producer_t *producer = (plugin_screencast_dshow_producer_t *)self;
- if(producer){
- /* init base */
- tmedia_producer_init(TMEDIA_PRODUCER(producer));
- TMEDIA_PRODUCER(producer)->video.chroma = tmedia_chroma_bgr24; // RGB24 on x86 (little endians) stored as BGR24
- /* init self with default values*/
- producer->create_on_ui_thread = tsk_true;
- TMEDIA_PRODUCER(producer)->video.fps = 15;
- TMEDIA_PRODUCER(producer)->video.width = 352;
- TMEDIA_PRODUCER(producer)->video.height = 288;
- }
- return self;
+ CoInitializeEx(NULL, COINIT_MULTITHREADED);
+
+ plugin_screencast_dshow_producer_t *producer = (plugin_screencast_dshow_producer_t *)self;
+ if(producer) {
+ /* init base */
+ tmedia_producer_init(TMEDIA_PRODUCER(producer));
+ TMEDIA_PRODUCER(producer)->video.chroma = tmedia_chroma_bgr24; // RGB24 on x86 (little endians) stored as BGR24
+ /* init self with default values*/
+ producer->create_on_ui_thread = tsk_true;
+ TMEDIA_PRODUCER(producer)->video.fps = 15;
+ TMEDIA_PRODUCER(producer)->video.width = 352;
+ TMEDIA_PRODUCER(producer)->video.height = 288;
+ }
+ return self;
}
/* destructor */
static tsk_object_t* plugin_screencast_dshow_producer_dtor(tsk_object_t * self)
-{
- plugin_screencast_dshow_producer_t *producer = (plugin_screencast_dshow_producer_t *)self;
- if(producer){
- /* stop */
- if(producer->started){
- plugin_screencast_dshow_producer_stop((tmedia_producer_t*)self);
- }
-
- /* for safety */
- if(producer->grabber){
- producer->grabber->setCallback(tsk_null, tsk_null);
- }
-
- /* deinit base */
- tmedia_producer_deinit(TMEDIA_PRODUCER(producer));
- /* deinit self */
- SAFE_DELETE_PTR(producer->grabber);
- }
-
- return self;
+{
+ plugin_screencast_dshow_producer_t *producer = (plugin_screencast_dshow_producer_t *)self;
+ if(producer) {
+ /* stop */
+ if(producer->started) {
+ plugin_screencast_dshow_producer_stop((tmedia_producer_t*)self);
+ }
+
+ /* for safety */
+ if(producer->grabber) {
+ producer->grabber->setCallback(tsk_null, tsk_null);
+ }
+
+ /* deinit base */
+ tmedia_producer_deinit(TMEDIA_PRODUCER(producer));
+ /* deinit self */
+ SAFE_DELETE_PTR(producer->grabber);
+ }
+
+ return self;
}
/* object definition */
-static const tsk_object_def_t plugin_screencast_dshow_producer_def_s =
-{
- sizeof(plugin_screencast_dshow_producer_t),
- plugin_screencast_dshow_producer_ctor,
- plugin_screencast_dshow_producer_dtor,
- tsk_null,
+static const tsk_object_def_t plugin_screencast_dshow_producer_def_s = {
+ sizeof(plugin_screencast_dshow_producer_t),
+ plugin_screencast_dshow_producer_ctor,
+ plugin_screencast_dshow_producer_dtor,
+ tsk_null,
};
/* plugin definition*/
-static const tmedia_producer_plugin_def_t plugin_screencast_dshow_producer_plugin_def_s =
-{
- &plugin_screencast_dshow_producer_def_s,
-
- tmedia_bfcp_video,
- "Microsoft DirectShow producer (ScrenCast)",
-
- plugin_screencast_dshow_producer_set,
- plugin_screencast_dshow_producer_prepare,
- plugin_screencast_dshow_producer_start,
- plugin_screencast_dshow_producer_pause,
- plugin_screencast_dshow_producer_stop
+static const tmedia_producer_plugin_def_t plugin_screencast_dshow_producer_plugin_def_s = {
+ &plugin_screencast_dshow_producer_def_s,
+
+ tmedia_bfcp_video,
+ "Microsoft DirectShow producer (ScrenCast)",
+
+ plugin_screencast_dshow_producer_set,
+ plugin_screencast_dshow_producer_prepare,
+ plugin_screencast_dshow_producer_start,
+ plugin_screencast_dshow_producer_pause,
+ plugin_screencast_dshow_producer_stop
};
const tmedia_producer_plugin_def_t *plugin_screencast_dshow_producer_plugin_def_t = &plugin_screencast_dshow_producer_plugin_def_s;
diff --git a/plugins/pluginDirectShow/plugin_video_dshow_consumer.cxx b/plugins/pluginDirectShow/plugin_video_dshow_consumer.cxx
index dfd7e72..3d47f4c 100755
--- a/plugins/pluginDirectShow/plugin_video_dshow_consumer.cxx
+++ b/plugins/pluginDirectShow/plugin_video_dshow_consumer.cxx
@@ -1,17 +1,17 @@
/* Copyright (C) 2011-2013 Doubango Telecom <http://www.doubango.org>
-*
+*
* This file is part of Open Source Doubango Framework.
*
* DOUBANGO is free software: you can redistribute it and/or modify
* it under the terms of the GNU General Public License as published by
* the Free Software Foundation, either version 3 of the License, or
* (at your option) any later version.
-*
+*
* DOUBANGO is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
* GNU General Public License for more details.
-*
+*
* You should have received a copy of the GNU General Public License
* along with DOUBANGO.
*/
@@ -55,33 +55,32 @@ const DWORD NUM_BACK_BUFFERS = 2;
(*ppT)->Release(); \
*ppT = NULL; \
} \
-}
+}
#undef CHECK_HR
// In CHECK_HR(x) When (x) is a function it will be executed twice when used in "TSK_DEBUG_ERROR(x)" and "If(x)"
#define CHECK_HR(x) { HRESULT __hr__ = (x); if (FAILED(__hr__)) { TSK_DEBUG_ERROR("Operation Failed (%08x)", __hr__); goto bail; } }
-typedef struct _DSRatio
- {
+typedef struct _DSRatio {
DWORD Numerator;
DWORD Denominator;
- } DSRatio;
+} DSRatio;
static HRESULT CreateDeviceD3D9(
- HWND hWnd,
- IDirect3DDevice9** ppDevice,
- IDirect3D9 **ppD3D,
- D3DPRESENT_PARAMETERS &d3dpp
- );
+ HWND hWnd,
+ IDirect3DDevice9** ppDevice,
+ IDirect3D9 **ppD3D,
+ D3DPRESENT_PARAMETERS &d3dpp
+);
static HRESULT TestCooperativeLevel(
- struct plugin_video_dshow_consumer_s *pSelf
- );
+ struct plugin_video_dshow_consumer_s *pSelf
+);
static HRESULT CreateSwapChain(
- HWND hWnd,
- UINT32 nFrameWidth,
- UINT32 nFrameHeight,
- IDirect3DDevice9* pDevice,
- IDirect3DSwapChain9 **ppSwapChain);
+ HWND hWnd,
+ UINT32 nFrameWidth,
+ UINT32 nFrameHeight,
+ IDirect3DDevice9* pDevice,
+ IDirect3DSwapChain9 **ppSwapChain);
static LRESULT CALLBACK WndProc(HWND hWnd, UINT uMsg, WPARAM wParam, LPARAM lParam);
@@ -98,30 +97,29 @@ static HRESULT HookWindow(struct plugin_video_dshow_consumer_s *pSelf, HWND hWnd
static HRESULT UnhookWindow(struct plugin_video_dshow_consumer_s *pSelf);
-typedef struct plugin_video_dshow_consumer_s
-{
- TMEDIA_DECLARE_CONSUMER;
-
- BOOL bStarted, bPrepared, bPaused, bFullScreen, bWindowHooked;
- BOOL bPluginFireFox, bPluginWebRTC4All;
- HWND hWindow;
- WNDPROC wndProc;
- HWND hWindowFullScreen;
- RECT rcWindow;
- RECT rcDest;
- DSRatio pixelAR;
-
- UINT32 nNegWidth;
- UINT32 nNegHeight;
- UINT32 nNegFps;
-
- D3DLOCKED_RECT rcLock;
- IDirect3DDevice9* pDevice;
- IDirect3D9 *pD3D;
- IDirect3DSwapChain9 *pSwapChain;
- D3DPRESENT_PARAMETERS d3dpp;
-
- TSK_DECLARE_SAFEOBJ;
+typedef struct plugin_video_dshow_consumer_s {
+ TMEDIA_DECLARE_CONSUMER;
+
+ BOOL bStarted, bPrepared, bPaused, bFullScreen, bWindowHooked;
+ BOOL bPluginFireFox, bPluginWebRTC4All;
+ HWND hWindow;
+ WNDPROC wndProc;
+ HWND hWindowFullScreen;
+ RECT rcWindow;
+ RECT rcDest;
+ DSRatio pixelAR;
+
+ UINT32 nNegWidth;
+ UINT32 nNegHeight;
+ UINT32 nNegFps;
+
+ D3DLOCKED_RECT rcLock;
+ IDirect3DDevice9* pDevice;
+ IDirect3D9 *pD3D;
+ IDirect3DSwapChain9 *pSwapChain;
+ D3DPRESENT_PARAMETERS d3dpp;
+
+ TSK_DECLARE_SAFEOBJ;
}
plugin_video_dshow_consumer_t;
@@ -130,377 +128,345 @@ static int _plugin_video_dshow_consumer_unprepare(plugin_video_dshow_consumer_t*
/* ============ Media Consumer Interface ================= */
static int plugin_video_dshow_consumer_set(tmedia_consumer_t *self, const tmedia_param_t* param)
{
- int ret = 0;
- HRESULT hr = S_OK;
- plugin_video_dshow_consumer_t* pSelf = (plugin_video_dshow_consumer_t*)self;
-
- if(!self || !param)
- {
- TSK_DEBUG_ERROR("Invalid parameter");
- CHECK_HR(hr = E_POINTER);
- }
-
- if(param->value_type == tmedia_pvt_int64)
- {
- if(tsk_striequals(param->key, "remote-hwnd"))
- {
- HWND hWnd = reinterpret_cast<HWND>((INT64)*((int64_t*)param->value));
- if(hWnd != pSelf->hWindow)
- {
- tsk_safeobj_lock(pSelf); // block consumer thread
- pSelf->hWindow = hWnd;
- if(pSelf->bPrepared)
- {
- hr = ResetDevice(pSelf);
- }
- tsk_safeobj_unlock(pSelf); // unblock consumer thread
- }
- }
- }
- else if(param->value_type == tmedia_pvt_int32)
- {
- if(tsk_striequals(param->key, "fullscreen"))
- {
- BOOL bFullScreen = !!*((int32_t*)param->value);
- TSK_DEBUG_INFO("[MF video consumer] Full Screen = %d", bFullScreen);
- CHECK_HR(hr = SetFullscreen(pSelf, bFullScreen));
- }
- else if(tsk_striequals(param->key, "create-on-current-thead"))
- {
- // DSCONSUMER(self)->create_on_ui_thread = *((int32_t*)param->value) ? tsk_false : tsk_true;
- }
- else if(tsk_striequals(param->key, "plugin-firefox"))
- {
- pSelf->bPluginFireFox = (*((int32_t*)param->value) != 0);
- }
- else if(tsk_striequals(param->key, "plugin-webrtc4all"))
- {
- pSelf->bPluginWebRTC4All = (*((int32_t*)param->value) != 0);
- }
- }
-
- CHECK_HR(hr);
+ int ret = 0;
+ HRESULT hr = S_OK;
+ plugin_video_dshow_consumer_t* pSelf = (plugin_video_dshow_consumer_t*)self;
+
+ if(!self || !param) {
+ TSK_DEBUG_ERROR("Invalid parameter");
+ CHECK_HR(hr = E_POINTER);
+ }
+
+ if(param->value_type == tmedia_pvt_int64) {
+ if(tsk_striequals(param->key, "remote-hwnd")) {
+ HWND hWnd = reinterpret_cast<HWND>((INT64)*((int64_t*)param->value));
+ if(hWnd != pSelf->hWindow) {
+ tsk_safeobj_lock(pSelf); // block consumer thread
+ pSelf->hWindow = hWnd;
+ if(pSelf->bPrepared) {
+ hr = ResetDevice(pSelf);
+ }
+ tsk_safeobj_unlock(pSelf); // unblock consumer thread
+ }
+ }
+ }
+ else if(param->value_type == tmedia_pvt_int32) {
+ if(tsk_striequals(param->key, "fullscreen")) {
+ BOOL bFullScreen = !!*((int32_t*)param->value);
+ TSK_DEBUG_INFO("[MF video consumer] Full Screen = %d", bFullScreen);
+ CHECK_HR(hr = SetFullscreen(pSelf, bFullScreen));
+ }
+ else if(tsk_striequals(param->key, "create-on-current-thead")) {
+ // DSCONSUMER(self)->create_on_ui_thread = *((int32_t*)param->value) ? tsk_false : tsk_true;
+ }
+ else if(tsk_striequals(param->key, "plugin-firefox")) {
+ pSelf->bPluginFireFox = (*((int32_t*)param->value) != 0);
+ }
+ else if(tsk_striequals(param->key, "plugin-webrtc4all")) {
+ pSelf->bPluginWebRTC4All = (*((int32_t*)param->value) != 0);
+ }
+ }
+
+ CHECK_HR(hr);
bail:
- return SUCCEEDED(hr) ? 0 : -1;
+ return SUCCEEDED(hr) ? 0 : -1;
}
static int plugin_video_dshow_consumer_prepare(tmedia_consumer_t* self, const tmedia_codec_t* codec)
{
- plugin_video_dshow_consumer_t* pSelf = (plugin_video_dshow_consumer_t*)self;
-
- if(!pSelf || !codec && codec->plugin){
- TSK_DEBUG_ERROR("Invalid parameter");
- return -1;
- }
- if(pSelf->bPrepared){
- TSK_DEBUG_WARN("D3D9 video consumer already prepared");
- return -1;
- }
-
- HRESULT hr = S_OK;
- HWND hWnd = Window(pSelf);
-
- TMEDIA_CONSUMER(pSelf)->video.fps = TMEDIA_CODEC_VIDEO(codec)->in.fps;
- TMEDIA_CONSUMER(pSelf)->video.in.width = TMEDIA_CODEC_VIDEO(codec)->in.width;
- TMEDIA_CONSUMER(pSelf)->video.in.height = TMEDIA_CODEC_VIDEO(codec)->in.height;
-
- if(!TMEDIA_CONSUMER(pSelf)->video.display.width){
- TMEDIA_CONSUMER(pSelf)->video.display.width = TMEDIA_CONSUMER(pSelf)->video.in.width;
- }
- if(!TMEDIA_CONSUMER(pSelf)->video.display.height){
- TMEDIA_CONSUMER(pSelf)->video.display.height = TMEDIA_CONSUMER(pSelf)->video.in.height;
- }
-
- pSelf->nNegFps = (UINT32)TMEDIA_CONSUMER(pSelf)->video.fps;
- pSelf->nNegWidth = (UINT32)TMEDIA_CONSUMER(pSelf)->video.display.width;
- pSelf->nNegHeight = (UINT32)TMEDIA_CONSUMER(pSelf)->video.display.height;
-
- TSK_DEBUG_INFO("D3D9 video consumer: fps=%d, width=%d, height=%d",
- pSelf->nNegFps,
- pSelf->nNegWidth,
- pSelf->nNegHeight);
-
- TMEDIA_CONSUMER(pSelf)->video.display.chroma = tmedia_chroma_rgb32;
- TMEDIA_CONSUMER(pSelf)->decoder.codec_id = tmedia_codec_id_none; // means accept RAW fames
-
- // The window handle is not created until the call is connect (incoming only) - At least on Internet Explorer 10
- if(hWnd && !pSelf->bPluginWebRTC4All)
- {
- CHECK_HR(hr = CreateDeviceD3D9(hWnd, &pSelf->pDevice, &pSelf->pD3D, pSelf->d3dpp));
- CHECK_HR(hr = CreateSwapChain(hWnd, pSelf->nNegWidth, pSelf->nNegHeight, pSelf->pDevice, &pSelf->pSwapChain));
- }
- else
- {
- if(hWnd && pSelf->bPluginWebRTC4All)
- {
- TSK_DEBUG_INFO("[MF consumer] HWND is defined but we detected webrtc4all...delaying D3D9 device creating until session get connected");
- }
- else
- {
- TSK_DEBUG_WARN("Delaying D3D9 device creation because HWND is not defined yet");
- }
- }
+ plugin_video_dshow_consumer_t* pSelf = (plugin_video_dshow_consumer_t*)self;
+
+ if(!pSelf || !codec && codec->plugin) {
+ TSK_DEBUG_ERROR("Invalid parameter");
+ return -1;
+ }
+ if(pSelf->bPrepared) {
+ TSK_DEBUG_WARN("D3D9 video consumer already prepared");
+ return -1;
+ }
+
+ HRESULT hr = S_OK;
+ HWND hWnd = Window(pSelf);
+
+ TMEDIA_CONSUMER(pSelf)->video.fps = TMEDIA_CODEC_VIDEO(codec)->in.fps;
+ TMEDIA_CONSUMER(pSelf)->video.in.width = TMEDIA_CODEC_VIDEO(codec)->in.width;
+ TMEDIA_CONSUMER(pSelf)->video.in.height = TMEDIA_CODEC_VIDEO(codec)->in.height;
+
+ if(!TMEDIA_CONSUMER(pSelf)->video.display.width) {
+ TMEDIA_CONSUMER(pSelf)->video.display.width = TMEDIA_CONSUMER(pSelf)->video.in.width;
+ }
+ if(!TMEDIA_CONSUMER(pSelf)->video.display.height) {
+ TMEDIA_CONSUMER(pSelf)->video.display.height = TMEDIA_CONSUMER(pSelf)->video.in.height;
+ }
+
+ pSelf->nNegFps = (UINT32)TMEDIA_CONSUMER(pSelf)->video.fps;
+ pSelf->nNegWidth = (UINT32)TMEDIA_CONSUMER(pSelf)->video.display.width;
+ pSelf->nNegHeight = (UINT32)TMEDIA_CONSUMER(pSelf)->video.display.height;
+
+ TSK_DEBUG_INFO("D3D9 video consumer: fps=%d, width=%d, height=%d",
+ pSelf->nNegFps,
+ pSelf->nNegWidth,
+ pSelf->nNegHeight);
+
+ TMEDIA_CONSUMER(pSelf)->video.display.chroma = tmedia_chroma_rgb32;
+ TMEDIA_CONSUMER(pSelf)->decoder.codec_id = tmedia_codec_id_none; // means accept RAW fames
+
+ // The window handle is not created until the call is connect (incoming only) - At least on Internet Explorer 10
+ if(hWnd && !pSelf->bPluginWebRTC4All) {
+ CHECK_HR(hr = CreateDeviceD3D9(hWnd, &pSelf->pDevice, &pSelf->pD3D, pSelf->d3dpp));
+ CHECK_HR(hr = CreateSwapChain(hWnd, pSelf->nNegWidth, pSelf->nNegHeight, pSelf->pDevice, &pSelf->pSwapChain));
+ }
+ else {
+ if(hWnd && pSelf->bPluginWebRTC4All) {
+ TSK_DEBUG_INFO("[MF consumer] HWND is defined but we detected webrtc4all...delaying D3D9 device creating until session get connected");
+ }
+ else {
+ TSK_DEBUG_WARN("Delaying D3D9 device creation because HWND is not defined yet");
+ }
+ }
bail:
- pSelf->bPrepared = SUCCEEDED(hr);
- return pSelf->bPrepared ? 0 : -1;
+ pSelf->bPrepared = SUCCEEDED(hr);
+ return pSelf->bPrepared ? 0 : -1;
}
static int plugin_video_dshow_consumer_start(tmedia_consumer_t* self)
{
- plugin_video_dshow_consumer_t* pSelf = (plugin_video_dshow_consumer_t*)self;
+ plugin_video_dshow_consumer_t* pSelf = (plugin_video_dshow_consumer_t*)self;
- if(!pSelf){
- TSK_DEBUG_ERROR("Invalid parameter");
- return -1;
- }
+ if(!pSelf) {
+ TSK_DEBUG_ERROR("Invalid parameter");
+ return -1;
+ }
- if(pSelf->bStarted){
- TSK_DEBUG_INFO("D3D9 video consumer already started");
- return 0;
- }
- if(!pSelf->bPrepared){
- TSK_DEBUG_ERROR("D3D9 video consumer not prepared");
- return -1;
- }
+ if(pSelf->bStarted) {
+ TSK_DEBUG_INFO("D3D9 video consumer already started");
+ return 0;
+ }
+ if(!pSelf->bPrepared) {
+ TSK_DEBUG_ERROR("D3D9 video consumer not prepared");
+ return -1;
+ }
- HRESULT hr = S_OK;
+ HRESULT hr = S_OK;
- pSelf->bPaused = false;
- pSelf->bStarted = true;
+ pSelf->bPaused = false;
+ pSelf->bStarted = true;
- return SUCCEEDED(hr) ? 0 : -1;
+ return SUCCEEDED(hr) ? 0 : -1;
}
static int plugin_video_dshow_consumer_consume(tmedia_consumer_t* self, const void* buffer, tsk_size_t size, const tsk_object_t* proto_hdr)
{
- plugin_video_dshow_consumer_t* pSelf = (plugin_video_dshow_consumer_t*)self;
+ plugin_video_dshow_consumer_t* pSelf = (plugin_video_dshow_consumer_t*)self;
- HRESULT hr = S_OK;
- HWND hWnd = Window(pSelf);
+ HRESULT hr = S_OK;
+ HWND hWnd = Window(pSelf);
- IDirect3DSurface9 *pSurf = NULL;
+ IDirect3DSurface9 *pSurf = NULL;
IDirect3DSurface9 *pBB = NULL;
- if(!pSelf)
- {
- TSK_DEBUG_ERROR("Invalid parameter");
- return -1; // because of the mutex lock do it here
- }
-
- tsk_safeobj_lock(pSelf);
-
- if(!buffer || !size)
- {
- TSK_DEBUG_ERROR("Invalid parameter");
- CHECK_HR(hr = E_INVALIDARG);
- }
-
- if(!pSelf->bStarted)
- {
- TSK_DEBUG_INFO("D3D9 video consumer not started");
- CHECK_HR(hr = E_FAIL);
- }
-
- if(!hWnd)
- {
- TSK_DEBUG_INFO("Do not draw frame because HWND not set");
- goto bail; // not an error as the application can decide to set the HWND at any time
- }
-
- if (!pSelf->bWindowHooked)
- {
- // Do not hook "hWnd" as it could be the fullscreen handle which is always hooked.
- CHECK_HR(hr = HookWindow(pSelf, pSelf->hWindow));
- }
-
- if(!pSelf->pDevice || !pSelf->pD3D || !pSelf->pSwapChain)
- {
- if(pSelf->pDevice || pSelf->pD3D || pSelf->pSwapChain)
- {
- CHECK_HR(hr = E_POINTER); // They must be "all null" or "all valid"
- }
-
- if(hWnd)
- {
- // means HWND was not set but defined now
- pSelf->nNegWidth = (UINT32)TMEDIA_CONSUMER(pSelf)->video.in.width;
- pSelf->nNegHeight = (UINT32)TMEDIA_CONSUMER(pSelf)->video.in.height;
-
- CHECK_HR(hr = CreateDeviceD3D9(hWnd, &pSelf->pDevice, &pSelf->pD3D, pSelf->d3dpp));
- CHECK_HR(hr = CreateSwapChain(hWnd, pSelf->nNegWidth, pSelf->nNegHeight, pSelf->pDevice, &pSelf->pSwapChain));
- }
- }
-
- if(pSelf->nNegWidth != TMEDIA_CONSUMER(pSelf)->video.in.width || pSelf->nNegHeight != TMEDIA_CONSUMER(pSelf)->video.in.height){
- TSK_DEBUG_INFO("Negotiated and input video sizes are different:%d#%d or %d#%d",
- pSelf->nNegWidth, TMEDIA_CONSUMER(pSelf)->video.in.width,
- pSelf->nNegHeight, TMEDIA_CONSUMER(pSelf)->video.in.height);
- // Update media type
-
- SafeRelease(&pSelf->pSwapChain);
- CHECK_HR(hr = CreateSwapChain(hWnd, (UINT32)TMEDIA_CONSUMER(pSelf)->video.in.width, (UINT32)TMEDIA_CONSUMER(pSelf)->video.in.height, pSelf->pDevice, &pSelf->pSwapChain));
-
- pSelf->nNegWidth = (UINT32)TMEDIA_CONSUMER(pSelf)->video.in.width;
- pSelf->nNegHeight = (UINT32)TMEDIA_CONSUMER(pSelf)->video.in.height;
-
- // Update Destination will do noting if the window size haven't changed.
- // Force updating the destination rect if negotiated size change
- CHECK_HR(hr = UpdateDestinationRect(pSelf, TRUE/* Force */));
- }
-
- if(((pSelf->nNegWidth * pSelf->nNegHeight) << 2) != size)
- {
- TSK_DEBUG_ERROR("%u not valid as input size", size);
- CHECK_HR(hr = E_FAIL);
- }
-
- CHECK_HR(hr = TestCooperativeLevel(pSelf));
-
- CHECK_HR(hr = UpdateDestinationRect(pSelf));
-
- CHECK_HR(hr = pSelf->pSwapChain->GetBackBuffer(0, D3DBACKBUFFER_TYPE_MONO, &pSurf));
- CHECK_HR(hr = pSurf->LockRect(&pSelf->rcLock, NULL, D3DLOCK_NOSYSLOCK ));
-
- // Fast copy() using MMX, SSE, or SSE2
- // Only available on Vista or later: Use LoadLibrary() to get a pointer to the function
- /*hr = MFCopyImage(
- (BYTE*)pSelf->rcLock.pBits,
- pSelf->rcLock.Pitch,
- (BYTE*)buffer,
- (pSelf->nNegWidth << 2),
- (pSelf->nNegWidth << 2),
- pSelf->nNegHeight
- );*/
-
- if(pSelf->rcLock.Pitch == (pSelf->nNegWidth << 2))
- {
- memcpy(pSelf->rcLock.pBits, buffer, size);
- }
- else
- {
- const BYTE* pSrcPtr = (const BYTE*)buffer;
- BYTE* pDstPtr = (BYTE*)pSelf->rcLock.pBits;
- UINT32 nDstPitch = pSelf->rcLock.Pitch;
- UINT32 nSrcPitch = (pSelf->nNegWidth << 2);
- for(UINT32 i = 0; i < pSelf->nNegHeight; ++i)
- {
- memcpy(pDstPtr, pSrcPtr, nSrcPitch);
- pDstPtr += nDstPitch;
- pSrcPtr += nSrcPitch;
- }
- }
-
- if(FAILED(hr))
- {
- // unlock() before leaving
- pSurf->UnlockRect();
- CHECK_HR(hr);
- }
-
- CHECK_HR(hr = pSurf->UnlockRect());
-
- // Color fill the back buffer
- CHECK_HR(hr = pSelf->pDevice->GetBackBuffer(0, 0, D3DBACKBUFFER_TYPE_MONO, &pBB));
- CHECK_HR(hr = pSelf->pDevice->ColorFill(pBB, NULL, D3DCOLOR_XRGB(0xFF, 0xFF, 0xFF)));
-
- // Resize keeping aspect ratio and Blit the frame (required)
- hr = pSelf->pDevice->StretchRect(
- pSurf,
- NULL,
- pBB,
- &pSelf->rcDest/*NULL*/,
- D3DTEXF_LINEAR
- ); // could fail when display is being resized
- if(SUCCEEDED(hr))
- {
- // Present the frame
- CHECK_HR(hr = pSelf->pDevice->Present(NULL, NULL, NULL, NULL));
- }
- else
- {
- TSK_DEBUG_INFO("StretchRect returned ...%x", hr);
- }
+ if(!pSelf) {
+ TSK_DEBUG_ERROR("Invalid parameter");
+ return -1; // because of the mutex lock do it here
+ }
+
+ tsk_safeobj_lock(pSelf);
+
+ if(!buffer || !size) {
+ TSK_DEBUG_ERROR("Invalid parameter");
+ CHECK_HR(hr = E_INVALIDARG);
+ }
+
+ if(!pSelf->bStarted) {
+ TSK_DEBUG_INFO("D3D9 video consumer not started");
+ CHECK_HR(hr = E_FAIL);
+ }
+
+ if(!hWnd) {
+ TSK_DEBUG_INFO("Do not draw frame because HWND not set");
+ goto bail; // not an error as the application can decide to set the HWND at any time
+ }
+
+ if (!pSelf->bWindowHooked) {
+ // Do not hook "hWnd" as it could be the fullscreen handle which is always hooked.
+ CHECK_HR(hr = HookWindow(pSelf, pSelf->hWindow));
+ }
+
+ if(!pSelf->pDevice || !pSelf->pD3D || !pSelf->pSwapChain) {
+ if(pSelf->pDevice || pSelf->pD3D || pSelf->pSwapChain) {
+ CHECK_HR(hr = E_POINTER); // They must be "all null" or "all valid"
+ }
+
+ if(hWnd) {
+ // means HWND was not set but defined now
+ pSelf->nNegWidth = (UINT32)TMEDIA_CONSUMER(pSelf)->video.in.width;
+ pSelf->nNegHeight = (UINT32)TMEDIA_CONSUMER(pSelf)->video.in.height;
+
+ CHECK_HR(hr = CreateDeviceD3D9(hWnd, &pSelf->pDevice, &pSelf->pD3D, pSelf->d3dpp));
+ CHECK_HR(hr = CreateSwapChain(hWnd, pSelf->nNegWidth, pSelf->nNegHeight, pSelf->pDevice, &pSelf->pSwapChain));
+ }
+ }
+
+ if(pSelf->nNegWidth != TMEDIA_CONSUMER(pSelf)->video.in.width || pSelf->nNegHeight != TMEDIA_CONSUMER(pSelf)->video.in.height) {
+ TSK_DEBUG_INFO("Negotiated and input video sizes are different:%d#%d or %d#%d",
+ pSelf->nNegWidth, TMEDIA_CONSUMER(pSelf)->video.in.width,
+ pSelf->nNegHeight, TMEDIA_CONSUMER(pSelf)->video.in.height);
+ // Update media type
+
+ SafeRelease(&pSelf->pSwapChain);
+ CHECK_HR(hr = CreateSwapChain(hWnd, (UINT32)TMEDIA_CONSUMER(pSelf)->video.in.width, (UINT32)TMEDIA_CONSUMER(pSelf)->video.in.height, pSelf->pDevice, &pSelf->pSwapChain));
+
+ pSelf->nNegWidth = (UINT32)TMEDIA_CONSUMER(pSelf)->video.in.width;
+ pSelf->nNegHeight = (UINT32)TMEDIA_CONSUMER(pSelf)->video.in.height;
+
+ // Update Destination will do noting if the window size haven't changed.
+ // Force updating the destination rect if negotiated size change
+ CHECK_HR(hr = UpdateDestinationRect(pSelf, TRUE/* Force */));
+ }
+
+ if(((pSelf->nNegWidth * pSelf->nNegHeight) << 2) != size) {
+ TSK_DEBUG_ERROR("%u not valid as input size", size);
+ CHECK_HR(hr = E_FAIL);
+ }
+
+ CHECK_HR(hr = TestCooperativeLevel(pSelf));
+
+ CHECK_HR(hr = UpdateDestinationRect(pSelf));
+
+ CHECK_HR(hr = pSelf->pSwapChain->GetBackBuffer(0, D3DBACKBUFFER_TYPE_MONO, &pSurf));
+ CHECK_HR(hr = pSurf->LockRect(&pSelf->rcLock, NULL, D3DLOCK_NOSYSLOCK ));
+
+ // Fast copy() using MMX, SSE, or SSE2
+ // Only available on Vista or later: Use LoadLibrary() to get a pointer to the function
+ /*hr = MFCopyImage(
+ (BYTE*)pSelf->rcLock.pBits,
+ pSelf->rcLock.Pitch,
+ (BYTE*)buffer,
+ (pSelf->nNegWidth << 2),
+ (pSelf->nNegWidth << 2),
+ pSelf->nNegHeight
+ );*/
+
+ if(pSelf->rcLock.Pitch == (pSelf->nNegWidth << 2)) {
+ memcpy(pSelf->rcLock.pBits, buffer, size);
+ }
+ else {
+ const BYTE* pSrcPtr = (const BYTE*)buffer;
+ BYTE* pDstPtr = (BYTE*)pSelf->rcLock.pBits;
+ UINT32 nDstPitch = pSelf->rcLock.Pitch;
+ UINT32 nSrcPitch = (pSelf->nNegWidth << 2);
+ for(UINT32 i = 0; i < pSelf->nNegHeight; ++i) {
+ memcpy(pDstPtr, pSrcPtr, nSrcPitch);
+ pDstPtr += nDstPitch;
+ pSrcPtr += nSrcPitch;
+ }
+ }
+
+ if(FAILED(hr)) {
+ // unlock() before leaving
+ pSurf->UnlockRect();
+ CHECK_HR(hr);
+ }
+
+ CHECK_HR(hr = pSurf->UnlockRect());
+
+ // Color fill the back buffer
+ CHECK_HR(hr = pSelf->pDevice->GetBackBuffer(0, 0, D3DBACKBUFFER_TYPE_MONO, &pBB));
+ CHECK_HR(hr = pSelf->pDevice->ColorFill(pBB, NULL, D3DCOLOR_XRGB(0xFF, 0xFF, 0xFF)));
+
+ // Resize keeping aspect ratio and Blit the frame (required)
+ hr = pSelf->pDevice->StretchRect(
+ pSurf,
+ NULL,
+ pBB,
+ &pSelf->rcDest/*NULL*/,
+ D3DTEXF_LINEAR
+ ); // could fail when display is being resized
+ if(SUCCEEDED(hr)) {
+ // Present the frame
+ CHECK_HR(hr = pSelf->pDevice->Present(NULL, NULL, NULL, NULL));
+ }
+ else {
+ TSK_DEBUG_INFO("StretchRect returned ...%x", hr);
+ }
bail:
- SafeRelease(&pSurf);
- SafeRelease(&pBB);
+ SafeRelease(&pSurf);
+ SafeRelease(&pBB);
- tsk_safeobj_unlock(pSelf);
+ tsk_safeobj_unlock(pSelf);
- return SUCCEEDED(hr) ? 0 : -1;
+ return SUCCEEDED(hr) ? 0 : -1;
}
static int plugin_video_dshow_consumer_pause(tmedia_consumer_t* self)
{
- plugin_video_dshow_consumer_t* pSelf = (plugin_video_dshow_consumer_t*)self;
+ plugin_video_dshow_consumer_t* pSelf = (plugin_video_dshow_consumer_t*)self;
- if(!pSelf){
- TSK_DEBUG_ERROR("Invalid parameter");
- return -1;
- }
- if(!pSelf->bStarted)
- {
- TSK_DEBUG_INFO("MF video producer not started");
- return 0;
- }
+ if(!pSelf) {
+ TSK_DEBUG_ERROR("Invalid parameter");
+ return -1;
+ }
+ if(!pSelf->bStarted) {
+ TSK_DEBUG_INFO("MF video producer not started");
+ return 0;
+ }
- HRESULT hr = S_OK;
+ HRESULT hr = S_OK;
- pSelf->bPaused = true;
+ pSelf->bPaused = true;
- return SUCCEEDED(hr) ? 0 : -1;
+ return SUCCEEDED(hr) ? 0 : -1;
}
static int plugin_video_dshow_consumer_stop(tmedia_consumer_t* self)
{
- plugin_video_dshow_consumer_t* pSelf = (plugin_video_dshow_consumer_t*)self;
+ plugin_video_dshow_consumer_t* pSelf = (plugin_video_dshow_consumer_t*)self;
- if(!pSelf){
+ if(!pSelf) {
TSK_DEBUG_ERROR("Invalid parameter");
return -1;
}
- HRESULT hr = S_OK;
-
+ HRESULT hr = S_OK;
+
pSelf->bStarted = false;
- pSelf->bPaused = false;
+ pSelf->bPaused = false;
- if(pSelf->hWindowFullScreen)
- {
- ::InvalidateRect(pSelf->hWindowFullScreen, NULL, FALSE);
- ::ShowWindow(pSelf->hWindowFullScreen, SW_HIDE);
- }
+ if(pSelf->hWindowFullScreen) {
+ ::InvalidateRect(pSelf->hWindowFullScreen, NULL, FALSE);
+ ::ShowWindow(pSelf->hWindowFullScreen, SW_HIDE);
+ }
- // next start() will be called after prepare()
- return _plugin_video_dshow_consumer_unprepare(pSelf);
+ // next start() will be called after prepare()
+ return _plugin_video_dshow_consumer_unprepare(pSelf);
}
static int _plugin_video_dshow_consumer_unprepare(plugin_video_dshow_consumer_t* pSelf)
{
- if(!pSelf){
- TSK_DEBUG_ERROR("Invalid parameter");
- return -1;
- }
+ if(!pSelf) {
+ TSK_DEBUG_ERROR("Invalid parameter");
+ return -1;
+ }
- if(pSelf->bStarted)
- {
- // plugin_win_mf_producer_video_stop(TMEDIA_PRODUCER(pSelf));
- TSK_DEBUG_ERROR("Consumer must be stopped before calling unprepare");
- return -1;
- }
+ if(pSelf->bStarted) {
+ // plugin_win_mf_producer_video_stop(TMEDIA_PRODUCER(pSelf));
+ TSK_DEBUG_ERROR("Consumer must be stopped before calling unprepare");
+ return -1;
+ }
- UnhookWindow(pSelf);
+ UnhookWindow(pSelf);
- SafeRelease(&pSelf->pDevice);
- SafeRelease(&pSelf->pD3D);
- SafeRelease(&pSelf->pSwapChain);
+ SafeRelease(&pSelf->pDevice);
+ SafeRelease(&pSelf->pD3D);
+ SafeRelease(&pSelf->pSwapChain);
- pSelf->bPrepared = false;
+ pSelf->bPrepared = false;
- return 0;
+ return 0;
}
@@ -510,191 +476,177 @@ static int _plugin_video_dshow_consumer_unprepare(plugin_video_dshow_consumer_t*
/* constructor */
static tsk_object_t* plugin_video_dshow_consumer_ctor(tsk_object_t * self, va_list * app)
{
- plugin_video_dshow_consumer_t *pSelf = (plugin_video_dshow_consumer_t *)self;
- if(pSelf)
- {
- /* init base */
- tmedia_consumer_init(TMEDIA_CONSUMER(pSelf));
- TMEDIA_CONSUMER(pSelf)->video.display.chroma = tmedia_chroma_rgb32;
- TMEDIA_CONSUMER(pSelf)->decoder.codec_id = tmedia_codec_id_none; // means accept RAW fames
-
- /* init self */
- tsk_safeobj_init(pSelf);
- TMEDIA_CONSUMER(pSelf)->video.fps = 15;
- TMEDIA_CONSUMER(pSelf)->video.display.width = 0; // use codec value
- TMEDIA_CONSUMER(pSelf)->video.display.height = 0; // use codec value
- TMEDIA_CONSUMER(pSelf)->video.display.auto_resize = tsk_true;
-
- pSelf->pixelAR.Denominator = pSelf->pixelAR.Numerator = 1;
- }
- return self;
+ plugin_video_dshow_consumer_t *pSelf = (plugin_video_dshow_consumer_t *)self;
+ if(pSelf) {
+ /* init base */
+ tmedia_consumer_init(TMEDIA_CONSUMER(pSelf));
+ TMEDIA_CONSUMER(pSelf)->video.display.chroma = tmedia_chroma_rgb32;
+ TMEDIA_CONSUMER(pSelf)->decoder.codec_id = tmedia_codec_id_none; // means accept RAW fames
+
+ /* init self */
+ tsk_safeobj_init(pSelf);
+ TMEDIA_CONSUMER(pSelf)->video.fps = 15;
+ TMEDIA_CONSUMER(pSelf)->video.display.width = 0; // use codec value
+ TMEDIA_CONSUMER(pSelf)->video.display.height = 0; // use codec value
+ TMEDIA_CONSUMER(pSelf)->video.display.auto_resize = tsk_true;
+
+ pSelf->pixelAR.Denominator = pSelf->pixelAR.Numerator = 1;
+ }
+ return self;
}
/* destructor */
static tsk_object_t* plugin_video_dshow_consumer_dtor(tsk_object_t * self)
-{
- plugin_video_dshow_consumer_t *pSelf = (plugin_video_dshow_consumer_t *)self;
- if (pSelf) {
- /* stop */
- if (pSelf->bStarted)
- {
- plugin_video_dshow_consumer_stop(TMEDIA_CONSUMER(pSelf));
- }
-
- /* deinit base */
- tmedia_consumer_deinit(TMEDIA_CONSUMER(pSelf));
- /* deinit self */
- _plugin_video_dshow_consumer_unprepare(pSelf);
- tsk_safeobj_deinit(pSelf);
- }
-
- return self;
+{
+ plugin_video_dshow_consumer_t *pSelf = (plugin_video_dshow_consumer_t *)self;
+ if (pSelf) {
+ /* stop */
+ if (pSelf->bStarted) {
+ plugin_video_dshow_consumer_stop(TMEDIA_CONSUMER(pSelf));
+ }
+
+ /* deinit base */
+ tmedia_consumer_deinit(TMEDIA_CONSUMER(pSelf));
+ /* deinit self */
+ _plugin_video_dshow_consumer_unprepare(pSelf);
+ tsk_safeobj_deinit(pSelf);
+ }
+
+ return self;
}
/* object definition */
-static const tsk_object_def_t plugin_video_dshow_consumer_def_s =
-{
- sizeof(plugin_video_dshow_consumer_t),
- plugin_video_dshow_consumer_ctor,
- plugin_video_dshow_consumer_dtor,
- tsk_null,
+static const tsk_object_def_t plugin_video_dshow_consumer_def_s = {
+ sizeof(plugin_video_dshow_consumer_t),
+ plugin_video_dshow_consumer_ctor,
+ plugin_video_dshow_consumer_dtor,
+ tsk_null,
};
/* plugin definition*/
-static const tmedia_consumer_plugin_def_t plugin_video_dshow_consumer_plugin_def_s =
-{
- &plugin_video_dshow_consumer_def_s,
-
- tmedia_video,
- "Microsoft DirectShow consumer (D3D9)",
-
- plugin_video_dshow_consumer_set,
- plugin_video_dshow_consumer_prepare,
- plugin_video_dshow_consumer_start,
- plugin_video_dshow_consumer_consume,
- plugin_video_dshow_consumer_pause,
- plugin_video_dshow_consumer_stop
+static const tmedia_consumer_plugin_def_t plugin_video_dshow_consumer_plugin_def_s = {
+ &plugin_video_dshow_consumer_def_s,
+
+ tmedia_video,
+ "Microsoft DirectShow consumer (D3D9)",
+
+ plugin_video_dshow_consumer_set,
+ plugin_video_dshow_consumer_prepare,
+ plugin_video_dshow_consumer_start,
+ plugin_video_dshow_consumer_consume,
+ plugin_video_dshow_consumer_pause,
+ plugin_video_dshow_consumer_stop
};
const tmedia_consumer_plugin_def_t *plugin_video_dshow_consumer_plugin_def_t = &plugin_video_dshow_consumer_plugin_def_s;
// Helper functions
static HRESULT CreateDeviceD3D9(
- HWND hWnd,
- IDirect3DDevice9** ppDevice,
- IDirect3D9 **ppD3D,
- D3DPRESENT_PARAMETERS &d3dpp
- )
+ HWND hWnd,
+ IDirect3DDevice9** ppDevice,
+ IDirect3D9 **ppD3D,
+ D3DPRESENT_PARAMETERS &d3dpp
+)
{
- HRESULT hr = S_OK;
+ HRESULT hr = S_OK;
D3DDISPLAYMODE mode = { 0 };
- D3DPRESENT_PARAMETERS pp = {0};
-
- if(!ppDevice || *ppDevice || !ppD3D || *ppD3D)
- {
- CHECK_HR(hr = E_POINTER);
- }
-
- if(!(*ppD3D = Direct3DCreate9(D3D_SDK_VERSION)))
- {
+ D3DPRESENT_PARAMETERS pp = {0};
+
+ if(!ppDevice || *ppDevice || !ppD3D || *ppD3D) {
+ CHECK_HR(hr = E_POINTER);
+ }
+
+ if(!(*ppD3D = Direct3DCreate9(D3D_SDK_VERSION))) {
CHECK_HR(hr = E_OUTOFMEMORY);
}
CHECK_HR(hr = (*ppD3D)->GetAdapterDisplayMode(
- D3DADAPTER_DEFAULT,
- &mode
- ));
+ D3DADAPTER_DEFAULT,
+ &mode
+ ));
CHECK_HR(hr = (*ppD3D)->CheckDeviceType(
- D3DADAPTER_DEFAULT,
- D3DDEVTYPE_HAL,
- mode.Format,
- D3DFMT_X8R8G8B8,
- TRUE // windowed
- ));
+ D3DADAPTER_DEFAULT,
+ D3DDEVTYPE_HAL,
+ mode.Format,
+ D3DFMT_X8R8G8B8,
+ TRUE // windowed
+ ));
pp.BackBufferFormat = D3DFMT_X8R8G8B8;
pp.SwapEffect = D3DSWAPEFFECT_DISCARD;
pp.PresentationInterval = D3DPRESENT_INTERVAL_IMMEDIATE;
- pp.Windowed = TRUE;
+ pp.Windowed = TRUE;
pp.hDeviceWindow = hWnd;
CHECK_HR(hr = (*ppD3D)->CreateDevice(
- D3DADAPTER_DEFAULT,
- D3DDEVTYPE_HAL,
- hWnd,
- D3DCREATE_HARDWARE_VERTEXPROCESSING,
- &pp,
- ppDevice
- ));
+ D3DADAPTER_DEFAULT,
+ D3DDEVTYPE_HAL,
+ hWnd,
+ D3DCREATE_HARDWARE_VERTEXPROCESSING,
+ &pp,
+ ppDevice
+ ));
- d3dpp = pp;
+ d3dpp = pp;
bail:
- if(FAILED(hr))
- {
- SafeRelease(ppD3D);
- SafeRelease(ppDevice);
- }
+ if(FAILED(hr)) {
+ SafeRelease(ppD3D);
+ SafeRelease(ppDevice);
+ }
return hr;
}
static HRESULT TestCooperativeLevel(
- struct plugin_video_dshow_consumer_s *pSelf
- )
+ struct plugin_video_dshow_consumer_s *pSelf
+)
{
- HRESULT hr = S_OK;
-
- if (!pSelf || !pSelf->pDevice)
- {
- CHECK_HR(hr = E_POINTER);
- }
-
- switch((hr = pSelf->pDevice->TestCooperativeLevel()))
- {
- case D3D_OK:
- {
- break;
- }
-
- case D3DERR_DEVICELOST:
- {
- hr = S_OK;
- break;
- }
-
- case D3DERR_DEVICENOTRESET:
- {
- hr = ResetDevice(pSelf, TRUE);
- break;
- }
-
- default:
- {
- break;
- }
- }
-
- CHECK_HR(hr);
+ HRESULT hr = S_OK;
+
+ if (!pSelf || !pSelf->pDevice) {
+ CHECK_HR(hr = E_POINTER);
+ }
+
+ switch((hr = pSelf->pDevice->TestCooperativeLevel())) {
+ case D3D_OK: {
+ break;
+ }
+
+ case D3DERR_DEVICELOST: {
+ hr = S_OK;
+ break;
+ }
+
+ case D3DERR_DEVICENOTRESET: {
+ hr = ResetDevice(pSelf, TRUE);
+ break;
+ }
+
+ default: {
+ break;
+ }
+ }
+
+ CHECK_HR(hr);
bail:
return hr;
}
static HRESULT CreateSwapChain(
- HWND hWnd,
- UINT32 nFrameWidth,
- UINT32 nFrameHeight,
- IDirect3DDevice9* pDevice,
- IDirect3DSwapChain9 **ppSwapChain
- )
+ HWND hWnd,
+ UINT32 nFrameWidth,
+ UINT32 nFrameHeight,
+ IDirect3DDevice9* pDevice,
+ IDirect3DSwapChain9 **ppSwapChain
+)
{
HRESULT hr = S_OK;
D3DPRESENT_PARAMETERS pp = { 0 };
- if(!pDevice || !ppSwapChain || *ppSwapChain)
- {
- CHECK_HR(hr = E_POINTER);
- }
-
- pp.BackBufferWidth = nFrameWidth;
+ if(!pDevice || !ppSwapChain || *ppSwapChain) {
+ CHECK_HR(hr = E_POINTER);
+ }
+
+ pp.BackBufferWidth = nFrameWidth;
pp.BackBufferHeight = nFrameHeight;
pp.Windowed = TRUE;
pp.SwapEffect = D3DSWAPEFFECT_FLIP;
@@ -706,7 +658,7 @@ static HRESULT CreateSwapChain(
pp.PresentationInterval = D3DPRESENT_INTERVAL_IMMEDIATE;
pp.BackBufferCount = NUM_BACK_BUFFERS;
- CHECK_HR(hr = pDevice->CreateAdditionalSwapChain(&pp, ppSwapChain));
+ CHECK_HR(hr = pDevice->CreateAdditionalSwapChain(&pp, ppSwapChain));
bail:
return hr;
@@ -714,7 +666,7 @@ bail:
static inline HWND Window(struct plugin_video_dshow_consumer_s *pSelf)
{
- return pSelf ? (pSelf->bFullScreen ? pSelf->hWindowFullScreen : pSelf->hWindow) : NULL;
+ return pSelf ? (pSelf->bFullScreen ? pSelf->hWindowFullScreen : pSelf->hWindow) : NULL;
}
static inline LONG Width(const RECT& r)
@@ -743,17 +695,14 @@ static inline RECT CorrectAspectRatio(const RECT& src, const DSRatio& srcPAR)
// Start with a rectangle the same size as src, but offset to the origin (0,0).
RECT rc = {0, 0, src.right - src.left, src.bottom - src.top};
- if ((srcPAR.Numerator != 1) || (srcPAR.Denominator != 1))
- {
+ if ((srcPAR.Numerator != 1) || (srcPAR.Denominator != 1)) {
// Correct for the source's PAR.
- if (srcPAR.Numerator > srcPAR.Denominator)
- {
+ if (srcPAR.Numerator > srcPAR.Denominator) {
// The source has "wide" pixels, so stretch the width.
rc.right = MulDiv(rc.right, srcPAR.Numerator, srcPAR.Denominator);
}
- else if (srcPAR.Numerator < srcPAR.Denominator)
- {
+ else if (srcPAR.Numerator < srcPAR.Denominator) {
// The source has "tall" pixels, so stretch the height.
rc.bottom = MulDiv(rc.bottom, srcPAR.Denominator, srcPAR.Numerator);
}
@@ -816,266 +765,242 @@ static inline RECT LetterBoxRect(const RECT& rcSrc, const RECT& rcDst)
static inline HRESULT UpdateDestinationRect(plugin_video_dshow_consumer_t *pSelf, BOOL bForce /*= FALSE*/)
{
- HRESULT hr = S_OK;
- HWND hwnd = Window(pSelf);
-
- if(!pSelf)
- {
- CHECK_HR(hr = E_POINTER);
- }
-
- if(!hwnd)
- {
- CHECK_HR(hr = E_HANDLE);
- }
+ HRESULT hr = S_OK;
+ HWND hwnd = Window(pSelf);
+
+ if(!pSelf) {
+ CHECK_HR(hr = E_POINTER);
+ }
+
+ if(!hwnd) {
+ CHECK_HR(hr = E_HANDLE);
+ }
RECT rcClient;
- GetClientRect(hwnd, &rcClient);
+ GetClientRect(hwnd, &rcClient);
- // only update destination if window size changed
- if(bForce || (rcClient.bottom != pSelf->rcWindow.bottom || rcClient.left != pSelf->rcWindow.left || rcClient.right != pSelf->rcWindow.right || rcClient.top != pSelf->rcWindow.top))
- {
- CHECK_HR(hr = ResetDevice(pSelf));
+ // only update destination if window size changed
+ if(bForce || (rcClient.bottom != pSelf->rcWindow.bottom || rcClient.left != pSelf->rcWindow.left || rcClient.right != pSelf->rcWindow.right || rcClient.top != pSelf->rcWindow.top)) {
+ CHECK_HR(hr = ResetDevice(pSelf));
- pSelf->rcWindow = rcClient;
+ pSelf->rcWindow = rcClient;
#if 1
- RECT rcSrc = { 0, 0, pSelf->nNegWidth, pSelf->nNegHeight };
- rcSrc = CorrectAspectRatio(rcSrc, pSelf->pixelAR);
- pSelf->rcDest = LetterBoxRect(rcSrc, rcClient);
+ RECT rcSrc = { 0, 0, pSelf->nNegWidth, pSelf->nNegHeight };
+ rcSrc = CorrectAspectRatio(rcSrc, pSelf->pixelAR);
+ pSelf->rcDest = LetterBoxRect(rcSrc, rcClient);
#else
- long w = rcClient.right - rcClient.left;
- long h = rcClient.bottom - rcClient.top;
- float ratio = ((float)pSelf->nNegWidth/(float)pSelf->nNegHeight);
- // (w/h)=ratio =>
- // 1) h=w/ratio
- // and
- // 2) w=h*ratio
- pSelf->rcDest.right = (int)(w/ratio) > h ? (int)(h * ratio) : w;
- pSelf->rcDest.bottom = (int)(pSelf->rcDest.right/ratio) > h ? h : (int)(pSelf->rcDest.right/ratio);
- pSelf->rcDest.left = ((w - pSelf->rcDest.right) >> 1);
- pSelf->rcDest.top = ((h - pSelf->rcDest.bottom) >> 1);
+ long w = rcClient.right - rcClient.left;
+ long h = rcClient.bottom - rcClient.top;
+ float ratio = ((float)pSelf->nNegWidth/(float)pSelf->nNegHeight);
+ // (w/h)=ratio =>
+ // 1) h=w/ratio
+ // and
+ // 2) w=h*ratio
+ pSelf->rcDest.right = (int)(w/ratio) > h ? (int)(h * ratio) : w;
+ pSelf->rcDest.bottom = (int)(pSelf->rcDest.right/ratio) > h ? h : (int)(pSelf->rcDest.right/ratio);
+ pSelf->rcDest.left = ((w - pSelf->rcDest.right) >> 1);
+ pSelf->rcDest.top = ((h - pSelf->rcDest.bottom) >> 1);
#endif
- //::InvalidateRect(hwnd, NULL, FALSE);
- }
+ //::InvalidateRect(hwnd, NULL, FALSE);
+ }
bail:
- return hr;
+ return hr;
}
static HRESULT ResetDevice(plugin_video_dshow_consumer_t *pSelf, BOOL bUpdateDestinationRect /*= FALSE*/)
{
HRESULT hr = S_OK;
- tsk_safeobj_lock(pSelf);
+ tsk_safeobj_lock(pSelf);
- HWND hWnd = Window(pSelf);
+ HWND hWnd = Window(pSelf);
- if (pSelf->pDevice)
- {
+ if (pSelf->pDevice) {
D3DPRESENT_PARAMETERS d3dpp = pSelf->d3dpp;
hr = pSelf->pDevice->Reset(&d3dpp);
- if (FAILED(hr))
- {
+ if (FAILED(hr)) {
SafeRelease(&pSelf->pDevice);
- SafeRelease(&pSelf->pD3D);
- SafeRelease(&pSelf->pSwapChain);
+ SafeRelease(&pSelf->pD3D);
+ SafeRelease(&pSelf->pSwapChain);
}
}
- if (pSelf->pDevice == NULL && hWnd)
- {
+ if (pSelf->pDevice == NULL && hWnd) {
CHECK_HR(hr = CreateDeviceD3D9(hWnd, &pSelf->pDevice, &pSelf->pD3D, pSelf->d3dpp));
- CHECK_HR(hr = CreateSwapChain(hWnd, pSelf->nNegWidth, pSelf->nNegHeight, pSelf->pDevice, &pSelf->pSwapChain));
- }
+ CHECK_HR(hr = CreateSwapChain(hWnd, pSelf->nNegWidth, pSelf->nNegHeight, pSelf->pDevice, &pSelf->pSwapChain));
+ }
- if(bUpdateDestinationRect) // endless loop guard
- {
- CHECK_HR(hr = UpdateDestinationRect(pSelf));
- }
+ if(bUpdateDestinationRect) { // endless loop guard
+ CHECK_HR(hr = UpdateDestinationRect(pSelf));
+ }
bail:
- tsk_safeobj_unlock(pSelf);
+ tsk_safeobj_unlock(pSelf);
- return hr;
+ return hr;
}
static HRESULT SetFullscreen(struct plugin_video_dshow_consumer_s *pSelf, BOOL bFullScreen)
{
- HRESULT hr = S_OK;
- if(!pSelf)
- {
- CHECK_HR(hr = E_POINTER);
- }
-
- if(pSelf->bFullScreen != bFullScreen)
- {
- tsk_safeobj_lock(pSelf);
- if(bFullScreen)
- {
- HWND hWnd = CreateFullScreenWindow(pSelf);
- if(hWnd)
- {
- ::ShowWindow(hWnd, SW_SHOWDEFAULT);
- ::UpdateWindow(hWnd);
- }
- }
- else if(pSelf->hWindowFullScreen)
- {
- ::ShowWindow(pSelf->hWindowFullScreen, SW_HIDE);
- }
- pSelf->bFullScreen = bFullScreen;
- if(pSelf->bPrepared)
- {
- hr = ResetDevice(pSelf);
- }
- tsk_safeobj_unlock(pSelf);
-
- CHECK_HR(hr);
- }
+ HRESULT hr = S_OK;
+ if(!pSelf) {
+ CHECK_HR(hr = E_POINTER);
+ }
+
+ if(pSelf->bFullScreen != bFullScreen) {
+ tsk_safeobj_lock(pSelf);
+ if(bFullScreen) {
+ HWND hWnd = CreateFullScreenWindow(pSelf);
+ if(hWnd) {
+ ::ShowWindow(hWnd, SW_SHOWDEFAULT);
+ ::UpdateWindow(hWnd);
+ }
+ }
+ else if(pSelf->hWindowFullScreen) {
+ ::ShowWindow(pSelf->hWindowFullScreen, SW_HIDE);
+ }
+ pSelf->bFullScreen = bFullScreen;
+ if(pSelf->bPrepared) {
+ hr = ResetDevice(pSelf);
+ }
+ tsk_safeobj_unlock(pSelf);
+
+ CHECK_HR(hr);
+ }
bail:
- return hr;
+ return hr;
}
static LRESULT CALLBACK WndProc(HWND hWnd, UINT uMsg, WPARAM wParam, LPARAM lParam)
{
- switch(uMsg)
- {
- case WM_CREATE:
- case WM_SIZE:
- case WM_MOVE:
- {
- struct plugin_video_dshow_consumer_s* pSelf = dynamic_cast<struct plugin_video_dshow_consumer_s*>((struct plugin_video_dshow_consumer_s*)GetPropA(hWnd, "Self"));
- if (pSelf)
- {
-
- }
- break;
- }
+ switch(uMsg) {
+ case WM_CREATE:
+ case WM_SIZE:
+ case WM_MOVE: {
+ struct plugin_video_dshow_consumer_s* pSelf = dynamic_cast<struct plugin_video_dshow_consumer_s*>((struct plugin_video_dshow_consumer_s*)GetPropA(hWnd, "Self"));
+ if (pSelf) {
+
+ }
+ break;
+ }
#if 0
- case WM_PAINT:
- {
- PAINTSTRUCT ps;
- HDC hdc = BeginPaint(hWnd, &ps);
- ps.fErase = FALSE;
-
- RECT rc;
- GetBoundsRect(hdc, &rc, 0);
- FillRect(hdc, &rc, (HBRUSH)GetStockObject(BLACK_BRUSH));
-
- EndPaint(hWnd, &ps);
- break;
- }
+ case WM_PAINT: {
+ PAINTSTRUCT ps;
+ HDC hdc = BeginPaint(hWnd, &ps);
+ ps.fErase = FALSE;
+
+ RECT rc;
+ GetBoundsRect(hdc, &rc, 0);
+ FillRect(hdc, &rc, (HBRUSH)GetStockObject(BLACK_BRUSH));
+
+ EndPaint(hWnd, &ps);
+ break;
+ }
#endif
- case WM_ERASEBKGND:
- {
- return TRUE; // avoid background erasing.
- }
-
-
- case WM_CHAR:
- case WM_KEYUP:
- {
- struct plugin_video_dshow_consumer_s* pSelf = dynamic_cast<struct plugin_video_dshow_consumer_s*>((struct plugin_video_dshow_consumer_s*)GetPropA(hWnd, "Self"));
- if (pSelf)
- {
- SetFullscreen(pSelf, FALSE);
- }
-
- break;
- }
- }
-
- return DefWindowProc(hWnd, uMsg, wParam, lParam);
+ case WM_ERASEBKGND: {
+ return TRUE; // avoid background erasing.
+ }
+
+
+ case WM_CHAR:
+ case WM_KEYUP: {
+ struct plugin_video_dshow_consumer_s* pSelf = dynamic_cast<struct plugin_video_dshow_consumer_s*>((struct plugin_video_dshow_consumer_s*)GetPropA(hWnd, "Self"));
+ if (pSelf) {
+ SetFullscreen(pSelf, FALSE);
+ }
+
+ break;
+ }
+ }
+
+ return DefWindowProc(hWnd, uMsg, wParam, lParam);
}
static HWND CreateFullScreenWindow(struct plugin_video_dshow_consumer_s *pSelf)
{
- HRESULT hr = S_OK;
-
- if(!pSelf)
- {
- return NULL;
- }
-
- if(!pSelf->hWindowFullScreen)
- {
- WNDCLASS wc = {0};
-
- wc.lpfnWndProc = WndProc;
- wc.hInstance = GetModuleHandle(NULL);
- wc.hCursor = LoadCursor(NULL, IDC_ARROW);
- wc.lpszClassName = L"WindowClass";
- RegisterClass(&wc);
- pSelf->hWindowFullScreen = ::CreateWindowEx(
- NULL,
- wc.lpszClassName,
- L"Doubango's Video Consumer Fullscreen",
- WS_EX_TOPMOST | WS_POPUP,
- 0, 0,
- GetSystemMetrics(SM_CXSCREEN), GetSystemMetrics(SM_CYSCREEN),
- NULL,
- NULL,
- GetModuleHandle(NULL),
- NULL);
-
- SetPropA(pSelf->hWindowFullScreen, "Self", pSelf);
- }
- return pSelf->hWindowFullScreen;
+ HRESULT hr = S_OK;
+
+ if(!pSelf) {
+ return NULL;
+ }
+
+ if(!pSelf->hWindowFullScreen) {
+ WNDCLASS wc = {0};
+
+ wc.lpfnWndProc = WndProc;
+ wc.hInstance = GetModuleHandle(NULL);
+ wc.hCursor = LoadCursor(NULL, IDC_ARROW);
+ wc.lpszClassName = L"WindowClass";
+ RegisterClass(&wc);
+ pSelf->hWindowFullScreen = ::CreateWindowEx(
+ NULL,
+ wc.lpszClassName,
+ L"Doubango's Video Consumer Fullscreen",
+ WS_EX_TOPMOST | WS_POPUP,
+ 0, 0,
+ GetSystemMetrics(SM_CXSCREEN), GetSystemMetrics(SM_CYSCREEN),
+ NULL,
+ NULL,
+ GetModuleHandle(NULL),
+ NULL);
+
+ SetPropA(pSelf->hWindowFullScreen, "Self", pSelf);
+ }
+ return pSelf->hWindowFullScreen;
}
static HRESULT HookWindow(plugin_video_dshow_consumer_t *pSelf, HWND hWnd)
{
- HRESULT hr = S_OK;
+ HRESULT hr = S_OK;
- tsk_safeobj_lock(pSelf);
+ tsk_safeobj_lock(pSelf);
- CHECK_HR(hr = UnhookWindow(pSelf));
+ CHECK_HR(hr = UnhookWindow(pSelf));
- if ((pSelf->hWindow = hWnd)) {
- pSelf->wndProc = (WNDPROC)SetWindowLongPtr(pSelf->hWindow, GWLP_WNDPROC, (LONG_PTR)WndProc);
- if (!pSelf->wndProc) {
- TSK_DEBUG_ERROR("HookWindowLongPtr() failed with errcode=%d", GetLastError());
- CHECK_HR(hr = E_FAIL);
- }
- pSelf->bWindowHooked = TRUE;
- }
+ if ((pSelf->hWindow = hWnd)) {
+ pSelf->wndProc = (WNDPROC)SetWindowLongPtr(pSelf->hWindow, GWLP_WNDPROC, (LONG_PTR)WndProc);
+ if (!pSelf->wndProc) {
+ TSK_DEBUG_ERROR("HookWindowLongPtr() failed with errcode=%d", GetLastError());
+ CHECK_HR(hr = E_FAIL);
+ }
+ pSelf->bWindowHooked = TRUE;
+ }
bail:
- tsk_safeobj_unlock(pSelf);
- return S_OK;
+ tsk_safeobj_unlock(pSelf);
+ return S_OK;
}
static HRESULT UnhookWindow(struct plugin_video_dshow_consumer_s *pSelf)
{
- tsk_safeobj_lock(pSelf);
- if (pSelf->hWindow && pSelf->wndProc) {
- SetWindowLongPtr(pSelf->hWindow, GWLP_WNDPROC, (LONG_PTR)pSelf->wndProc);
- pSelf->wndProc = NULL;
- }
- if(pSelf->hWindow)
- {
- ::InvalidateRect(pSelf->hWindow, NULL, FALSE);
- }
- pSelf->bWindowHooked = FALSE;
- tsk_safeobj_unlock(pSelf);
- return S_OK;
+ tsk_safeobj_lock(pSelf);
+ if (pSelf->hWindow && pSelf->wndProc) {
+ SetWindowLongPtr(pSelf->hWindow, GWLP_WNDPROC, (LONG_PTR)pSelf->wndProc);
+ pSelf->wndProc = NULL;
+ }
+ if(pSelf->hWindow) {
+ ::InvalidateRect(pSelf->hWindow, NULL, FALSE);
+ }
+ pSelf->bWindowHooked = FALSE;
+ tsk_safeobj_unlock(pSelf);
+ return S_OK;
}
#else /* !PLUGIN_DS_CV_USE_D3D9 */
-typedef struct plugin_video_dshow_consumer_s
-{
- TMEDIA_DECLARE_CONSUMER;
-
- DSDisplay* display;
- INT64 window;
-
- tsk_bool_t plugin_firefox;
- tsk_bool_t started;
- tsk_bool_t create_on_ui_thread;
+typedef struct plugin_video_dshow_consumer_s {
+ TMEDIA_DECLARE_CONSUMER;
+
+ DSDisplay* display;
+ INT64 window;
+
+ tsk_bool_t plugin_firefox;
+ tsk_bool_t started;
+ tsk_bool_t create_on_ui_thread;
}
plugin_video_dshow_consumer_t;
@@ -1084,166 +1009,170 @@ plugin_video_dshow_consumer_t;
/* ============ Media Consumer Interface ================= */
static int plugin_video_dshow_consumer_set(tmedia_consumer_t *self, const tmedia_param_t* param)
{
- int ret = 0;
-
- if(!self || !param){
- TSK_DEBUG_ERROR("Invalid parameter");
- return -1;
- }
-
- if(param->value_type == tmedia_pvt_int64){
- if(tsk_striequals(param->key, "remote-hwnd")){
- DSCONSUMER(self)->window = (INT64)*((int64_t*)param->value);
- if(DSCONSUMER(self)->display){
- if(DSCONSUMER(self)->window){
- DSCONSUMER(self)->display->attach(DSCONSUMER(self)->window);
- }
- else{
- DSCONSUMER(self)->display->detach();
- }
- }
- }
- }
- else if(param->value_type == tmedia_pvt_int32){
- if(tsk_striequals(param->key, "fullscreen")){
- if(DSCONSUMER(self)->display){
- DSCONSUMER(self)->display->setFullscreen(*((int32_t*)param->value) != 0);
- }
- }
- else if(tsk_striequals(param->key, "create-on-current-thead")){
- DSCONSUMER(self)->create_on_ui_thread = *((int32_t*)param->value) ? tsk_false : tsk_true;
- }
- else if(tsk_striequals(param->key, "plugin-firefox")){
- DSCONSUMER(self)->plugin_firefox = (*((int32_t*)param->value) != 0);
- if(DSCONSUMER(self)->display){
- DSCONSUMER(self)->display->setPluginFirefox((DSCONSUMER(self)->plugin_firefox == tsk_true));
- }
- }
- }
-
- return ret;
+ int ret = 0;
+
+ if(!self || !param) {
+ TSK_DEBUG_ERROR("Invalid parameter");
+ return -1;
+ }
+
+ if(param->value_type == tmedia_pvt_int64) {
+ if(tsk_striequals(param->key, "remote-hwnd")) {
+ DSCONSUMER(self)->window = (INT64)*((int64_t*)param->value);
+ if(DSCONSUMER(self)->display) {
+ if(DSCONSUMER(self)->window) {
+ DSCONSUMER(self)->display->attach(DSCONSUMER(self)->window);
+ }
+ else {
+ DSCONSUMER(self)->display->detach();
+ }
+ }
+ }
+ }
+ else if(param->value_type == tmedia_pvt_int32) {
+ if(tsk_striequals(param->key, "fullscreen")) {
+ if(DSCONSUMER(self)->display) {
+ DSCONSUMER(self)->display->setFullscreen(*((int32_t*)param->value) != 0);
+ }
+ }
+ else if(tsk_striequals(param->key, "create-on-current-thead")) {
+ DSCONSUMER(self)->create_on_ui_thread = *((int32_t*)param->value) ? tsk_false : tsk_true;
+ }
+ else if(tsk_striequals(param->key, "plugin-firefox")) {
+ DSCONSUMER(self)->plugin_firefox = (*((int32_t*)param->value) != 0);
+ if(DSCONSUMER(self)->display) {
+ DSCONSUMER(self)->display->setPluginFirefox((DSCONSUMER(self)->plugin_firefox == tsk_true));
+ }
+ }
+ }
+
+ return ret;
}
static int plugin_video_dshow_consumer_prepare(tmedia_consumer_t* self, const tmedia_codec_t* codec)
{
- plugin_video_dshow_consumer_t* consumer = (plugin_video_dshow_consumer_t*)self;
-
- if(!consumer || !codec && codec->plugin){
- TSK_DEBUG_ERROR("Invalid parameter");
- return -1;
- }
-
- TMEDIA_CONSUMER(consumer)->video.fps = TMEDIA_CODEC_VIDEO(codec)->in.fps;
- TMEDIA_CONSUMER(consumer)->video.in.width = TMEDIA_CODEC_VIDEO(codec)->in.width;
- TMEDIA_CONSUMER(consumer)->video.in.height = TMEDIA_CODEC_VIDEO(codec)->in.height;
-
- if(!TMEDIA_CONSUMER(consumer)->video.display.width){
- TMEDIA_CONSUMER(consumer)->video.display.width = TMEDIA_CONSUMER(consumer)->video.in.width;
- }
- if(!TMEDIA_CONSUMER(consumer)->video.display.height){
- TMEDIA_CONSUMER(consumer)->video.display.height = TMEDIA_CONSUMER(consumer)->video.in.height;
- }
-
- return 0;
+ plugin_video_dshow_consumer_t* consumer = (plugin_video_dshow_consumer_t*)self;
+
+ if(!consumer || !codec && codec->plugin) {
+ TSK_DEBUG_ERROR("Invalid parameter");
+ return -1;
+ }
+
+ TMEDIA_CONSUMER(consumer)->video.fps = TMEDIA_CODEC_VIDEO(codec)->in.fps;
+ TMEDIA_CONSUMER(consumer)->video.in.width = TMEDIA_CODEC_VIDEO(codec)->in.width;
+ TMEDIA_CONSUMER(consumer)->video.in.height = TMEDIA_CODEC_VIDEO(codec)->in.height;
+
+ if(!TMEDIA_CONSUMER(consumer)->video.display.width) {
+ TMEDIA_CONSUMER(consumer)->video.display.width = TMEDIA_CONSUMER(consumer)->video.in.width;
+ }
+ if(!TMEDIA_CONSUMER(consumer)->video.display.height) {
+ TMEDIA_CONSUMER(consumer)->video.display.height = TMEDIA_CONSUMER(consumer)->video.in.height;
+ }
+
+ return 0;
}
static int plugin_video_dshow_consumer_start(tmedia_consumer_t* self)
{
- plugin_video_dshow_consumer_t* consumer = (plugin_video_dshow_consumer_t*)self;
-
- if(!consumer){
- TSK_DEBUG_ERROR("Invalid parameter");
- return -1;
- }
-
- if(consumer->started){
- return 0;
- }
-
- // create display on UI thread
- if(!consumer->display){
- if (consumer->create_on_ui_thread) createOnUIThead(reinterpret_cast<HWND>((void*)consumer->window), (void**)&consumer->display, true, false);
- else createOnCurrentThead(reinterpret_cast<HWND>((void*)consumer->window), (void**)&consumer->display, true, false);
-
- if(!consumer->display){
- TSK_DEBUG_ERROR("Failed to create display");
- return -2;
- }
- }
-
- // Set parameters
- consumer->display->setPluginFirefox((consumer->plugin_firefox == tsk_true));
- consumer->display->setFps(TMEDIA_CONSUMER(consumer)->video.fps);
- // do not change the display size: see hook()
- // consumer->display->setSize(TMEDIA_CONSUMER(consumer)->video.display.width, TMEDIA_CONSUMER(consumer)->video.display.height);
- if(consumer->window){
- consumer->display->attach(consumer->window);
- }
-
- // Start display
- consumer->display->start();
- consumer->started = tsk_true;
-
- return 0;
+ plugin_video_dshow_consumer_t* consumer = (plugin_video_dshow_consumer_t*)self;
+
+ if(!consumer) {
+ TSK_DEBUG_ERROR("Invalid parameter");
+ return -1;
+ }
+
+ if(consumer->started) {
+ return 0;
+ }
+
+ // create display on UI thread
+ if(!consumer->display) {
+ if (consumer->create_on_ui_thread) {
+ createOnUIThead(reinterpret_cast<HWND>((void*)consumer->window), (void**)&consumer->display, true, false);
+ }
+ else {
+ createOnCurrentThead(reinterpret_cast<HWND>((void*)consumer->window), (void**)&consumer->display, true, false);
+ }
+
+ if(!consumer->display) {
+ TSK_DEBUG_ERROR("Failed to create display");
+ return -2;
+ }
+ }
+
+ // Set parameters
+ consumer->display->setPluginFirefox((consumer->plugin_firefox == tsk_true));
+ consumer->display->setFps(TMEDIA_CONSUMER(consumer)->video.fps);
+ // do not change the display size: see hook()
+ // consumer->display->setSize(TMEDIA_CONSUMER(consumer)->video.display.width, TMEDIA_CONSUMER(consumer)->video.display.height);
+ if(consumer->window) {
+ consumer->display->attach(consumer->window);
+ }
+
+ // Start display
+ consumer->display->start();
+ consumer->started = tsk_true;
+
+ return 0;
}
static int plugin_video_dshow_consumer_consume(tmedia_consumer_t* self, const void* buffer, tsk_size_t size, const tsk_object_t* proto_hdr)
{
- plugin_video_dshow_consumer_t* consumer = (plugin_video_dshow_consumer_t*)self;
- if(consumer && consumer->display && buffer){
- consumer->display->handleVideoFrame(buffer, TMEDIA_CONSUMER(consumer)->video.display.width, TMEDIA_CONSUMER(consumer)->video.display.height);
- return 0;
- }
- else{
- TSK_DEBUG_ERROR("Invalid parameter");
- return -1;
- }
+ plugin_video_dshow_consumer_t* consumer = (plugin_video_dshow_consumer_t*)self;
+ if(consumer && consumer->display && buffer) {
+ consumer->display->handleVideoFrame(buffer, TMEDIA_CONSUMER(consumer)->video.display.width, TMEDIA_CONSUMER(consumer)->video.display.height);
+ return 0;
+ }
+ else {
+ TSK_DEBUG_ERROR("Invalid parameter");
+ return -1;
+ }
}
static int plugin_video_dshow_consumer_pause(tmedia_consumer_t* self)
{
- plugin_video_dshow_consumer_t* consumer = (plugin_video_dshow_consumer_t*)self;
+ plugin_video_dshow_consumer_t* consumer = (plugin_video_dshow_consumer_t*)self;
- if(!consumer){
- TSK_DEBUG_ERROR("Invalid parameter");
- return -1;
- }
+ if(!consumer) {
+ TSK_DEBUG_ERROR("Invalid parameter");
+ return -1;
+ }
- if(!consumer->display){
- TSK_DEBUG_ERROR("Invalid internal grabber");
- return -2;
- }
+ if(!consumer->display) {
+ TSK_DEBUG_ERROR("Invalid internal grabber");
+ return -2;
+ }
- //consumer->display->pause();
+ //consumer->display->pause();
- return 0;
+ return 0;
}
static int plugin_video_dshow_consumer_stop(tmedia_consumer_t* self)
{
- plugin_video_dshow_consumer_t* consumer = (plugin_video_dshow_consumer_t*)self;
+ plugin_video_dshow_consumer_t* consumer = (plugin_video_dshow_consumer_t*)self;
- if(!self){
- TSK_DEBUG_ERROR("Invalid parameter");
- return -1;
- }
+ if(!self) {
+ TSK_DEBUG_ERROR("Invalid parameter");
+ return -1;
+ }
- if(!consumer->started){
- return 0;
- }
+ if(!consumer->started) {
+ return 0;
+ }
- if(!consumer->display){
- TSK_DEBUG_ERROR("Invalid internal display");
- return -2;
- }
+ if(!consumer->display) {
+ TSK_DEBUG_ERROR("Invalid internal display");
+ return -2;
+ }
- TSK_DEBUG_INFO("Before stopping DirectShow consumer");
- consumer->display->stop();
- consumer->started = tsk_false;
- TSK_DEBUG_INFO("After stopping DirectShow consumer");
+ TSK_DEBUG_INFO("Before stopping DirectShow consumer");
+ consumer->display->stop();
+ consumer->started = tsk_false;
+ TSK_DEBUG_INFO("After stopping DirectShow consumer");
- return 0;
+ return 0;
}
@@ -1253,65 +1182,63 @@ static int plugin_video_dshow_consumer_stop(tmedia_consumer_t* self)
/* constructor */
static tsk_object_t* plugin_video_dshow_consumer_ctor(tsk_object_t * self, va_list * app)
{
- CoInitializeEx(NULL, COINIT_MULTITHREADED);
-
- plugin_video_dshow_consumer_t *consumer = (plugin_video_dshow_consumer_t *)self;
- if(consumer){
- /* init base */
- tmedia_consumer_init(TMEDIA_CONSUMER(consumer));
- TMEDIA_CONSUMER(consumer)->video.display.chroma = tmedia_chroma_bgr24; // RGB24 on x86 (little endians) stored as BGR24
-
- /* init self */
- consumer->create_on_ui_thread = tsk_true;
- TMEDIA_CONSUMER(consumer)->video.fps = 15;
- TMEDIA_CONSUMER(consumer)->video.display.width = 352;
- TMEDIA_CONSUMER(consumer)->video.display.height = 288;
- TMEDIA_CONSUMER(consumer)->video.display.auto_resize = tsk_true;
- }
- return self;
+ CoInitializeEx(NULL, COINIT_MULTITHREADED);
+
+ plugin_video_dshow_consumer_t *consumer = (plugin_video_dshow_consumer_t *)self;
+ if(consumer) {
+ /* init base */
+ tmedia_consumer_init(TMEDIA_CONSUMER(consumer));
+ TMEDIA_CONSUMER(consumer)->video.display.chroma = tmedia_chroma_bgr24; // RGB24 on x86 (little endians) stored as BGR24
+
+ /* init self */
+ consumer->create_on_ui_thread = tsk_true;
+ TMEDIA_CONSUMER(consumer)->video.fps = 15;
+ TMEDIA_CONSUMER(consumer)->video.display.width = 352;
+ TMEDIA_CONSUMER(consumer)->video.display.height = 288;
+ TMEDIA_CONSUMER(consumer)->video.display.auto_resize = tsk_true;
+ }
+ return self;
}
/* destructor */
static tsk_object_t* plugin_video_dshow_consumer_dtor(tsk_object_t * self)
-{
- plugin_video_dshow_consumer_t *consumer = (plugin_video_dshow_consumer_t *)self;
- if(consumer){
-
- /* stop */
- if(consumer->started){
- plugin_video_dshow_consumer_stop((tmedia_consumer_t*)self);
- }
-
- /* deinit base */
- tmedia_consumer_deinit(TMEDIA_CONSUMER(consumer));
- /* deinit self */
- SAFE_DELETE_PTR(consumer->display);
-
- }
-
- return self;
+{
+ plugin_video_dshow_consumer_t *consumer = (plugin_video_dshow_consumer_t *)self;
+ if(consumer) {
+
+ /* stop */
+ if(consumer->started) {
+ plugin_video_dshow_consumer_stop((tmedia_consumer_t*)self);
+ }
+
+ /* deinit base */
+ tmedia_consumer_deinit(TMEDIA_CONSUMER(consumer));
+ /* deinit self */
+ SAFE_DELETE_PTR(consumer->display);
+
+ }
+
+ return self;
}
/* object definition */
-static const tsk_object_def_t plugin_video_dshow_consumer_def_s =
-{
- sizeof(plugin_video_dshow_consumer_t),
- plugin_video_dshow_consumer_ctor,
- plugin_video_dshow_consumer_dtor,
- tsk_null,
+static const tsk_object_def_t plugin_video_dshow_consumer_def_s = {
+ sizeof(plugin_video_dshow_consumer_t),
+ plugin_video_dshow_consumer_ctor,
+ plugin_video_dshow_consumer_dtor,
+ tsk_null,
};
/* plugin definition*/
-static const tmedia_consumer_plugin_def_t plugin_video_dshow_consumer_plugin_def_s =
-{
- &plugin_video_dshow_consumer_def_s,
-
- tmedia_video,
- "Microsoft DirectShow consumer (using custom source)",
-
- plugin_video_dshow_consumer_set,
- plugin_video_dshow_consumer_prepare,
- plugin_video_dshow_consumer_start,
- plugin_video_dshow_consumer_consume,
- plugin_video_dshow_consumer_pause,
- plugin_video_dshow_consumer_stop
+static const tmedia_consumer_plugin_def_t plugin_video_dshow_consumer_plugin_def_s = {
+ &plugin_video_dshow_consumer_def_s,
+
+ tmedia_video,
+ "Microsoft DirectShow consumer (using custom source)",
+
+ plugin_video_dshow_consumer_set,
+ plugin_video_dshow_consumer_prepare,
+ plugin_video_dshow_consumer_start,
+ plugin_video_dshow_consumer_consume,
+ plugin_video_dshow_consumer_pause,
+ plugin_video_dshow_consumer_stop
};
const tmedia_consumer_plugin_def_t *plugin_video_dshow_consumer_plugin_def_t = &plugin_video_dshow_consumer_plugin_def_s;
diff --git a/plugins/pluginDirectShow/plugin_video_dshow_producer.cxx b/plugins/pluginDirectShow/plugin_video_dshow_producer.cxx
index 61aa929..77f6bfb 100755
--- a/plugins/pluginDirectShow/plugin_video_dshow_producer.cxx
+++ b/plugins/pluginDirectShow/plugin_video_dshow_producer.cxx
@@ -1,17 +1,17 @@
/* Copyright (C) 2011-2013 Doubango Telecom <http://www.doubango.org>
-*
+*
* This file is part of Open Source Doubango Framework.
*
* DOUBANGO is free software: you can redistribute it and/or modify
* it under the terms of the GNU General Public License as published by
* the Free Software Foundation, either version 3 of the License, or
* (at your option) any later version.
-*
+*
* DOUBANGO is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
* GNU General Public License for more details.
-*
+*
* You should have received a copy of the GNU General Public License
* along with DOUBANGO.
*/
@@ -26,187 +26,190 @@
#define DSPRODUCER(self) ((plugin_video_dshow_producer_t*)(self))
-typedef struct plugin_video_dshow_producer_s
-{
- TMEDIA_DECLARE_PRODUCER;
-
- DSGrabber* grabber;
- INT64 previewHwnd;
-
- tsk_bool_t plugin_firefox;
- tsk_bool_t started;
- tsk_bool_t mute;
- tsk_bool_t create_on_ui_thread;
+typedef struct plugin_video_dshow_producer_s {
+ TMEDIA_DECLARE_PRODUCER;
+
+ DSGrabber* grabber;
+ INT64 previewHwnd;
+
+ tsk_bool_t plugin_firefox;
+ tsk_bool_t started;
+ tsk_bool_t mute;
+ tsk_bool_t create_on_ui_thread;
}
plugin_video_dshow_producer_t;
// Producer callback (From DirectShow Grabber to our plugin)
static int plugin_video_dshow_plugin_cb(const void* callback_data, const void* buffer, tsk_size_t size)
{
- const plugin_video_dshow_producer_t* producer = (const plugin_video_dshow_producer_t*)callback_data;
+ const plugin_video_dshow_producer_t* producer = (const plugin_video_dshow_producer_t*)callback_data;
- if(producer && TMEDIA_PRODUCER(producer)->enc_cb.callback){
- TMEDIA_PRODUCER(producer)->enc_cb.callback(TMEDIA_PRODUCER(producer)->enc_cb.callback_data, buffer, size);
- }
+ if(producer && TMEDIA_PRODUCER(producer)->enc_cb.callback) {
+ TMEDIA_PRODUCER(producer)->enc_cb.callback(TMEDIA_PRODUCER(producer)->enc_cb.callback_data, buffer, size);
+ }
- return 0;
+ return 0;
}
/* ============ Media Producer Interface ================= */
static int plugin_video_dshow_producer_set(tmedia_producer_t *self, const tmedia_param_t* param)
{
- int ret = 0;
- plugin_video_dshow_producer_t* producer = (plugin_video_dshow_producer_t*)self;
-
- if(!producer || !param){
- TSK_DEBUG_ERROR("Invalid parameter");
- return -1;
- }
-
- if(param->value_type == tmedia_pvt_int64){
- if(tsk_striequals(param->key, "local-hwnd")){
- DSPRODUCER(producer)->previewHwnd = (INT64)*((int64_t*)param->value);
- if(DSPRODUCER(producer)->grabber && DSPRODUCER(self)->grabber->preview){
- DSPRODUCER(producer)->grabber->preview->attach(DSPRODUCER(producer)->previewHwnd);
- }
- }
- }
- else if(param->value_type == tmedia_pvt_int32){
- if(tsk_striequals(param->key, "mute")){
- producer->mute = (TSK_TO_INT32((uint8_t*)param->value) != 0);
- if(producer->started){
- if (producer->mute) {
- producer->grabber->pause();
- }
- else{
- producer->grabber->start();
- }
- }
- }
- else if(tsk_striequals(param->key, "create-on-current-thead")){
- producer->create_on_ui_thread = *((int32_t*)param->value) ? tsk_false : tsk_true;
- }
- else if(tsk_striequals(param->key, "plugin-firefox")){
- producer->plugin_firefox = (*((int32_t*)param->value) != 0);
- if(producer->grabber){
- producer->grabber->setPluginFirefox((producer->plugin_firefox == tsk_true));
- }
- }
- }
-
- return ret;
+ int ret = 0;
+ plugin_video_dshow_producer_t* producer = (plugin_video_dshow_producer_t*)self;
+
+ if(!producer || !param) {
+ TSK_DEBUG_ERROR("Invalid parameter");
+ return -1;
+ }
+
+ if(param->value_type == tmedia_pvt_int64) {
+ if(tsk_striequals(param->key, "local-hwnd")) {
+ DSPRODUCER(producer)->previewHwnd = (INT64)*((int64_t*)param->value);
+ if(DSPRODUCER(producer)->grabber && DSPRODUCER(self)->grabber->preview) {
+ DSPRODUCER(producer)->grabber->preview->attach(DSPRODUCER(producer)->previewHwnd);
+ }
+ }
+ }
+ else if(param->value_type == tmedia_pvt_int32) {
+ if(tsk_striequals(param->key, "mute")) {
+ producer->mute = (TSK_TO_INT32((uint8_t*)param->value) != 0);
+ if(producer->started) {
+ if (producer->mute) {
+ producer->grabber->pause();
+ }
+ else {
+ producer->grabber->start();
+ }
+ }
+ }
+ else if(tsk_striequals(param->key, "create-on-current-thead")) {
+ producer->create_on_ui_thread = *((int32_t*)param->value) ? tsk_false : tsk_true;
+ }
+ else if(tsk_striequals(param->key, "plugin-firefox")) {
+ producer->plugin_firefox = (*((int32_t*)param->value) != 0);
+ if(producer->grabber) {
+ producer->grabber->setPluginFirefox((producer->plugin_firefox == tsk_true));
+ }
+ }
+ }
+
+ return ret;
}
static int plugin_video_dshow_producer_prepare(tmedia_producer_t* self, const tmedia_codec_t* codec)
{
- plugin_video_dshow_producer_t* producer = (plugin_video_dshow_producer_t*)self;
-
- if(!producer || !codec && codec->plugin){
- TSK_DEBUG_ERROR("Invalid parameter");
- return -1;
- }
-
- TMEDIA_PRODUCER(producer)->video.fps = TMEDIA_CODEC_VIDEO(codec)->out.fps;
- TMEDIA_PRODUCER(producer)->video.width = TMEDIA_CODEC_VIDEO(codec)->out.width;
- TMEDIA_PRODUCER(producer)->video.height = TMEDIA_CODEC_VIDEO(codec)->out.height;
-
- return 0;
+ plugin_video_dshow_producer_t* producer = (plugin_video_dshow_producer_t*)self;
+
+ if(!producer || !codec && codec->plugin) {
+ TSK_DEBUG_ERROR("Invalid parameter");
+ return -1;
+ }
+
+ TMEDIA_PRODUCER(producer)->video.fps = TMEDIA_CODEC_VIDEO(codec)->out.fps;
+ TMEDIA_PRODUCER(producer)->video.width = TMEDIA_CODEC_VIDEO(codec)->out.width;
+ TMEDIA_PRODUCER(producer)->video.height = TMEDIA_CODEC_VIDEO(codec)->out.height;
+
+ return 0;
}
static int plugin_video_dshow_producer_start(tmedia_producer_t* self)
{
- plugin_video_dshow_producer_t* producer = (plugin_video_dshow_producer_t*)self;
-
- if(!producer){
- TSK_DEBUG_ERROR("Invalid parameter");
- return -1;
- }
-
- if (producer->started) {
- return 0;
- }
-
- // create grabber on UI thread
- if (!producer->grabber) {
- static BOOL __isDisplayFalse = FALSE;
- static BOOL __isScreenCastFalse = FALSE;
- if(producer->create_on_ui_thread) createOnUIThead(reinterpret_cast<HWND>((void*)DSPRODUCER(producer)->previewHwnd), (void**)&producer->grabber, __isDisplayFalse, __isScreenCastFalse);
- else createOnCurrentThead(reinterpret_cast<HWND>((void*)DSPRODUCER(producer)->previewHwnd), (void**)&producer->grabber, __isDisplayFalse, __isScreenCastFalse);
- if (!producer->grabber) {
- TSK_DEBUG_ERROR("Failed to create grabber");
- return -2;
- }
- }
- producer->grabber->setPluginFirefox((producer->plugin_firefox == tsk_true));
-
- //set Source device
- producer->grabber->setCaptureDevice("Null");
-
- // set parameters
- producer->grabber->setCaptureParameters((int)TMEDIA_PRODUCER(producer)->video.width, (int)TMEDIA_PRODUCER(producer)->video.height, TMEDIA_PRODUCER(producer)->video.fps);
-
- // set callback function
- producer->grabber->setCallback(plugin_video_dshow_plugin_cb, producer);
-
- // attach preview
- if(producer->grabber->preview){
- if(producer->previewHwnd){
- producer->grabber->preview->attach(producer->previewHwnd);
- }
- producer->grabber->preview->setSize((int)TMEDIA_PRODUCER(producer)->video.width, (int)TMEDIA_PRODUCER(producer)->video.height);
- }
-
- // start grabber
- if(!producer->mute){
- producer->grabber->start();
- }
- producer->started = tsk_true;
-
- return 0;
+ plugin_video_dshow_producer_t* producer = (plugin_video_dshow_producer_t*)self;
+
+ if(!producer) {
+ TSK_DEBUG_ERROR("Invalid parameter");
+ return -1;
+ }
+
+ if (producer->started) {
+ return 0;
+ }
+
+ // create grabber on UI thread
+ if (!producer->grabber) {
+ static BOOL __isDisplayFalse = FALSE;
+ static BOOL __isScreenCastFalse = FALSE;
+ if(producer->create_on_ui_thread) {
+ createOnUIThead(reinterpret_cast<HWND>((void*)DSPRODUCER(producer)->previewHwnd), (void**)&producer->grabber, __isDisplayFalse, __isScreenCastFalse);
+ }
+ else {
+ createOnCurrentThead(reinterpret_cast<HWND>((void*)DSPRODUCER(producer)->previewHwnd), (void**)&producer->grabber, __isDisplayFalse, __isScreenCastFalse);
+ }
+ if (!producer->grabber) {
+ TSK_DEBUG_ERROR("Failed to create grabber");
+ return -2;
+ }
+ }
+ producer->grabber->setPluginFirefox((producer->plugin_firefox == tsk_true));
+
+ //set Source device
+ producer->grabber->setCaptureDevice("Null");
+
+ // set parameters
+ producer->grabber->setCaptureParameters((int)TMEDIA_PRODUCER(producer)->video.width, (int)TMEDIA_PRODUCER(producer)->video.height, TMEDIA_PRODUCER(producer)->video.fps);
+
+ // set callback function
+ producer->grabber->setCallback(plugin_video_dshow_plugin_cb, producer);
+
+ // attach preview
+ if(producer->grabber->preview) {
+ if(producer->previewHwnd) {
+ producer->grabber->preview->attach(producer->previewHwnd);
+ }
+ producer->grabber->preview->setSize((int)TMEDIA_PRODUCER(producer)->video.width, (int)TMEDIA_PRODUCER(producer)->video.height);
+ }
+
+ // start grabber
+ if(!producer->mute) {
+ producer->grabber->start();
+ }
+ producer->started = tsk_true;
+
+ return 0;
}
static int plugin_video_dshow_producer_pause(tmedia_producer_t* self)
{
- plugin_video_dshow_producer_t* producer = (plugin_video_dshow_producer_t*)self;
+ plugin_video_dshow_producer_t* producer = (plugin_video_dshow_producer_t*)self;
- if(!producer){
- TSK_DEBUG_ERROR("Invalid parameter");
- return -1;
- }
+ if(!producer) {
+ TSK_DEBUG_ERROR("Invalid parameter");
+ return -1;
+ }
- if(!producer->grabber){
- TSK_DEBUG_ERROR("Invalid internal grabber");
- return -2;
- }
+ if(!producer->grabber) {
+ TSK_DEBUG_ERROR("Invalid internal grabber");
+ return -2;
+ }
- producer->grabber->pause();
+ producer->grabber->pause();
- return 0;
+ return 0;
}
static int plugin_video_dshow_producer_stop(tmedia_producer_t* self)
{
- plugin_video_dshow_producer_t* producer = (plugin_video_dshow_producer_t*)self;
+ plugin_video_dshow_producer_t* producer = (plugin_video_dshow_producer_t*)self;
- if(!self){
- TSK_DEBUG_ERROR("Invalid parameter");
- return -1;
- }
+ if(!self) {
+ TSK_DEBUG_ERROR("Invalid parameter");
+ return -1;
+ }
- if(!producer->started){
- return 0;
- }
+ if(!producer->started) {
+ return 0;
+ }
- if(!producer->grabber){
- TSK_DEBUG_ERROR("Invalid internal grabber");
- return -2;
- }
+ if(!producer->grabber) {
+ TSK_DEBUG_ERROR("Invalid internal grabber");
+ return -2;
+ }
- producer->grabber->stop();
- producer->started = tsk_false;
+ producer->grabber->stop();
+ producer->started = tsk_false;
- return 0;
+ return 0;
}
@@ -215,62 +218,61 @@ static int plugin_video_dshow_producer_stop(tmedia_producer_t* self)
//
/* constructor */
static tsk_object_t* plugin_video_dshow_producer_ctor(tsk_object_t * self, va_list * app)
-{ plugin_video_dshow_producer_t *producer = (plugin_video_dshow_producer_t *)self;
- if (producer) {
- /* init base */
- tmedia_producer_init(TMEDIA_PRODUCER(producer));
- TMEDIA_PRODUCER(producer)->video.chroma = tmedia_chroma_bgr24; // RGB24 on x86 (little endians) stored as BGR24
- /* init self with default values*/
- producer->create_on_ui_thread = tsk_true;
- TMEDIA_PRODUCER(producer)->video.fps = 15;
- TMEDIA_PRODUCER(producer)->video.width = 352;
- TMEDIA_PRODUCER(producer)->video.height = 288;
- }
- return self;
+{
+ plugin_video_dshow_producer_t *producer = (plugin_video_dshow_producer_t *)self;
+ if (producer) {
+ /* init base */
+ tmedia_producer_init(TMEDIA_PRODUCER(producer));
+ TMEDIA_PRODUCER(producer)->video.chroma = tmedia_chroma_bgr24; // RGB24 on x86 (little endians) stored as BGR24
+ /* init self with default values*/
+ producer->create_on_ui_thread = tsk_true;
+ TMEDIA_PRODUCER(producer)->video.fps = 15;
+ TMEDIA_PRODUCER(producer)->video.width = 352;
+ TMEDIA_PRODUCER(producer)->video.height = 288;
+ }
+ return self;
}
/* destructor */
static tsk_object_t* plugin_video_dshow_producer_dtor(tsk_object_t * self)
-{
- plugin_video_dshow_producer_t *producer = (plugin_video_dshow_producer_t *)self;
- if(producer){
- /* stop */
- if(producer->started){
- plugin_video_dshow_producer_stop((tmedia_producer_t*)self);
- }
-
- /* for safety */
- if(producer->grabber){
- producer->grabber->setCallback(tsk_null, tsk_null);
- }
-
- /* deinit base */
- tmedia_producer_deinit(TMEDIA_PRODUCER(producer));
- /* deinit self */
- SAFE_DELETE_PTR(producer->grabber);
- }
-
- return self;
+{
+ plugin_video_dshow_producer_t *producer = (plugin_video_dshow_producer_t *)self;
+ if(producer) {
+ /* stop */
+ if(producer->started) {
+ plugin_video_dshow_producer_stop((tmedia_producer_t*)self);
+ }
+
+ /* for safety */
+ if(producer->grabber) {
+ producer->grabber->setCallback(tsk_null, tsk_null);
+ }
+
+ /* deinit base */
+ tmedia_producer_deinit(TMEDIA_PRODUCER(producer));
+ /* deinit self */
+ SAFE_DELETE_PTR(producer->grabber);
+ }
+
+ return self;
}
/* object definition */
-static const tsk_object_def_t plugin_video_dshow_producer_def_s =
-{
- sizeof(plugin_video_dshow_producer_t),
- plugin_video_dshow_producer_ctor,
- plugin_video_dshow_producer_dtor,
- tsk_null,
+static const tsk_object_def_t plugin_video_dshow_producer_def_s = {
+ sizeof(plugin_video_dshow_producer_t),
+ plugin_video_dshow_producer_ctor,
+ plugin_video_dshow_producer_dtor,
+ tsk_null,
};
/* plugin definition*/
-static const tmedia_producer_plugin_def_t plugin_video_dshow_producer_plugin_def_s =
-{
- &plugin_video_dshow_producer_def_s,
-
- tmedia_video,
- "Microsoft DirectShow producer",
-
- plugin_video_dshow_producer_set,
- plugin_video_dshow_producer_prepare,
- plugin_video_dshow_producer_start,
- plugin_video_dshow_producer_pause,
- plugin_video_dshow_producer_stop
+static const tmedia_producer_plugin_def_t plugin_video_dshow_producer_plugin_def_s = {
+ &plugin_video_dshow_producer_def_s,
+
+ tmedia_video,
+ "Microsoft DirectShow producer",
+
+ plugin_video_dshow_producer_set,
+ plugin_video_dshow_producer_prepare,
+ plugin_video_dshow_producer_start,
+ plugin_video_dshow_producer_pause,
+ plugin_video_dshow_producer_stop
};
const tmedia_producer_plugin_def_t *plugin_video_dshow_producer_plugin_def_t = &plugin_video_dshow_producer_plugin_def_s;
diff --git a/plugins/pluginWASAPI/dllmain_wasapi.cxx b/plugins/pluginWASAPI/dllmain_wasapi.cxx
index ff13977..1ba8313 100755
--- a/plugins/pluginWASAPI/dllmain_wasapi.cxx
+++ b/plugins/pluginWASAPI/dllmain_wasapi.cxx
@@ -1,18 +1,18 @@
/* Copyright (C) 2013 Mamadou DIOP
* Copyright (C) 2013 Doubango Telecom <http://www.doubango.org>
-*
+*
* This file is part of Open Source Doubango Framework.
*
* DOUBANGO is free software: you can redistribute it and/or modify
* it under the terms of the GNU General Public License as published by
* the Free Software Foundation, either version 3 of the License, or
* (at your option) any later version.
-*
+*
* DOUBANGO is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
* GNU General Public License for more details.
-*
+*
* You should have received a copy of the GNU General Public License
* along with DOUBANGO.
*/
@@ -48,87 +48,80 @@ PLUGIN_WASAPI_END_DECLS /* END */
BOOL APIENTRY DllMain( HMODULE hModule,
DWORD ul_reason_for_call,
LPVOID lpReserved
- )
+ )
{
- switch (ul_reason_for_call)
- {
- case DLL_PROCESS_ATTACH:
- break;
- case DLL_THREAD_ATTACH:
- break;
- case DLL_THREAD_DETACH:
- break;
- case DLL_PROCESS_DETACH:
- break;
- }
- return TRUE;
+ switch (ul_reason_for_call) {
+ case DLL_PROCESS_ATTACH:
+ break;
+ case DLL_THREAD_ATTACH:
+ break;
+ case DLL_THREAD_DETACH:
+ break;
+ case DLL_PROCESS_DETACH:
+ break;
+ }
+ return TRUE;
}
-typedef enum PLUGIN_INDEX_E
-{
+typedef enum PLUGIN_INDEX_E {
#if PLUGIN_WASAPI_ENABLE
- PLUGIN_INDEX_CONSUMER,
- PLUGIN_INDEX_PRODUCER,
+ PLUGIN_INDEX_CONSUMER,
+ PLUGIN_INDEX_PRODUCER,
#endif
-
- PLUGIN_INDEX_COUNT
+
+ PLUGIN_INDEX_COUNT
}
PLUGIN_INDEX_T;
int __plugin_get_def_count()
{
- return PLUGIN_INDEX_COUNT;
+ return PLUGIN_INDEX_COUNT;
}
tsk_plugin_def_type_t __plugin_get_def_type_at(int index)
{
#if PLUGIN_WASAPI_ENABLE
- switch(index){
- case PLUGIN_INDEX_CONSUMER:
- {
- return tsk_plugin_def_type_consumer;
- }
- case PLUGIN_INDEX_PRODUCER:
- {
- return tsk_plugin_def_type_producer;
- }
- }
+ switch(index) {
+ case PLUGIN_INDEX_CONSUMER: {
+ return tsk_plugin_def_type_consumer;
+ }
+ case PLUGIN_INDEX_PRODUCER: {
+ return tsk_plugin_def_type_producer;
+ }
+ }
#endif
- TSK_DEBUG_ERROR("No plugin at index %d", index);
- return tsk_plugin_def_type_none;
+ TSK_DEBUG_ERROR("No plugin at index %d", index);
+ return tsk_plugin_def_type_none;
}
tsk_plugin_def_media_type_t __plugin_get_def_media_type_at(int index)
{
#if PLUGIN_WASAPI_ENABLE
- switch(index){
- case PLUGIN_INDEX_CONSUMER:
- case PLUGIN_INDEX_PRODUCER:
- {
- return tsk_plugin_def_media_type_audio;
- }
- }
+ switch(index) {
+ case PLUGIN_INDEX_CONSUMER:
+ case PLUGIN_INDEX_PRODUCER: {
+ return tsk_plugin_def_media_type_audio;
+ }
+ }
#endif
- TSK_DEBUG_ERROR("No plugin at index %d", index);
- return tsk_plugin_def_media_type_none;
+ TSK_DEBUG_ERROR("No plugin at index %d", index);
+ return tsk_plugin_def_media_type_none;
}
tsk_plugin_def_ptr_const_t __plugin_get_def_at(int index)
{
#if PLUGIN_WASAPI_ENABLE
- switch(index){
- case PLUGIN_INDEX_CONSUMER:
- {
- return plugin_wasapi_consumer_audio_plugin_def_t;
- }
- case PLUGIN_INDEX_PRODUCER:
- {
- return plugin_wasapi_producer_audio_plugin_def_t;
- }
- }
+ switch(index) {
+ case PLUGIN_INDEX_CONSUMER: {
+ return plugin_wasapi_consumer_audio_plugin_def_t;
+ }
+ case PLUGIN_INDEX_PRODUCER: {
+ return plugin_wasapi_producer_audio_plugin_def_t;
+ }
+ }
#endif
- TSK_DEBUG_ERROR("No plugin at index %d", index);
- return tsk_null;
+ TSK_DEBUG_ERROR("No plugin at index %d", index);
+ return tsk_null;
}
diff --git a/plugins/pluginWASAPI/plugin_wasapi_config.h b/plugins/pluginWASAPI/plugin_wasapi_config.h
index d5f742f..822c575 100755
--- a/plugins/pluginWASAPI/plugin_wasapi_config.h
+++ b/plugins/pluginWASAPI/plugin_wasapi_config.h
@@ -1,18 +1,18 @@
/* Copyright (C) 2013 Mamadou DIOP
* Copyright (C) 2013 Doubango Telecom <http://www.doubango.org>
-*
+*
* This file is part of Open Source Doubango Framework.
*
* DOUBANGO is free software: you can redistribute it and/or modify
* it under the terms of the GNU General Public License as published by
* the Free Software Foundation, either version 3 of the License, or
* (at your option) any later version.
-*
+*
* DOUBANGO is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
* GNU General Public License for more details.
-*
+*
* You should have received a copy of the GNU General Public License
* along with DOUBANGO.
*/
@@ -51,12 +51,12 @@
# define PLUGIN_WASAPI_UNDER_X86 1
#endif
-// Guards against C++ name mangling
+// Guards against C++ name mangling
#ifdef __cplusplus
# define PLUGIN_WASAPI_BEGIN_DECLS extern "C" {
# define PLUGIN_WASAPI_END_DECLS }
#else
-# define PLUGIN_WASAPI_BEGIN_DECLS
+# define PLUGIN_WASAPI_BEGIN_DECLS
# define PLUGIN_WASAPI_END_DECLS
#endif
@@ -72,7 +72,7 @@
#endif
#if HAVE_CONFIG_H
- #include <config.h>
+#include <config.h>
#endif
#endif // PLUGIN_WASAPI_CONFIG_H
diff --git a/plugins/pluginWASAPI/plugin_wasapi_consumer_audio.cxx b/plugins/pluginWASAPI/plugin_wasapi_consumer_audio.cxx
index 97db2eb..74d0383 100755
--- a/plugins/pluginWASAPI/plugin_wasapi_consumer_audio.cxx
+++ b/plugins/pluginWASAPI/plugin_wasapi_consumer_audio.cxx
@@ -1,18 +1,18 @@
/*Copyright (C) 2013 Mamadou Diop
* Copyright (C) 2013 Doubango Telecom <http://www.doubango.org>
-*
+*
* This file is part of Open Source Doubango Framework.
*
* DOUBANGO is free software: you can redistribute it and/or modify
* it under the terms of the GNU General Public License as published by
* the Free Software Foundation, either version 3 of the License, or
* (at your option) any later version.
-*
+*
* DOUBANGO is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
* GNU General Public License for more details.
-*
+*
* You should have received a copy of the GNU General Public License
* along with DOUBANGO.
*/
@@ -22,7 +22,7 @@
*/
#include "plugin_wasapi_utils.h"
-#include "tinydav/audio/tdav_consumer_audio.h"
+#include "tinydav/audio/tdav_consumer_audio.h"
#include "tsk_thread.h"
#include "tsk_memory.h"
@@ -52,58 +52,55 @@ struct plugin_wasapi_consumer_audio_s;
class AudioRender sealed
{
public:
- AudioRender();
- virtual ~AudioRender();
-
- int Prepare(struct plugin_wasapi_consumer_audio_s* wasapi, const tmedia_codec_t* codec);
- int UnPrepare();
- int Start();
- int Stop();
- int Pause();
- int Consume(const void* buffer, tsk_size_t size, const tsk_object_t* proto_hdr);
+ AudioRender();
+ virtual ~AudioRender();
+
+ int Prepare(struct plugin_wasapi_consumer_audio_s* wasapi, const tmedia_codec_t* codec);
+ int UnPrepare();
+ int Start();
+ int Stop();
+ int Pause();
+ int Consume(const void* buffer, tsk_size_t size, const tsk_object_t* proto_hdr);
private:
- tsk_size_t Read(void* data, tsk_size_t size);
- static void* TSK_STDCALL AsyncThread(void *pArg);
+ tsk_size_t Read(void* data, tsk_size_t size);
+ static void* TSK_STDCALL AsyncThread(void *pArg);
private:
- tsk_mutex_handle_t* m_hMutex;
- const struct plugin_wasapi_consumer_audio_s* m_pWrappedConsumer; // Must not take ref() otherwise dtor() will be never called (circular reference)
+ tsk_mutex_handle_t* m_hMutex;
+ const struct plugin_wasapi_consumer_audio_s* m_pWrappedConsumer; // Must not take ref() otherwise dtor() will be never called (circular reference)
#if PLUGIN_WASAPI_UNDER_WINDOWS_PHONE
- IAudioClient2* m_pDevice;
+ IAudioClient2* m_pDevice;
#else
- IAudioClient* m_pDevice;
+ IAudioClient* m_pDevice;
#endif
- IAudioRenderClient* m_pClient;
- tsk_condwait_handle_t* m_hCondWait;
- tsk_thread_handle_t* m_ppTread[1];
- INT32 m_nBytesPerNotif;
- INT32 m_nSourceFrameSizeInBytes;
- UINT32 m_nMaxFrameCount;
- UINT32 m_nPtime;
- UINT32 m_nChannels;
-
- struct
- {
- struct
- {
- void* buffer;
- tsk_size_t size;
- } chunck;
- tsk_ssize_t leftBytes;
- SpeexBuffer* buffer;
- tsk_size_t size;
- } m_ring;
-
- bool m_bStarted;
- bool m_bPrepared;
- bool m_bPaused;
+ IAudioRenderClient* m_pClient;
+ tsk_condwait_handle_t* m_hCondWait;
+ tsk_thread_handle_t* m_ppTread[1];
+ INT32 m_nBytesPerNotif;
+ INT32 m_nSourceFrameSizeInBytes;
+ UINT32 m_nMaxFrameCount;
+ UINT32 m_nPtime;
+ UINT32 m_nChannels;
+
+ struct {
+ struct {
+ void* buffer;
+ tsk_size_t size;
+ } chunck;
+ tsk_ssize_t leftBytes;
+ SpeexBuffer* buffer;
+ tsk_size_t size;
+ } m_ring;
+
+ bool m_bStarted;
+ bool m_bPrepared;
+ bool m_bPaused;
};
-typedef struct plugin_wasapi_consumer_audio_s
-{
- TDAV_DECLARE_CONSUMER_AUDIO;
+typedef struct plugin_wasapi_consumer_audio_s {
+ TDAV_DECLARE_CONSUMER_AUDIO;
- AudioRender* pAudioRender;
+ AudioRender* pAudioRender;
}
plugin_wasapi_consumer_audio_t;
@@ -111,88 +108,83 @@ plugin_wasapi_consumer_audio_t;
/* ============ Media consumer Interface ================= */
static int plugin_wasapi_consumer_audio_set(tmedia_consumer_t* self, const tmedia_param_t* param)
-{
- return tdav_consumer_audio_set(TDAV_CONSUMER_AUDIO(self), param);
+{
+ return tdav_consumer_audio_set(TDAV_CONSUMER_AUDIO(self), param);
}
static int plugin_wasapi_consumer_audio_prepare(tmedia_consumer_t* self, const tmedia_codec_t* codec)
{
- plugin_wasapi_consumer_audio_t* wasapi = (plugin_wasapi_consumer_audio_t*)self;
-
- if(!wasapi || !codec || !wasapi->pAudioRender)
- {
- TSK_DEBUG_ERROR("Invalid parameter");
- return -1;
- }
-
- TMEDIA_CONSUMER(wasapi)->audio.ptime = TMEDIA_CODEC_PTIME_AUDIO_DECODING(codec);
- TMEDIA_CONSUMER(wasapi)->audio.in.channels = TMEDIA_CODEC_CHANNELS_AUDIO_DECODING(codec);
- TMEDIA_CONSUMER(wasapi)->audio.in.rate = TMEDIA_CODEC_RATE_DECODING(codec);
-
- TSK_DEBUG_INFO("WASAPI consumer: in.channels=%d, out.channles=%d, in.rate=%d, out.rate=%d, ptime=%d",
- TMEDIA_CONSUMER(wasapi)->audio.in.channels,
- TMEDIA_CONSUMER(wasapi)->audio.out.channels,
- TMEDIA_CONSUMER(wasapi)->audio.in.rate,
- TMEDIA_CONSUMER(wasapi)->audio.out.rate,
- TMEDIA_CONSUMER(wasapi)->audio.ptime);
-
- return wasapi->pAudioRender->Prepare(wasapi, codec);
+ plugin_wasapi_consumer_audio_t* wasapi = (plugin_wasapi_consumer_audio_t*)self;
+
+ if(!wasapi || !codec || !wasapi->pAudioRender) {
+ TSK_DEBUG_ERROR("Invalid parameter");
+ return -1;
+ }
+
+ TMEDIA_CONSUMER(wasapi)->audio.ptime = TMEDIA_CODEC_PTIME_AUDIO_DECODING(codec);
+ TMEDIA_CONSUMER(wasapi)->audio.in.channels = TMEDIA_CODEC_CHANNELS_AUDIO_DECODING(codec);
+ TMEDIA_CONSUMER(wasapi)->audio.in.rate = TMEDIA_CODEC_RATE_DECODING(codec);
+
+ TSK_DEBUG_INFO("WASAPI consumer: in.channels=%d, out.channles=%d, in.rate=%d, out.rate=%d, ptime=%d",
+ TMEDIA_CONSUMER(wasapi)->audio.in.channels,
+ TMEDIA_CONSUMER(wasapi)->audio.out.channels,
+ TMEDIA_CONSUMER(wasapi)->audio.in.rate,
+ TMEDIA_CONSUMER(wasapi)->audio.out.rate,
+ TMEDIA_CONSUMER(wasapi)->audio.ptime);
+
+ return wasapi->pAudioRender->Prepare(wasapi, codec);
}
static int plugin_wasapi_consumer_audio_start(tmedia_consumer_t* self)
{
- plugin_wasapi_consumer_audio_t* wasapi = (plugin_wasapi_consumer_audio_t*)self;
+ plugin_wasapi_consumer_audio_t* wasapi = (plugin_wasapi_consumer_audio_t*)self;
- TSK_DEBUG_INFO("plugin_wasapi_consumer_audio_start()");
+ TSK_DEBUG_INFO("plugin_wasapi_consumer_audio_start()");
- if(!wasapi || !wasapi->pAudioRender)
- {
- TSK_DEBUG_ERROR("Invalid parameter");
- return -1;
- }
+ if(!wasapi || !wasapi->pAudioRender) {
+ TSK_DEBUG_ERROR("Invalid parameter");
+ return -1;
+ }
- return wasapi->pAudioRender->Start();
+ return wasapi->pAudioRender->Start();
}
static int plugin_wasapi_consumer_audio_consume(tmedia_consumer_t* self, const void* buffer, tsk_size_t size, const tsk_object_t* proto_hdr)
-{
- plugin_wasapi_consumer_audio_t* wasapi = (plugin_wasapi_consumer_audio_t*)self;
- if(!wasapi || !wasapi->pAudioRender || !buffer || !size)
- {
- TSK_DEBUG_ERROR("Invalid parameter");
- return -1;
- }
-
- return wasapi->pAudioRender->Consume(buffer, size, proto_hdr);
+{
+ plugin_wasapi_consumer_audio_t* wasapi = (plugin_wasapi_consumer_audio_t*)self;
+ if(!wasapi || !wasapi->pAudioRender || !buffer || !size) {
+ TSK_DEBUG_ERROR("Invalid parameter");
+ return -1;
+ }
+
+ return wasapi->pAudioRender->Consume(buffer, size, proto_hdr);
}
static int plugin_wasapi_consumer_audio_pause(tmedia_consumer_t* self)
{
- plugin_wasapi_consumer_audio_t* wasapi = (plugin_wasapi_consumer_audio_t*)self;
+ plugin_wasapi_consumer_audio_t* wasapi = (plugin_wasapi_consumer_audio_t*)self;
- if(!wasapi || !wasapi->pAudioRender)
- {
- TSK_DEBUG_ERROR("Invalid parameter");
- return -1;
- }
+ if(!wasapi || !wasapi->pAudioRender) {
+ TSK_DEBUG_ERROR("Invalid parameter");
+ return -1;
+ }
- return wasapi->pAudioRender->Pause();
+ return wasapi->pAudioRender->Pause();
}
static int plugin_wasapi_consumer_audio_stop(tmedia_consumer_t* self)
{
- plugin_wasapi_consumer_audio_t* wasapi = (plugin_wasapi_consumer_audio_t*)self;
+ plugin_wasapi_consumer_audio_t* wasapi = (plugin_wasapi_consumer_audio_t*)self;
- TSK_DEBUG_INFO("plugin_wasapi_consumer_audio_stop()");
+ TSK_DEBUG_INFO("plugin_wasapi_consumer_audio_stop()");
- if(!wasapi || !wasapi->pAudioRender)
- {
- TSK_DEBUG_ERROR("Invalid parameter");
- return -1;
- }
+ if(!wasapi || !wasapi->pAudioRender) {
+ TSK_DEBUG_ERROR("Invalid parameter");
+ return -1;
+ }
- return wasapi->pAudioRender->Stop();
+ return wasapi->pAudioRender->Stop();
}
@@ -202,425 +194,386 @@ static int plugin_wasapi_consumer_audio_stop(tmedia_consumer_t* self)
AudioRender::AudioRender()
-: m_pDevice(NULL)
-, m_hMutex(NULL)
-, m_pClient(NULL)
-, m_hCondWait(NULL)
-, m_pWrappedConsumer(NULL)
-, m_nBytesPerNotif(0)
-, m_nSourceFrameSizeInBytes(0)
-, m_nMaxFrameCount(0)
-, m_nPtime(0)
-, m_nChannels(1)
-, m_bStarted(false)
-, m_bPrepared(false)
-, m_bPaused(false)
+ : m_pDevice(NULL)
+ , m_hMutex(NULL)
+ , m_pClient(NULL)
+ , m_hCondWait(NULL)
+ , m_pWrappedConsumer(NULL)
+ , m_nBytesPerNotif(0)
+ , m_nSourceFrameSizeInBytes(0)
+ , m_nMaxFrameCount(0)
+ , m_nPtime(0)
+ , m_nChannels(1)
+ , m_bStarted(false)
+ , m_bPrepared(false)
+ , m_bPaused(false)
{
- m_ppTread[0] = NULL;
- memset(&m_ring, 0, sizeof(m_ring));
+ m_ppTread[0] = NULL;
+ memset(&m_ring, 0, sizeof(m_ring));
- if(!(m_hMutex = tsk_mutex_create()))
- {
- TSK_DEBUG_ERROR("Failed to create mutex");
- }
+ if(!(m_hMutex = tsk_mutex_create())) {
+ TSK_DEBUG_ERROR("Failed to create mutex");
+ }
}
AudioRender::~AudioRender()
{
- Stop();
- UnPrepare();
+ Stop();
+ UnPrepare();
- tsk_mutex_destroy(&m_hMutex);
+ tsk_mutex_destroy(&m_hMutex);
}
int AudioRender::Prepare(plugin_wasapi_consumer_audio_t* wasapi, const tmedia_codec_t* codec)
{
- HRESULT hr = E_FAIL;
- int ret = 0;
- WAVEFORMATEX wfx = {0};
+ HRESULT hr = E_FAIL;
+ int ret = 0;
+ WAVEFORMATEX wfx = {0};
#if PLUGIN_WASAPI_UNDER_WINDOWS_PHONE
- AudioClientProperties properties = {0};
+ AudioClientProperties properties = {0};
#endif
- LPCWSTR pwstrRenderId = NULL;
- IMMDeviceEnumerator *pEnumerator = NULL;
- IMMDevice *pDevice = NULL;
-
- tsk_mutex_lock(m_hMutex);
-
- if(m_bPrepared)
- {
- TSK_DEBUG_INFO("#WASAPI: Audio consumer already prepared");
- goto bail;
- }
-
- if(!wasapi || !codec)
- {
- TSK_DEBUG_ERROR("Invalid parameter");
- CHECK_HR(hr = E_FAIL);
- }
-
- if(m_pDevice || m_pClient)
- {
- TSK_DEBUG_ERROR("consumer already prepared");
- CHECK_HR(hr = E_FAIL);
- }
+ LPCWSTR pwstrRenderId = NULL;
+ IMMDeviceEnumerator *pEnumerator = NULL;
+ IMMDevice *pDevice = NULL;
+
+ tsk_mutex_lock(m_hMutex);
+
+ if(m_bPrepared) {
+ TSK_DEBUG_INFO("#WASAPI: Audio consumer already prepared");
+ goto bail;
+ }
+
+ if(!wasapi || !codec) {
+ TSK_DEBUG_ERROR("Invalid parameter");
+ CHECK_HR(hr = E_FAIL);
+ }
+
+ if(m_pDevice || m_pClient) {
+ TSK_DEBUG_ERROR("consumer already prepared");
+ CHECK_HR(hr = E_FAIL);
+ }
#if PLUGIN_WASAPI_UNDER_WINDOWS_PHONE
- pwstrRenderId = GetDefaultAudioRenderId(AudioDeviceRole::Communications);
+ pwstrRenderId = GetDefaultAudioRenderId(AudioDeviceRole::Communications);
- if (NULL == pwstrRenderId)
- {
- PLUGIN_WASAPI_ERROR("GetDefaultAudioRenderId", HRESULT_FROM_WIN32(GetLastError()));
- CHECK_HR(hr = E_FAIL);
- }
+ if (NULL == pwstrRenderId) {
+ PLUGIN_WASAPI_ERROR("GetDefaultAudioRenderId", HRESULT_FROM_WIN32(GetLastError()));
+ CHECK_HR(hr = E_FAIL);
+ }
- CHECK_HR(hr = ActivateAudioInterface(pwstrRenderId, __uuidof(IAudioClient2), (void**)&m_pDevice));
+ CHECK_HR(hr = ActivateAudioInterface(pwstrRenderId, __uuidof(IAudioClient2), (void**)&m_pDevice));
- // Win8 or WP8 only
- properties.cbSize = sizeof AudioClientProperties;
- properties.eCategory = AudioCategory_Communications;
- CHECK_HR(hr = m_pDevice->SetClientProperties(&properties));
+ // Win8 or WP8 only
+ properties.cbSize = sizeof AudioClientProperties;
+ properties.eCategory = AudioCategory_Communications;
+ CHECK_HR(hr = m_pDevice->SetClientProperties(&properties));
#else
- CHECK_HR(hr = CoCreateInstance(
- CLSID_MMDeviceEnumerator, NULL,
- CLSCTX_ALL, IID_IMMDeviceEnumerator,
- (void**)&pEnumerator));
+ CHECK_HR(hr = CoCreateInstance(
+ CLSID_MMDeviceEnumerator, NULL,
+ CLSCTX_ALL, IID_IMMDeviceEnumerator,
+ (void**)&pEnumerator));
CHECK_HR(hr = pEnumerator->GetDefaultAudioEndpoint(
- eRender, eCommunications, &pDevice));
+ eRender, eCommunications, &pDevice));
CHECK_HR(hr = pDevice->Activate(
- IID_IAudioClient, CLSCTX_ALL,
- NULL, (void**)&m_pDevice));
+ IID_IAudioClient, CLSCTX_ALL,
+ NULL, (void**)&m_pDevice));
#endif
-
-
-
-
- /* Set best format */
- {
- wfx.wFormatTag = WAVE_FORMAT_PCM;
- wfx.nChannels = TMEDIA_CONSUMER(wasapi)->audio.in.channels;
- wfx.nSamplesPerSec = TMEDIA_CONSUMER(wasapi)->audio.in.rate;
- wfx.wBitsPerSample = TMEDIA_CONSUMER(wasapi)->audio.bits_per_sample;
- wfx.nBlockAlign = (wfx.nChannels * wfx.wBitsPerSample/8);
- wfx.nAvgBytesPerSec = (wfx.nSamplesPerSec * wfx.nBlockAlign);
-
- PWAVEFORMATEX pwfxClosestMatch = NULL;
- hr = m_pDevice->IsFormatSupported(AUDCLNT_SHAREMODE_SHARED, &wfx, &pwfxClosestMatch);
- if(hr != S_OK && hr != S_FALSE)
- {
- PLUGIN_WASAPI_ERROR(HRESULT_FROM_WIN32(GetLastError()));
- CHECK_HR(hr = E_FAIL);
- }
-
- if(hr == S_FALSE)
- {
- if(!pwfxClosestMatch)
- {
- TSK_DEBUG_ERROR("malloc(%d) failed", sizeof(WAVEFORMATEX));
- CHECK_HR(hr = E_OUTOFMEMORY);
- }
-
- wfx.nSamplesPerSec = pwfxClosestMatch->nSamplesPerSec;
- wfx.nChannels = pwfxClosestMatch->nChannels;
+
+
+
+
+ /* Set best format */
+ {
+ wfx.wFormatTag = WAVE_FORMAT_PCM;
+ wfx.nChannels = TMEDIA_CONSUMER(wasapi)->audio.in.channels;
+ wfx.nSamplesPerSec = TMEDIA_CONSUMER(wasapi)->audio.in.rate;
+ wfx.wBitsPerSample = TMEDIA_CONSUMER(wasapi)->audio.bits_per_sample;
+ wfx.nBlockAlign = (wfx.nChannels * wfx.wBitsPerSample/8);
+ wfx.nAvgBytesPerSec = (wfx.nSamplesPerSec * wfx.nBlockAlign);
+
+ PWAVEFORMATEX pwfxClosestMatch = NULL;
+ hr = m_pDevice->IsFormatSupported(AUDCLNT_SHAREMODE_SHARED, &wfx, &pwfxClosestMatch);
+ if(hr != S_OK && hr != S_FALSE) {
+ PLUGIN_WASAPI_ERROR(HRESULT_FROM_WIN32(GetLastError()));
+ CHECK_HR(hr = E_FAIL);
+ }
+
+ if(hr == S_FALSE) {
+ if(!pwfxClosestMatch) {
+ TSK_DEBUG_ERROR("malloc(%d) failed", sizeof(WAVEFORMATEX));
+ CHECK_HR(hr = E_OUTOFMEMORY);
+ }
+
+ wfx.nSamplesPerSec = pwfxClosestMatch->nSamplesPerSec;
+ wfx.nChannels = pwfxClosestMatch->nChannels;
#if 0
- wfx.wBitsPerSample = pwfxClosestMatch->wBitsPerSample;
+ wfx.wBitsPerSample = pwfxClosestMatch->wBitsPerSample;
#endif
- wfx.nBlockAlign = wfx.nChannels * (wfx.wBitsPerSample / 8);
- wfx.nAvgBytesPerSec = wfx.nSamplesPerSec * wfx.nBlockAlign;
- // Request resampler
- TMEDIA_CONSUMER(wasapi)->audio.out.rate = (uint32_t)wfx.nSamplesPerSec;
- TMEDIA_CONSUMER(wasapi)->audio.bits_per_sample = (uint8_t)wfx.wBitsPerSample;
- TMEDIA_CONSUMER(wasapi)->audio.out.channels = (uint8_t)wfx.nChannels;
-
- TSK_DEBUG_INFO("Audio device format fallback: rate=%d, bps=%d, channels=%d", wfx.nSamplesPerSec, wfx.wBitsPerSample, wfx.nChannels);
- }
- if(pwfxClosestMatch)
- {
- CoTaskMemFree(pwfxClosestMatch);
- }
- }
-
- m_nSourceFrameSizeInBytes = (wfx.wBitsPerSample >> 3) * wfx.nChannels;
- m_nBytesPerNotif = ((wfx.nAvgBytesPerSec * TMEDIA_CONSUMER(wasapi)->audio.ptime)/1000) * wfx.nChannels;
-
- // Initialize
- CHECK_HR(hr = m_pDevice->Initialize(
- AUDCLNT_SHAREMODE_SHARED,
- 0x00000000,
- (PLUGIN_WASAPI_CONSUMER_NOTIF_POS_COUNT * WASAPI_MILLIS_TO_100NS(TMEDIA_CONSUMER(wasapi)->audio.ptime)) ,
- 0,
- &wfx,
- NULL));
-
- REFERENCE_TIME DefaultDevicePeriod, MinimumDevicePeriod;
- CHECK_HR(hr = m_pDevice->GetDevicePeriod(&DefaultDevicePeriod, &MinimumDevicePeriod));
-
- CHECK_HR(hr = m_pDevice->GetBufferSize(&m_nMaxFrameCount));
- TSK_DEBUG_INFO("#WASAPI (Playback): BufferSize=%u, DefaultDevicePeriod=%lld ms, MinimumDevicePeriod=%lldms", m_nMaxFrameCount, WASAPI_100NS_TO_MILLIS(DefaultDevicePeriod), WASAPI_100NS_TO_MILLIS(MinimumDevicePeriod));
-
- if(!m_hCondWait)
- {
- if(!(m_hCondWait = tsk_condwait_create()))
- {
- PLUGIN_WASAPI_ERROR(HRESULT_FROM_WIN32(GetLastError()));
- CHECK_HR(hr = E_FAIL);
- }
- }
-
- CHECK_HR(hr = m_pDevice->GetService(__uuidof(IAudioRenderClient), (void**)&m_pClient));
-
- int packetperbuffer = (1000 / TMEDIA_CONSUMER(wasapi)->audio.ptime);
- m_ring.chunck.size = (wfx.nSamplesPerSec * (wfx.wBitsPerSample >> 3) / packetperbuffer) * wfx.nChannels;
- m_ring.size = PLUGIN_WASAPI_CONSUMER_NOTIF_POS_COUNT * m_ring.chunck.size;
- if(!(m_ring.chunck.buffer = tsk_realloc(m_ring.chunck.buffer, m_ring.chunck.size)))
- {
- m_ring.size = 0;
- TSK_DEBUG_ERROR("Failed to allocate new buffer");
- CHECK_HR(hr = E_OUTOFMEMORY);
- }
- if(!m_ring.buffer)
- {
- m_ring.buffer = speex_buffer_init(m_ring.size);
- }
- else
- {
- int sret;
- if((sret = speex_buffer_resize(m_ring.buffer, m_ring.size)) < 0)
- {
- TSK_DEBUG_ERROR("speex_buffer_resize(%d) failed with error code=%d", m_ring.size, sret);
- CHECK_HR(hr = E_OUTOFMEMORY);
- }
- }
- if(!m_ring.buffer)
- {
- TSK_DEBUG_ERROR("Failed to create a new ring buffer with size = %d", m_ring.size);
- CHECK_HR(hr = E_OUTOFMEMORY);
- }
+ wfx.nBlockAlign = wfx.nChannels * (wfx.wBitsPerSample / 8);
+ wfx.nAvgBytesPerSec = wfx.nSamplesPerSec * wfx.nBlockAlign;
+ // Request resampler
+ TMEDIA_CONSUMER(wasapi)->audio.out.rate = (uint32_t)wfx.nSamplesPerSec;
+ TMEDIA_CONSUMER(wasapi)->audio.bits_per_sample = (uint8_t)wfx.wBitsPerSample;
+ TMEDIA_CONSUMER(wasapi)->audio.out.channels = (uint8_t)wfx.nChannels;
+
+ TSK_DEBUG_INFO("Audio device format fallback: rate=%d, bps=%d, channels=%d", wfx.nSamplesPerSec, wfx.wBitsPerSample, wfx.nChannels);
+ }
+ if(pwfxClosestMatch) {
+ CoTaskMemFree(pwfxClosestMatch);
+ }
+ }
+
+ m_nSourceFrameSizeInBytes = (wfx.wBitsPerSample >> 3) * wfx.nChannels;
+ m_nBytesPerNotif = ((wfx.nAvgBytesPerSec * TMEDIA_CONSUMER(wasapi)->audio.ptime)/1000) * wfx.nChannels;
+
+ // Initialize
+ CHECK_HR(hr = m_pDevice->Initialize(
+ AUDCLNT_SHAREMODE_SHARED,
+ 0x00000000,
+ (PLUGIN_WASAPI_CONSUMER_NOTIF_POS_COUNT * WASAPI_MILLIS_TO_100NS(TMEDIA_CONSUMER(wasapi)->audio.ptime)) ,
+ 0,
+ &wfx,
+ NULL));
+
+ REFERENCE_TIME DefaultDevicePeriod, MinimumDevicePeriod;
+ CHECK_HR(hr = m_pDevice->GetDevicePeriod(&DefaultDevicePeriod, &MinimumDevicePeriod));
+
+ CHECK_HR(hr = m_pDevice->GetBufferSize(&m_nMaxFrameCount));
+ TSK_DEBUG_INFO("#WASAPI (Playback): BufferSize=%u, DefaultDevicePeriod=%lld ms, MinimumDevicePeriod=%lldms", m_nMaxFrameCount, WASAPI_100NS_TO_MILLIS(DefaultDevicePeriod), WASAPI_100NS_TO_MILLIS(MinimumDevicePeriod));
+
+ if(!m_hCondWait) {
+ if(!(m_hCondWait = tsk_condwait_create())) {
+ PLUGIN_WASAPI_ERROR(HRESULT_FROM_WIN32(GetLastError()));
+ CHECK_HR(hr = E_FAIL);
+ }
+ }
+
+ CHECK_HR(hr = m_pDevice->GetService(__uuidof(IAudioRenderClient), (void**)&m_pClient));
+
+ int packetperbuffer = (1000 / TMEDIA_CONSUMER(wasapi)->audio.ptime);
+ m_ring.chunck.size = (wfx.nSamplesPerSec * (wfx.wBitsPerSample >> 3) / packetperbuffer) * wfx.nChannels;
+ m_ring.size = PLUGIN_WASAPI_CONSUMER_NOTIF_POS_COUNT * m_ring.chunck.size;
+ if(!(m_ring.chunck.buffer = tsk_realloc(m_ring.chunck.buffer, m_ring.chunck.size))) {
+ m_ring.size = 0;
+ TSK_DEBUG_ERROR("Failed to allocate new buffer");
+ CHECK_HR(hr = E_OUTOFMEMORY);
+ }
+ if(!m_ring.buffer) {
+ m_ring.buffer = speex_buffer_init(m_ring.size);
+ }
+ else {
+ int sret;
+ if((sret = speex_buffer_resize(m_ring.buffer, m_ring.size)) < 0) {
+ TSK_DEBUG_ERROR("speex_buffer_resize(%d) failed with error code=%d", m_ring.size, sret);
+ CHECK_HR(hr = E_OUTOFMEMORY);
+ }
+ }
+ if(!m_ring.buffer) {
+ TSK_DEBUG_ERROR("Failed to create a new ring buffer with size = %d", m_ring.size);
+ CHECK_HR(hr = E_OUTOFMEMORY);
+ }
bail:
- ret = SUCCEEDED(hr) ? 0 : -1;
- if (pwstrRenderId)
- {
- CoTaskMemFree((LPVOID)pwstrRenderId);
- }
- if(ret != 0)
- {
- UnPrepare();
- }
-
- if((m_bPrepared = (ret == 0)))
- {
- m_pWrappedConsumer = wasapi;
- m_nPtime = TMEDIA_CONSUMER(wasapi)->audio.ptime;
- m_nChannels = TMEDIA_CONSUMER(wasapi)->audio.out.channels;
- }
-
- tsk_mutex_unlock(m_hMutex);
-
- SafeRelease(&pEnumerator);
- SafeRelease(&pDevice);
-
- return ret;
+ ret = SUCCEEDED(hr) ? 0 : -1;
+ if (pwstrRenderId) {
+ CoTaskMemFree((LPVOID)pwstrRenderId);
+ }
+ if(ret != 0) {
+ UnPrepare();
+ }
+
+ if((m_bPrepared = (ret == 0))) {
+ m_pWrappedConsumer = wasapi;
+ m_nPtime = TMEDIA_CONSUMER(wasapi)->audio.ptime;
+ m_nChannels = TMEDIA_CONSUMER(wasapi)->audio.out.channels;
+ }
+
+ tsk_mutex_unlock(m_hMutex);
+
+ SafeRelease(&pEnumerator);
+ SafeRelease(&pDevice);
+
+ return ret;
}
int AudioRender::UnPrepare()
{
- tsk_mutex_lock(m_hMutex);
-
- if(m_hCondWait)
- {
- tsk_condwait_destroy(&m_hCondWait);
- }
- if(m_pDevice)
- {
- m_pDevice->Release(), m_pDevice = NULL;
- }
- if(m_pClient)
- {
- m_pClient->Release(), m_pClient = NULL;
- }
-
- TSK_FREE(m_ring.chunck.buffer);
- if(m_ring.buffer)
- {
- speex_buffer_destroy(m_ring.buffer);
- m_ring.buffer = NULL;
- }
-
- m_pWrappedConsumer = NULL;
-
- m_bPrepared = false;
-
- tsk_mutex_unlock(m_hMutex);
-
- return 0;
+ tsk_mutex_lock(m_hMutex);
+
+ if(m_hCondWait) {
+ tsk_condwait_destroy(&m_hCondWait);
+ }
+ if(m_pDevice) {
+ m_pDevice->Release(), m_pDevice = NULL;
+ }
+ if(m_pClient) {
+ m_pClient->Release(), m_pClient = NULL;
+ }
+
+ TSK_FREE(m_ring.chunck.buffer);
+ if(m_ring.buffer) {
+ speex_buffer_destroy(m_ring.buffer);
+ m_ring.buffer = NULL;
+ }
+
+ m_pWrappedConsumer = NULL;
+
+ m_bPrepared = false;
+
+ tsk_mutex_unlock(m_hMutex);
+
+ return 0;
}
int AudioRender::Start()
{
- tsk_mutex_lock(m_hMutex);
-
- if(m_bStarted)
- {
- TSK_DEBUG_INFO("#WASAPI: Audio consumer already started");
- goto bail;
- }
- if(!m_bPrepared)
- {
- TSK_DEBUG_ERROR("Audio consumer not prepared");
- goto bail;
- }
-
- m_bStarted = true;
- if(!m_ppTread[0] && tsk_thread_create(m_ppTread, AudioRender::AsyncThread, this) != 0)
- {
- m_bStarted = false;
- goto bail;
- }
-
- HRESULT hr = m_pDevice->Start();
- if(!SUCCEEDED(hr))
- {
- PLUGIN_WASAPI_ERROR(hr);
- Stop();
- }
- m_bPaused = false;
+ tsk_mutex_lock(m_hMutex);
+
+ if(m_bStarted) {
+ TSK_DEBUG_INFO("#WASAPI: Audio consumer already started");
+ goto bail;
+ }
+ if(!m_bPrepared) {
+ TSK_DEBUG_ERROR("Audio consumer not prepared");
+ goto bail;
+ }
+
+ m_bStarted = true;
+ if(!m_ppTread[0] && tsk_thread_create(m_ppTread, AudioRender::AsyncThread, this) != 0) {
+ m_bStarted = false;
+ goto bail;
+ }
+
+ HRESULT hr = m_pDevice->Start();
+ if(!SUCCEEDED(hr)) {
+ PLUGIN_WASAPI_ERROR(hr);
+ Stop();
+ }
+ m_bPaused = false;
bail:
- tsk_mutex_unlock(m_hMutex);
+ tsk_mutex_unlock(m_hMutex);
- return (m_bStarted ? 0 : -2);
+ return (m_bStarted ? 0 : -2);
}
int AudioRender::Stop()
{
- m_bStarted = false;
+ m_bStarted = false;
- tsk_mutex_lock(m_hMutex);
+ tsk_mutex_lock(m_hMutex);
- if (m_hCondWait)
- {
- tsk_condwait_broadcast(m_hCondWait);
- }
+ if (m_hCondWait) {
+ tsk_condwait_broadcast(m_hCondWait);
+ }
- if (m_ppTread[0])
- {
- tsk_thread_join(m_ppTread);
- }
+ if (m_ppTread[0]) {
+ tsk_thread_join(m_ppTread);
+ }
- if(m_pDevice)
- {
- m_pDevice->Stop();
- }
+ if(m_pDevice) {
+ m_pDevice->Stop();
+ }
- // will be prepared again before next start()
- UnPrepare();
+ // will be prepared again before next start()
+ UnPrepare();
- tsk_mutex_unlock(m_hMutex);
+ tsk_mutex_unlock(m_hMutex);
- return 0;
+ return 0;
}
int AudioRender::Pause()
{
- m_bPaused = true;
+ m_bPaused = true;
- return 0;
+ return 0;
}
int AudioRender::Consume(const void* buffer, tsk_size_t size, const tsk_object_t* proto_hdr)
{
- return tdav_consumer_audio_put(TDAV_CONSUMER_AUDIO(m_pWrappedConsumer), buffer, size, proto_hdr);
+ return tdav_consumer_audio_put(TDAV_CONSUMER_AUDIO(m_pWrappedConsumer), buffer, size, proto_hdr);
}
tsk_size_t AudioRender::Read(void* data, tsk_size_t size)
{
- tsk_ssize_t retSize = 0;
-
- m_ring.leftBytes += size;
- while (m_ring.leftBytes >= (tsk_ssize_t)m_ring.chunck.size)
- {
- m_ring.leftBytes -= m_ring.chunck.size;
- retSize = (tsk_ssize_t)tdav_consumer_audio_get(TDAV_CONSUMER_AUDIO(m_pWrappedConsumer), m_ring.chunck.buffer, m_ring.chunck.size);
- tdav_consumer_audio_tick(TDAV_CONSUMER_AUDIO(m_pWrappedConsumer));
- speex_buffer_write(m_ring.buffer, m_ring.chunck.buffer, retSize);
- }
- // IMPORTANT: looks like there is a bug in speex: continously trying to read more than avail
- // many times can corrupt the buffer. At least on OS X 1.5
- int avail = speex_buffer_get_available(m_ring.buffer);
- //if(speex_buffer_get_available(m_ring.buffer) >= (tsk_ssize_t)size)
- //{
- retSize = speex_buffer_read(m_ring.buffer, data, TSK_MIN((int)size,avail));
- //}
- //else
- //{
- //memset(data, 0, size);
- //}
-
- return retSize;
+ tsk_ssize_t retSize = 0;
+
+ m_ring.leftBytes += size;
+ while (m_ring.leftBytes >= (tsk_ssize_t)m_ring.chunck.size) {
+ m_ring.leftBytes -= m_ring.chunck.size;
+ retSize = (tsk_ssize_t)tdav_consumer_audio_get(TDAV_CONSUMER_AUDIO(m_pWrappedConsumer), m_ring.chunck.buffer, m_ring.chunck.size);
+ tdav_consumer_audio_tick(TDAV_CONSUMER_AUDIO(m_pWrappedConsumer));
+ speex_buffer_write(m_ring.buffer, m_ring.chunck.buffer, retSize);
+ }
+ // IMPORTANT: looks like there is a bug in speex: continously trying to read more than avail
+ // many times can corrupt the buffer. At least on OS X 1.5
+ int avail = speex_buffer_get_available(m_ring.buffer);
+ //if(speex_buffer_get_available(m_ring.buffer) >= (tsk_ssize_t)size)
+ //{
+ retSize = speex_buffer_read(m_ring.buffer, data, TSK_MIN((int)size,avail));
+ //}
+ //else
+ //{
+ //memset(data, 0, size);
+ //}
+
+ return retSize;
}
void* TSK_STDCALL AudioRender::AsyncThread(void *pArg)
{
- HRESULT hr = S_OK;
- INT32 nFramesToWrite;
- UINT32 nPadding, nRead;
- int waitResult = 0;
- AudioRender* This = (AudioRender*)pArg;
+ HRESULT hr = S_OK;
+ INT32 nFramesToWrite;
+ UINT32 nPadding, nRead;
+ int waitResult = 0;
+ AudioRender* This = (AudioRender*)pArg;
- TSK_DEBUG_INFO("#WASAPI: __playback_thread -- START");
+ TSK_DEBUG_INFO("#WASAPI: __playback_thread -- START");
#define BREAK_WHILE tsk_mutex_unlock(This->m_hMutex); break;
- while(This->m_bStarted && SUCCEEDED(hr))
- {
- waitResult = tsk_condwait_timedwait(This->m_hCondWait, This->m_nPtime);
-
- tsk_mutex_lock(This->m_hMutex);
-
- if(!This->m_bStarted)
- {
- BREAK_WHILE;
- }
-
- if(waitResult == 0)
- {
- hr = This->m_pDevice->GetCurrentPadding(&nPadding);
- if (SUCCEEDED(hr))
- {
- BYTE* pRenderBuffer = NULL;
- nFramesToWrite = This->m_nMaxFrameCount - nPadding;
-
- if (nFramesToWrite > 0)
- {
- hr = This->m_pClient->GetBuffer(nFramesToWrite, &pRenderBuffer);
- if (SUCCEEDED(hr))
- {
- nRead = This->Read(pRenderBuffer, (nFramesToWrite * This->m_nSourceFrameSizeInBytes));
-
- // Release the buffer
- hr = This->m_pClient->ReleaseBuffer((nRead / This->m_nSourceFrameSizeInBytes), (nRead == 0) ? AUDCLNT_BUFFERFLAGS_SILENT: 0);
- }
- }
- }
- }
- else
- {
- BREAK_WHILE;
- }
-
- tsk_mutex_lock(This->m_hMutex);
- }// end-of-while
-
- if (!SUCCEEDED(hr))
- {
- PLUGIN_WASAPI_ERROR(hr);
- }
-
- TSK_DEBUG_INFO("WASAPI: __playback_thread(%s) -- STOP", (SUCCEEDED(hr) && waitResult == 0) ? "OK": "NOK");
-
- return NULL;
+ while(This->m_bStarted && SUCCEEDED(hr)) {
+ waitResult = tsk_condwait_timedwait(This->m_hCondWait, This->m_nPtime);
+
+ tsk_mutex_lock(This->m_hMutex);
+
+ if(!This->m_bStarted) {
+ BREAK_WHILE;
+ }
+
+ if(waitResult == 0) {
+ hr = This->m_pDevice->GetCurrentPadding(&nPadding);
+ if (SUCCEEDED(hr)) {
+ BYTE* pRenderBuffer = NULL;
+ nFramesToWrite = This->m_nMaxFrameCount - nPadding;
+
+ if (nFramesToWrite > 0) {
+ hr = This->m_pClient->GetBuffer(nFramesToWrite, &pRenderBuffer);
+ if (SUCCEEDED(hr)) {
+ nRead = This->Read(pRenderBuffer, (nFramesToWrite * This->m_nSourceFrameSizeInBytes));
+
+ // Release the buffer
+ hr = This->m_pClient->ReleaseBuffer((nRead / This->m_nSourceFrameSizeInBytes), (nRead == 0) ? AUDCLNT_BUFFERFLAGS_SILENT: 0);
+ }
+ }
+ }
+ }
+ else {
+ BREAK_WHILE;
+ }
+
+ tsk_mutex_lock(This->m_hMutex);
+ }// end-of-while
+
+ if (!SUCCEEDED(hr)) {
+ PLUGIN_WASAPI_ERROR(hr);
+ }
+
+ TSK_DEBUG_INFO("WASAPI: __playback_thread(%s) -- STOP", (SUCCEEDED(hr) && waitResult == 0) ? "OK": "NOK");
+
+ return NULL;
}
@@ -635,66 +588,60 @@ void* TSK_STDCALL AudioRender::AsyncThread(void *pArg)
/* constructor */
static tsk_object_t* plugin_wasapi_consumer_audio_ctor(tsk_object_t * self, va_list * app)
{
- plugin_wasapi_consumer_audio_t *wasapi = (plugin_wasapi_consumer_audio_t*)self;
- if(wasapi)
- {
- WASAPIUtils::Startup();
-
- /* init base */
- tdav_consumer_audio_init(TDAV_CONSUMER_AUDIO(wasapi));
- /* init self */
-
- wasapi->pAudioRender = new AudioRender();
- if(!wasapi->pAudioRender)
- {
- TSK_DEBUG_ERROR("Failed to create renderer");
- return tsk_null;
- }
- }
- return self;
+ plugin_wasapi_consumer_audio_t *wasapi = (plugin_wasapi_consumer_audio_t*)self;
+ if(wasapi) {
+ WASAPIUtils::Startup();
+
+ /* init base */
+ tdav_consumer_audio_init(TDAV_CONSUMER_AUDIO(wasapi));
+ /* init self */
+
+ wasapi->pAudioRender = new AudioRender();
+ if(!wasapi->pAudioRender) {
+ TSK_DEBUG_ERROR("Failed to create renderer");
+ return tsk_null;
+ }
+ }
+ return self;
}
/* destructor */
static tsk_object_t* plugin_wasapi_consumer_audio_dtor(tsk_object_t * self)
-{
- plugin_wasapi_consumer_audio_t *wasapi = (plugin_wasapi_consumer_audio_t*)self;
- if(wasapi)
- {
- /* stop */
- plugin_wasapi_consumer_audio_stop((tmedia_consumer_t*)self);
- /* deinit base */
- tdav_consumer_audio_deinit(TDAV_CONSUMER_AUDIO(wasapi));
- /* deinit self */
- if(wasapi->pAudioRender)
- {
- delete wasapi->pAudioRender;
- wasapi->pAudioRender = NULL;
- }
- }
-
- return self;
+{
+ plugin_wasapi_consumer_audio_t *wasapi = (plugin_wasapi_consumer_audio_t*)self;
+ if(wasapi) {
+ /* stop */
+ plugin_wasapi_consumer_audio_stop((tmedia_consumer_t*)self);
+ /* deinit base */
+ tdav_consumer_audio_deinit(TDAV_CONSUMER_AUDIO(wasapi));
+ /* deinit self */
+ if(wasapi->pAudioRender) {
+ delete wasapi->pAudioRender;
+ wasapi->pAudioRender = NULL;
+ }
+ }
+
+ return self;
}
/* object definition */
-static const tsk_object_def_t plugin_wasapi_consumer_audio_def_s =
-{
- sizeof(plugin_wasapi_consumer_audio_t),
- plugin_wasapi_consumer_audio_ctor,
- plugin_wasapi_consumer_audio_dtor,
- tdav_consumer_audio_cmp,
+static const tsk_object_def_t plugin_wasapi_consumer_audio_def_s = {
+ sizeof(plugin_wasapi_consumer_audio_t),
+ plugin_wasapi_consumer_audio_ctor,
+ plugin_wasapi_consumer_audio_dtor,
+ tdav_consumer_audio_cmp,
};
/* plugin definition*/
-static const tmedia_consumer_plugin_def_t plugin_wasapi_consumer_audio_plugin_def_s =
-{
- &plugin_wasapi_consumer_audio_def_s,
-
- tmedia_audio,
- "Microsoft Windows Audio Session API (WASAPI) consumer",
-
- plugin_wasapi_consumer_audio_set,
- plugin_wasapi_consumer_audio_prepare,
- plugin_wasapi_consumer_audio_start,
- plugin_wasapi_consumer_audio_consume,
- plugin_wasapi_consumer_audio_pause,
- plugin_wasapi_consumer_audio_stop
+static const tmedia_consumer_plugin_def_t plugin_wasapi_consumer_audio_plugin_def_s = {
+ &plugin_wasapi_consumer_audio_def_s,
+
+ tmedia_audio,
+ "Microsoft Windows Audio Session API (WASAPI) consumer",
+
+ plugin_wasapi_consumer_audio_set,
+ plugin_wasapi_consumer_audio_prepare,
+ plugin_wasapi_consumer_audio_start,
+ plugin_wasapi_consumer_audio_consume,
+ plugin_wasapi_consumer_audio_pause,
+ plugin_wasapi_consumer_audio_stop
};
const tmedia_consumer_plugin_def_t *plugin_wasapi_consumer_audio_plugin_def_t = &plugin_wasapi_consumer_audio_plugin_def_s;
diff --git a/plugins/pluginWASAPI/plugin_wasapi_producer_audio.cxx b/plugins/pluginWASAPI/plugin_wasapi_producer_audio.cxx
index 6f44ab0..92e03f7 100755
--- a/plugins/pluginWASAPI/plugin_wasapi_producer_audio.cxx
+++ b/plugins/pluginWASAPI/plugin_wasapi_producer_audio.cxx
@@ -1,18 +1,18 @@
/*Copyright (C) 2013 Mamadou Diop
* Copyright (C) 2013 Doubango Telecom <http://www.doubango.org>
-*
+*
* This file is part of Open Source Doubango Framework.
*
* DOUBANGO is free software: you can redistribute it and/or modify
* it under the terms of the GNU General Public License as published by
* the Free Software Foundation, either version 3 of the License, or
* (at your option) any later version.
-*
+*
* DOUBANGO is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
* GNU General Public License for more details.
-*
+*
* You should have received a copy of the GNU General Public License
* along with DOUBANGO.
*/
@@ -22,7 +22,7 @@
*/
#include "plugin_wasapi_utils.h"
-#include "tinydav/audio/tdav_producer_audio.h"
+#include "tinydav/audio/tdav_producer_audio.h"
#include "tsk_thread.h"
#include "tsk_memory.h"
@@ -56,156 +56,147 @@ struct plugin_wasapi_producer_audio_s;
class AudioCapture
{
public:
- AudioCapture();
- virtual ~AudioCapture();
+ AudioCapture();
+ virtual ~AudioCapture();
- int Prepare(struct plugin_wasapi_producer_audio_s* wasapi, const tmedia_codec_t* codec);
- int UnPrepare();
- int Start();
- int Stop();
- int Pause();
+ int Prepare(struct plugin_wasapi_producer_audio_s* wasapi, const tmedia_codec_t* codec);
+ int UnPrepare();
+ int Start();
+ int Stop();
+ int Pause();
private:
- static void* TSK_STDCALL AsyncThread(void *pArg);
+ static void* TSK_STDCALL AsyncThread(void *pArg);
private:
- tsk_mutex_handle_t* m_hMutex;
+ tsk_mutex_handle_t* m_hMutex;
#if PLUGIN_WASAPI_UNDER_WINDOWS_PHONE
- IAudioClient2* m_pDevice;
+ IAudioClient2* m_pDevice;
#else
- IAudioClient* m_pDevice;
+ IAudioClient* m_pDevice;
#endif
- IAudioCaptureClient* m_pClient;
- HANDLE m_hCaptureEvent;
- HANDLE m_hShutdownEvent;
- tsk_thread_handle_t* m_ppTread[1];
- INT32 m_nBytesPerNotif;
- INT32 m_nSourceFrameSizeInBytes;
-
- struct
- {
- tmedia_producer_enc_cb_f fn;
- const void* pcData;
- } m_callback;
-
- struct
- {
- struct
- {
- void* buffer;
- tsk_size_t size;
- } chunck;
- SpeexBuffer* buffer;
- tsk_size_t size;
- } m_ring;
- bool m_bStarted;
- bool m_bPrepared;
- bool m_bPaused;
+ IAudioCaptureClient* m_pClient;
+ HANDLE m_hCaptureEvent;
+ HANDLE m_hShutdownEvent;
+ tsk_thread_handle_t* m_ppTread[1];
+ INT32 m_nBytesPerNotif;
+ INT32 m_nSourceFrameSizeInBytes;
+
+ struct {
+ tmedia_producer_enc_cb_f fn;
+ const void* pcData;
+ } m_callback;
+
+ struct {
+ struct {
+ void* buffer;
+ tsk_size_t size;
+ } chunck;
+ SpeexBuffer* buffer;
+ tsk_size_t size;
+ } m_ring;
+ bool m_bStarted;
+ bool m_bPrepared;
+ bool m_bPaused;
};
-typedef struct plugin_wasapi_producer_audio_s
-{
- TDAV_DECLARE_PRODUCER_AUDIO;
- AudioCapture* pAudioCapture;
+typedef struct plugin_wasapi_producer_audio_s {
+ TDAV_DECLARE_PRODUCER_AUDIO;
+ AudioCapture* pAudioCapture;
}
plugin_wasapi_producer_audio_t;
/* ============ Media Producer Interface ================= */
static int plugin_wasapi_producer_audio_set(tmedia_producer_t* self, const tmedia_param_t* param)
-{
- plugin_wasapi_producer_audio_t* wasapi = (plugin_wasapi_producer_audio_t*)self;
- if(param->plugin_type == tmedia_ppt_producer)
- {
- if(param->value_type == tmedia_pvt_int32)
- {
- if(tsk_striequals(param->key, "volume"))
- {
- return 0;
- }
- else if(tsk_striequals(param->key, "mute"))
- {
- //wasapi->mute = (TSK_TO_INT32((uint8_t*)param->value) != 0);
+{
+ plugin_wasapi_producer_audio_t* wasapi = (plugin_wasapi_producer_audio_t*)self;
+ if(param->plugin_type == tmedia_ppt_producer) {
+ if(param->value_type == tmedia_pvt_int32) {
+ if(tsk_striequals(param->key, "volume")) {
+ return 0;
+ }
+ else if(tsk_striequals(param->key, "mute")) {
+ //wasapi->mute = (TSK_TO_INT32((uint8_t*)param->value) != 0);
#if !FIXME_SEND_SILENCE_ON_MUTE
- //if(wasapi->started){
- // if(wasapi->mute){
- //IDirectSoundCaptureBuffer_Stop(wasapi->captureBuffer);
- // }
- // else{
- //IDirectSoundCaptureBuffer_Start(wasapi->captureBuffer, DSBPLAY_LOOPING);
- // }
- //}
+ //if(wasapi->started){
+ // if(wasapi->mute){
+ //IDirectSoundCaptureBuffer_Stop(wasapi->captureBuffer);
+ // }
+ // else{
+ //IDirectSoundCaptureBuffer_Start(wasapi->captureBuffer, DSBPLAY_LOOPING);
+ // }
+ //}
#endif
- return 0;
- }
- }
- }
- return tdav_producer_audio_set(TDAV_PRODUCER_AUDIO(self), param);
+ return 0;
+ }
+ }
+ }
+ return tdav_producer_audio_set(TDAV_PRODUCER_AUDIO(self), param);
}
static int plugin_wasapi_producer_audio_prepare(tmedia_producer_t* self, const tmedia_codec_t* codec)
{
- plugin_wasapi_producer_audio_t* wasapi = (plugin_wasapi_producer_audio_t*)self;
+ plugin_wasapi_producer_audio_t* wasapi = (plugin_wasapi_producer_audio_t*)self;
- if(!wasapi || !codec || !wasapi->pAudioCapture)
- {
- TSK_DEBUG_ERROR("Invalid parameter");
- return -1;
- }
+ if(!wasapi || !codec || !wasapi->pAudioCapture) {
+ TSK_DEBUG_ERROR("Invalid parameter");
+ return -1;
+ }
- /* codec should have ptime */
- TMEDIA_PRODUCER(wasapi)->audio.channels = TMEDIA_CODEC_CHANNELS_AUDIO_ENCODING(codec);
- TMEDIA_PRODUCER(wasapi)->audio.rate = TMEDIA_CODEC_RATE_ENCODING(codec);
- TMEDIA_PRODUCER(wasapi)->audio.ptime = TMEDIA_CODEC_PTIME_AUDIO_ENCODING(codec);
+ /* codec should have ptime */
+ TMEDIA_PRODUCER(wasapi)->audio.channels = TMEDIA_CODEC_CHANNELS_AUDIO_ENCODING(codec);
+ TMEDIA_PRODUCER(wasapi)->audio.rate = TMEDIA_CODEC_RATE_ENCODING(codec);
+ TMEDIA_PRODUCER(wasapi)->audio.ptime = TMEDIA_CODEC_PTIME_AUDIO_ENCODING(codec);
- TSK_DEBUG_INFO("WASAPI producer: channels=%d, rate=%d, ptime=%d",
- TMEDIA_PRODUCER(wasapi)->audio.channels,
- TMEDIA_PRODUCER(wasapi)->audio.rate,
- TMEDIA_PRODUCER(wasapi)->audio.ptime);
+ TSK_DEBUG_INFO("WASAPI producer: channels=%d, rate=%d, ptime=%d",
+ TMEDIA_PRODUCER(wasapi)->audio.channels,
+ TMEDIA_PRODUCER(wasapi)->audio.rate,
+ TMEDIA_PRODUCER(wasapi)->audio.ptime);
- return wasapi->pAudioCapture->Prepare(wasapi, codec);
+ return wasapi->pAudioCapture->Prepare(wasapi, codec);
}
static int plugin_wasapi_producer_audio_start(tmedia_producer_t* self)
{
- plugin_wasapi_producer_audio_t* wasapi = (plugin_wasapi_producer_audio_t*)self;
+ plugin_wasapi_producer_audio_t* wasapi = (plugin_wasapi_producer_audio_t*)self;
- TSK_DEBUG_INFO("plugin_wasapi_producer_audio_start()");
+ TSK_DEBUG_INFO("plugin_wasapi_producer_audio_start()");
- if(!wasapi || !wasapi->pAudioCapture){
- TSK_DEBUG_ERROR("Invalid parameter");
- return -1;
- }
+ if(!wasapi || !wasapi->pAudioCapture) {
+ TSK_DEBUG_ERROR("Invalid parameter");
+ return -1;
+ }
- return wasapi->pAudioCapture->Start();
+ return wasapi->pAudioCapture->Start();
}
static int plugin_wasapi_producer_audio_pause(tmedia_producer_t* self)
{
- plugin_wasapi_producer_audio_t* wasapi = (plugin_wasapi_producer_audio_t*)self;
+ plugin_wasapi_producer_audio_t* wasapi = (plugin_wasapi_producer_audio_t*)self;
- if(!wasapi || !wasapi->pAudioCapture){
- TSK_DEBUG_ERROR("Invalid parameter");
- return -1;
- }
+ if(!wasapi || !wasapi->pAudioCapture) {
+ TSK_DEBUG_ERROR("Invalid parameter");
+ return -1;
+ }
- return wasapi->pAudioCapture->Pause();
+ return wasapi->pAudioCapture->Pause();
}
static int plugin_wasapi_producer_audio_stop(tmedia_producer_t* self)
{
- plugin_wasapi_producer_audio_t* wasapi = (plugin_wasapi_producer_audio_t*)self;
+ plugin_wasapi_producer_audio_t* wasapi = (plugin_wasapi_producer_audio_t*)self;
- TSK_DEBUG_INFO("plugin_wasapi_producer_audio_stop()");
+ TSK_DEBUG_INFO("plugin_wasapi_producer_audio_stop()");
- if(!wasapi || !wasapi->pAudioCapture){
- TSK_DEBUG_ERROR("Invalid parameter");
- return -1;
- }
+ if(!wasapi || !wasapi->pAudioCapture) {
+ TSK_DEBUG_ERROR("Invalid parameter");
+ return -1;
+ }
- return wasapi->pAudioCapture->Stop();
+ return wasapi->pAudioCapture->Stop();
}
@@ -215,426 +206,383 @@ static int plugin_wasapi_producer_audio_stop(tmedia_producer_t* self)
AudioCapture::AudioCapture()
-: m_pDevice(NULL)
-, m_hMutex(NULL)
-, m_pClient(NULL)
-, m_hCaptureEvent(NULL)
-, m_hShutdownEvent(NULL)
-, m_nBytesPerNotif(0)
-, m_nSourceFrameSizeInBytes(0)
-, m_bStarted(false)
-, m_bPrepared(false)
-, m_bPaused(false)
+ : m_pDevice(NULL)
+ , m_hMutex(NULL)
+ , m_pClient(NULL)
+ , m_hCaptureEvent(NULL)
+ , m_hShutdownEvent(NULL)
+ , m_nBytesPerNotif(0)
+ , m_nSourceFrameSizeInBytes(0)
+ , m_bStarted(false)
+ , m_bPrepared(false)
+ , m_bPaused(false)
{
- m_ppTread[0] = NULL;
- memset(&m_ring, 0, sizeof(m_ring));
+ m_ppTread[0] = NULL;
+ memset(&m_ring, 0, sizeof(m_ring));
- m_callback.fn = NULL, m_callback.pcData = NULL;
+ m_callback.fn = NULL, m_callback.pcData = NULL;
- if(!(m_hMutex = tsk_mutex_create()))
- {
- TSK_DEBUG_ERROR("Failed to create mutex");
- }
+ if(!(m_hMutex = tsk_mutex_create())) {
+ TSK_DEBUG_ERROR("Failed to create mutex");
+ }
}
AudioCapture::~AudioCapture()
{
- Stop();
- UnPrepare();
+ Stop();
+ UnPrepare();
- tsk_mutex_destroy(&m_hMutex);
+ tsk_mutex_destroy(&m_hMutex);
}
int AudioCapture::Prepare(plugin_wasapi_producer_audio_t* wasapi, const tmedia_codec_t* codec)
{
- HRESULT hr = S_OK;
- int ret = 0;
- WAVEFORMATEX wfx = {0};
+ HRESULT hr = S_OK;
+ int ret = 0;
+ WAVEFORMATEX wfx = {0};
#if PLUGIN_WASAPI_UNDER_WINDOWS_PHONE
- AudioClientProperties properties = {0};
+ AudioClientProperties properties = {0};
#endif
- IMMDeviceEnumerator *pEnumerator = NULL;
- LPCWSTR pwstrCaptureId = NULL;
- IMMDevice *pDevice = NULL;
-
- tsk_mutex_lock(m_hMutex);
-
- if(m_bPrepared)
- {
- TSK_DEBUG_INFO("#WASAPI: Audio producer already prepared");
- goto bail;
- }
-
- if(!wasapi || !codec)
- {
- TSK_DEBUG_ERROR("Invalid parameter");
- CHECK_HR(hr = E_FAIL);
- }
-
- if(m_pDevice || m_pClient)
- {
- TSK_DEBUG_ERROR("Producer already prepared");
- CHECK_HR(hr = E_FAIL);
- }
+ IMMDeviceEnumerator *pEnumerator = NULL;
+ LPCWSTR pwstrCaptureId = NULL;
+ IMMDevice *pDevice = NULL;
+
+ tsk_mutex_lock(m_hMutex);
+
+ if(m_bPrepared) {
+ TSK_DEBUG_INFO("#WASAPI: Audio producer already prepared");
+ goto bail;
+ }
+
+ if(!wasapi || !codec) {
+ TSK_DEBUG_ERROR("Invalid parameter");
+ CHECK_HR(hr = E_FAIL);
+ }
+
+ if(m_pDevice || m_pClient) {
+ TSK_DEBUG_ERROR("Producer already prepared");
+ CHECK_HR(hr = E_FAIL);
+ }
#if PLUGIN_WASAPI_UNDER_WINDOWS_PHONE
- pwstrCaptureId = GetDefaultAudioCaptureId(AudioDeviceRole::Communications);
- if (NULL == pwstrCaptureId)
- {
- PLUGIN_WASAPI_ERROR("GetDefaultAudioCaptureId", HRESULT_FROM_WIN32(GetLastError()));
- CHECK_HR(hr = E_FAIL);
- }
- CHECK_HR(hr = ActivateAudioInterface(pwstrCaptureId, __uuidof(IAudioClient2), (void**)&m_pDevice));
-
- // Win8 or WP8 only
- properties.cbSize = sizeof AudioClientProperties;
- properties.eCategory = AudioCategory_Communications;
- CHECK_HR(hr = m_pDevice->SetClientProperties(&properties));
+ pwstrCaptureId = GetDefaultAudioCaptureId(AudioDeviceRole::Communications);
+ if (NULL == pwstrCaptureId) {
+ PLUGIN_WASAPI_ERROR("GetDefaultAudioCaptureId", HRESULT_FROM_WIN32(GetLastError()));
+ CHECK_HR(hr = E_FAIL);
+ }
+ CHECK_HR(hr = ActivateAudioInterface(pwstrCaptureId, __uuidof(IAudioClient2), (void**)&m_pDevice));
+
+ // Win8 or WP8 only
+ properties.cbSize = sizeof AudioClientProperties;
+ properties.eCategory = AudioCategory_Communications;
+ CHECK_HR(hr = m_pDevice->SetClientProperties(&properties));
#else
- CHECK_HR(hr = CoCreateInstance(
- CLSID_MMDeviceEnumerator, NULL,
- CLSCTX_ALL, IID_IMMDeviceEnumerator,
- (void**)&pEnumerator));
+ CHECK_HR(hr = CoCreateInstance(
+ CLSID_MMDeviceEnumerator, NULL,
+ CLSCTX_ALL, IID_IMMDeviceEnumerator,
+ (void**)&pEnumerator));
CHECK_HR(hr = pEnumerator->GetDefaultAudioEndpoint(
- eCapture, eCommunications, &pDevice));
+ eCapture, eCommunications, &pDevice));
CHECK_HR(hr = pDevice->Activate(
- IID_IAudioClient, CLSCTX_ALL,
- NULL, (void**)&m_pDevice));
+ IID_IAudioClient, CLSCTX_ALL,
+ NULL, (void**)&m_pDevice));
#endif
-
-
- /* Set best format */
- {
- wfx.wFormatTag = WAVE_FORMAT_PCM;
- wfx.nChannels = TMEDIA_PRODUCER(wasapi)->audio.channels;
- wfx.nSamplesPerSec = TMEDIA_PRODUCER(wasapi)->audio.rate;
- wfx.wBitsPerSample = TMEDIA_PRODUCER(wasapi)->audio.bits_per_sample;
- wfx.nBlockAlign = (wfx.nChannels * wfx.wBitsPerSample/8);
- wfx.nAvgBytesPerSec = (wfx.nSamplesPerSec * wfx.nBlockAlign);
-
- PWAVEFORMATEX pwfxClosestMatch = NULL;
- hr = m_pDevice->IsFormatSupported(AUDCLNT_SHAREMODE_SHARED, &wfx, &pwfxClosestMatch);
- if(hr != S_OK && hr != S_FALSE)
- {
- CHECK_HR(hr);
- }
-
- if(hr == S_FALSE)
- {
- if(!pwfxClosestMatch)
- {
- CHECK_HR(hr = E_OUTOFMEMORY);
- }
- wfx.nChannels = pwfxClosestMatch->nChannels;
- wfx.nSamplesPerSec = pwfxClosestMatch->nSamplesPerSec;
+
+
+ /* Set best format */
+ {
+ wfx.wFormatTag = WAVE_FORMAT_PCM;
+ wfx.nChannels = TMEDIA_PRODUCER(wasapi)->audio.channels;
+ wfx.nSamplesPerSec = TMEDIA_PRODUCER(wasapi)->audio.rate;
+ wfx.wBitsPerSample = TMEDIA_PRODUCER(wasapi)->audio.bits_per_sample;
+ wfx.nBlockAlign = (wfx.nChannels * wfx.wBitsPerSample/8);
+ wfx.nAvgBytesPerSec = (wfx.nSamplesPerSec * wfx.nBlockAlign);
+
+ PWAVEFORMATEX pwfxClosestMatch = NULL;
+ hr = m_pDevice->IsFormatSupported(AUDCLNT_SHAREMODE_SHARED, &wfx, &pwfxClosestMatch);
+ if(hr != S_OK && hr != S_FALSE) {
+ CHECK_HR(hr);
+ }
+
+ if(hr == S_FALSE) {
+ if(!pwfxClosestMatch) {
+ CHECK_HR(hr = E_OUTOFMEMORY);
+ }
+ wfx.nChannels = pwfxClosestMatch->nChannels;
+ wfx.nSamplesPerSec = pwfxClosestMatch->nSamplesPerSec;
#if 0
- wfx.wBitsPerSample = pwfxClosestMatch->wBitsPerSample;
+ wfx.wBitsPerSample = pwfxClosestMatch->wBitsPerSample;
#endif
- wfx.nBlockAlign = wfx.nChannels * (wfx.wBitsPerSample / 8);
- wfx.nAvgBytesPerSec = wfx.nSamplesPerSec * wfx.nBlockAlign;
- // Request resampler
- TMEDIA_PRODUCER(wasapi)->audio.rate = (uint32_t)wfx.nSamplesPerSec;
- TMEDIA_PRODUCER(wasapi)->audio.bits_per_sample = (uint8_t)wfx.wBitsPerSample;
- TMEDIA_PRODUCER(wasapi)->audio.channels = (uint8_t)wfx.nChannels;
-
- TSK_DEBUG_INFO("Audio device format fallback: rate=%d, bps=%d, channels=%d", wfx.nSamplesPerSec, wfx.wBitsPerSample, wfx.nChannels);
- }
- if(pwfxClosestMatch)
- {
- CoTaskMemFree(pwfxClosestMatch);
- }
- }
-
- m_nSourceFrameSizeInBytes = (wfx.wBitsPerSample >> 3) * wfx.nChannels;
- m_nBytesPerNotif = ((wfx.nAvgBytesPerSec * TMEDIA_PRODUCER(wasapi)->audio.ptime)/1000) * wfx.nChannels;
-
- // Initialize
- CHECK_HR(hr = m_pDevice->Initialize(
- AUDCLNT_SHAREMODE_SHARED,
- AUDCLNT_STREAMFLAGS_EVENTCALLBACK,
- (PLUGIN_WASAPI_PRODUCER_NOTIF_POS_COUNT * WASAPI_MILLIS_TO_100NS(TMEDIA_PRODUCER(wasapi)->audio.ptime)),
- 0,
- &wfx,
- NULL));
-
- REFERENCE_TIME DefaultDevicePeriod, MinimumDevicePeriod;
- CHECK_HR(hr = m_pDevice->GetDevicePeriod(&DefaultDevicePeriod, &MinimumDevicePeriod));
- TSK_DEBUG_INFO("#WASAPI(Capture): DefaultDevicePeriod=%lld ms, MinimumDevicePeriod=%lldms", WASAPI_100NS_TO_MILLIS(DefaultDevicePeriod), WASAPI_100NS_TO_MILLIS(MinimumDevicePeriod));
-
- if(!m_hCaptureEvent)
- {
- if(!(m_hCaptureEvent = CreateEventEx(NULL, NULL, 0, EVENT_ALL_ACCESS)))
- {
- PLUGIN_WASAPI_ERROR(HRESULT_FROM_WIN32(GetLastError()));
- CHECK_HR(hr = E_FAIL);
- }
- }
- if(!m_hShutdownEvent)
- {
- if(!(m_hShutdownEvent = CreateEventEx(NULL, NULL, CREATE_EVENT_MANUAL_RESET, EVENT_ALL_ACCESS)))
- {
- PLUGIN_WASAPI_ERROR(HRESULT_FROM_WIN32(GetLastError()));
- CHECK_HR(hr = E_FAIL);
- }
- }
-
- CHECK_HR(hr = m_pDevice->SetEventHandle(m_hCaptureEvent));
-
- CHECK_HR(hr = m_pDevice->GetService(__uuidof(IAudioCaptureClient), (void**)&m_pClient));
-
- int packetperbuffer = (1000 / TMEDIA_PRODUCER(wasapi)->audio.ptime);
- m_ring.chunck.size = (wfx.nSamplesPerSec * (wfx.wBitsPerSample >> 3) / packetperbuffer) * wfx.nChannels;
- TSK_DEBUG_INFO("#WASAPI: Audio producer ring chunk size = %u", m_ring.chunck.size);
- // allocate our chunck buffer
- if(!(m_ring.chunck.buffer = tsk_realloc(m_ring.chunck.buffer, m_ring.chunck.size)))
- {
- TSK_DEBUG_ERROR("Failed to allocate new buffer");
- CHECK_HR(hr = E_OUTOFMEMORY);
- }
- // create ringbuffer
- m_ring.size = PLUGIN_WASAPI_PRODUCER_NOTIF_POS_COUNT * m_ring.chunck.size;
- TSK_DEBUG_INFO("#WASAPI: Audio producer ring size = %u", m_ring.size);
- if(!m_ring.buffer)
- {
- m_ring.buffer = speex_buffer_init(m_ring.size);
- }
- else
- {
- int sret;
- if((sret = speex_buffer_resize(m_ring.buffer, m_ring.size)) < 0)
- {
- TSK_DEBUG_ERROR("speex_buffer_resize(%d) failed with error code=%d", m_ring.size, sret);
- CHECK_HR(hr = E_OUTOFMEMORY);
- }
- }
- if(!m_ring.buffer)
- {
- TSK_DEBUG_ERROR("Failed to create a new ring buffer with size = %d", m_ring.size);
- CHECK_HR(hr = E_OUTOFMEMORY);
- }
-
- m_callback.fn = TMEDIA_PRODUCER(wasapi)->enc_cb.callback;
- m_callback.pcData = TMEDIA_PRODUCER(wasapi)->enc_cb.callback_data;
+ wfx.nBlockAlign = wfx.nChannels * (wfx.wBitsPerSample / 8);
+ wfx.nAvgBytesPerSec = wfx.nSamplesPerSec * wfx.nBlockAlign;
+ // Request resampler
+ TMEDIA_PRODUCER(wasapi)->audio.rate = (uint32_t)wfx.nSamplesPerSec;
+ TMEDIA_PRODUCER(wasapi)->audio.bits_per_sample = (uint8_t)wfx.wBitsPerSample;
+ TMEDIA_PRODUCER(wasapi)->audio.channels = (uint8_t)wfx.nChannels;
+
+ TSK_DEBUG_INFO("Audio device format fallback: rate=%d, bps=%d, channels=%d", wfx.nSamplesPerSec, wfx.wBitsPerSample, wfx.nChannels);
+ }
+ if(pwfxClosestMatch) {
+ CoTaskMemFree(pwfxClosestMatch);
+ }
+ }
+
+ m_nSourceFrameSizeInBytes = (wfx.wBitsPerSample >> 3) * wfx.nChannels;
+ m_nBytesPerNotif = ((wfx.nAvgBytesPerSec * TMEDIA_PRODUCER(wasapi)->audio.ptime)/1000) * wfx.nChannels;
+
+ // Initialize
+ CHECK_HR(hr = m_pDevice->Initialize(
+ AUDCLNT_SHAREMODE_SHARED,
+ AUDCLNT_STREAMFLAGS_EVENTCALLBACK,
+ (PLUGIN_WASAPI_PRODUCER_NOTIF_POS_COUNT * WASAPI_MILLIS_TO_100NS(TMEDIA_PRODUCER(wasapi)->audio.ptime)),
+ 0,
+ &wfx,
+ NULL));
+
+ REFERENCE_TIME DefaultDevicePeriod, MinimumDevicePeriod;
+ CHECK_HR(hr = m_pDevice->GetDevicePeriod(&DefaultDevicePeriod, &MinimumDevicePeriod));
+ TSK_DEBUG_INFO("#WASAPI(Capture): DefaultDevicePeriod=%lld ms, MinimumDevicePeriod=%lldms", WASAPI_100NS_TO_MILLIS(DefaultDevicePeriod), WASAPI_100NS_TO_MILLIS(MinimumDevicePeriod));
+
+ if(!m_hCaptureEvent) {
+ if(!(m_hCaptureEvent = CreateEventEx(NULL, NULL, 0, EVENT_ALL_ACCESS))) {
+ PLUGIN_WASAPI_ERROR(HRESULT_FROM_WIN32(GetLastError()));
+ CHECK_HR(hr = E_FAIL);
+ }
+ }
+ if(!m_hShutdownEvent) {
+ if(!(m_hShutdownEvent = CreateEventEx(NULL, NULL, CREATE_EVENT_MANUAL_RESET, EVENT_ALL_ACCESS))) {
+ PLUGIN_WASAPI_ERROR(HRESULT_FROM_WIN32(GetLastError()));
+ CHECK_HR(hr = E_FAIL);
+ }
+ }
+
+ CHECK_HR(hr = m_pDevice->SetEventHandle(m_hCaptureEvent));
+
+ CHECK_HR(hr = m_pDevice->GetService(__uuidof(IAudioCaptureClient), (void**)&m_pClient));
+
+ int packetperbuffer = (1000 / TMEDIA_PRODUCER(wasapi)->audio.ptime);
+ m_ring.chunck.size = (wfx.nSamplesPerSec * (wfx.wBitsPerSample >> 3) / packetperbuffer) * wfx.nChannels;
+ TSK_DEBUG_INFO("#WASAPI: Audio producer ring chunk size = %u", m_ring.chunck.size);
+ // allocate our chunck buffer
+ if(!(m_ring.chunck.buffer = tsk_realloc(m_ring.chunck.buffer, m_ring.chunck.size))) {
+ TSK_DEBUG_ERROR("Failed to allocate new buffer");
+ CHECK_HR(hr = E_OUTOFMEMORY);
+ }
+ // create ringbuffer
+ m_ring.size = PLUGIN_WASAPI_PRODUCER_NOTIF_POS_COUNT * m_ring.chunck.size;
+ TSK_DEBUG_INFO("#WASAPI: Audio producer ring size = %u", m_ring.size);
+ if(!m_ring.buffer) {
+ m_ring.buffer = speex_buffer_init(m_ring.size);
+ }
+ else {
+ int sret;
+ if((sret = speex_buffer_resize(m_ring.buffer, m_ring.size)) < 0) {
+ TSK_DEBUG_ERROR("speex_buffer_resize(%d) failed with error code=%d", m_ring.size, sret);
+ CHECK_HR(hr = E_OUTOFMEMORY);
+ }
+ }
+ if(!m_ring.buffer) {
+ TSK_DEBUG_ERROR("Failed to create a new ring buffer with size = %d", m_ring.size);
+ CHECK_HR(hr = E_OUTOFMEMORY);
+ }
+
+ m_callback.fn = TMEDIA_PRODUCER(wasapi)->enc_cb.callback;
+ m_callback.pcData = TMEDIA_PRODUCER(wasapi)->enc_cb.callback_data;
bail:
- ret = SUCCEEDED(hr) ? 0 : -1;
- if (pwstrCaptureId)
- {
- CoTaskMemFree((LPVOID)pwstrCaptureId);
- }
- if(ret != 0)
- {
- UnPrepare();
- }
- m_bPrepared = (ret == 0);
-
- tsk_mutex_unlock(m_hMutex);
-
- SafeRelease(&pEnumerator);
- SafeRelease(&pDevice);
-
- return ret;
+ ret = SUCCEEDED(hr) ? 0 : -1;
+ if (pwstrCaptureId) {
+ CoTaskMemFree((LPVOID)pwstrCaptureId);
+ }
+ if(ret != 0) {
+ UnPrepare();
+ }
+ m_bPrepared = (ret == 0);
+
+ tsk_mutex_unlock(m_hMutex);
+
+ SafeRelease(&pEnumerator);
+ SafeRelease(&pDevice);
+
+ return ret;
}
int AudioCapture::UnPrepare()
{
- tsk_mutex_lock(m_hMutex);
-
- if(m_hCaptureEvent)
- {
- CloseHandle(m_hCaptureEvent), m_hCaptureEvent = NULL;
- }
- if(m_hShutdownEvent)
- {
- CloseHandle(m_hShutdownEvent), m_hShutdownEvent = NULL;
- }
- if(m_pDevice)
- {
- m_pDevice->Release(), m_pDevice = NULL;
- }
- if(m_pClient)
- {
- m_pClient->Release(), m_pClient = NULL;
- }
-
- TSK_FREE(m_ring.chunck.buffer);
- if(m_ring.buffer)
- {
- speex_buffer_destroy(m_ring.buffer);
- m_ring.buffer = NULL;
- }
-
- m_callback.fn = NULL;
- m_callback.pcData = NULL;
-
- m_bPrepared = false;
-
- tsk_mutex_unlock(m_hMutex);
-
- return 0;
+ tsk_mutex_lock(m_hMutex);
+
+ if(m_hCaptureEvent) {
+ CloseHandle(m_hCaptureEvent), m_hCaptureEvent = NULL;
+ }
+ if(m_hShutdownEvent) {
+ CloseHandle(m_hShutdownEvent), m_hShutdownEvent = NULL;
+ }
+ if(m_pDevice) {
+ m_pDevice->Release(), m_pDevice = NULL;
+ }
+ if(m_pClient) {
+ m_pClient->Release(), m_pClient = NULL;
+ }
+
+ TSK_FREE(m_ring.chunck.buffer);
+ if(m_ring.buffer) {
+ speex_buffer_destroy(m_ring.buffer);
+ m_ring.buffer = NULL;
+ }
+
+ m_callback.fn = NULL;
+ m_callback.pcData = NULL;
+
+ m_bPrepared = false;
+
+ tsk_mutex_unlock(m_hMutex);
+
+ return 0;
}
int AudioCapture::Start()
{
- tsk_mutex_lock(m_hMutex);
-
- if(m_bStarted)
- {
- TSK_DEBUG_INFO("#WASAPI: Audio producer already started");
- goto bail;
- }
- if(!m_bPrepared)
- {
- TSK_DEBUG_ERROR("Audio producer not prepared");
- goto bail;
- }
-
- m_bStarted = true;
- if(!m_ppTread[0] && tsk_thread_create(m_ppTread, AudioCapture::AsyncThread, this) != 0)
- {
- m_bStarted = false;
- goto bail;
- }
-
- HRESULT hr = m_pDevice->Start();
- if(!SUCCEEDED(hr))
- {
- PLUGIN_WASAPI_ERROR(hr);
- Stop();
- }
- m_bPaused = false;
+ tsk_mutex_lock(m_hMutex);
+
+ if(m_bStarted) {
+ TSK_DEBUG_INFO("#WASAPI: Audio producer already started");
+ goto bail;
+ }
+ if(!m_bPrepared) {
+ TSK_DEBUG_ERROR("Audio producer not prepared");
+ goto bail;
+ }
+
+ m_bStarted = true;
+ if(!m_ppTread[0] && tsk_thread_create(m_ppTread, AudioCapture::AsyncThread, this) != 0) {
+ m_bStarted = false;
+ goto bail;
+ }
+
+ HRESULT hr = m_pDevice->Start();
+ if(!SUCCEEDED(hr)) {
+ PLUGIN_WASAPI_ERROR(hr);
+ Stop();
+ }
+ m_bPaused = false;
bail:
- tsk_mutex_unlock(m_hMutex);
+ tsk_mutex_unlock(m_hMutex);
- return (m_bStarted ? 0 : -2);
+ return (m_bStarted ? 0 : -2);
}
int AudioCapture::Stop()
{
- m_bStarted = false;
+ m_bStarted = false;
- tsk_mutex_lock(m_hMutex);
+ tsk_mutex_lock(m_hMutex);
- if (m_hShutdownEvent)
- {
- SetEvent(m_hShutdownEvent);
- }
+ if (m_hShutdownEvent) {
+ SetEvent(m_hShutdownEvent);
+ }
- if (m_ppTread[0])
- {
- tsk_thread_join(m_ppTread);
- }
+ if (m_ppTread[0]) {
+ tsk_thread_join(m_ppTread);
+ }
- if(m_pDevice)
- {
- m_pDevice->Stop();
- }
+ if(m_pDevice) {
+ m_pDevice->Stop();
+ }
- // will be prepared again before next start()
- UnPrepare();
+ // will be prepared again before next start()
+ UnPrepare();
- tsk_mutex_unlock(m_hMutex);
+ tsk_mutex_unlock(m_hMutex);
- return 0;
+ return 0;
}
int AudioCapture::Pause()
{
- tsk_mutex_lock(m_hMutex);
+ tsk_mutex_lock(m_hMutex);
- m_bPaused = true;
+ m_bPaused = true;
- tsk_mutex_unlock(m_hMutex);
+ tsk_mutex_unlock(m_hMutex);
- return 0;
+ return 0;
}
void* TSK_STDCALL AudioCapture::AsyncThread(void *pArg)
{
- HRESULT hr = S_OK;
- BYTE* pbData = NULL;
- UINT32 nFrames = 0;
- DWORD dwFlags = 0;
- UINT32 incomingBufferSize;
- INT32 avail;
- UINT32 nNextPacketSize;
- AudioCapture* This = (AudioCapture*)pArg;
-
- HANDLE eventHandles[] = {
- This->m_hCaptureEvent, // WAIT_OBJECT0
- This->m_hShutdownEvent // WAIT_OBJECT1
- };
-
- TSK_DEBUG_INFO("#WASAPI: __record_thread -- START");
+ HRESULT hr = S_OK;
+ BYTE* pbData = NULL;
+ UINT32 nFrames = 0;
+ DWORD dwFlags = 0;
+ UINT32 incomingBufferSize;
+ INT32 avail;
+ UINT32 nNextPacketSize;
+ AudioCapture* This = (AudioCapture*)pArg;
+
+ HANDLE eventHandles[] = {
+ This->m_hCaptureEvent, // WAIT_OBJECT0
+ This->m_hShutdownEvent // WAIT_OBJECT1
+ };
+
+ TSK_DEBUG_INFO("#WASAPI: __record_thread -- START");
#define BREAK_WHILE tsk_mutex_unlock(This->m_hMutex); break;
- while(This->m_bStarted && SUCCEEDED(hr))
- {
- DWORD waitResult = WaitForMultipleObjectsEx(SIZEOF_ARRAY(eventHandles), eventHandles, FALSE, INFINITE, FALSE);
-
- tsk_mutex_lock(This->m_hMutex);
-
- if(!This->m_bStarted)
- {
- BREAK_WHILE;
- }
-
- if(waitResult == WAIT_OBJECT_0 && This->m_callback.fn)
- {
- hr = This->m_pClient->GetNextPacketSize(&nNextPacketSize);
- while(SUCCEEDED(hr) && nNextPacketSize >0)
- {
- hr = This->m_pClient->GetBuffer(&pbData, &nFrames, &dwFlags, NULL, NULL);
- if(SUCCEEDED(hr) && pbData && nFrames)
- {
- if((dwFlags & AUDCLNT_BUFFERFLAGS_SILENT) != AUDCLNT_BUFFERFLAGS_SILENT)
- {
- incomingBufferSize = nFrames * This->m_nSourceFrameSizeInBytes;
- speex_buffer_write(This->m_ring.buffer, pbData, incomingBufferSize);
- avail = speex_buffer_get_available(This->m_ring.buffer);
- while (This->m_bStarted && avail >= (INT32)This->m_ring.chunck.size)
- {
- avail -= speex_buffer_read(This->m_ring.buffer, This->m_ring.chunck.buffer, This->m_ring.chunck.size);
+ while(This->m_bStarted && SUCCEEDED(hr)) {
+ DWORD waitResult = WaitForMultipleObjectsEx(SIZEOF_ARRAY(eventHandles), eventHandles, FALSE, INFINITE, FALSE);
+
+ tsk_mutex_lock(This->m_hMutex);
+
+ if(!This->m_bStarted) {
+ BREAK_WHILE;
+ }
+
+ if(waitResult == WAIT_OBJECT_0 && This->m_callback.fn) {
+ hr = This->m_pClient->GetNextPacketSize(&nNextPacketSize);
+ while(SUCCEEDED(hr) && nNextPacketSize >0) {
+ hr = This->m_pClient->GetBuffer(&pbData, &nFrames, &dwFlags, NULL, NULL);
+ if(SUCCEEDED(hr) && pbData && nFrames) {
+ if((dwFlags & AUDCLNT_BUFFERFLAGS_SILENT) != AUDCLNT_BUFFERFLAGS_SILENT) {
+ incomingBufferSize = nFrames * This->m_nSourceFrameSizeInBytes;
+ speex_buffer_write(This->m_ring.buffer, pbData, incomingBufferSize);
+ avail = speex_buffer_get_available(This->m_ring.buffer);
+ while (This->m_bStarted && avail >= (INT32)This->m_ring.chunck.size) {
+ avail -= speex_buffer_read(This->m_ring.buffer, This->m_ring.chunck.buffer, This->m_ring.chunck.size);
#if 0
- {
- static FILE* f = fopen("./wasapi_producer.raw", "w+");
- fwrite(This->m_ring.chunck.buffer, 1, This->m_ring.chunck.size, f);
- }
+ {
+ static FILE* f = fopen("./wasapi_producer.raw", "w+");
+ fwrite(This->m_ring.chunck.buffer, 1, This->m_ring.chunck.size, f);
+ }
#endif
- This->m_callback.fn(This->m_callback.pcData, This->m_ring.chunck.buffer, This->m_ring.chunck.size);
- }
- }
-
- if (SUCCEEDED(hr))
- {
- hr = This->m_pClient->ReleaseBuffer(nFrames);
- }
- if (SUCCEEDED(hr))
- {
- hr = This->m_pClient->GetNextPacketSize(&nNextPacketSize);
- }
- }
- }
- }
- else if(waitResult != WAIT_OBJECT_0)
- {
- BREAK_WHILE;
- }
-
- tsk_mutex_unlock(This->m_hMutex);
- }// end-of-while
-
- if (!SUCCEEDED(hr))
- {
- PLUGIN_WASAPI_ERROR(hr);
- }
-
- TSK_DEBUG_INFO("WASAPI: __record_thread(%s) -- STOP", SUCCEEDED(hr) ? "OK": "NOK");
-
- return NULL;
+ This->m_callback.fn(This->m_callback.pcData, This->m_ring.chunck.buffer, This->m_ring.chunck.size);
+ }
+ }
+
+ if (SUCCEEDED(hr)) {
+ hr = This->m_pClient->ReleaseBuffer(nFrames);
+ }
+ if (SUCCEEDED(hr)) {
+ hr = This->m_pClient->GetNextPacketSize(&nNextPacketSize);
+ }
+ }
+ }
+ }
+ else if(waitResult != WAIT_OBJECT_0) {
+ BREAK_WHILE;
+ }
+
+ tsk_mutex_unlock(This->m_hMutex);
+ }// end-of-while
+
+ if (!SUCCEEDED(hr)) {
+ PLUGIN_WASAPI_ERROR(hr);
+ }
+
+ TSK_DEBUG_INFO("WASAPI: __record_thread(%s) -- STOP", SUCCEEDED(hr) ? "OK": "NOK");
+
+ return NULL;
}
@@ -649,64 +597,58 @@ void* TSK_STDCALL AudioCapture::AsyncThread(void *pArg)
/* constructor */
static tsk_object_t* plugin_wasapi_producer_audio_ctor(tsk_object_t * self, va_list * app)
{
- plugin_wasapi_producer_audio_t *wasapi = (plugin_wasapi_producer_audio_t*)self;
- if(wasapi)
- {
- WASAPIUtils::Startup();
-
- /* init base */
- tdav_producer_audio_init(TDAV_PRODUCER_AUDIO(wasapi));
- /* init self */
-
- wasapi->pAudioCapture = new AudioCapture();
- if(!wasapi->pAudioCapture)
- {
- TSK_DEBUG_ERROR("Failed to create Audio capture device");
- return tsk_null;
- }
- }
- return self;
+ plugin_wasapi_producer_audio_t *wasapi = (plugin_wasapi_producer_audio_t*)self;
+ if(wasapi) {
+ WASAPIUtils::Startup();
+
+ /* init base */
+ tdav_producer_audio_init(TDAV_PRODUCER_AUDIO(wasapi));
+ /* init self */
+
+ wasapi->pAudioCapture = new AudioCapture();
+ if(!wasapi->pAudioCapture) {
+ TSK_DEBUG_ERROR("Failed to create Audio capture device");
+ return tsk_null;
+ }
+ }
+ return self;
}
/* destructor */
static tsk_object_t* plugin_wasapi_producer_audio_dtor(tsk_object_t * self)
-{
- plugin_wasapi_producer_audio_t *wasapi = (plugin_wasapi_producer_audio_t*)self;
- if(wasapi)
- {
- /* stop */
- plugin_wasapi_producer_audio_stop((tmedia_producer_t*)self);
- /* deinit base */
- tdav_producer_audio_deinit(TDAV_PRODUCER_AUDIO(wasapi));
- /* deinit self */
- if(wasapi->pAudioCapture)
- {
- delete wasapi->pAudioCapture;
- wasapi->pAudioCapture = NULL;
- }
- }
-
- return self;
+{
+ plugin_wasapi_producer_audio_t *wasapi = (plugin_wasapi_producer_audio_t*)self;
+ if(wasapi) {
+ /* stop */
+ plugin_wasapi_producer_audio_stop((tmedia_producer_t*)self);
+ /* deinit base */
+ tdav_producer_audio_deinit(TDAV_PRODUCER_AUDIO(wasapi));
+ /* deinit self */
+ if(wasapi->pAudioCapture) {
+ delete wasapi->pAudioCapture;
+ wasapi->pAudioCapture = NULL;
+ }
+ }
+
+ return self;
}
/* object definition */
-static const tsk_object_def_t plugin_wasapi_producer_audio_def_s =
-{
- sizeof(plugin_wasapi_producer_audio_t),
- plugin_wasapi_producer_audio_ctor,
- plugin_wasapi_producer_audio_dtor,
- tdav_producer_audio_cmp,
+static const tsk_object_def_t plugin_wasapi_producer_audio_def_s = {
+ sizeof(plugin_wasapi_producer_audio_t),
+ plugin_wasapi_producer_audio_ctor,
+ plugin_wasapi_producer_audio_dtor,
+ tdav_producer_audio_cmp,
};
/* plugin definition*/
-static const tmedia_producer_plugin_def_t plugin_wasapi_producer_audio_plugin_def_s =
-{
- &plugin_wasapi_producer_audio_def_s,
+static const tmedia_producer_plugin_def_t plugin_wasapi_producer_audio_plugin_def_s = {
+ &plugin_wasapi_producer_audio_def_s,
- tmedia_audio,
- "Microsoft Windows Audio Session API (WASAPI) producer",
+ tmedia_audio,
+ "Microsoft Windows Audio Session API (WASAPI) producer",
- plugin_wasapi_producer_audio_set,
- plugin_wasapi_producer_audio_prepare,
- plugin_wasapi_producer_audio_start,
- plugin_wasapi_producer_audio_pause,
- plugin_wasapi_producer_audio_stop
+ plugin_wasapi_producer_audio_set,
+ plugin_wasapi_producer_audio_prepare,
+ plugin_wasapi_producer_audio_start,
+ plugin_wasapi_producer_audio_pause,
+ plugin_wasapi_producer_audio_stop
};
const tmedia_producer_plugin_def_t *plugin_wasapi_producer_audio_plugin_def_t = &plugin_wasapi_producer_audio_plugin_def_s;
diff --git a/plugins/pluginWASAPI/plugin_wasapi_tdav.cxx b/plugins/pluginWASAPI/plugin_wasapi_tdav.cxx
index e7a15e9..ebfc85f 100755
--- a/plugins/pluginWASAPI/plugin_wasapi_tdav.cxx
+++ b/plugins/pluginWASAPI/plugin_wasapi_tdav.cxx
@@ -1,18 +1,18 @@
/* Copyright (C) 2013 Mamadou DIOP
* Copyright (C) 2013 Doubango Telecom <http://www.doubango.org>
-*
+*
* This file is part of Open Source Doubango Framework.
*
* DOUBANGO is free software: you can redistribute it and/or modify
* it under the terms of the GNU General Public License as published by
* the Free Software Foundation, either version 3 of the License, or
* (at your option) any later version.
-*
+*
* DOUBANGO is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
* GNU General Public License for more details.
-*
+*
* You should have received a copy of the GNU General Public License
* along with DOUBANGO.
*/
diff --git a/plugins/pluginWASAPI/plugin_wasapi_utils.cxx b/plugins/pluginWASAPI/plugin_wasapi_utils.cxx
index bc2d45e..e967cff 100755
--- a/plugins/pluginWASAPI/plugin_wasapi_utils.cxx
+++ b/plugins/pluginWASAPI/plugin_wasapi_utils.cxx
@@ -1,18 +1,18 @@
/* Copyright (C) 2013 Mamadou DIOP
* Copyright (C) 2013 Doubango Telecom <http://www.doubango.org>
-*
+*
* This file is part of Open Source Doubango Framework.
*
* DOUBANGO is free software: you can redistribute it and/or modify
* it under the terms of the GNU General Public License as published by
* the Free Software Foundation, either version 3 of the License, or
* (at your option) any later version.
-*
+*
* DOUBANGO is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
* GNU General Public License for more details.
-*
+*
* You should have received a copy of the GNU General Public License
* along with DOUBANGO.
*/
@@ -24,58 +24,56 @@ bool WASAPIUtils::g_bStarted = false;
HRESULT WASAPIUtils::Startup()
{
- if(!g_bStarted)
- {
- HRESULT hr = CoInitializeEx(NULL, COINIT_MULTITHREADED);
- if(SUCCEEDED(hr) || hr == 0x80010106) // 0x80010106 when called from managed code (e.g. Boghe) - More info: http://support.microsoft.com/kb/824480
- {
- hr = S_OK;
- }
- g_bStarted = SUCCEEDED(hr);
- return hr;
- }
- return S_OK;
+ if(!g_bStarted) {
+ HRESULT hr = CoInitializeEx(NULL, COINIT_MULTITHREADED);
+ if(SUCCEEDED(hr) || hr == 0x80010106) { // 0x80010106 when called from managed code (e.g. Boghe) - More info: http://support.microsoft.com/kb/824480
+ hr = S_OK;
+ }
+ g_bStarted = SUCCEEDED(hr);
+ return hr;
+ }
+ return S_OK;
}
HRESULT WASAPIUtils::Shutdown()
{
- return S_OK;
+ return S_OK;
}
void WASAPIUtils::PrintError(const char* pcFileName, const char* pcFuncName, unsigned nLineNumber, HRESULT hr)
{
- CHAR message[1024] = {0};
+ CHAR message[1024] = {0};
#if PLUGIN_WASAPI_UNDER_WINDOWS_RT
- // FormatMessageA not allowed on the Store
- static WCHAR wBuff[1024] = {0};
- FormatMessageW(
- FORMAT_MESSAGE_FROM_SYSTEM,
- tsk_null,
- hr,
- 0,
- wBuff,
- sizeof(wBuff)-1,
- tsk_null);
- WideCharToMultiByte(CP_UTF8, WC_ERR_INVALID_CHARS, wBuff, wcslen(wBuff), message, sizeof(message) - 1, NULL, NULL);
+ // FormatMessageA not allowed on the Store
+ static WCHAR wBuff[1024] = {0};
+ FormatMessageW(
+ FORMAT_MESSAGE_FROM_SYSTEM,
+ tsk_null,
+ hr,
+ 0,
+ wBuff,
+ sizeof(wBuff)-1,
+ tsk_null);
+ WideCharToMultiByte(CP_UTF8, WC_ERR_INVALID_CHARS, wBuff, wcslen(wBuff), message, sizeof(message) - 1, NULL, NULL);
#else
#ifdef _WIN32_WCE
- FormatMessage
+ FormatMessage
#else
- FormatMessageA
+ FormatMessageA
#endif
- (
+ (
#if !PLUGIN_WASAPI_UNDER_WINDOWS_RT
- FORMAT_MESSAGE_ALLOCATE_BUFFER |
+ FORMAT_MESSAGE_ALLOCATE_BUFFER |
#endif
- FORMAT_MESSAGE_FROM_SYSTEM,
- tsk_null,
- hr,
- 0,
- message,
- sizeof(message) - 1,
- tsk_null);
+ FORMAT_MESSAGE_FROM_SYSTEM,
+ tsk_null,
+ hr,
+ 0,
+ message,
+ sizeof(message) - 1,
+ tsk_null);
#endif
- TSK_DEBUG_ERROR("[WASAPI] File:%s\n Function=%s\n Line:%u\n Message:%s", pcFileName, pcFuncName, nLineNumber, message);
+ TSK_DEBUG_ERROR("[WASAPI] File:%s\n Function=%s\n Line:%u\n Message:%s", pcFileName, pcFuncName, nLineNumber, message);
} \ No newline at end of file
diff --git a/plugins/pluginWASAPI/plugin_wasapi_utils.h b/plugins/pluginWASAPI/plugin_wasapi_utils.h
index 218a7f8..7237619 100755
--- a/plugins/pluginWASAPI/plugin_wasapi_utils.h
+++ b/plugins/pluginWASAPI/plugin_wasapi_utils.h
@@ -1,18 +1,18 @@
/* Copyright (C) 2013 Mamadou DIOP
* Copyright (C) 2013 Doubango Telecom <http://www.doubango.org>
-*
+*
* This file is part of Open Source Doubango Framework.
*
* DOUBANGO is free software: you can redistribute it and/or modify
* it under the terms of the GNU General Public License as published by
* the Free Software Foundation, either version 3 of the License, or
* (at your option) any later version.
-*
+*
* DOUBANGO is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
* GNU General Public License for more details.
-*
+*
* You should have received a copy of the GNU General Public License
* along with DOUBANGO.
*/
@@ -31,7 +31,7 @@
(*ppT)->Release(); \
*ppT = NULL; \
} \
-}
+}
#undef CHECK_HR
// In CHECK_HR(x) When (x) is a function it will be executed twice when used in "TSK_DEBUG_ERROR(x)" and "If(x)"
@@ -42,12 +42,12 @@
class WASAPIUtils
{
public:
- static HRESULT Startup();
- static HRESULT Shutdown();
- static void PrintError(const char* pcFileName, const char* pcFuncName, unsigned nLineNumber, HRESULT hr);
+ static HRESULT Startup();
+ static HRESULT Shutdown();
+ static void PrintError(const char* pcFileName, const char* pcFuncName, unsigned nLineNumber, HRESULT hr);
private:
- static bool g_bStarted;
+ static bool g_bStarted;
};
#endif /* PLUGIN_WASAPI_UTILS_H */
diff --git a/plugins/pluginWinAudioDSP/dllmain_audio_dsp.cxx b/plugins/pluginWinAudioDSP/dllmain_audio_dsp.cxx
index dcb9005..a252d04 100755
--- a/plugins/pluginWinAudioDSP/dllmain_audio_dsp.cxx
+++ b/plugins/pluginWinAudioDSP/dllmain_audio_dsp.cxx
@@ -1,18 +1,18 @@
/* Copyright (C) 2013 Mamadou DIOP
* Copyright (C) 2013 Doubango Telecom <http://www.doubango.org>
-*
+*
* This file is part of Open Source Doubango Framework.
*
* DOUBANGO is free software: you can redistribute it and/or modify
* it under the terms of the GNU General Public License as published by
* the Free Software Foundation, either version 3 of the License, or
* (at your option) any later version.
-*
+*
* DOUBANGO is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
* GNU General Public License for more details.
-*
+*
* You should have received a copy of the GNU General Public License
* along with DOUBANGO.
*/
@@ -55,103 +55,92 @@ PLUGIN_AUDIO_DSP_END_DECLS /* END */
BOOL APIENTRY DllMain( HMODULE hModule,
DWORD ul_reason_for_call,
LPVOID lpReserved
- )
+ )
{
- switch (ul_reason_for_call)
- {
- case DLL_PROCESS_ATTACH:
- break;
- case DLL_THREAD_ATTACH:
- break;
- case DLL_THREAD_DETACH:
- break;
- case DLL_PROCESS_DETACH:
- break;
- }
- return TRUE;
+ switch (ul_reason_for_call) {
+ case DLL_PROCESS_ATTACH:
+ break;
+ case DLL_THREAD_ATTACH:
+ break;
+ case DLL_THREAD_DETACH:
+ break;
+ case DLL_PROCESS_DETACH:
+ break;
+ }
+ return TRUE;
}
-typedef enum PLUGIN_INDEX_E
-{
+typedef enum PLUGIN_INDEX_E {
#if PLUGIN_AUDIO_DSP_RESAMPLER_ENABLE
- PLUGIN_INDEX_RESAMPLER,
+ PLUGIN_INDEX_RESAMPLER,
#endif
#if PLUGIN_AUDIO_DSP_DENOISER_ENABLE
- PLUGIN_INDEX_DENOISER,
+ PLUGIN_INDEX_DENOISER,
#endif
-
- PLUGIN_INDEX_COUNT
+
+ PLUGIN_INDEX_COUNT
}
PLUGIN_INDEX_T;
int __plugin_get_def_count()
{
- return PLUGIN_INDEX_COUNT;
+ return PLUGIN_INDEX_COUNT;
}
tsk_plugin_def_type_t __plugin_get_def_type_at(int index)
{
- switch(index)
- {
+ switch(index) {
#if PLUGIN_AUDIO_DSP_RESAMPLER_ENABLE
- case PLUGIN_INDEX_RESAMPLER:
- {
- return tsk_plugin_def_type_resampler;
- }
+ case PLUGIN_INDEX_RESAMPLER: {
+ return tsk_plugin_def_type_resampler;
+ }
#endif
#if PLUGIN_AUDIO_DSP_DENOISER_ENABLE
- case PLUGIN_INDEX_DENOISER:
- {
- return tsk_plugin_def_type_denoiser;
- }
+ case PLUGIN_INDEX_DENOISER: {
+ return tsk_plugin_def_type_denoiser;
+ }
#endif
- }
- TSK_DEBUG_ERROR("No plugin at index %d", index);
- return tsk_plugin_def_type_none;
+ }
+ TSK_DEBUG_ERROR("No plugin at index %d", index);
+ return tsk_plugin_def_type_none;
}
tsk_plugin_def_media_type_t __plugin_get_def_media_type_at(int index)
{
- switch(index)
- {
+ switch(index) {
#if PLUGIN_AUDIO_DSP_RESAMPLER_ENABLE
- case PLUGIN_INDEX_RESAMPLER:
- {
- return tsk_plugin_def_media_type_audio;
- }
+ case PLUGIN_INDEX_RESAMPLER: {
+ return tsk_plugin_def_media_type_audio;
+ }
#endif
#if PLUGIN_AUDIO_DSP_DENOISER_ENABLE
- case PLUGIN_INDEX_DENOISER:
- {
- return tsk_plugin_def_media_type_audio;
- }
+ case PLUGIN_INDEX_DENOISER: {
+ return tsk_plugin_def_media_type_audio;
+ }
#endif
- }
+ }
- TSK_DEBUG_ERROR("No plugin at index %d", index);
- return tsk_plugin_def_media_type_none;
+ TSK_DEBUG_ERROR("No plugin at index %d", index);
+ return tsk_plugin_def_media_type_none;
}
tsk_plugin_def_ptr_const_t __plugin_get_def_at(int index)
{
- switch(index)
- {
+ switch(index) {
#if PLUGIN_AUDIO_DSP_RESAMPLER_ENABLE
- case PLUGIN_INDEX_RESAMPLER:
- {
- return plugin_audio_dsp_resampler_plugin_def_t;
- }
+ case PLUGIN_INDEX_RESAMPLER: {
+ return plugin_audio_dsp_resampler_plugin_def_t;
+ }
#endif
#if PLUGIN_AUDIO_DSP_DENOISER_ENABLE
- case PLUGIN_INDEX_DENOISER:
- {
- return plugin_audio_dsp_denoise_plugin_def_t;
- }
+ case PLUGIN_INDEX_DENOISER: {
+ return plugin_audio_dsp_denoise_plugin_def_t;
+ }
#endif
- }
+ }
- TSK_DEBUG_ERROR("No plugin at index %d", index);
- return tsk_null;
+ TSK_DEBUG_ERROR("No plugin at index %d", index);
+ return tsk_null;
}
diff --git a/plugins/pluginWinAudioDSP/plugin_audio_dsp_config.h b/plugins/pluginWinAudioDSP/plugin_audio_dsp_config.h
index 7730bc8..100f0f8 100755
--- a/plugins/pluginWinAudioDSP/plugin_audio_dsp_config.h
+++ b/plugins/pluginWinAudioDSP/plugin_audio_dsp_config.h
@@ -1,18 +1,18 @@
/* Copyright (C) 2013 Mamadou DIOP
* Copyright (C) 2013 Doubango Telecom <http://www.doubango.org>
-*
+*
* This file is part of Open Source Doubango Framework.
*
* DOUBANGO is free software: you can redistribute it and/or modify
* it under the terms of the GNU General Public License as published by
* the Free Software Foundation, either version 3 of the License, or
* (at your option) any later version.
-*
+*
* DOUBANGO is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
* GNU General Public License for more details.
-*
+*
* You should have received a copy of the GNU General Public License
* along with DOUBANGO.
*/
@@ -48,12 +48,12 @@
# define PLUGIN_AUDIO_DSP_UNDER_X86 1
#endif
-// Guards against C++ name mangling
+// Guards against C++ name mangling
#ifdef __cplusplus
# define PLUGIN_AUDIO_DSP_BEGIN_DECLS extern "C" {
# define PLUGIN_AUDIO_DSP_END_DECLS }
#else
-# define PLUGIN_AUDIO_DSP_BEGIN_DECLS
+# define PLUGIN_AUDIO_DSP_BEGIN_DECLS
# define PLUGIN_AUDIO_DSP_END_DECLS
#endif
@@ -69,7 +69,7 @@
#endif
#if HAVE_CONFIG_H
- #include <config.h>
+#include <config.h>
#endif
#endif // PLUGIN_AUDIO_DSP_CONFIG_H
diff --git a/plugins/pluginWinAudioDSP/plugin_audio_dsp_denoiser.cxx b/plugins/pluginWinAudioDSP/plugin_audio_dsp_denoiser.cxx
index 574c5ac..74d713f 100755
--- a/plugins/pluginWinAudioDSP/plugin_audio_dsp_denoiser.cxx
+++ b/plugins/pluginWinAudioDSP/plugin_audio_dsp_denoiser.cxx
@@ -1,18 +1,18 @@
/* Copyright (C) 2013 Mamadou DIOP
* Copyright (C) 2013 Doubango Telecom <http://www.doubango.org>
-*
+*
* This file is part of Open Source Doubango Framework.
*
* DOUBANGO is free software: you can redistribute it and/or modify
* it under the terms of the GNU General Public License as published by
* the Free Software Foundation, either version 3 of the License, or
* (at your option) any later version.
-*
+*
* DOUBANGO is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
* GNU General Public License for more details.
-*
+*
* You should have received a copy of the GNU General Public License
* along with DOUBANGO.
*/
@@ -44,201 +44,189 @@ static const UINT32 g_nChannles = 1; // FIXME
static const UINT32 g_nFrameDuration = 20; // FIXME
/** Speex denoiser*/
-typedef struct plugin_audio_dsp_denoise_s
-{
- TMEDIA_DECLARE_DENOISE;
-
- bool bOpened;
+typedef struct plugin_audio_dsp_denoise_s {
+ TMEDIA_DECLARE_DENOISE;
+
+ bool bOpened;
- LONGLONG rtStart;
+ LONGLONG rtStart;
UINT64 rtDuration;
- uint32_t echo_tail;
- tsk_size_t playback_size_samples;
- tsk_size_t playback_size_bytes;
- tsk_size_t playback_channels;
- tsk_size_t record_size_samples;
- tsk_size_t record_size_bytes;
- tsk_size_t record_channels;
-
- IMediaObject* pInst;
- IMediaBuffer *pBufferIn;
- IMediaBuffer *pBufferOut;
+ uint32_t echo_tail;
+ tsk_size_t playback_size_samples;
+ tsk_size_t playback_size_bytes;
+ tsk_size_t playback_channels;
+ tsk_size_t record_size_samples;
+ tsk_size_t record_size_bytes;
+ tsk_size_t record_channels;
+
+ IMediaObject* pInst;
+ IMediaBuffer *pBufferIn;
+ IMediaBuffer *pBufferOut;
}
plugin_audio_dsp_denoise_t;
static int plugin_audio_dsp_denoise_set(tmedia_denoise_t* _self, const tmedia_param_t* param)
{
- plugin_audio_dsp_denoise_t *self = (plugin_audio_dsp_denoise_t *)_self;
- if(!self || !param)
- {
- TSK_DEBUG_ERROR("Invalid parameter");
- return -1;
- }
-
- if(param->value_type == tmedia_pvt_int32)
- {
- if(tsk_striequals(param->key, "echo-tail"))
- {
- _self->echo_tail = *((int32_t*)param->value);
- TSK_DEBUG_INFO("ms_voice_dsp_set_echo_tail(%d)", _self->echo_tail);
- if(self->pInst)
- {
- IPropertyStore* pPropStore = NULL;
- HRESULT hr = self->pInst->QueryInterface(IID_PPV_ARGS(&pPropStore));
- if(SUCCEEDED(hr))
- {
- DMO_MEDIA_TYPE mt = {0};
- PROPVARIANT var = {0};
- var.vt = VT_UI4;
- var.ulVal = _self->echo_tail;
- hr = pPropStore->SetValue(MFPKEY_WMAAECMA_FEATR_ECHO_LENGTH , var);
- }
- SafeRelease(&pPropStore);
- }
- return 0;
- }
- }
- return -1;
+ plugin_audio_dsp_denoise_t *self = (plugin_audio_dsp_denoise_t *)_self;
+ if(!self || !param) {
+ TSK_DEBUG_ERROR("Invalid parameter");
+ return -1;
+ }
+
+ if(param->value_type == tmedia_pvt_int32) {
+ if(tsk_striequals(param->key, "echo-tail")) {
+ _self->echo_tail = *((int32_t*)param->value);
+ TSK_DEBUG_INFO("ms_voice_dsp_set_echo_tail(%d)", _self->echo_tail);
+ if(self->pInst) {
+ IPropertyStore* pPropStore = NULL;
+ HRESULT hr = self->pInst->QueryInterface(IID_PPV_ARGS(&pPropStore));
+ if(SUCCEEDED(hr)) {
+ DMO_MEDIA_TYPE mt = {0};
+ PROPVARIANT var = {0};
+ var.vt = VT_UI4;
+ var.ulVal = _self->echo_tail;
+ hr = pPropStore->SetValue(MFPKEY_WMAAECMA_FEATR_ECHO_LENGTH , var);
+ }
+ SafeRelease(&pPropStore);
+ }
+ return 0;
+ }
+ }
+ return -1;
}
static int plugin_audio_dsp_denoise_open(tmedia_denoise_t* self, uint32_t record_frame_size_samples, uint32_t record_sampling_rate, uint32_t playback_frame_size_samples, uint32_t playback_sampling_rate)
{
- plugin_audio_dsp_denoise_t *denoiser = (plugin_audio_dsp_denoise_t *)self;
-
- HRESULT hr = S_OK;
- DMO_MEDIA_TYPE mt = {0};
- PROPVARIANT var = {0};
- IPropertyStore* pPropStore = NULL;
-
- TSK_DEBUG_INFO("[MS Voice Capture DSP] AEC_ENABLED=%d ECHO_TAIL=%d,\nAGC_ENABLED=%d,\nNOISE_SUPP_ENABLED=%d,\nVAD_ENABLED=%d",
- self->echo_supp_enabled, self->echo_tail,
- self->agc_enabled,
- self->noise_supp_enabled,
- self->vad_enabled
- );
-
- if(denoiser->bOpened)
- {
- TSK_DEBUG_ERROR("Denoiser already opened");
- CHECK_HR(hr = E_FAIL);
- }
-
- CHECK_HR(hr = AudioDSPUtils::MoInitMediaType(
- record_sampling_rate,
- g_nBitsPerSample,
- g_nChannles,
- &mt));
-
- CHECK_HR(hr = CoCreateInstance(CLSID_CWMAudioAEC, NULL,
- CLSCTX_INPROC_SERVER, IID_PPV_ARGS(&denoiser->pInst)));
- CHECK_HR(hr = denoiser->pInst->QueryInterface(IID_PPV_ARGS(&pPropStore)));
-
- // If the input format does not match the output format, the DMO automatically performs sample-rate conversion.
- CHECK_HR(hr = denoiser->pInst->SetInputType(0, &mt, 0));
- CHECK_HR(hr = denoiser->pInst->SetOutputType(0, &mt, 0));
-
- // Enables the application to override the default settings on various properties of the Voice Capture DSP
- // http://msdn.microsoft.com/en-us/library/windows/desktop/ff819422(v=vs.85).aspx
- var.vt = VT_BOOL;
- var.boolVal = VARIANT_TRUE;
- CHECK_HR(hr = pPropStore->SetValue(MFPKEY_WMAAECMA_FEATURE_MODE, var));
-
- // Switch to filter mode: http://msdn.microsoft.com/en-us/library/windows/desktop/ff819410(v=vs.85).aspx
- var.vt = VT_BOOL;
- var.boolVal = VARIANT_FALSE; /* VARIANT_FALSE: Filter, VARIANT_TRUE: Source */
- CHECK_HR(hr = pPropStore->SetValue(MFPKEY_WMAAECMA_DMO_SOURCE_MODE, var));
-
- // Enable AEC
- if(self->echo_supp_enabled)
- {
- // Enable AEC: http://msdn.microsoft.com/en-us/library/windows/desktop/ff819427(v=vs.85).aspx
- var.vt = VT_I4;
- var.lVal = SINGLE_CHANNEL_AEC;
- CHECK_HR(hr = pPropStore->SetValue(MFPKEY_WMAAECMA_SYSTEM_MODE, var));
-
- // Echo Tail (milliseconds): http://msdn.microsoft.com/en-us/library/windows/desktop/ff819414(v=vs.85).aspx
- if(!self->echo_tail)
- {
- self->echo_tail = tmedia_defaults_get_echo_tail();
- }
- var.vt = VT_I4;
- var.lVal = self->echo_tail ? self->echo_tail : 256;
- CHECK_HR(hr = pPropStore->SetValue(MFPKEY_WMAAECMA_FEATR_ECHO_LENGTH, var));
- }
-
- // Automatic Gain Control (AGC): http://msdn.microsoft.com/en-us/library/windows/desktop/ff819412(v=vs.85).aspx
- var.vt = VT_BOOL;
- var.boolVal = self->agc_enabled ? VARIANT_TRUE : VARIANT_FALSE;
- CHECK_HR(hr = pPropStore->SetValue(MFPKEY_WMAAECMA_FEATR_AGC, var));
-
- // Noise suppression (NS): http://msdn.microsoft.com/en-us/library/windows/desktop/ff819420(v=vs.85).aspx
- var.vt = VT_I4;
- var.lVal = self->noise_supp_enabled ? 1 : 0;
- CHECK_HR(hr = pPropStore->SetValue(MFPKEY_WMAAECMA_FEATR_NS, var));
-
- // Automatic Gain Control (AGC): http://msdn.microsoft.com/en-us/library/windows/desktop/ff819412(v=vs.85).aspx
- var.vt = VT_BOOL;
- var.boolVal = self->agc_enabled ? VARIANT_TRUE : VARIANT_FALSE;
- CHECK_HR(hr = pPropStore->SetValue(MFPKEY_WMAAECMA_FEATR_AGC, var));
-
- // Voice Activity Detection (VAD): http://msdn.microsoft.com/en-us/library/windows/desktop/ff819421(v=vs.85).aspx
- var.vt = VT_I4;
- var.lVal = self->vad_enabled ? AEC_VAD_FOR_SILENCE_SUPPRESSION : AEC_VAD_DISABLED;
- CHECK_HR(hr = pPropStore->SetValue(MFPKEY_WMAAECMA_FEATR_VAD, var));
-
- // Recommended to allocate resources
- CHECK_HR(hr = denoiser->pInst->AllocateStreamingResources()); // FIXME: returns E_FAIL
-
- denoiser->record_channels = g_nChannles;
- denoiser->record_size_samples = ((record_sampling_rate * g_nFrameDuration) / 1000) << (denoiser->record_channels == 2 ? 1 : 0);
- denoiser->record_size_bytes = (denoiser->record_size_samples * (g_nBitsPerSample >> 3));
-
- denoiser->playback_channels = g_nChannles;
- denoiser->playback_size_samples = ((playback_sampling_rate * g_nFrameDuration) / 1000) << (denoiser->playback_channels == 2 ? 1 : 0);
- denoiser->playback_size_bytes = (denoiser->playback_size_samples * (g_nBitsPerSample >> 3));
-
- denoiser->rtStart = 0;
- denoiser->rtDuration = PLUGIN_AUDIO_DSP_MILLIS_TO_100NS(g_nFrameDuration); // milliseconds -> 100ns
+ plugin_audio_dsp_denoise_t *denoiser = (plugin_audio_dsp_denoise_t *)self;
+
+ HRESULT hr = S_OK;
+ DMO_MEDIA_TYPE mt = {0};
+ PROPVARIANT var = {0};
+ IPropertyStore* pPropStore = NULL;
+
+ TSK_DEBUG_INFO("[MS Voice Capture DSP] AEC_ENABLED=%d ECHO_TAIL=%d,\nAGC_ENABLED=%d,\nNOISE_SUPP_ENABLED=%d,\nVAD_ENABLED=%d",
+ self->echo_supp_enabled, self->echo_tail,
+ self->agc_enabled,
+ self->noise_supp_enabled,
+ self->vad_enabled
+ );
+
+ if(denoiser->bOpened) {
+ TSK_DEBUG_ERROR("Denoiser already opened");
+ CHECK_HR(hr = E_FAIL);
+ }
+
+ CHECK_HR(hr = AudioDSPUtils::MoInitMediaType(
+ record_sampling_rate,
+ g_nBitsPerSample,
+ g_nChannles,
+ &mt));
+
+ CHECK_HR(hr = CoCreateInstance(CLSID_CWMAudioAEC, NULL,
+ CLSCTX_INPROC_SERVER, IID_PPV_ARGS(&denoiser->pInst)));
+ CHECK_HR(hr = denoiser->pInst->QueryInterface(IID_PPV_ARGS(&pPropStore)));
+
+ // If the input format does not match the output format, the DMO automatically performs sample-rate conversion.
+ CHECK_HR(hr = denoiser->pInst->SetInputType(0, &mt, 0));
+ CHECK_HR(hr = denoiser->pInst->SetOutputType(0, &mt, 0));
+
+ // Enables the application to override the default settings on various properties of the Voice Capture DSP
+ // http://msdn.microsoft.com/en-us/library/windows/desktop/ff819422(v=vs.85).aspx
+ var.vt = VT_BOOL;
+ var.boolVal = VARIANT_TRUE;
+ CHECK_HR(hr = pPropStore->SetValue(MFPKEY_WMAAECMA_FEATURE_MODE, var));
+
+ // Switch to filter mode: http://msdn.microsoft.com/en-us/library/windows/desktop/ff819410(v=vs.85).aspx
+ var.vt = VT_BOOL;
+ var.boolVal = VARIANT_FALSE; /* VARIANT_FALSE: Filter, VARIANT_TRUE: Source */
+ CHECK_HR(hr = pPropStore->SetValue(MFPKEY_WMAAECMA_DMO_SOURCE_MODE, var));
+
+ // Enable AEC
+ if(self->echo_supp_enabled) {
+ // Enable AEC: http://msdn.microsoft.com/en-us/library/windows/desktop/ff819427(v=vs.85).aspx
+ var.vt = VT_I4;
+ var.lVal = SINGLE_CHANNEL_AEC;
+ CHECK_HR(hr = pPropStore->SetValue(MFPKEY_WMAAECMA_SYSTEM_MODE, var));
+
+ // Echo Tail (milliseconds): http://msdn.microsoft.com/en-us/library/windows/desktop/ff819414(v=vs.85).aspx
+ if(!self->echo_tail) {
+ self->echo_tail = tmedia_defaults_get_echo_tail();
+ }
+ var.vt = VT_I4;
+ var.lVal = self->echo_tail ? self->echo_tail : 256;
+ CHECK_HR(hr = pPropStore->SetValue(MFPKEY_WMAAECMA_FEATR_ECHO_LENGTH, var));
+ }
+
+ // Automatic Gain Control (AGC): http://msdn.microsoft.com/en-us/library/windows/desktop/ff819412(v=vs.85).aspx
+ var.vt = VT_BOOL;
+ var.boolVal = self->agc_enabled ? VARIANT_TRUE : VARIANT_FALSE;
+ CHECK_HR(hr = pPropStore->SetValue(MFPKEY_WMAAECMA_FEATR_AGC, var));
+
+ // Noise suppression (NS): http://msdn.microsoft.com/en-us/library/windows/desktop/ff819420(v=vs.85).aspx
+ var.vt = VT_I4;
+ var.lVal = self->noise_supp_enabled ? 1 : 0;
+ CHECK_HR(hr = pPropStore->SetValue(MFPKEY_WMAAECMA_FEATR_NS, var));
+
+ // Automatic Gain Control (AGC): http://msdn.microsoft.com/en-us/library/windows/desktop/ff819412(v=vs.85).aspx
+ var.vt = VT_BOOL;
+ var.boolVal = self->agc_enabled ? VARIANT_TRUE : VARIANT_FALSE;
+ CHECK_HR(hr = pPropStore->SetValue(MFPKEY_WMAAECMA_FEATR_AGC, var));
+
+ // Voice Activity Detection (VAD): http://msdn.microsoft.com/en-us/library/windows/desktop/ff819421(v=vs.85).aspx
+ var.vt = VT_I4;
+ var.lVal = self->vad_enabled ? AEC_VAD_FOR_SILENCE_SUPPRESSION : AEC_VAD_DISABLED;
+ CHECK_HR(hr = pPropStore->SetValue(MFPKEY_WMAAECMA_FEATR_VAD, var));
+
+ // Recommended to allocate resources
+ CHECK_HR(hr = denoiser->pInst->AllocateStreamingResources()); // FIXME: returns E_FAIL
+
+ denoiser->record_channels = g_nChannles;
+ denoiser->record_size_samples = ((record_sampling_rate * g_nFrameDuration) / 1000) << (denoiser->record_channels == 2 ? 1 : 0);
+ denoiser->record_size_bytes = (denoiser->record_size_samples * (g_nBitsPerSample >> 3));
+
+ denoiser->playback_channels = g_nChannles;
+ denoiser->playback_size_samples = ((playback_sampling_rate * g_nFrameDuration) / 1000) << (denoiser->playback_channels == 2 ? 1 : 0);
+ denoiser->playback_size_bytes = (denoiser->playback_size_samples * (g_nBitsPerSample >> 3));
+
+ denoiser->rtStart = 0;
+ denoiser->rtDuration = PLUGIN_AUDIO_DSP_MILLIS_TO_100NS(g_nFrameDuration); // milliseconds -> 100ns
bail:
- denoiser->bOpened = SUCCEEDED(hr);
+ denoiser->bOpened = SUCCEEDED(hr);
- MoFreeMediaType(&mt);
- SafeRelease(&pPropStore);
+ MoFreeMediaType(&mt);
+ SafeRelease(&pPropStore);
- return denoiser->bOpened ? 0 : -1;
+ return denoiser->bOpened ? 0 : -1;
}
// playback = "stream 1"
// /!\Thread safety: could be called at the same time as plugin_audio_dsp_denoise_process_record()
static int plugin_audio_dsp_denoise_echo_playback(tmedia_denoise_t* self, const void* echo_frame, uint32_t echo_frame_size_bytes)
{
- plugin_audio_dsp_denoise_t *denoiser = (plugin_audio_dsp_denoise_t *)self;
+ plugin_audio_dsp_denoise_t *denoiser = (plugin_audio_dsp_denoise_t *)self;
+
+ HRESULT hr = S_OK;
- HRESULT hr = S_OK;
+ if(!self || !echo_frame || !echo_frame_size_bytes) {
+ CHECK_HR(hr = E_POINTER);
+ }
- if(!self || !echo_frame || !echo_frame_size_bytes)
- {
- CHECK_HR(hr = E_POINTER);
- }
+ if(!denoiser->bOpened) {
+ TSK_DEBUG_ERROR("Denoiser not opened");
+ CHECK_HR(hr = E_FAIL);
+ }
+
+ if(denoiser->record_size_bytes != echo_frame_size_bytes) {
+ TSK_DEBUG_ERROR("Size mismatch: %u<>%u", denoiser->record_size_bytes, echo_frame_size_bytes);
+ CHECK_HR(hr = E_INVALIDARG);
+ }
- if(!denoiser->bOpened)
- {
- TSK_DEBUG_ERROR("Denoiser not opened");
- CHECK_HR(hr = E_FAIL);
- }
- if(denoiser->record_size_bytes != echo_frame_size_bytes)
- {
- TSK_DEBUG_ERROR("Size mismatch: %u<>%u", denoiser->record_size_bytes, echo_frame_size_bytes);
- CHECK_HR(hr = E_INVALIDARG);
- }
-
-
bail:
- return SUCCEEDED(hr) ? 0 : -1;
+ return SUCCEEDED(hr) ? 0 : -1;
}
@@ -246,97 +234,89 @@ bail:
// /!\Thread safety: could be called at the same time as plugin_audio_dsp_denoise_echo_playback()
static int plugin_audio_dsp_denoise_process_record(tmedia_denoise_t* self, void* audio_frame, uint32_t audio_frame_size_bytes, tsk_bool_t* silence_or_noise)
{
- plugin_audio_dsp_denoise_t *denoiser = (plugin_audio_dsp_denoise_t *)self;
-
- HRESULT hr = S_OK;
- BYTE* pBufferInPtr = NULL;
- DWORD dwBufferInSize = 0;
-
- if(!self || !audio_frame || !audio_frame_size_bytes || !silence_or_noise)
- {
- CHECK_HR(hr = E_POINTER);
- }
-
- if(!denoiser->bOpened)
- {
- TSK_DEBUG_ERROR("Denoiser not opened");
- CHECK_HR(hr = E_FAIL);
- }
-
- if(denoiser->record_size_bytes != audio_frame_size_bytes)
- {
- TSK_DEBUG_ERROR("Size mismatch: %u<>%u", denoiser->record_size_bytes, audio_frame_size_bytes);
- CHECK_HR(hr = E_INVALIDARG);
- }
-
- if(!denoiser->pBufferIn)
- {
- CHECK_HR(hr = AudioDSPMediaBuffer::Create(denoiser->record_size_bytes, &denoiser->pBufferIn));
- }
- else
- {
- DWORD dwMaxLength = 0;
- CHECK_HR(hr = denoiser->pBufferIn->GetMaxLength(&dwMaxLength));
- if(dwMaxLength < denoiser->record_size_bytes)
- {
- SafeRelease(&denoiser->pBufferIn);
- CHECK_HR(hr = AudioDSPMediaBuffer::Create(denoiser->record_size_bytes, &denoiser->pBufferIn));
- }
- }
-
- // Get memory pointer to the input buffer
- CHECK_HR(hr = denoiser->pBufferIn->GetBufferAndLength(&pBufferInPtr, NULL));
- // Copy data
- dwBufferInSize = TSK_MIN(audio_frame_size_bytes, denoiser->record_size_bytes);
- memcpy(pBufferInPtr, audio_frame, dwBufferInSize);
- CHECK_HR(hr = denoiser->pBufferIn->SetLength(dwBufferInSize));
-
- // Process input
- hr = denoiser->pInst->ProcessInput(
- g_nMicrophoneStreamIndex,
- denoiser->pBufferIn,
- (/*DMO_INPUT_DATA_BUFFERF_TIME | DMO_INPUT_DATA_BUFFERF_TIMELENGTH*/0),
- denoiser->rtStart,
- denoiser->rtDuration);
- if(hr == DMO_E_NOTACCEPTING)
- {
- hr = S_OK;
- }
- CHECK_HR(hr);
-
- denoiser->rtStart += denoiser->rtDuration;
+ plugin_audio_dsp_denoise_t *denoiser = (plugin_audio_dsp_denoise_t *)self;
+
+ HRESULT hr = S_OK;
+ BYTE* pBufferInPtr = NULL;
+ DWORD dwBufferInSize = 0;
+
+ if(!self || !audio_frame || !audio_frame_size_bytes || !silence_or_noise) {
+ CHECK_HR(hr = E_POINTER);
+ }
+
+ if(!denoiser->bOpened) {
+ TSK_DEBUG_ERROR("Denoiser not opened");
+ CHECK_HR(hr = E_FAIL);
+ }
+
+ if(denoiser->record_size_bytes != audio_frame_size_bytes) {
+ TSK_DEBUG_ERROR("Size mismatch: %u<>%u", denoiser->record_size_bytes, audio_frame_size_bytes);
+ CHECK_HR(hr = E_INVALIDARG);
+ }
+
+ if(!denoiser->pBufferIn) {
+ CHECK_HR(hr = AudioDSPMediaBuffer::Create(denoiser->record_size_bytes, &denoiser->pBufferIn));
+ }
+ else {
+ DWORD dwMaxLength = 0;
+ CHECK_HR(hr = denoiser->pBufferIn->GetMaxLength(&dwMaxLength));
+ if(dwMaxLength < denoiser->record_size_bytes) {
+ SafeRelease(&denoiser->pBufferIn);
+ CHECK_HR(hr = AudioDSPMediaBuffer::Create(denoiser->record_size_bytes, &denoiser->pBufferIn));
+ }
+ }
+
+ // Get memory pointer to the input buffer
+ CHECK_HR(hr = denoiser->pBufferIn->GetBufferAndLength(&pBufferInPtr, NULL));
+ // Copy data
+ dwBufferInSize = TSK_MIN(audio_frame_size_bytes, denoiser->record_size_bytes);
+ memcpy(pBufferInPtr, audio_frame, dwBufferInSize);
+ CHECK_HR(hr = denoiser->pBufferIn->SetLength(dwBufferInSize));
+
+ // Process input
+ hr = denoiser->pInst->ProcessInput(
+ g_nMicrophoneStreamIndex,
+ denoiser->pBufferIn,
+ (/*DMO_INPUT_DATA_BUFFERF_TIME | DMO_INPUT_DATA_BUFFERF_TIMELENGTH*/0),
+ denoiser->rtStart,
+ denoiser->rtDuration);
+ if(hr == DMO_E_NOTACCEPTING) {
+ hr = S_OK;
+ }
+ CHECK_HR(hr);
+
+ denoiser->rtStart += denoiser->rtDuration;
bail:
- return SUCCEEDED(hr) ? 0 : -1;
+ return SUCCEEDED(hr) ? 0 : -1;
}
static int plugin_audio_dsp_denoise_process_playback(tmedia_denoise_t* self, void* audio_frame, uint32_t audio_frame_size_bytes)
{
- plugin_audio_dsp_denoise_t *denoiser = (plugin_audio_dsp_denoise_t *)self;
-
- (void)(denoiser);
+ plugin_audio_dsp_denoise_t *denoiser = (plugin_audio_dsp_denoise_t *)self;
+
+ (void)(denoiser);
- // Not mandatory to denoise audio before playback.
- // All Doubango clients support noise suppression.
- return 0;
+ // Not mandatory to denoise audio before playback.
+ // All Doubango clients support noise suppression.
+ return 0;
}
static int plugin_audio_dsp_denoise_close(tmedia_denoise_t* self)
{
- plugin_audio_dsp_denoise_t *denoiser = (plugin_audio_dsp_denoise_t *)self;
-
- if(!self)
- {
- TSK_DEBUG_ERROR("Invalid parameter");
- return -1;
- }
-
- denoiser->bOpened = false;
- SafeRelease(&denoiser->pBufferIn);
- SafeRelease(&denoiser->pBufferOut);
- SafeRelease(&denoiser->pInst);
- return 0;
+ plugin_audio_dsp_denoise_t *denoiser = (plugin_audio_dsp_denoise_t *)self;
+
+ if(!self) {
+ TSK_DEBUG_ERROR("Invalid parameter");
+ return -1;
+ }
+
+ denoiser->bOpened = false;
+ SafeRelease(&denoiser->pBufferIn);
+ SafeRelease(&denoiser->pBufferOut);
+ SafeRelease(&denoiser->pInst);
+ return 0;
}
@@ -348,55 +328,51 @@ static int plugin_audio_dsp_denoise_close(tmedia_denoise_t* self)
/* constructor */
static tsk_object_t* plugin_audio_dsp_denoise_ctor(tsk_object_t * self, va_list * app)
{
- plugin_audio_dsp_denoise_t *denoise = (plugin_audio_dsp_denoise_t*)self;
- if(denoise)
- {
- AudioDSPUtils::Startup();
-
- /* init base */
- tmedia_denoise_init(TMEDIA_DENOISE(denoise));
- /* init self */
- }
- return self;
+ plugin_audio_dsp_denoise_t *denoise = (plugin_audio_dsp_denoise_t*)self;
+ if(denoise) {
+ AudioDSPUtils::Startup();
+
+ /* init base */
+ tmedia_denoise_init(TMEDIA_DENOISE(denoise));
+ /* init self */
+ }
+ return self;
}
/* destructor */
static tsk_object_t* plugin_audio_dsp_denoise_dtor(tsk_object_t * self)
-{
- plugin_audio_dsp_denoise_t *denoiser = (plugin_audio_dsp_denoise_t*)self;
- if(denoiser)
- {
- /* deinit base */
- tmedia_denoise_deinit(TMEDIA_DENOISE(denoiser));
- /* deinit self */
- SafeRelease(&denoiser->pBufferIn);
- SafeRelease(&denoiser->pBufferOut);
- SafeRelease(&denoiser->pInst);
-
- TSK_DEBUG_INFO("*** MS Voice Capture DSP destroyed ***");
- }
-
- return self;
+{
+ plugin_audio_dsp_denoise_t *denoiser = (plugin_audio_dsp_denoise_t*)self;
+ if(denoiser) {
+ /* deinit base */
+ tmedia_denoise_deinit(TMEDIA_DENOISE(denoiser));
+ /* deinit self */
+ SafeRelease(&denoiser->pBufferIn);
+ SafeRelease(&denoiser->pBufferOut);
+ SafeRelease(&denoiser->pInst);
+
+ TSK_DEBUG_INFO("*** MS Voice Capture DSP destroyed ***");
+ }
+
+ return self;
}
/* object definition */
-static const tsk_object_def_t plugin_audio_dsp_denoise_def_s =
-{
- sizeof(plugin_audio_dsp_denoise_t),
- plugin_audio_dsp_denoise_ctor,
- plugin_audio_dsp_denoise_dtor,
- tsk_null,
+static const tsk_object_def_t plugin_audio_dsp_denoise_def_s = {
+ sizeof(plugin_audio_dsp_denoise_t),
+ plugin_audio_dsp_denoise_ctor,
+ plugin_audio_dsp_denoise_dtor,
+ tsk_null,
};
/* plugin definition*/
-static const tmedia_denoise_plugin_def_t plugin_audio_dsp_denoise_plugin_def_s =
-{
- &plugin_audio_dsp_denoise_def_s,
+static const tmedia_denoise_plugin_def_t plugin_audio_dsp_denoise_plugin_def_s = {
+ &plugin_audio_dsp_denoise_def_s,
- "MS Voice Capture DSP", /* http://msdn.microsoft.com/en-us/library/windows/desktop/ff819492(v=vs.85).aspx */
+ "MS Voice Capture DSP", /* http://msdn.microsoft.com/en-us/library/windows/desktop/ff819492(v=vs.85).aspx */
- plugin_audio_dsp_denoise_set,
- plugin_audio_dsp_denoise_open,
- plugin_audio_dsp_denoise_echo_playback,
- plugin_audio_dsp_denoise_process_record,
- plugin_audio_dsp_denoise_process_playback,
- plugin_audio_dsp_denoise_close,
+ plugin_audio_dsp_denoise_set,
+ plugin_audio_dsp_denoise_open,
+ plugin_audio_dsp_denoise_echo_playback,
+ plugin_audio_dsp_denoise_process_record,
+ plugin_audio_dsp_denoise_process_playback,
+ plugin_audio_dsp_denoise_close,
};
const tmedia_denoise_plugin_def_t *plugin_audio_dsp_denoise_plugin_def_t = &plugin_audio_dsp_denoise_plugin_def_s;
diff --git a/plugins/pluginWinAudioDSP/plugin_audio_dsp_mediabuffer.cxx b/plugins/pluginWinAudioDSP/plugin_audio_dsp_mediabuffer.cxx
index 34e5b4d..b4a1726 100755
--- a/plugins/pluginWinAudioDSP/plugin_audio_dsp_mediabuffer.cxx
+++ b/plugins/pluginWinAudioDSP/plugin_audio_dsp_mediabuffer.cxx
@@ -1,18 +1,18 @@
/* Copyright (C) 2013 Mamadou DIOP
* Copyright (C) 2013 Doubango Telecom <http://www.doubango.org>
-*
+*
* This file is part of Open Source Doubango Framework.
*
* DOUBANGO is free software: you can redistribute it and/or modify
* it under the terms of the GNU General Public License as published by
* the Free Software Foundation, either version 3 of the License, or
* (at your option) any later version.
-*
+*
* DOUBANGO is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
* GNU General Public License for more details.
-*
+*
* You should have received a copy of the GNU General Public License
* along with DOUBANGO.
*/
@@ -25,48 +25,42 @@ AudioDSPMediaBuffer::AudioDSPMediaBuffer(DWORD cbMaxLength, HRESULT& hr) :
m_pbData(NULL)
{
m_pbData = new BYTE[cbMaxLength];
- if (!m_pbData)
- {
+ if (!m_pbData) {
hr = E_OUTOFMEMORY;
}
}
AudioDSPMediaBuffer::~AudioDSPMediaBuffer()
{
- if (m_pbData)
- {
+ if (m_pbData) {
delete [] m_pbData;
}
}
-// Function to create a new IMediaBuffer object and return
+// Function to create a new IMediaBuffer object and return
// an AddRef'd interface pointer.
HRESULT AudioDSPMediaBuffer::Create(long cbMaxLen, IMediaBuffer **ppBuffer)
{
HRESULT hr = S_OK;
AudioDSPMediaBuffer *pBuffer = NULL;
- if (ppBuffer == NULL)
- {
+ if (ppBuffer == NULL) {
return E_POINTER;
}
pBuffer = new AudioDSPMediaBuffer(cbMaxLen, hr);
- if (pBuffer == NULL)
- {
+ if (pBuffer == NULL) {
hr = E_OUTOFMEMORY;
}
- if (SUCCEEDED(hr))
- {
- *ppBuffer = pBuffer;
- (*ppBuffer)->AddRef();
+ if (SUCCEEDED(hr)) {
+ *ppBuffer = pBuffer;
+ (*ppBuffer)->AddRef();
}
- if (pBuffer)
- {
+ if (pBuffer) {
pBuffer->Release();
}
return hr;
@@ -75,18 +69,15 @@ HRESULT AudioDSPMediaBuffer::Create(long cbMaxLen, IMediaBuffer **ppBuffer)
// IUnknown methods.
STDMETHODIMP AudioDSPMediaBuffer::QueryInterface(REFIID riid, void **ppv)
{
- if (ppv == NULL)
- {
+ if (ppv == NULL) {
return E_POINTER;
}
- else if (riid == IID_IMediaBuffer || riid == IID_IUnknown)
- {
+ else if (riid == IID_IMediaBuffer || riid == IID_IUnknown) {
*ppv = static_cast<IMediaBuffer *>(this);
AddRef();
return S_OK;
}
- else
- {
+ else {
*ppv = NULL;
return E_NOINTERFACE;
}
@@ -100,19 +91,17 @@ STDMETHODIMP_(ULONG) AudioDSPMediaBuffer::AddRef()
STDMETHODIMP_(ULONG) AudioDSPMediaBuffer::Release()
{
LONG lRef = InterlockedDecrement(&m_nRefCount);
- if (lRef == 0)
- {
+ if (lRef == 0) {
delete this;
// m_cRef is no longer valid! Return lRef.
}
- return lRef;
+ return lRef;
}
// IMediaBuffer methods.
STDMETHODIMP AudioDSPMediaBuffer::SetLength(DWORD cbLength)
{
- if (cbLength > m_cbMaxLength)
- {
+ if (cbLength > m_cbMaxLength) {
return E_INVALIDARG;
}
m_cbLength = cbLength;
@@ -121,8 +110,7 @@ STDMETHODIMP AudioDSPMediaBuffer::SetLength(DWORD cbLength)
STDMETHODIMP AudioDSPMediaBuffer::GetMaxLength(DWORD *pcbMaxLength)
{
- if (pcbMaxLength == NULL)
- {
+ if (pcbMaxLength == NULL) {
return E_POINTER;
}
*pcbMaxLength = m_cbMaxLength;
@@ -132,16 +120,13 @@ STDMETHODIMP AudioDSPMediaBuffer::GetMaxLength(DWORD *pcbMaxLength)
STDMETHODIMP AudioDSPMediaBuffer::GetBufferAndLength(BYTE **ppbBuffer, DWORD *pcbLength)
{
// Either parameter can be NULL, but not both.
- if (ppbBuffer == NULL && pcbLength == NULL)
- {
+ if (ppbBuffer == NULL && pcbLength == NULL) {
return E_POINTER;
}
- if (ppbBuffer)
- {
+ if (ppbBuffer) {
*ppbBuffer = m_pbData;
}
- if (pcbLength)
- {
+ if (pcbLength) {
*pcbLength = m_cbLength;
}
return S_OK;
diff --git a/plugins/pluginWinAudioDSP/plugin_audio_dsp_mediabuffer.h b/plugins/pluginWinAudioDSP/plugin_audio_dsp_mediabuffer.h
index a8d603b..486c273 100755
--- a/plugins/pluginWinAudioDSP/plugin_audio_dsp_mediabuffer.h
+++ b/plugins/pluginWinAudioDSP/plugin_audio_dsp_mediabuffer.h
@@ -1,18 +1,18 @@
/* Copyright (C) 2013 Mamadou DIOP
* Copyright (C) 2013 Doubango Telecom <http://www.doubango.org>
-*
+*
* This file is part of Open Source Doubango Framework.
*
* DOUBANGO is free software: you can redistribute it and/or modify
* it under the terms of the GNU General Public License as published by
* the Free Software Foundation, either version 3 of the License, or
* (at your option) any later version.
-*
+*
* DOUBANGO is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
* GNU General Public License for more details.
-*
+*
* You should have received a copy of the GNU General Public License
* along with DOUBANGO.
*/
@@ -34,7 +34,7 @@ private:
AudioDSPMediaBuffer(DWORD cbMaxLength, HRESULT& hr);
~AudioDSPMediaBuffer();
-public:
+public:
static HRESULT Create(long cbMaxLen, IMediaBuffer **ppBuffer);
// IUnknown methods.
diff --git a/plugins/pluginWinAudioDSP/plugin_audio_dsp_resampler.cxx b/plugins/pluginWinAudioDSP/plugin_audio_dsp_resampler.cxx
index 3e5a291..4fe636f 100755
--- a/plugins/pluginWinAudioDSP/plugin_audio_dsp_resampler.cxx
+++ b/plugins/pluginWinAudioDSP/plugin_audio_dsp_resampler.cxx
@@ -1,18 +1,18 @@
/* Copyright (C) 2013 Mamadou DIOP
* Copyright (C) 2013 Doubango Telecom <http://www.doubango.org>
-*
+*
* This file is part of Open Source Doubango Framework.
*
* DOUBANGO is free software: you can redistribute it and/or modify
* it under the terms of the GNU General Public License as published by
* the Free Software Foundation, either version 3 of the License, or
* (at your option) any later version.
-*
+*
* DOUBANGO is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
* GNU General Public License for more details.
-*
+*
* You should have received a copy of the GNU General Public License
* along with DOUBANGO.
*/
@@ -37,268 +37,246 @@
static const UINT32 g_nBitsPerSample = 16;
static HRESULT ProcessOutput(struct plugin_audio_dsp_resampler_s *resampler, IMFSample **ppSample);
-typedef struct plugin_audio_dsp_resampler_s
-{
- TMEDIA_DECLARE_RESAMPLER;
-
- bool bOpened;
-
- tsk_size_t in_size_samples;
- tsk_size_t in_size_bytes;
- tsk_size_t out_size_samples;
- tsk_size_t out_size_bytes;
- uint32_t in_channels;
- uint32_t out_channels;
-
- LONGLONG rtStart;
+typedef struct plugin_audio_dsp_resampler_s {
+ TMEDIA_DECLARE_RESAMPLER;
+
+ bool bOpened;
+
+ tsk_size_t in_size_samples;
+ tsk_size_t in_size_bytes;
+ tsk_size_t out_size_samples;
+ tsk_size_t out_size_bytes;
+ uint32_t in_channels;
+ uint32_t out_channels;
+
+ LONGLONG rtStart;
UINT64 rtDuration;
-
- IMFTransform* pMFT;
- IMFSample *pSampleIn;
- IMFSample *pSampleOut;
+
+ IMFTransform* pMFT;
+ IMFSample *pSampleIn;
+ IMFSample *pSampleOut;
}
plugin_audio_dsp_resampler_t;
// Doubango engine uses quality from [1 - 10].
static int plugin_audio_dsp_resampler_open(tmedia_resampler_t* self, uint32_t in_freq, uint32_t out_freq, uint32_t frame_duration, uint32_t in_channels, uint32_t out_channels, uint32_t quality)
{
- plugin_audio_dsp_resampler_t *resampler = (plugin_audio_dsp_resampler_t *)self;
-
- IMFMediaType* pTypeIn = NULL;
- IMFMediaType* pTypeOut = NULL;
- IWMResamplerProps* pProps = NULL;
- HRESULT hr = S_OK;
-
- if(in_channels != 1 && in_channels != 2)
- {
- TSK_DEBUG_ERROR("%d not valid as input channel", in_channels);
- CHECK_HR(hr = E_INVALIDARG);
- }
- if(out_channels != 1 && out_channels != 2)
- {
- TSK_DEBUG_ERROR("%d not valid as output channel", out_channels);
- CHECK_HR(hr = E_INVALIDARG);
- }
-
- if(resampler->bOpened)
- {
- TSK_DEBUG_ERROR("Resampler already opened");
- CHECK_HR(hr = E_FAIL);
- }
-
- resampler->in_size_samples = ((in_freq * frame_duration) / 1000) << (in_channels == 2 ? 1 : 0);
- resampler->out_size_samples = ((out_freq * frame_duration) / 1000) << (out_channels == 2 ? 1 : 0);
- resampler->in_channels = in_channels;
- resampler->out_channels = out_channels;
-
- resampler->in_size_bytes = (resampler->in_size_samples * (g_nBitsPerSample >> 3));
- resampler->out_size_bytes = (resampler->out_size_samples * (g_nBitsPerSample >> 3));
-
- resampler->rtStart = 0;
- resampler->rtDuration = PLUGIN_AUDIO_DSP_MILLIS_TO_100NS(frame_duration); // milliseconds -> 100ns
-
- CHECK_HR(hr = CoCreateInstance(CLSID_CResamplerMediaObject, NULL,
- CLSCTX_INPROC_SERVER, IID_PPV_ARGS(&resampler->pMFT)));
-
- CHECK_HR(hr = AudioDSPUtils::CreatePCMAudioType(in_freq, g_nBitsPerSample, in_channels, &pTypeIn));
- CHECK_HR(hr = AudioDSPUtils::CreatePCMAudioType(out_freq, g_nBitsPerSample, out_channels, &pTypeOut));
-
- CHECK_HR(hr = resampler->pMFT->SetInputType(0, pTypeIn, 0));
- CHECK_HR(hr = resampler->pMFT->SetOutputType(0, pTypeOut, 0));
-
- CHECK_HR(hr = resampler->pMFT->QueryInterface(IID_PPV_ARGS(&pProps)));
+ plugin_audio_dsp_resampler_t *resampler = (plugin_audio_dsp_resampler_t *)self;
+
+ IMFMediaType* pTypeIn = NULL;
+ IMFMediaType* pTypeOut = NULL;
+ IWMResamplerProps* pProps = NULL;
+ HRESULT hr = S_OK;
+
+ if(in_channels != 1 && in_channels != 2) {
+ TSK_DEBUG_ERROR("%d not valid as input channel", in_channels);
+ CHECK_HR(hr = E_INVALIDARG);
+ }
+ if(out_channels != 1 && out_channels != 2) {
+ TSK_DEBUG_ERROR("%d not valid as output channel", out_channels);
+ CHECK_HR(hr = E_INVALIDARG);
+ }
+
+ if(resampler->bOpened) {
+ TSK_DEBUG_ERROR("Resampler already opened");
+ CHECK_HR(hr = E_FAIL);
+ }
+
+ resampler->in_size_samples = ((in_freq * frame_duration) / 1000) << (in_channels == 2 ? 1 : 0);
+ resampler->out_size_samples = ((out_freq * frame_duration) / 1000) << (out_channels == 2 ? 1 : 0);
+ resampler->in_channels = in_channels;
+ resampler->out_channels = out_channels;
+
+ resampler->in_size_bytes = (resampler->in_size_samples * (g_nBitsPerSample >> 3));
+ resampler->out_size_bytes = (resampler->out_size_samples * (g_nBitsPerSample >> 3));
+
+ resampler->rtStart = 0;
+ resampler->rtDuration = PLUGIN_AUDIO_DSP_MILLIS_TO_100NS(frame_duration); // milliseconds -> 100ns
+
+ CHECK_HR(hr = CoCreateInstance(CLSID_CResamplerMediaObject, NULL,
+ CLSCTX_INPROC_SERVER, IID_PPV_ARGS(&resampler->pMFT)));
+
+ CHECK_HR(hr = AudioDSPUtils::CreatePCMAudioType(in_freq, g_nBitsPerSample, in_channels, &pTypeIn));
+ CHECK_HR(hr = AudioDSPUtils::CreatePCMAudioType(out_freq, g_nBitsPerSample, out_channels, &pTypeOut));
+
+ CHECK_HR(hr = resampler->pMFT->SetInputType(0, pTypeIn, 0));
+ CHECK_HR(hr = resampler->pMFT->SetOutputType(0, pTypeOut, 0));
+
+ CHECK_HR(hr = resampler->pMFT->QueryInterface(IID_PPV_ARGS(&pProps)));
CHECK_HR(hr = pProps->SetHalfFilterLength((quality * PLUGIN_AUDIO_DSP_RESAMPLER_MAX_QUALITY) / 10)); // [1 - 10] -> [1 - 60]
- CHECK_HR(hr = resampler->pMFT->ProcessMessage(MFT_MESSAGE_COMMAND_FLUSH, NULL));
+ CHECK_HR(hr = resampler->pMFT->ProcessMessage(MFT_MESSAGE_COMMAND_FLUSH, NULL));
CHECK_HR(hr = resampler->pMFT->ProcessMessage(MFT_MESSAGE_NOTIFY_BEGIN_STREAMING, NULL));
CHECK_HR(hr = resampler->pMFT->ProcessMessage(MFT_MESSAGE_NOTIFY_START_OF_STREAM, NULL));
bail:
- resampler->bOpened = SUCCEEDED(hr);
- if(!resampler->bOpened)
- {
- SafeRelease(&resampler->pMFT);
- }
- SafeRelease(&pTypeIn);
- SafeRelease(&pTypeOut);
- SafeRelease(&pProps);
- return resampler->bOpened ? 0 : -1;
+ resampler->bOpened = SUCCEEDED(hr);
+ if(!resampler->bOpened) {
+ SafeRelease(&resampler->pMFT);
+ }
+ SafeRelease(&pTypeIn);
+ SafeRelease(&pTypeOut);
+ SafeRelease(&pProps);
+ return resampler->bOpened ? 0 : -1;
}
static tsk_size_t plugin_audio_dsp_resampler_process(tmedia_resampler_t* self, const uint16_t* in_data, tsk_size_t in_size, uint16_t* out_data, tsk_size_t out_size)
{
- plugin_audio_dsp_resampler_t *resampler = (plugin_audio_dsp_resampler_t *)self;
-
- HRESULT hr = S_OK;
- tsk_size_t retSize = 0;
-
- if(!resampler || !out_data)
- {
- CHECK_HR(hr = E_POINTER);
- }
-
- if(!resampler->bOpened)
- {
- TSK_DEBUG_ERROR("Resampler not opened");
- CHECK_HR(hr = E_FAIL);
- }
-
- if(in_size != resampler->in_size_samples)
- {
- TSK_DEBUG_ERROR("Input data has wrong size");
- CHECK_HR(hr = E_FAIL);
- }
-
- if(out_size < resampler->out_size_samples)
- {
- TSK_DEBUG_ERROR("Output data is too short");
- CHECK_HR(hr = E_FAIL);
- }
-
- IMFMediaBuffer* pBufferIn = NULL;
- IMFMediaBuffer* pBufferOut = NULL;
- IMFSample *pSampleOut = NULL;
- BYTE* pBufferPtr = NULL;
-
- if(!resampler->pSampleIn)
- {
- CHECK_HR(hr = AudioDSPUtils::CreateMediaSample(resampler->in_size_bytes, &resampler->pSampleIn));
- hr = resampler->pSampleIn->GetBufferByIndex(0, &pBufferIn);
- if(FAILED(hr))
- {
- SafeRelease(&resampler->pSampleIn);
- CHECK_HR(hr);
- }
- }
- else
- {
- DWORD dwMaxLength = 0;
- CHECK_HR(hr = resampler->pSampleIn->GetBufferByIndex(0, &pBufferIn));
- CHECK_HR(hr = pBufferIn->GetMaxLength(&dwMaxLength));
- if(dwMaxLength < resampler->in_size_bytes)
- {
- CHECK_HR(hr = resampler->pSampleIn->RemoveAllBuffers());
- SafeRelease(&pBufferIn);
- CHECK_HR(hr = MFCreateMemoryBuffer(resampler->in_size_bytes, &pBufferIn));
- CHECK_HR(hr = resampler->pSampleIn->AddBuffer(pBufferIn));
- }
- }
-
- CHECK_HR(hr = pBufferIn->Lock(&pBufferPtr, NULL, NULL));
- memcpy(pBufferPtr, in_data, resampler->in_size_bytes);
- CHECK_HR(hr = pBufferIn->Unlock());
- CHECK_HR(hr = pBufferIn->SetCurrentLength(resampler->in_size_bytes));
-
- CHECK_HR(hr = resampler->pSampleIn->SetSampleDuration(resampler->rtDuration));
- CHECK_HR(hr = resampler->pSampleIn->SetSampleTime(resampler->rtStart));
-
- // Process input
- hr = resampler->pMFT->ProcessInput(0, resampler->pSampleIn, 0);
- if(hr == MF_E_NOTACCEPTING)
- {
- hr = S_OK;
- }
- CHECK_HR(hr);
-
- resampler->rtStart += resampler->rtDuration;
-
- // Process output
- CHECK_HR(hr = ProcessOutput(resampler, &pSampleOut));
- if(pSampleOut)
- {
- CHECK_HR(hr = pSampleOut->GetBufferByIndex(0, &pBufferOut));
-
- BYTE* pBufferOutPtr = NULL;
- DWORD dwDataLength = 0;
- CHECK_HR(hr = pBufferOut->GetCurrentLength(&dwDataLength));
- //if(dwDataLength == resampler->out_size_bytes)
- {
- CHECK_HR(hr = pBufferOut->Lock(&pBufferOutPtr, NULL, NULL));
- {
- memcpy(out_data, pBufferOutPtr, TSK_MIN(dwDataLength, resampler->out_size_bytes));
- if(dwDataLength < resampler->out_size_bytes)
- {
- TSK_DEBUG_INFO("[MS Resampler DSP] Output too short filling with silence");
- memset(&((uint8_t*)out_data)[dwDataLength], 0, (resampler->out_size_bytes - dwDataLength));
- }
- retSize = (tsk_size_t)resampler->out_size_bytes;
- }
- CHECK_HR(hr = pBufferOut->Unlock());
- }
- }
-
-
+ plugin_audio_dsp_resampler_t *resampler = (plugin_audio_dsp_resampler_t *)self;
+
+ HRESULT hr = S_OK;
+ tsk_size_t retSize = 0;
+
+ if(!resampler || !out_data) {
+ CHECK_HR(hr = E_POINTER);
+ }
+
+ if(!resampler->bOpened) {
+ TSK_DEBUG_ERROR("Resampler not opened");
+ CHECK_HR(hr = E_FAIL);
+ }
+
+ if(in_size != resampler->in_size_samples) {
+ TSK_DEBUG_ERROR("Input data has wrong size");
+ CHECK_HR(hr = E_FAIL);
+ }
+
+ if(out_size < resampler->out_size_samples) {
+ TSK_DEBUG_ERROR("Output data is too short");
+ CHECK_HR(hr = E_FAIL);
+ }
+
+ IMFMediaBuffer* pBufferIn = NULL;
+ IMFMediaBuffer* pBufferOut = NULL;
+ IMFSample *pSampleOut = NULL;
+ BYTE* pBufferPtr = NULL;
+
+ if(!resampler->pSampleIn) {
+ CHECK_HR(hr = AudioDSPUtils::CreateMediaSample(resampler->in_size_bytes, &resampler->pSampleIn));
+ hr = resampler->pSampleIn->GetBufferByIndex(0, &pBufferIn);
+ if(FAILED(hr)) {
+ SafeRelease(&resampler->pSampleIn);
+ CHECK_HR(hr);
+ }
+ }
+ else {
+ DWORD dwMaxLength = 0;
+ CHECK_HR(hr = resampler->pSampleIn->GetBufferByIndex(0, &pBufferIn));
+ CHECK_HR(hr = pBufferIn->GetMaxLength(&dwMaxLength));
+ if(dwMaxLength < resampler->in_size_bytes) {
+ CHECK_HR(hr = resampler->pSampleIn->RemoveAllBuffers());
+ SafeRelease(&pBufferIn);
+ CHECK_HR(hr = MFCreateMemoryBuffer(resampler->in_size_bytes, &pBufferIn));
+ CHECK_HR(hr = resampler->pSampleIn->AddBuffer(pBufferIn));
+ }
+ }
+
+ CHECK_HR(hr = pBufferIn->Lock(&pBufferPtr, NULL, NULL));
+ memcpy(pBufferPtr, in_data, resampler->in_size_bytes);
+ CHECK_HR(hr = pBufferIn->Unlock());
+ CHECK_HR(hr = pBufferIn->SetCurrentLength(resampler->in_size_bytes));
+
+ CHECK_HR(hr = resampler->pSampleIn->SetSampleDuration(resampler->rtDuration));
+ CHECK_HR(hr = resampler->pSampleIn->SetSampleTime(resampler->rtStart));
+
+ // Process input
+ hr = resampler->pMFT->ProcessInput(0, resampler->pSampleIn, 0);
+ if(hr == MF_E_NOTACCEPTING) {
+ hr = S_OK;
+ }
+ CHECK_HR(hr);
+
+ resampler->rtStart += resampler->rtDuration;
+
+ // Process output
+ CHECK_HR(hr = ProcessOutput(resampler, &pSampleOut));
+ if(pSampleOut) {
+ CHECK_HR(hr = pSampleOut->GetBufferByIndex(0, &pBufferOut));
+
+ BYTE* pBufferOutPtr = NULL;
+ DWORD dwDataLength = 0;
+ CHECK_HR(hr = pBufferOut->GetCurrentLength(&dwDataLength));
+ //if(dwDataLength == resampler->out_size_bytes)
+ {
+ CHECK_HR(hr = pBufferOut->Lock(&pBufferOutPtr, NULL, NULL));
+ {
+ memcpy(out_data, pBufferOutPtr, TSK_MIN(dwDataLength, resampler->out_size_bytes));
+ if(dwDataLength < resampler->out_size_bytes) {
+ TSK_DEBUG_INFO("[MS Resampler DSP] Output too short filling with silence");
+ memset(&((uint8_t*)out_data)[dwDataLength], 0, (resampler->out_size_bytes - dwDataLength));
+ }
+ retSize = (tsk_size_t)resampler->out_size_bytes;
+ }
+ CHECK_HR(hr = pBufferOut->Unlock());
+ }
+ }
+
+
bail:
- SafeRelease(&pBufferIn);
- SafeRelease(&pBufferOut);
- SafeRelease(&pSampleOut);
-
- return retSize;
+ SafeRelease(&pBufferIn);
+ SafeRelease(&pBufferOut);
+ SafeRelease(&pSampleOut);
+
+ return retSize;
}
static int plugin_audio_dsp_resampler_close(tmedia_resampler_t* self)
{
- plugin_audio_dsp_resampler_t *resampler = (plugin_audio_dsp_resampler_t *)self;
+ plugin_audio_dsp_resampler_t *resampler = (plugin_audio_dsp_resampler_t *)self;
- HRESULT hr = S_OK;
+ HRESULT hr = S_OK;
+
+ if(resampler->pMFT) {
+ hr = resampler->pMFT->ProcessMessage(MFT_MESSAGE_NOTIFY_END_OF_STREAM, NULL);
+ }
- if(resampler->pMFT)
- {
- hr = resampler->pMFT->ProcessMessage(MFT_MESSAGE_NOTIFY_END_OF_STREAM, NULL);
- }
+ SafeRelease(&resampler->pMFT);
+ SafeRelease(&resampler->pSampleIn);
+ SafeRelease(&resampler->pSampleOut);
- SafeRelease(&resampler->pMFT);
- SafeRelease(&resampler->pSampleIn);
- SafeRelease(&resampler->pSampleOut);
+ resampler->bOpened = false;
- resampler->bOpened = false;
-
- return 0;
+ return 0;
}
static HRESULT ProcessOutput(plugin_audio_dsp_resampler_t *resampler, IMFSample **ppSample)
{
- *ppSample = NULL;
+ *ppSample = NULL;
IMFMediaBuffer* pBufferOut = NULL;
DWORD dwStatus;
- HRESULT hr = S_OK;
+ HRESULT hr = S_OK;
MFT_OUTPUT_DATA_BUFFER mftOutputData = { 0 };
- if(!resampler || !ppSample)
- {
- CHECK_HR(hr = E_POINTER);
- }
-
- if(!resampler->pSampleOut)
- {
- CHECK_HR(hr = AudioDSPUtils::CreateMediaSample(resampler->out_size_bytes, &resampler->pSampleOut));
- hr = resampler->pSampleOut->GetBufferByIndex(0, &pBufferOut);
- if(FAILED(hr))
- {
- SafeRelease(&resampler->pSampleOut);
- CHECK_HR(hr);
- }
- }
- else
- {
- DWORD dwMaxLength = 0;
- CHECK_HR(hr = resampler->pSampleOut->GetBufferByIndex(0, &pBufferOut));
- CHECK_HR(hr = pBufferOut->GetMaxLength(&dwMaxLength));
- if(dwMaxLength < resampler->out_size_bytes)
- {
- CHECK_HR(hr = resampler->pSampleOut->RemoveAllBuffers());
- SafeRelease(&pBufferOut);
- CHECK_HR(hr = MFCreateMemoryBuffer(resampler->out_size_bytes, &pBufferOut));
- CHECK_HR(hr = resampler->pSampleOut->AddBuffer(pBufferOut));
- }
- }
-
- CHECK_HR(hr = pBufferOut->SetCurrentLength(0));
-
+ if(!resampler || !ppSample) {
+ CHECK_HR(hr = E_POINTER);
+ }
+
+ if(!resampler->pSampleOut) {
+ CHECK_HR(hr = AudioDSPUtils::CreateMediaSample(resampler->out_size_bytes, &resampler->pSampleOut));
+ hr = resampler->pSampleOut->GetBufferByIndex(0, &pBufferOut);
+ if(FAILED(hr)) {
+ SafeRelease(&resampler->pSampleOut);
+ CHECK_HR(hr);
+ }
+ }
+ else {
+ DWORD dwMaxLength = 0;
+ CHECK_HR(hr = resampler->pSampleOut->GetBufferByIndex(0, &pBufferOut));
+ CHECK_HR(hr = pBufferOut->GetMaxLength(&dwMaxLength));
+ if(dwMaxLength < resampler->out_size_bytes) {
+ CHECK_HR(hr = resampler->pSampleOut->RemoveAllBuffers());
+ SafeRelease(&pBufferOut);
+ CHECK_HR(hr = MFCreateMemoryBuffer(resampler->out_size_bytes, &pBufferOut));
+ CHECK_HR(hr = resampler->pSampleOut->AddBuffer(pBufferOut));
+ }
+ }
+
+ CHECK_HR(hr = pBufferOut->SetCurrentLength(0));
+
//Set the output sample
mftOutputData.pSample = resampler->pSampleOut;
//Set the output id
@@ -306,16 +284,14 @@ static HRESULT ProcessOutput(plugin_audio_dsp_resampler_t *resampler, IMFSample
//Generate the output sample
hr = resampler->pMFT->ProcessOutput(0, 1, &mftOutputData, &dwStatus);
- if (hr == MF_E_TRANSFORM_NEED_MORE_INPUT)
- {
+ if (hr == MF_E_TRANSFORM_NEED_MORE_INPUT) {
hr = S_OK;
goto bail;
}
// TODO: Handle MF_E_TRANSFORM_STREAM_CHANGE
- if (FAILED(hr))
- {
+ if (FAILED(hr)) {
goto bail;
}
@@ -336,53 +312,49 @@ bail:
/* constructor */
static tsk_object_t* plugin_audio_dsp_resampler_ctor(tsk_object_t * self, va_list * app)
{
- plugin_audio_dsp_resampler_t *resampler = (plugin_audio_dsp_resampler_t *)self;
- if(resampler)
- {
- AudioDSPUtils::Startup();
-
- /* init base */
- tmedia_resampler_init(TMEDIA_RESAMPLER(resampler));
- /* init self */
- }
- return self;
+ plugin_audio_dsp_resampler_t *resampler = (plugin_audio_dsp_resampler_t *)self;
+ if(resampler) {
+ AudioDSPUtils::Startup();
+
+ /* init base */
+ tmedia_resampler_init(TMEDIA_RESAMPLER(resampler));
+ /* init self */
+ }
+ return self;
}
/* destructor */
static tsk_object_t* plugin_audio_dsp_resampler_dtor(tsk_object_t * self)
-{
- plugin_audio_dsp_resampler_t *resampler = (plugin_audio_dsp_resampler_t *)self;
- if(resampler)
- {
- /* deinit base */
- tmedia_resampler_deinit(TMEDIA_RESAMPLER(resampler));
- /* deinit self */
- // tmedia_resampler_deinit() already closed the resampler and freed the resources...but do it again
- SafeRelease(&resampler->pMFT);
- SafeRelease(&resampler->pSampleIn);
- SafeRelease(&resampler->pSampleOut);
-
- TSK_DEBUG_INFO("*** MS Audio Resampler DSP (plugin) destroyed ***");
- }
-
- return self;
+{
+ plugin_audio_dsp_resampler_t *resampler = (plugin_audio_dsp_resampler_t *)self;
+ if(resampler) {
+ /* deinit base */
+ tmedia_resampler_deinit(TMEDIA_RESAMPLER(resampler));
+ /* deinit self */
+ // tmedia_resampler_deinit() already closed the resampler and freed the resources...but do it again
+ SafeRelease(&resampler->pMFT);
+ SafeRelease(&resampler->pSampleIn);
+ SafeRelease(&resampler->pSampleOut);
+
+ TSK_DEBUG_INFO("*** MS Audio Resampler DSP (plugin) destroyed ***");
+ }
+
+ return self;
}
/* object definition */
-static const tsk_object_def_t plugin_audio_dsp_resampler_def_s =
-{
- sizeof(plugin_audio_dsp_resampler_t),
- plugin_audio_dsp_resampler_ctor,
- plugin_audio_dsp_resampler_dtor,
- tsk_null,
+static const tsk_object_def_t plugin_audio_dsp_resampler_def_s = {
+ sizeof(plugin_audio_dsp_resampler_t),
+ plugin_audio_dsp_resampler_ctor,
+ plugin_audio_dsp_resampler_dtor,
+ tsk_null,
};
/* plugin definition*/
-static const tmedia_resampler_plugin_def_t plugin_audio_dsp_resampler_plugin_def_s =
-{
- &plugin_audio_dsp_resampler_def_s,
-
- "MS Audio Resampler DSP", /* http://msdn.microsoft.com/en-us/library/windows/desktop/ff819070(v=vs.85).aspx */
-
- plugin_audio_dsp_resampler_open,
- plugin_audio_dsp_resampler_process,
- plugin_audio_dsp_resampler_close,
+static const tmedia_resampler_plugin_def_t plugin_audio_dsp_resampler_plugin_def_s = {
+ &plugin_audio_dsp_resampler_def_s,
+
+ "MS Audio Resampler DSP", /* http://msdn.microsoft.com/en-us/library/windows/desktop/ff819070(v=vs.85).aspx */
+
+ plugin_audio_dsp_resampler_open,
+ plugin_audio_dsp_resampler_process,
+ plugin_audio_dsp_resampler_close,
};
const tmedia_resampler_plugin_def_t *plugin_audio_dsp_resampler_plugin_def_t = &plugin_audio_dsp_resampler_plugin_def_s;
diff --git a/plugins/pluginWinAudioDSP/plugin_audio_dsp_utils.cxx b/plugins/pluginWinAudioDSP/plugin_audio_dsp_utils.cxx
index 67cf3cf..d549336 100755
--- a/plugins/pluginWinAudioDSP/plugin_audio_dsp_utils.cxx
+++ b/plugins/pluginWinAudioDSP/plugin_audio_dsp_utils.cxx
@@ -1,18 +1,18 @@
/* Copyright (C) 2013 Mamadou DIOP
* Copyright (C) 2013 Doubango Telecom <http://www.doubango.org>
-*
+*
* This file is part of Open Source Doubango Framework.
*
* DOUBANGO is free software: you can redistribute it and/or modify
* it under the terms of the GNU General Public License as published by
* the Free Software Foundation, either version 3 of the License, or
* (at your option) any later version.
-*
+*
* DOUBANGO is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
* GNU General Public License for more details.
-*
+*
* You should have received a copy of the GNU General Public License
* along with DOUBANGO.
*/
@@ -28,22 +28,20 @@ bool AudioDSPUtils::g_bStarted = false;
HRESULT AudioDSPUtils::Startup()
{
- if(!g_bStarted)
- {
- HRESULT hr = CoInitializeEx(NULL, COINIT_MULTITHREADED);
- if(SUCCEEDED(hr) || hr == 0x80010106) // 0x80010106 when called from managed code (e.g. Boghe) - More info: http://support.microsoft.com/kb/824480
- {
- hr = MFStartup(MF_VERSION);
- }
- g_bStarted = SUCCEEDED(hr);
- return hr;
- }
- return S_OK;
+ if(!g_bStarted) {
+ HRESULT hr = CoInitializeEx(NULL, COINIT_MULTITHREADED);
+ if(SUCCEEDED(hr) || hr == 0x80010106) { // 0x80010106 when called from managed code (e.g. Boghe) - More info: http://support.microsoft.com/kb/824480
+ hr = MFStartup(MF_VERSION);
+ }
+ g_bStarted = SUCCEEDED(hr);
+ return hr;
+ }
+ return S_OK;
}
HRESULT AudioDSPUtils::Shutdown()
{
- return S_OK;
+ return S_OK;
}
HRESULT AudioDSPUtils::CreatePCMAudioType(
@@ -51,7 +49,7 @@ HRESULT AudioDSPUtils::CreatePCMAudioType(
UINT32 bitsPerSample, // Bits per sample
UINT32 cChannels, // Number of channels
IMFMediaType **ppType // Receives a pointer to the media type.
- )
+)
{
HRESULT hr = S_OK;
@@ -62,27 +60,27 @@ HRESULT AudioDSPUtils::CreatePCMAudioType(
UINT32 bytesPerSecond = blockAlign * sampleRate;
// Create the empty media type.
- CHECK_HR(hr = MFCreateMediaType(&pType));
+ CHECK_HR(hr = MFCreateMediaType(&pType));
// Set attributes on the type.
CHECK_HR(hr = pType->SetGUID(MF_MT_MAJOR_TYPE, MFMediaType_Audio));
-
- CHECK_HR(hr = pType->SetGUID(MF_MT_SUBTYPE, MFAudioFormat_PCM));
-
- CHECK_HR(hr = pType->SetUINT32(MF_MT_AUDIO_NUM_CHANNELS, cChannels));
-
- CHECK_HR(hr = pType->SetUINT32(MF_MT_AUDIO_SAMPLES_PER_SECOND, sampleRate));
-
- CHECK_HR(hr = pType->SetUINT32(MF_MT_AUDIO_BLOCK_ALIGNMENT, blockAlign));
-
- CHECK_HR(hr = pType->SetUINT32(MF_MT_AUDIO_AVG_BYTES_PER_SECOND, bytesPerSecond));
-
- CHECK_HR(hr = pType->SetUINT32(MF_MT_AUDIO_BITS_PER_SAMPLE, bitsPerSample));
-
- CHECK_HR(hr = pType->SetUINT32(MF_MT_ALL_SAMPLES_INDEPENDENT, TRUE));
-
- CHECK_HR(hr = pType->SetUINT32(MF_MT_FIXED_SIZE_SAMPLES, TRUE));
-
+
+ CHECK_HR(hr = pType->SetGUID(MF_MT_SUBTYPE, MFAudioFormat_PCM));
+
+ CHECK_HR(hr = pType->SetUINT32(MF_MT_AUDIO_NUM_CHANNELS, cChannels));
+
+ CHECK_HR(hr = pType->SetUINT32(MF_MT_AUDIO_SAMPLES_PER_SECOND, sampleRate));
+
+ CHECK_HR(hr = pType->SetUINT32(MF_MT_AUDIO_BLOCK_ALIGNMENT, blockAlign));
+
+ CHECK_HR(hr = pType->SetUINT32(MF_MT_AUDIO_AVG_BYTES_PER_SECOND, bytesPerSecond));
+
+ CHECK_HR(hr = pType->SetUINT32(MF_MT_AUDIO_BITS_PER_SAMPLE, bitsPerSample));
+
+ CHECK_HR(hr = pType->SetUINT32(MF_MT_ALL_SAMPLES_INDEPENDENT, TRUE));
+
+ CHECK_HR(hr = pType->SetUINT32(MF_MT_FIXED_SIZE_SAMPLES, TRUE));
+
*ppType = pType;
(*ppType)->AddRef();
@@ -92,24 +90,23 @@ bail:
}
HRESULT AudioDSPUtils::CreateMediaSample(
- DWORD cbData, // Maximum buffer size
- IMFSample **ppSample // Receives the sample
+ DWORD cbData, // Maximum buffer size
+ IMFSample **ppSample // Receives the sample
)
{
- HRESULT hr = S_OK;
+ HRESULT hr = S_OK;
- if(!ppSample)
- {
- CHECK_HR(hr = E_POINTER);
- }
+ if(!ppSample) {
+ CHECK_HR(hr = E_POINTER);
+ }
IMFSample *pSample = NULL;
IMFMediaBuffer *pBuffer = NULL;
- CHECK_HR(hr = MFCreateSample(&pSample));
- CHECK_HR(hr = MFCreateMemoryBuffer(cbData, &pBuffer));
+ CHECK_HR(hr = MFCreateSample(&pSample));
+ CHECK_HR(hr = MFCreateMemoryBuffer(cbData, &pBuffer));
CHECK_HR(hr = pSample->AddBuffer(pBuffer));
-
+
*ppSample = pSample;
(*ppSample)->AddRef();
@@ -120,38 +117,37 @@ bail:
}
HRESULT AudioDSPUtils::MoInitMediaType(
- UINT32 sampleRate, // Samples per second
- UINT32 bitsPerSample, // Bits per sample
- UINT32 cChannels, // Number of channels
- DMO_MEDIA_TYPE *pType // The media type to initialize. Must be freed using MoFreeMediaType.
- )
+ UINT32 sampleRate, // Samples per second
+ UINT32 bitsPerSample, // Bits per sample
+ UINT32 cChannels, // Number of channels
+ DMO_MEDIA_TYPE *pType // The media type to initialize. Must be freed using MoFreeMediaType.
+)
{
- HRESULT hr = S_OK;
- WAVEFORMATEX *pWAV = NULL;
+ HRESULT hr = S_OK;
+ WAVEFORMATEX *pWAV = NULL;
- if(!pType)
- {
- CHECK_HR(hr = E_POINTER);
- }
+ if(!pType) {
+ CHECK_HR(hr = E_POINTER);
+ }
- pType->majortype = MEDIATYPE_Audio;
- pType->subtype = MEDIASUBTYPE_PCM;
- pType->lSampleSize = 0;
- pType->bFixedSizeSamples = TRUE;
- pType->bTemporalCompression = FALSE;
- pType->formattype = FORMAT_WaveFormatEx;
+ pType->majortype = MEDIATYPE_Audio;
+ pType->subtype = MEDIASUBTYPE_PCM;
+ pType->lSampleSize = 0;
+ pType->bFixedSizeSamples = TRUE;
+ pType->bTemporalCompression = FALSE;
+ pType->formattype = FORMAT_WaveFormatEx;
- CHECK_HR(hr = ::MoInitMediaType(pType, sizeof(WAVEFORMATEX)));
+ CHECK_HR(hr = ::MoInitMediaType(pType, sizeof(WAVEFORMATEX)));
- pWAV = (WAVEFORMATEX*)pType->pbFormat;
+ pWAV = (WAVEFORMATEX*)pType->pbFormat;
pWAV->wFormatTag = WAVE_FORMAT_PCM;
pWAV->nChannels = 1;
pWAV->nSamplesPerSec = sampleRate;
- pWAV->nBlockAlign = cChannels * (bitsPerSample >> 3);
+ pWAV->nBlockAlign = cChannels * (bitsPerSample >> 3);
pWAV->nAvgBytesPerSec = pWAV->nBlockAlign * pWAV->nSamplesPerSec;
pWAV->wBitsPerSample = bitsPerSample;
pWAV->cbSize = 0;
bail:
- return hr;
+ return hr;
}
diff --git a/plugins/pluginWinAudioDSP/plugin_audio_dsp_utils.h b/plugins/pluginWinAudioDSP/plugin_audio_dsp_utils.h
index 7daff31..b2236f2 100755
--- a/plugins/pluginWinAudioDSP/plugin_audio_dsp_utils.h
+++ b/plugins/pluginWinAudioDSP/plugin_audio_dsp_utils.h
@@ -1,18 +1,18 @@
/* Copyright (C) 2013 Mamadou DIOP
* Copyright (C) 2013 Doubango Telecom <http://www.doubango.org>
-*
+*
* This file is part of Open Source Doubango Framework.
*
* DOUBANGO is free software: you can redistribute it and/or modify
* it under the terms of the GNU General Public License as published by
* the Free Software Foundation, either version 3 of the License, or
* (at your option) any later version.
-*
+*
* DOUBANGO is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
* GNU General Public License for more details.
-*
+*
* You should have received a copy of the GNU General Public License
* along with DOUBANGO.
*/
@@ -40,32 +40,32 @@
(*ppT)->Release(); \
*ppT = NULL; \
} \
-}
+}
class AudioDSPUtils
{
public:
- static HRESULT Startup();
- static HRESULT Shutdown();
- static HRESULT CreatePCMAudioType(
- UINT32 sampleRate, // Samples per second
- UINT32 bitsPerSample, // Bits per sample
- UINT32 cChannels, // Number of channels
- IMFMediaType **ppType // Receives a pointer to the media type.
+ static HRESULT Startup();
+ static HRESULT Shutdown();
+ static HRESULT CreatePCMAudioType(
+ UINT32 sampleRate, // Samples per second
+ UINT32 bitsPerSample, // Bits per sample
+ UINT32 cChannels, // Number of channels
+ IMFMediaType **ppType // Receives a pointer to the media type.
+ );
+ static HRESULT CreateMediaSample(
+ DWORD cbData, // Maximum buffer size
+ IMFSample **ppSample // Receives the sample
+ );
+ static HRESULT MoInitMediaType(
+ UINT32 sampleRate, // Samples per second
+ UINT32 bitsPerSample, // Bits per sample
+ UINT32 cChannels, // Number of channels
+ DMO_MEDIA_TYPE *pType // The media type to initialize. Must be freed using MoFreeMediaType.
);
- static HRESULT CreateMediaSample(
- DWORD cbData, // Maximum buffer size
- IMFSample **ppSample // Receives the sample
- );
- static HRESULT MoInitMediaType(
- UINT32 sampleRate, // Samples per second
- UINT32 bitsPerSample, // Bits per sample
- UINT32 cChannels, // Number of channels
- DMO_MEDIA_TYPE *pType // The media type to initialize. Must be freed using MoFreeMediaType.
- );
private:
- static bool g_bStarted;
+ static bool g_bStarted;
};
#endif /* PLUGIN_AUDIO_DSP_UTILS_H */
diff --git a/plugins/pluginWinDD/dllmain_dd.cxx b/plugins/pluginWinDD/dllmain_dd.cxx
index b390c90..e8df126 100755
--- a/plugins/pluginWinDD/dllmain_dd.cxx
+++ b/plugins/pluginWinDD/dllmain_dd.cxx
@@ -35,80 +35,72 @@ PLUGIN_WIN_DDP_API tsk_plugin_def_ptr_const_t __plugin_get_def_at(int index);
PLUGIN_WIN_DD_END_DECLS /* END */
BOOL APIENTRY DllMain(HMODULE hModule,
-DWORD ul_reason_for_call,
-LPVOID lpReserved
-)
+ DWORD ul_reason_for_call,
+ LPVOID lpReserved
+ )
{
- switch (ul_reason_for_call)
- {
- case DLL_PROCESS_ATTACH:
- break;
- case DLL_THREAD_ATTACH:
- break;
- case DLL_THREAD_DETACH:
- break;
- case DLL_PROCESS_DETACH:
- break;
- }
- return TRUE;
+ switch (ul_reason_for_call) {
+ case DLL_PROCESS_ATTACH:
+ break;
+ case DLL_THREAD_ATTACH:
+ break;
+ case DLL_THREAD_DETACH:
+ break;
+ case DLL_PROCESS_DETACH:
+ break;
+ }
+ return TRUE;
}
-typedef enum PLUGIN_INDEX_E
-{
- PLUGIN_INDEX_PRODUCER,
+typedef enum PLUGIN_INDEX_E {
+ PLUGIN_INDEX_PRODUCER,
- PLUGIN_INDEX_COUNT
+ PLUGIN_INDEX_COUNT
}
PLUGIN_INDEX_T;
int __plugin_get_def_count()
{
- return PLUGIN_INDEX_COUNT;
+ return PLUGIN_INDEX_COUNT;
}
tsk_plugin_def_type_t __plugin_get_def_type_at(int index)
{
- switch (index){
- case PLUGIN_INDEX_PRODUCER:
- {
- return tsk_plugin_def_type_producer;
- }
- default:
- {
- TSK_DEBUG_ERROR("No plugin at index %d", index);
- return tsk_plugin_def_type_none;
- }
- }
+ switch (index) {
+ case PLUGIN_INDEX_PRODUCER: {
+ return tsk_plugin_def_type_producer;
+ }
+ default: {
+ TSK_DEBUG_ERROR("No plugin at index %d", index);
+ return tsk_plugin_def_type_none;
+ }
+ }
}
tsk_plugin_def_media_type_t __plugin_get_def_media_type_at(int index)
{
- switch (index){
- case PLUGIN_INDEX_PRODUCER:
- {
- return tsk_plugin_def_media_type_screencast;
- }
- default:
- {
- TSK_DEBUG_ERROR("No plugin at index %d", index);
- return tsk_plugin_def_media_type_none;
- }
- }
+ switch (index) {
+ case PLUGIN_INDEX_PRODUCER: {
+ return tsk_plugin_def_media_type_screencast;
+ }
+ default: {
+ TSK_DEBUG_ERROR("No plugin at index %d", index);
+ return tsk_plugin_def_media_type_none;
+ }
+ }
}
tsk_plugin_def_ptr_const_t __plugin_get_def_at(int index)
{
- switch (index){
- case PLUGIN_INDEX_PRODUCER:
- {
- return plugin_win_dd_producer_plugin_def_t;
- }
- default:
- {
- TSK_DEBUG_ERROR("No plugin at index %d", index);
- return tsk_null;
- }
- }
+ switch (index) {
+ case PLUGIN_INDEX_PRODUCER: {
+ return plugin_win_dd_producer_plugin_def_t;
+ }
+ default: {
+ TSK_DEBUG_ERROR("No plugin at index %d", index);
+ return tsk_null;
+ }
+ }
}
diff --git a/plugins/pluginWinDD/internals/CommonTypes.h b/plugins/pluginWinDD/internals/CommonTypes.h
index 7b9c2d4..3870e96 100755
--- a/plugins/pluginWinDD/internals/CommonTypes.h
+++ b/plugins/pluginWinDD/internals/CommonTypes.h
@@ -32,12 +32,11 @@ extern HRESULT AcquireFrameExpectedError[];
extern HRESULT EnumOutputsExpectedErrors[];
-typedef _Return_type_success_(return == DUPL_RETURN_SUCCESS) enum
-{
+typedef _Return_type_success_(return == DUPL_RETURN_SUCCESS) enum {
DUPL_RETURN_SUCCESS = 0,
DUPL_RETURN_ERROR_EXPECTED = 1,
DUPL_RETURN_ERROR_UNEXPECTED = 2
-}DUPL_RETURN;
+} DUPL_RETURN;
_Post_satisfies_(return != DUPL_RETURN_SUCCESS)
DUPL_RETURN ProcessFailure(_In_opt_ ID3D11Device* Device, _In_ LPCWSTR Str, _In_ LPCWSTR Title, HRESULT hr, _In_opt_z_ HRESULT* ExpectedErrors = nullptr);
@@ -47,8 +46,7 @@ void DisplayMsg(_In_ LPCWSTR Str, _In_ LPCWSTR Title, HRESULT hr);
//
// Holds info about the pointer/cursor
//
-typedef struct _PTR_INFO
-{
+typedef struct _PTR_INFO {
_Field_size_bytes_(BufferSize) BYTE* PtrShapeBuffer;
DXGI_OUTDUPL_POINTER_SHAPE_INFO ShapeInfo;
POINT Position;
@@ -61,8 +59,7 @@ typedef struct _PTR_INFO
//
// Structure that holds D3D resources not directly tied to any one thread
//
-typedef struct _DX_RESOURCES
-{
+typedef struct _DX_RESOURCES {
ID3D11Device* Device;
ID3D11DeviceContext* Context;
ID3D11VertexShader* VertexShader;
@@ -74,8 +71,7 @@ typedef struct _DX_RESOURCES
//
// Structure to pass to a new thread
//
-typedef struct _THREAD_DATA
-{
+typedef struct _THREAD_DATA {
// Used to indicate abnormal error condition
HANDLE UnexpectedErrorEvent;
@@ -92,14 +88,13 @@ typedef struct _THREAD_DATA
PTR_INFO* PtrInfo;
DX_RESOURCES DxRes;
- const struct tmedia_producer_s* Producer;
+ const struct tmedia_producer_s* Producer;
} THREAD_DATA;
//
// FRAME_DATA holds information about an acquired frame
//
-typedef struct _FRAME_DATA
-{
+typedef struct _FRAME_DATA {
ID3D11Texture2D* Frame;
DXGI_OUTDUPL_FRAME_INFO FrameInfo;
_Field_size_bytes_((MoveCount * sizeof(DXGI_OUTDUPL_MOVE_RECT)) + (DirtyCount * sizeof(RECT))) BYTE* MetaData;
@@ -110,8 +105,7 @@ typedef struct _FRAME_DATA
//
// A vertex with a position and texture coordinate
//
-typedef struct _VERTEX
-{
+typedef struct _VERTEX {
DirectX::XMFLOAT3 Pos;
DirectX::XMFLOAT2 TexCoord;
} VERTEX;
diff --git a/plugins/pluginWinDD/internals/DisplayManager.cxx b/plugins/pluginWinDD/internals/DisplayManager.cxx
index 98209a0..e60ab64 100755
--- a/plugins/pluginWinDD/internals/DisplayManager.cxx
+++ b/plugins/pluginWinDD/internals/DisplayManager.cxx
@@ -12,15 +12,15 @@ using namespace DirectX;
// Constructor NULLs out vars
//
DISPLAYMANAGER::DISPLAYMANAGER() : m_Device(nullptr),
- m_DeviceContext(nullptr),
- m_MoveSurf(nullptr),
- m_VertexShader(nullptr),
- m_PixelShader(nullptr),
- m_InputLayout(nullptr),
- m_RTV(nullptr),
- m_SamplerLinear(nullptr),
- m_DirtyVertexBufferAlloc(nullptr),
- m_DirtyVertexBufferAllocSize(0)
+ m_DeviceContext(nullptr),
+ m_MoveSurf(nullptr),
+ m_VertexShader(nullptr),
+ m_PixelShader(nullptr),
+ m_InputLayout(nullptr),
+ m_RTV(nullptr),
+ m_SamplerLinear(nullptr),
+ m_DirtyVertexBufferAlloc(nullptr),
+ m_DirtyVertexBufferAllocSize(0)
{
}
@@ -31,8 +31,7 @@ DISPLAYMANAGER::~DISPLAYMANAGER()
{
CleanRefs();
- if (m_DirtyVertexBufferAlloc)
- {
+ if (m_DirtyVertexBufferAlloc) {
delete [] m_DirtyVertexBufferAlloc;
m_DirtyVertexBufferAlloc = nullptr;
}
@@ -66,22 +65,18 @@ DUPL_RETURN DISPLAYMANAGER::ProcessFrame(_In_ FRAME_DATA* Data, _Inout_ ID3D11Te
DUPL_RETURN Ret = DUPL_RETURN_SUCCESS;
// Process dirties and moves
- if (Data->FrameInfo.TotalMetadataBufferSize)
- {
+ if (Data->FrameInfo.TotalMetadataBufferSize) {
D3D11_TEXTURE2D_DESC Desc;
Data->Frame->GetDesc(&Desc);
- if (Data->MoveCount)
- {
+ if (Data->MoveCount) {
Ret = CopyMove(SharedSurf, reinterpret_cast<DXGI_OUTDUPL_MOVE_RECT*>(Data->MetaData), Data->MoveCount, OffsetX, OffsetY, DeskDesc, Desc.Width, Desc.Height);
- if (Ret != DUPL_RETURN_SUCCESS)
- {
+ if (Ret != DUPL_RETURN_SUCCESS) {
return Ret;
}
}
- if (Data->DirtyCount)
- {
+ if (Data->DirtyCount) {
Ret = CopyDirty(Data->Frame, SharedSurf, reinterpret_cast<RECT*>(Data->MetaData + (Data->MoveCount * sizeof(DXGI_OUTDUPL_MOVE_RECT))), Data->DirtyCount, OffsetX, OffsetY, DeskDesc);
}
}
@@ -102,64 +97,58 @@ ID3D11Device* DISPLAYMANAGER::GetDevice()
//
void DISPLAYMANAGER::SetMoveRect(_Out_ RECT* SrcRect, _Out_ RECT* DestRect, _In_ DXGI_OUTPUT_DESC* DeskDesc, _In_ DXGI_OUTDUPL_MOVE_RECT* MoveRect, INT TexWidth, INT TexHeight)
{
- switch (DeskDesc->Rotation)
- {
- case DXGI_MODE_ROTATION_UNSPECIFIED:
- case DXGI_MODE_ROTATION_IDENTITY:
- {
- SrcRect->left = MoveRect->SourcePoint.x;
- SrcRect->top = MoveRect->SourcePoint.y;
- SrcRect->right = MoveRect->SourcePoint.x + MoveRect->DestinationRect.right - MoveRect->DestinationRect.left;
- SrcRect->bottom = MoveRect->SourcePoint.y + MoveRect->DestinationRect.bottom - MoveRect->DestinationRect.top;
-
- *DestRect = MoveRect->DestinationRect;
- break;
- }
- case DXGI_MODE_ROTATION_ROTATE90:
- {
- SrcRect->left = TexHeight - (MoveRect->SourcePoint.y + MoveRect->DestinationRect.bottom - MoveRect->DestinationRect.top);
- SrcRect->top = MoveRect->SourcePoint.x;
- SrcRect->right = TexHeight - MoveRect->SourcePoint.y;
- SrcRect->bottom = MoveRect->SourcePoint.x + MoveRect->DestinationRect.right - MoveRect->DestinationRect.left;
-
- DestRect->left = TexHeight - MoveRect->DestinationRect.bottom;
- DestRect->top = MoveRect->DestinationRect.left;
- DestRect->right = TexHeight - MoveRect->DestinationRect.top;
- DestRect->bottom = MoveRect->DestinationRect.right;
- break;
- }
- case DXGI_MODE_ROTATION_ROTATE180:
- {
- SrcRect->left = TexWidth - (MoveRect->SourcePoint.x + MoveRect->DestinationRect.right - MoveRect->DestinationRect.left);
- SrcRect->top = TexHeight - (MoveRect->SourcePoint.y + MoveRect->DestinationRect.bottom - MoveRect->DestinationRect.top);
- SrcRect->right = TexWidth - MoveRect->SourcePoint.x;
- SrcRect->bottom = TexHeight - MoveRect->SourcePoint.y;
-
- DestRect->left = TexWidth - MoveRect->DestinationRect.right;
- DestRect->top = TexHeight - MoveRect->DestinationRect.bottom;
- DestRect->right = TexWidth - MoveRect->DestinationRect.left;
- DestRect->bottom = TexHeight - MoveRect->DestinationRect.top;
- break;
- }
- case DXGI_MODE_ROTATION_ROTATE270:
- {
- SrcRect->left = MoveRect->SourcePoint.x;
- SrcRect->top = TexWidth - (MoveRect->SourcePoint.x + MoveRect->DestinationRect.right - MoveRect->DestinationRect.left);
- SrcRect->right = MoveRect->SourcePoint.y + MoveRect->DestinationRect.bottom - MoveRect->DestinationRect.top;
- SrcRect->bottom = TexWidth - MoveRect->SourcePoint.x;
-
- DestRect->left = MoveRect->DestinationRect.top;
- DestRect->top = TexWidth - MoveRect->DestinationRect.right;
- DestRect->right = MoveRect->DestinationRect.bottom;
- DestRect->bottom = TexWidth - MoveRect->DestinationRect.left;
- break;
- }
- default:
- {
- RtlZeroMemory(DestRect, sizeof(RECT));
- RtlZeroMemory(SrcRect, sizeof(RECT));
- break;
- }
+ switch (DeskDesc->Rotation) {
+ case DXGI_MODE_ROTATION_UNSPECIFIED:
+ case DXGI_MODE_ROTATION_IDENTITY: {
+ SrcRect->left = MoveRect->SourcePoint.x;
+ SrcRect->top = MoveRect->SourcePoint.y;
+ SrcRect->right = MoveRect->SourcePoint.x + MoveRect->DestinationRect.right - MoveRect->DestinationRect.left;
+ SrcRect->bottom = MoveRect->SourcePoint.y + MoveRect->DestinationRect.bottom - MoveRect->DestinationRect.top;
+
+ *DestRect = MoveRect->DestinationRect;
+ break;
+ }
+ case DXGI_MODE_ROTATION_ROTATE90: {
+ SrcRect->left = TexHeight - (MoveRect->SourcePoint.y + MoveRect->DestinationRect.bottom - MoveRect->DestinationRect.top);
+ SrcRect->top = MoveRect->SourcePoint.x;
+ SrcRect->right = TexHeight - MoveRect->SourcePoint.y;
+ SrcRect->bottom = MoveRect->SourcePoint.x + MoveRect->DestinationRect.right - MoveRect->DestinationRect.left;
+
+ DestRect->left = TexHeight - MoveRect->DestinationRect.bottom;
+ DestRect->top = MoveRect->DestinationRect.left;
+ DestRect->right = TexHeight - MoveRect->DestinationRect.top;
+ DestRect->bottom = MoveRect->DestinationRect.right;
+ break;
+ }
+ case DXGI_MODE_ROTATION_ROTATE180: {
+ SrcRect->left = TexWidth - (MoveRect->SourcePoint.x + MoveRect->DestinationRect.right - MoveRect->DestinationRect.left);
+ SrcRect->top = TexHeight - (MoveRect->SourcePoint.y + MoveRect->DestinationRect.bottom - MoveRect->DestinationRect.top);
+ SrcRect->right = TexWidth - MoveRect->SourcePoint.x;
+ SrcRect->bottom = TexHeight - MoveRect->SourcePoint.y;
+
+ DestRect->left = TexWidth - MoveRect->DestinationRect.right;
+ DestRect->top = TexHeight - MoveRect->DestinationRect.bottom;
+ DestRect->right = TexWidth - MoveRect->DestinationRect.left;
+ DestRect->bottom = TexHeight - MoveRect->DestinationRect.top;
+ break;
+ }
+ case DXGI_MODE_ROTATION_ROTATE270: {
+ SrcRect->left = MoveRect->SourcePoint.x;
+ SrcRect->top = TexWidth - (MoveRect->SourcePoint.x + MoveRect->DestinationRect.right - MoveRect->DestinationRect.left);
+ SrcRect->right = MoveRect->SourcePoint.y + MoveRect->DestinationRect.bottom - MoveRect->DestinationRect.top;
+ SrcRect->bottom = TexWidth - MoveRect->SourcePoint.x;
+
+ DestRect->left = MoveRect->DestinationRect.top;
+ DestRect->top = TexWidth - MoveRect->DestinationRect.right;
+ DestRect->right = MoveRect->DestinationRect.bottom;
+ DestRect->bottom = TexWidth - MoveRect->DestinationRect.left;
+ break;
+ }
+ default: {
+ RtlZeroMemory(DestRect, sizeof(RECT));
+ RtlZeroMemory(SrcRect, sizeof(RECT));
+ break;
+ }
}
}
@@ -172,8 +161,7 @@ DUPL_RETURN DISPLAYMANAGER::CopyMove(_Inout_ ID3D11Texture2D* SharedSurf, _In_re
SharedSurf->GetDesc(&FullDesc);
// Make new intermediate surface to copy into for moving
- if (!m_MoveSurf)
- {
+ if (!m_MoveSurf) {
D3D11_TEXTURE2D_DESC MoveDesc;
MoveDesc = FullDesc;
MoveDesc.Width = DeskDesc->DesktopCoordinates.right - DeskDesc->DesktopCoordinates.left;
@@ -181,14 +169,12 @@ DUPL_RETURN DISPLAYMANAGER::CopyMove(_Inout_ ID3D11Texture2D* SharedSurf, _In_re
MoveDesc.BindFlags = D3D11_BIND_RENDER_TARGET;
MoveDesc.MiscFlags = 0;
HRESULT hr = m_Device->CreateTexture2D(&MoveDesc, nullptr, &m_MoveSurf);
- if (FAILED(hr))
- {
+ if (FAILED(hr)) {
return ProcessFailure(m_Device, L"Failed to create staging texture for move rects", L"Error", hr, SystemTransitionsExpectedErrors);
}
}
- for (UINT i = 0; i < MoveCount; ++i)
- {
+ for (UINT i = 0; i < MoveCount; ++i) {
RECT SrcRect;
RECT DestRect;
@@ -235,75 +221,70 @@ void DISPLAYMANAGER::SetDirtyVert(_Out_writes_(NUMVERTICES) VERTEX* Vertices, _I
RECT DestDirty = *Dirty;
// Set appropriate coordinates compensated for rotation
- switch (DeskDesc->Rotation)
- {
- case DXGI_MODE_ROTATION_ROTATE90:
- {
- DestDirty.left = Width - Dirty->bottom;
- DestDirty.top = Dirty->left;
- DestDirty.right = Width - Dirty->top;
- DestDirty.bottom = Dirty->right;
-
- Vertices[0].TexCoord = XMFLOAT2(Dirty->right / static_cast<FLOAT>(ThisDesc->Width), Dirty->bottom / static_cast<FLOAT>(ThisDesc->Height));
- Vertices[1].TexCoord = XMFLOAT2(Dirty->left / static_cast<FLOAT>(ThisDesc->Width), Dirty->bottom / static_cast<FLOAT>(ThisDesc->Height));
- Vertices[2].TexCoord = XMFLOAT2(Dirty->right / static_cast<FLOAT>(ThisDesc->Width), Dirty->top / static_cast<FLOAT>(ThisDesc->Height));
- Vertices[5].TexCoord = XMFLOAT2(Dirty->left / static_cast<FLOAT>(ThisDesc->Width), Dirty->top / static_cast<FLOAT>(ThisDesc->Height));
- break;
- }
- case DXGI_MODE_ROTATION_ROTATE180:
- {
- DestDirty.left = Width - Dirty->right;
- DestDirty.top = Height - Dirty->bottom;
- DestDirty.right = Width - Dirty->left;
- DestDirty.bottom = Height - Dirty->top;
-
- Vertices[0].TexCoord = XMFLOAT2(Dirty->right / static_cast<FLOAT>(ThisDesc->Width), Dirty->top / static_cast<FLOAT>(ThisDesc->Height));
- Vertices[1].TexCoord = XMFLOAT2(Dirty->right / static_cast<FLOAT>(ThisDesc->Width), Dirty->bottom / static_cast<FLOAT>(ThisDesc->Height));
- Vertices[2].TexCoord = XMFLOAT2(Dirty->left / static_cast<FLOAT>(ThisDesc->Width), Dirty->top / static_cast<FLOAT>(ThisDesc->Height));
- Vertices[5].TexCoord = XMFLOAT2(Dirty->left / static_cast<FLOAT>(ThisDesc->Width), Dirty->bottom / static_cast<FLOAT>(ThisDesc->Height));
- break;
- }
- case DXGI_MODE_ROTATION_ROTATE270:
- {
- DestDirty.left = Dirty->top;
- DestDirty.top = Height - Dirty->right;
- DestDirty.right = Dirty->bottom;
- DestDirty.bottom = Height - Dirty->left;
-
- Vertices[0].TexCoord = XMFLOAT2(Dirty->left / static_cast<FLOAT>(ThisDesc->Width), Dirty->top / static_cast<FLOAT>(ThisDesc->Height));
- Vertices[1].TexCoord = XMFLOAT2(Dirty->right / static_cast<FLOAT>(ThisDesc->Width), Dirty->top / static_cast<FLOAT>(ThisDesc->Height));
- Vertices[2].TexCoord = XMFLOAT2(Dirty->left / static_cast<FLOAT>(ThisDesc->Width), Dirty->bottom / static_cast<FLOAT>(ThisDesc->Height));
- Vertices[5].TexCoord = XMFLOAT2(Dirty->right / static_cast<FLOAT>(ThisDesc->Width), Dirty->bottom / static_cast<FLOAT>(ThisDesc->Height));
- break;
- }
- default:
- assert(false); // drop through
- case DXGI_MODE_ROTATION_UNSPECIFIED:
- case DXGI_MODE_ROTATION_IDENTITY:
- {
- Vertices[0].TexCoord = XMFLOAT2(Dirty->left / static_cast<FLOAT>(ThisDesc->Width), Dirty->bottom / static_cast<FLOAT>(ThisDesc->Height));
- Vertices[1].TexCoord = XMFLOAT2(Dirty->left / static_cast<FLOAT>(ThisDesc->Width), Dirty->top / static_cast<FLOAT>(ThisDesc->Height));
- Vertices[2].TexCoord = XMFLOAT2(Dirty->right / static_cast<FLOAT>(ThisDesc->Width), Dirty->bottom / static_cast<FLOAT>(ThisDesc->Height));
- Vertices[5].TexCoord = XMFLOAT2(Dirty->right / static_cast<FLOAT>(ThisDesc->Width), Dirty->top / static_cast<FLOAT>(ThisDesc->Height));
- break;
- }
+ switch (DeskDesc->Rotation) {
+ case DXGI_MODE_ROTATION_ROTATE90: {
+ DestDirty.left = Width - Dirty->bottom;
+ DestDirty.top = Dirty->left;
+ DestDirty.right = Width - Dirty->top;
+ DestDirty.bottom = Dirty->right;
+
+ Vertices[0].TexCoord = XMFLOAT2(Dirty->right / static_cast<FLOAT>(ThisDesc->Width), Dirty->bottom / static_cast<FLOAT>(ThisDesc->Height));
+ Vertices[1].TexCoord = XMFLOAT2(Dirty->left / static_cast<FLOAT>(ThisDesc->Width), Dirty->bottom / static_cast<FLOAT>(ThisDesc->Height));
+ Vertices[2].TexCoord = XMFLOAT2(Dirty->right / static_cast<FLOAT>(ThisDesc->Width), Dirty->top / static_cast<FLOAT>(ThisDesc->Height));
+ Vertices[5].TexCoord = XMFLOAT2(Dirty->left / static_cast<FLOAT>(ThisDesc->Width), Dirty->top / static_cast<FLOAT>(ThisDesc->Height));
+ break;
+ }
+ case DXGI_MODE_ROTATION_ROTATE180: {
+ DestDirty.left = Width - Dirty->right;
+ DestDirty.top = Height - Dirty->bottom;
+ DestDirty.right = Width - Dirty->left;
+ DestDirty.bottom = Height - Dirty->top;
+
+ Vertices[0].TexCoord = XMFLOAT2(Dirty->right / static_cast<FLOAT>(ThisDesc->Width), Dirty->top / static_cast<FLOAT>(ThisDesc->Height));
+ Vertices[1].TexCoord = XMFLOAT2(Dirty->right / static_cast<FLOAT>(ThisDesc->Width), Dirty->bottom / static_cast<FLOAT>(ThisDesc->Height));
+ Vertices[2].TexCoord = XMFLOAT2(Dirty->left / static_cast<FLOAT>(ThisDesc->Width), Dirty->top / static_cast<FLOAT>(ThisDesc->Height));
+ Vertices[5].TexCoord = XMFLOAT2(Dirty->left / static_cast<FLOAT>(ThisDesc->Width), Dirty->bottom / static_cast<FLOAT>(ThisDesc->Height));
+ break;
+ }
+ case DXGI_MODE_ROTATION_ROTATE270: {
+ DestDirty.left = Dirty->top;
+ DestDirty.top = Height - Dirty->right;
+ DestDirty.right = Dirty->bottom;
+ DestDirty.bottom = Height - Dirty->left;
+
+ Vertices[0].TexCoord = XMFLOAT2(Dirty->left / static_cast<FLOAT>(ThisDesc->Width), Dirty->top / static_cast<FLOAT>(ThisDesc->Height));
+ Vertices[1].TexCoord = XMFLOAT2(Dirty->right / static_cast<FLOAT>(ThisDesc->Width), Dirty->top / static_cast<FLOAT>(ThisDesc->Height));
+ Vertices[2].TexCoord = XMFLOAT2(Dirty->left / static_cast<FLOAT>(ThisDesc->Width), Dirty->bottom / static_cast<FLOAT>(ThisDesc->Height));
+ Vertices[5].TexCoord = XMFLOAT2(Dirty->right / static_cast<FLOAT>(ThisDesc->Width), Dirty->bottom / static_cast<FLOAT>(ThisDesc->Height));
+ break;
+ }
+ default:
+ assert(false); // drop through
+ case DXGI_MODE_ROTATION_UNSPECIFIED:
+ case DXGI_MODE_ROTATION_IDENTITY: {
+ Vertices[0].TexCoord = XMFLOAT2(Dirty->left / static_cast<FLOAT>(ThisDesc->Width), Dirty->bottom / static_cast<FLOAT>(ThisDesc->Height));
+ Vertices[1].TexCoord = XMFLOAT2(Dirty->left / static_cast<FLOAT>(ThisDesc->Width), Dirty->top / static_cast<FLOAT>(ThisDesc->Height));
+ Vertices[2].TexCoord = XMFLOAT2(Dirty->right / static_cast<FLOAT>(ThisDesc->Width), Dirty->bottom / static_cast<FLOAT>(ThisDesc->Height));
+ Vertices[5].TexCoord = XMFLOAT2(Dirty->right / static_cast<FLOAT>(ThisDesc->Width), Dirty->top / static_cast<FLOAT>(ThisDesc->Height));
+ break;
+ }
}
// Set positions
Vertices[0].Pos = XMFLOAT3((DestDirty.left + DeskDesc->DesktopCoordinates.left - OffsetX - CenterX) / static_cast<FLOAT>(CenterX),
- -1 * (DestDirty.bottom + DeskDesc->DesktopCoordinates.top - OffsetY - CenterY) / static_cast<FLOAT>(CenterY),
- 0.0f);
+ -1 * (DestDirty.bottom + DeskDesc->DesktopCoordinates.top - OffsetY - CenterY) / static_cast<FLOAT>(CenterY),
+ 0.0f);
Vertices[1].Pos = XMFLOAT3((DestDirty.left + DeskDesc->DesktopCoordinates.left - OffsetX - CenterX) / static_cast<FLOAT>(CenterX),
- -1 * (DestDirty.top + DeskDesc->DesktopCoordinates.top - OffsetY - CenterY) / static_cast<FLOAT>(CenterY),
- 0.0f);
+ -1 * (DestDirty.top + DeskDesc->DesktopCoordinates.top - OffsetY - CenterY) / static_cast<FLOAT>(CenterY),
+ 0.0f);
Vertices[2].Pos = XMFLOAT3((DestDirty.right + DeskDesc->DesktopCoordinates.left - OffsetX - CenterX) / static_cast<FLOAT>(CenterX),
- -1 * (DestDirty.bottom + DeskDesc->DesktopCoordinates.top - OffsetY - CenterY) / static_cast<FLOAT>(CenterY),
- 0.0f);
+ -1 * (DestDirty.bottom + DeskDesc->DesktopCoordinates.top - OffsetY - CenterY) / static_cast<FLOAT>(CenterY),
+ 0.0f);
Vertices[3].Pos = Vertices[2].Pos;
Vertices[4].Pos = Vertices[1].Pos;
Vertices[5].Pos = XMFLOAT3((DestDirty.right + DeskDesc->DesktopCoordinates.left - OffsetX - CenterX) / static_cast<FLOAT>(CenterX),
- -1 * (DestDirty.top + DeskDesc->DesktopCoordinates.top - OffsetY - CenterY) / static_cast<FLOAT>(CenterY),
- 0.0f);
+ -1 * (DestDirty.top + DeskDesc->DesktopCoordinates.top - OffsetY - CenterY) / static_cast<FLOAT>(CenterY),
+ 0.0f);
Vertices[3].TexCoord = Vertices[2].TexCoord;
Vertices[4].TexCoord = Vertices[1].TexCoord;
@@ -324,11 +305,9 @@ DUPL_RETURN DISPLAYMANAGER::CopyDirty(_In_ ID3D11Texture2D* SrcSurface, _Inout_
D3D11_TEXTURE2D_DESC ThisDesc;
SrcSurface->GetDesc(&ThisDesc);
- if (!m_RTV)
- {
+ if (!m_RTV) {
hr = m_Device->CreateRenderTargetView(SharedSurf, nullptr, &m_RTV);
- if (FAILED(hr))
- {
+ if (FAILED(hr)) {
return ProcessFailure(m_Device, L"Failed to create render target view for dirty rects", L"Error", hr, SystemTransitionsExpectedErrors);
}
}
@@ -342,8 +321,7 @@ DUPL_RETURN DISPLAYMANAGER::CopyDirty(_In_ ID3D11Texture2D* SrcSurface, _Inout_
// Create new shader resource view
ID3D11ShaderResourceView* ShaderResource = nullptr;
hr = m_Device->CreateShaderResourceView(SrcSurface, &ShaderDesc, &ShaderResource);
- if (FAILED(hr))
- {
+ if (FAILED(hr)) {
return ProcessFailure(m_Device, L"Failed to create shader resource view for dirty rects", L"Error", hr, SystemTransitionsExpectedErrors);
}
@@ -358,16 +336,13 @@ DUPL_RETURN DISPLAYMANAGER::CopyDirty(_In_ ID3D11Texture2D* SrcSurface, _Inout_
// Create space for vertices for the dirty rects if the current space isn't large enough
UINT BytesNeeded = sizeof(VERTEX) * NUMVERTICES * DirtyCount;
- if (BytesNeeded > m_DirtyVertexBufferAllocSize)
- {
- if (m_DirtyVertexBufferAlloc)
- {
+ if (BytesNeeded > m_DirtyVertexBufferAllocSize) {
+ if (m_DirtyVertexBufferAlloc) {
delete [] m_DirtyVertexBufferAlloc;
}
m_DirtyVertexBufferAlloc = new (std::nothrow) BYTE[BytesNeeded];
- if (!m_DirtyVertexBufferAlloc)
- {
+ if (!m_DirtyVertexBufferAlloc) {
m_DirtyVertexBufferAllocSize = 0;
return ProcessFailure(nullptr, L"Failed to allocate memory for dirty vertex buffer.", L"Error", E_OUTOFMEMORY);
}
@@ -377,8 +352,7 @@ DUPL_RETURN DISPLAYMANAGER::CopyDirty(_In_ ID3D11Texture2D* SrcSurface, _Inout_
// Fill them in
VERTEX* DirtyVertex = reinterpret_cast<VERTEX*>(m_DirtyVertexBufferAlloc);
- for (UINT i = 0; i < DirtyCount; ++i, DirtyVertex += NUMVERTICES)
- {
+ for (UINT i = 0; i < DirtyCount; ++i, DirtyVertex += NUMVERTICES) {
SetDirtyVert(DirtyVertex, &(DirtyBuffer[i]), OffsetX, OffsetY, DeskDesc, &FullDesc, &ThisDesc);
}
@@ -395,8 +369,7 @@ DUPL_RETURN DISPLAYMANAGER::CopyDirty(_In_ ID3D11Texture2D* SrcSurface, _Inout_
ID3D11Buffer* VertBuf = nullptr;
hr = m_Device->CreateBuffer(&BufferDesc, &InitData, &VertBuf);
- if (FAILED(hr))
- {
+ if (FAILED(hr)) {
return ProcessFailure(m_Device, L"Failed to create vertex buffer in dirty rect processing", L"Error", hr, SystemTransitionsExpectedErrors);
}
UINT Stride = sizeof(VERTEX);
@@ -428,50 +401,42 @@ DUPL_RETURN DISPLAYMANAGER::CopyDirty(_In_ ID3D11Texture2D* SrcSurface, _Inout_
//
void DISPLAYMANAGER::CleanRefs()
{
- if (m_DeviceContext)
- {
+ if (m_DeviceContext) {
m_DeviceContext->Release();
m_DeviceContext = nullptr;
}
- if (m_Device)
- {
+ if (m_Device) {
m_Device->Release();
m_Device = nullptr;
}
- if (m_MoveSurf)
- {
+ if (m_MoveSurf) {
m_MoveSurf->Release();
m_MoveSurf = nullptr;
}
- if (m_VertexShader)
- {
+ if (m_VertexShader) {
m_VertexShader->Release();
m_VertexShader = nullptr;
}
- if (m_PixelShader)
- {
+ if (m_PixelShader) {
m_PixelShader->Release();
m_PixelShader = nullptr;
}
- if (m_InputLayout)
- {
+ if (m_InputLayout) {
m_InputLayout->Release();
m_InputLayout = nullptr;
}
- if (m_SamplerLinear)
- {
+ if (m_SamplerLinear) {
m_SamplerLinear->Release();
m_SamplerLinear = nullptr;
}
- if (m_RTV)
- {
+ if (m_RTV) {
m_RTV->Release();
m_RTV = nullptr;
}
diff --git a/plugins/pluginWinDD/internals/DisplayManager.h b/plugins/pluginWinDD/internals/DisplayManager.h
index f9bf69c..3308ecb 100755
--- a/plugins/pluginWinDD/internals/DisplayManager.h
+++ b/plugins/pluginWinDD/internals/DisplayManager.h
@@ -15,32 +15,32 @@
//
class DISPLAYMANAGER
{
- public:
- DISPLAYMANAGER();
- ~DISPLAYMANAGER();
- void InitD3D(DX_RESOURCES* Data);
- ID3D11Device* GetDevice();
- DUPL_RETURN ProcessFrame(_In_ FRAME_DATA* Data, _Inout_ ID3D11Texture2D* SharedSurf, INT OffsetX, INT OffsetY, _In_ DXGI_OUTPUT_DESC* DeskDesc);
- void CleanRefs();
+public:
+ DISPLAYMANAGER();
+ ~DISPLAYMANAGER();
+ void InitD3D(DX_RESOURCES* Data);
+ ID3D11Device* GetDevice();
+ DUPL_RETURN ProcessFrame(_In_ FRAME_DATA* Data, _Inout_ ID3D11Texture2D* SharedSurf, INT OffsetX, INT OffsetY, _In_ DXGI_OUTPUT_DESC* DeskDesc);
+ void CleanRefs();
- private:
+private:
// methods
- DUPL_RETURN CopyDirty(_In_ ID3D11Texture2D* SrcSurface, _Inout_ ID3D11Texture2D* SharedSurf, _In_reads_(DirtyCount) RECT* DirtyBuffer, UINT DirtyCount, INT OffsetX, INT OffsetY, _In_ DXGI_OUTPUT_DESC* DeskDesc);
- DUPL_RETURN CopyMove(_Inout_ ID3D11Texture2D* SharedSurf, _In_reads_(MoveCount) DXGI_OUTDUPL_MOVE_RECT* MoveBuffer, UINT MoveCount, INT OffsetX, INT OffsetY, _In_ DXGI_OUTPUT_DESC* DeskDesc, INT TexWidth, INT TexHeight);
- void SetDirtyVert(_Out_writes_(NUMVERTICES) VERTEX* Vertices, _In_ RECT* Dirty, INT OffsetX, INT OffsetY, _In_ DXGI_OUTPUT_DESC* DeskDesc, _In_ D3D11_TEXTURE2D_DESC* FullDesc, _In_ D3D11_TEXTURE2D_DESC* ThisDesc);
- void SetMoveRect(_Out_ RECT* SrcRect, _Out_ RECT* DestRect, _In_ DXGI_OUTPUT_DESC* DeskDesc, _In_ DXGI_OUTDUPL_MOVE_RECT* MoveRect, INT TexWidth, INT TexHeight);
+ DUPL_RETURN CopyDirty(_In_ ID3D11Texture2D* SrcSurface, _Inout_ ID3D11Texture2D* SharedSurf, _In_reads_(DirtyCount) RECT* DirtyBuffer, UINT DirtyCount, INT OffsetX, INT OffsetY, _In_ DXGI_OUTPUT_DESC* DeskDesc);
+ DUPL_RETURN CopyMove(_Inout_ ID3D11Texture2D* SharedSurf, _In_reads_(MoveCount) DXGI_OUTDUPL_MOVE_RECT* MoveBuffer, UINT MoveCount, INT OffsetX, INT OffsetY, _In_ DXGI_OUTPUT_DESC* DeskDesc, INT TexWidth, INT TexHeight);
+ void SetDirtyVert(_Out_writes_(NUMVERTICES) VERTEX* Vertices, _In_ RECT* Dirty, INT OffsetX, INT OffsetY, _In_ DXGI_OUTPUT_DESC* DeskDesc, _In_ D3D11_TEXTURE2D_DESC* FullDesc, _In_ D3D11_TEXTURE2D_DESC* ThisDesc);
+ void SetMoveRect(_Out_ RECT* SrcRect, _Out_ RECT* DestRect, _In_ DXGI_OUTPUT_DESC* DeskDesc, _In_ DXGI_OUTDUPL_MOVE_RECT* MoveRect, INT TexWidth, INT TexHeight);
// variables
- ID3D11Device* m_Device;
- ID3D11DeviceContext* m_DeviceContext;
- ID3D11Texture2D* m_MoveSurf;
- ID3D11VertexShader* m_VertexShader;
- ID3D11PixelShader* m_PixelShader;
- ID3D11InputLayout* m_InputLayout;
- ID3D11RenderTargetView* m_RTV;
- ID3D11SamplerState* m_SamplerLinear;
- BYTE* m_DirtyVertexBufferAlloc;
- UINT m_DirtyVertexBufferAllocSize;
+ ID3D11Device* m_Device;
+ ID3D11DeviceContext* m_DeviceContext;
+ ID3D11Texture2D* m_MoveSurf;
+ ID3D11VertexShader* m_VertexShader;
+ ID3D11PixelShader* m_PixelShader;
+ ID3D11InputLayout* m_InputLayout;
+ ID3D11RenderTargetView* m_RTV;
+ ID3D11SamplerState* m_SamplerLinear;
+ BYTE* m_DirtyVertexBufferAlloc;
+ UINT m_DirtyVertexBufferAllocSize;
};
#endif
diff --git a/plugins/pluginWinDD/internals/DuplicationManager.cxx b/plugins/pluginWinDD/internals/DuplicationManager.cxx
index 995c8ec..34ec896 100755
--- a/plugins/pluginWinDD/internals/DuplicationManager.cxx
+++ b/plugins/pluginWinDD/internals/DuplicationManager.cxx
@@ -12,24 +12,24 @@
#include <Mfapi.h>
static inline HRESULT CopyRGBb32DownTop(
- BYTE* pDst,
- const BYTE* pSrc,
- INT dwWidthDstPixels,
- INT dwWidthSrcPixels,
- INT dwHeightPixels
- );
+ BYTE* pDst,
+ const BYTE* pSrc,
+ INT dwWidthDstPixels,
+ INT dwWidthSrcPixels,
+ INT dwHeightPixels
+);
//
// Constructor sets up references / variables
//
DUPLICATIONMANAGER::DUPLICATIONMANAGER() : m_DeskDupl(nullptr),
- m_AcquiredDesktopImage(nullptr),
- m_MetaDataBuffer(nullptr),
- m_MetaDataSize(0),
- m_OutputNumber(0),
- m_Device(nullptr),
- m_DeviceContext(nullptr),
- m_BufferPtr(nullptr),
- m_BufferSize(0)
+ m_AcquiredDesktopImage(nullptr),
+ m_MetaDataBuffer(nullptr),
+ m_MetaDataSize(0),
+ m_OutputNumber(0),
+ m_Device(nullptr),
+ m_DeviceContext(nullptr),
+ m_BufferPtr(nullptr),
+ m_BufferSize(0)
{
RtlZeroMemory(&m_OutputDesc, sizeof(m_OutputDesc));
}
@@ -39,41 +39,35 @@ DUPLICATIONMANAGER::DUPLICATIONMANAGER() : m_DeskDupl(nullptr),
//
DUPLICATIONMANAGER::~DUPLICATIONMANAGER()
{
- if (m_DeskDupl)
- {
+ if (m_DeskDupl) {
m_DeskDupl->Release();
m_DeskDupl = nullptr;
}
- if (m_AcquiredDesktopImage)
- {
+ if (m_AcquiredDesktopImage) {
m_AcquiredDesktopImage->Release();
m_AcquiredDesktopImage = nullptr;
}
- if (m_MetaDataBuffer)
- {
+ if (m_MetaDataBuffer) {
delete [] m_MetaDataBuffer;
m_MetaDataBuffer = nullptr;
}
- if (m_DeviceContext)
- {
- m_DeviceContext->Release();
- m_DeviceContext = nullptr;
- }
+ if (m_DeviceContext) {
+ m_DeviceContext->Release();
+ m_DeviceContext = nullptr;
+ }
- if (m_Device)
- {
+ if (m_Device) {
m_Device->Release();
m_Device = nullptr;
}
- if (m_BufferPtr)
- {
- VirtualFree(m_BufferPtr, 0, MEM_RELEASE);
- m_BufferPtr = nullptr;
- }
+ if (m_BufferPtr) {
+ VirtualFree(m_BufferPtr, 0, MEM_RELEASE);
+ m_BufferPtr = nullptr;
+ }
}
//
@@ -87,14 +81,13 @@ DUPL_RETURN DUPLICATIONMANAGER::InitDupl(_In_ ID3D11Device* Device, ID3D11Device
m_Device = Device;
m_Device->AddRef();
- m_DeviceContext = DeviceContext;
- m_DeviceContext->AddRef();
+ m_DeviceContext = DeviceContext;
+ m_DeviceContext->AddRef();
// Get DXGI device
IDXGIDevice* DxgiDevice = nullptr;
HRESULT hr = m_Device->QueryInterface(__uuidof(IDXGIDevice), reinterpret_cast<void**>(&DxgiDevice));
- if (FAILED(hr))
- {
+ if (FAILED(hr)) {
return ProcessFailure(nullptr, L"Failed to QI for DXGI Device", L"Error", hr);
}
@@ -103,8 +96,7 @@ DUPL_RETURN DUPLICATIONMANAGER::InitDupl(_In_ ID3D11Device* Device, ID3D11Device
hr = DxgiDevice->GetParent(__uuidof(IDXGIAdapter), reinterpret_cast<void**>(&DxgiAdapter));
DxgiDevice->Release();
DxgiDevice = nullptr;
- if (FAILED(hr))
- {
+ if (FAILED(hr)) {
return ProcessFailure(m_Device, L"Failed to get parent DXGI Adapter", L"Error", hr, SystemTransitionsExpectedErrors);
}
@@ -113,8 +105,7 @@ DUPL_RETURN DUPLICATIONMANAGER::InitDupl(_In_ ID3D11Device* Device, ID3D11Device
hr = DxgiAdapter->EnumOutputs(Output, &DxgiOutput);
DxgiAdapter->Release();
DxgiAdapter = nullptr;
- if (FAILED(hr))
- {
+ if (FAILED(hr)) {
return ProcessFailure(m_Device, L"Failed to get specified output in DUPLICATIONMANAGER", L"Error", hr, EnumOutputsExpectedErrors);
}
@@ -125,8 +116,7 @@ DUPL_RETURN DUPLICATIONMANAGER::InitDupl(_In_ ID3D11Device* Device, ID3D11Device
hr = DxgiOutput->QueryInterface(__uuidof(DxgiOutput1), reinterpret_cast<void**>(&DxgiOutput1));
DxgiOutput->Release();
DxgiOutput = nullptr;
- if (FAILED(hr))
- {
+ if (FAILED(hr)) {
return ProcessFailure(nullptr, L"Failed to QI for DxgiOutput1 in DUPLICATIONMANAGER", L"Error", hr);
}
@@ -134,10 +124,8 @@ DUPL_RETURN DUPLICATIONMANAGER::InitDupl(_In_ ID3D11Device* Device, ID3D11Device
hr = DxgiOutput1->DuplicateOutput(m_Device, &m_DeskDupl);
DxgiOutput1->Release();
DxgiOutput1 = nullptr;
- if (FAILED(hr))
- {
- if (hr == DXGI_ERROR_NOT_CURRENTLY_AVAILABLE)
- {
+ if (FAILED(hr)) {
+ if (hr == DXGI_ERROR_NOT_CURRENTLY_AVAILABLE) {
MessageBoxW(nullptr, L"There is already the maximum number of applications using the Desktop Duplication API running, please close one of those applications and then try again.", L"Error", MB_OK);
return DUPL_RETURN_ERROR_UNEXPECTED;
}
@@ -153,8 +141,7 @@ DUPL_RETURN DUPLICATIONMANAGER::InitDupl(_In_ ID3D11Device* Device, ID3D11Device
DUPL_RETURN DUPLICATIONMANAGER::GetMouse(_Inout_ PTR_INFO* PtrInfo, _In_ DXGI_OUTDUPL_FRAME_INFO* FrameInfo, INT OffsetX, INT OffsetY)
{
// A non-zero mouse update timestamp indicates that there is a mouse position update and optionally a shape change
- if (FrameInfo->LastMouseUpdateTime.QuadPart == 0)
- {
+ if (FrameInfo->LastMouseUpdateTime.QuadPart == 0) {
return DUPL_RETURN_SUCCESS;
}
@@ -163,20 +150,17 @@ DUPL_RETURN DUPLICATIONMANAGER::GetMouse(_Inout_ PTR_INFO* PtrInfo, _In_ DXGI_OU
// Make sure we don't update pointer position wrongly
// If pointer is invisible, make sure we did not get an update from another output that the last time that said pointer
// was visible, if so, don't set it to invisible or update.
- if (!FrameInfo->PointerPosition.Visible && (PtrInfo->WhoUpdatedPositionLast != m_OutputNumber))
- {
+ if (!FrameInfo->PointerPosition.Visible && (PtrInfo->WhoUpdatedPositionLast != m_OutputNumber)) {
UpdatePosition = false;
}
// If two outputs both say they have a visible, only update if new update has newer timestamp
- if (FrameInfo->PointerPosition.Visible && PtrInfo->Visible && (PtrInfo->WhoUpdatedPositionLast != m_OutputNumber) && (PtrInfo->LastTimeStamp.QuadPart > FrameInfo->LastMouseUpdateTime.QuadPart))
- {
+ if (FrameInfo->PointerPosition.Visible && PtrInfo->Visible && (PtrInfo->WhoUpdatedPositionLast != m_OutputNumber) && (PtrInfo->LastTimeStamp.QuadPart > FrameInfo->LastMouseUpdateTime.QuadPart)) {
UpdatePosition = false;
}
// Update position
- if (UpdatePosition)
- {
+ if (UpdatePosition) {
PtrInfo->Position.x = FrameInfo->PointerPosition.Position.x + m_OutputDesc.DesktopCoordinates.left - OffsetX;
PtrInfo->Position.y = FrameInfo->PointerPosition.Position.y + m_OutputDesc.DesktopCoordinates.top - OffsetY;
PtrInfo->WhoUpdatedPositionLast = m_OutputNumber;
@@ -185,22 +169,18 @@ DUPL_RETURN DUPLICATIONMANAGER::GetMouse(_Inout_ PTR_INFO* PtrInfo, _In_ DXGI_OU
}
// No new shape
- if (FrameInfo->PointerShapeBufferSize == 0)
- {
+ if (FrameInfo->PointerShapeBufferSize == 0) {
return DUPL_RETURN_SUCCESS;
}
// Old buffer too small
- if (FrameInfo->PointerShapeBufferSize > PtrInfo->BufferSize)
- {
- if (PtrInfo->PtrShapeBuffer)
- {
+ if (FrameInfo->PointerShapeBufferSize > PtrInfo->BufferSize) {
+ if (PtrInfo->PtrShapeBuffer) {
delete [] PtrInfo->PtrShapeBuffer;
PtrInfo->PtrShapeBuffer = nullptr;
}
PtrInfo->PtrShapeBuffer = new (std::nothrow) BYTE[FrameInfo->PointerShapeBufferSize];
- if (!PtrInfo->PtrShapeBuffer)
- {
+ if (!PtrInfo->PtrShapeBuffer) {
PtrInfo->BufferSize = 0;
return ProcessFailure(nullptr, L"Failed to allocate memory for pointer shape in DUPLICATIONMANAGER", L"Error", E_OUTOFMEMORY);
}
@@ -212,8 +192,7 @@ DUPL_RETURN DUPLICATIONMANAGER::GetMouse(_Inout_ PTR_INFO* PtrInfo, _In_ DXGI_OU
// Get shape
UINT BufferSizeRequired;
HRESULT hr = m_DeskDupl->GetFramePointerShape(FrameInfo->PointerShapeBufferSize, reinterpret_cast<VOID*>(PtrInfo->PtrShapeBuffer), &BufferSizeRequired, &(PtrInfo->ShapeInfo));
- if (FAILED(hr))
- {
+ if (FAILED(hr)) {
delete [] PtrInfo->PtrShapeBuffer;
PtrInfo->PtrShapeBuffer = nullptr;
PtrInfo->BufferSize = 0;
@@ -235,21 +214,18 @@ DUPL_RETURN DUPLICATIONMANAGER::GetFrame(_Out_ FRAME_DATA* Data, _Out_ bool* Tim
// Get new frame
HRESULT hr = m_DeskDupl->AcquireNextFrame(500, &FrameInfo, &DesktopResource);
- if (hr == DXGI_ERROR_WAIT_TIMEOUT)
- {
+ if (hr == DXGI_ERROR_WAIT_TIMEOUT) {
*Timeout = true;
return DUPL_RETURN_SUCCESS;
}
*Timeout = false;
- if (FAILED(hr))
- {
+ if (FAILED(hr)) {
return ProcessFailure(m_Device, L"Failed to acquire next frame in DUPLICATIONMANAGER", L"Error", hr, FrameInfoExpectedErrors);
}
// If still holding old frame, destroy it
- if (m_AcquiredDesktopImage)
- {
+ if (m_AcquiredDesktopImage) {
m_AcquiredDesktopImage->Release();
m_AcquiredDesktopImage = nullptr;
}
@@ -258,25 +234,20 @@ DUPL_RETURN DUPLICATIONMANAGER::GetFrame(_Out_ FRAME_DATA* Data, _Out_ bool* Tim
hr = DesktopResource->QueryInterface(__uuidof(ID3D11Texture2D), reinterpret_cast<void **>(&m_AcquiredDesktopImage));
DesktopResource->Release();
DesktopResource = nullptr;
- if (FAILED(hr))
- {
+ if (FAILED(hr)) {
return ProcessFailure(nullptr, L"Failed to QI for ID3D11Texture2D from acquired IDXGIResource in DUPLICATIONMANAGER", L"Error", hr);
}
// Get metadata
- if (FrameInfo.TotalMetadataBufferSize)
- {
+ if (FrameInfo.TotalMetadataBufferSize) {
// Old buffer too small
- if (FrameInfo.TotalMetadataBufferSize > m_MetaDataSize)
- {
- if (m_MetaDataBuffer)
- {
+ if (FrameInfo.TotalMetadataBufferSize > m_MetaDataSize) {
+ if (m_MetaDataBuffer) {
delete [] m_MetaDataBuffer;
m_MetaDataBuffer = nullptr;
}
m_MetaDataBuffer = new (std::nothrow) BYTE[FrameInfo.TotalMetadataBufferSize];
- if (!m_MetaDataBuffer)
- {
+ if (!m_MetaDataBuffer) {
m_MetaDataSize = 0;
Data->MoveCount = 0;
Data->DirtyCount = 0;
@@ -289,8 +260,7 @@ DUPL_RETURN DUPLICATIONMANAGER::GetFrame(_Out_ FRAME_DATA* Data, _Out_ bool* Tim
// Get move rectangles
hr = m_DeskDupl->GetFrameMoveRects(BufSize, reinterpret_cast<DXGI_OUTDUPL_MOVE_RECT*>(m_MetaDataBuffer), &BufSize);
- if (FAILED(hr))
- {
+ if (FAILED(hr)) {
Data->MoveCount = 0;
Data->DirtyCount = 0;
return ProcessFailure(nullptr, L"Failed to get frame move rects in DUPLICATIONMANAGER", L"Error", hr, FrameInfoExpectedErrors);
@@ -302,8 +272,7 @@ DUPL_RETURN DUPLICATIONMANAGER::GetFrame(_Out_ FRAME_DATA* Data, _Out_ bool* Tim
// Get dirty rectangles
hr = m_DeskDupl->GetFrameDirtyRects(BufSize, reinterpret_cast<RECT*>(DirtyRects), &BufSize);
- if (FAILED(hr))
- {
+ if (FAILED(hr)) {
Data->MoveCount = 0;
Data->DirtyCount = 0;
return ProcessFailure(nullptr, L"Failed to get frame dirty rects in DUPLICATIONMANAGER", L"Error", hr, FrameInfoExpectedErrors);
@@ -325,13 +294,11 @@ DUPL_RETURN DUPLICATIONMANAGER::GetFrame(_Out_ FRAME_DATA* Data, _Out_ bool* Tim
DUPL_RETURN DUPLICATIONMANAGER::DoneWithFrame()
{
HRESULT hr = m_DeskDupl->ReleaseFrame();
- if (FAILED(hr))
- {
+ if (FAILED(hr)) {
return ProcessFailure(m_Device, L"Failed to release frame in DUPLICATIONMANAGER", L"Error", hr, FrameInfoExpectedErrors);
}
- if (m_AcquiredDesktopImage)
- {
+ if (m_AcquiredDesktopImage) {
m_AcquiredDesktopImage->Release();
m_AcquiredDesktopImage = nullptr;
}
@@ -350,150 +317,138 @@ void DUPLICATIONMANAGER::GetOutputDesc(_Out_ DXGI_OUTPUT_DESC* DescPtr)
HRESULT DUPLICATIONMANAGER::SendData(struct tmedia_producer_s* pProducer, FRAME_DATA* FrameData)
{
- HRESULT hr = E_FAIL;
- D3D11_TEXTURE2D_DESC CopyBufferDesc = {0};
- D3D11_TEXTURE2D_DESC FullDesc;
- DXGI_MAPPED_RECT MappedSurface;
- D3D11_BOX Box;
- UINT BuffSize;
-
- ID3D11Texture2D* CopyBuffer = nullptr;
- IDXGISurface* CopySurface = nullptr;
- ID3D11Device* Device = nullptr;
-
- FrameData->Frame->GetDesc(&FullDesc);
-
- CopyBufferDesc.Width = FullDesc.Width;
- CopyBufferDesc.Height = FullDesc.Height;
- CopyBufferDesc.MipLevels = 1;
- CopyBufferDesc.ArraySize = 1;
- CopyBufferDesc.Format = DXGI_FORMAT_B8G8R8A8_UNORM;
- CopyBufferDesc.SampleDesc.Count = 1;
- CopyBufferDesc.SampleDesc.Quality = 0;
- CopyBufferDesc.Usage = D3D11_USAGE_STAGING;
- CopyBufferDesc.BindFlags = 0;
- CopyBufferDesc.CPUAccessFlags = D3D11_CPU_ACCESS_READ;
- CopyBufferDesc.MiscFlags = 0;
-
- FrameData->Frame->GetDevice(&Device);
- if (!Device)
- {
- hr = E_POINTER;
- ProcessFailure(m_Device, L"Failed to get device", L"Error", hr, SystemTransitionsExpectedErrors);
- goto bail;
- }
-
- hr = Device->CreateTexture2D(&CopyBufferDesc, nullptr, &CopyBuffer);
- if (FAILED(hr))
- {
- ProcessFailure(m_Device, L"Failed creating staging texture for pointer", L"Error", hr, SystemTransitionsExpectedErrors);
- goto bail;
- }
-
- Box.left = 0;
- Box.top = 0;
- Box.right = CopyBufferDesc.Width;
- Box.bottom = CopyBufferDesc.Height;
- Box.front = 0;
- Box.back = 1;
- m_DeviceContext->CopySubresourceRegion(CopyBuffer, 0, 0, 0, 0, FrameData->Frame, 0, &Box);
-
- hr = CopyBuffer->QueryInterface(__uuidof(IDXGISurface), (void **)&CopySurface);
- if (FAILED(hr))
- {
- ProcessFailure(nullptr, L"Failed to QI staging texture into IDXGISurface for pointer", L"Error", hr, SystemTransitionsExpectedErrors);
- goto bail;
- }
-
- BuffSize = CopyBufferDesc.Width * CopyBufferDesc.Height * 4;
- if (m_BufferSize < BuffSize)
- {
- if (m_BufferPtr)
- {
- VirtualFree(m_BufferPtr, 0, MEM_RELEASE);
- m_BufferSize = 0;
- }
- if (!(m_BufferPtr = (BYTE*)VirtualAlloc(NULL, BuffSize, MEM_RESERVE | MEM_COMMIT, PAGE_READWRITE)))
- {
- ProcessFailure(Device, L"Failed to allocate memory", L"Error", hr, SystemTransitionsExpectedErrors);
- goto bail;
- }
- m_BufferSize = BuffSize;
- }
-
- hr = CopySurface->Map(&MappedSurface, DXGI_MAP_READ); // *** MAP *** //
- if (FAILED(hr))
- {
- ProcessFailure(Device, L"Failed to map surface for pointer", L"Error", hr, SystemTransitionsExpectedErrors);
- goto bail;
- }
-
- pProducer->video.width = CopyBufferDesc.Width;
- pProducer->video.height = CopyBufferDesc.Height;
+ HRESULT hr = E_FAIL;
+ D3D11_TEXTURE2D_DESC CopyBufferDesc = {0};
+ D3D11_TEXTURE2D_DESC FullDesc;
+ DXGI_MAPPED_RECT MappedSurface;
+ D3D11_BOX Box;
+ UINT BuffSize;
+
+ ID3D11Texture2D* CopyBuffer = nullptr;
+ IDXGISurface* CopySurface = nullptr;
+ ID3D11Device* Device = nullptr;
+
+ FrameData->Frame->GetDesc(&FullDesc);
+
+ CopyBufferDesc.Width = FullDesc.Width;
+ CopyBufferDesc.Height = FullDesc.Height;
+ CopyBufferDesc.MipLevels = 1;
+ CopyBufferDesc.ArraySize = 1;
+ CopyBufferDesc.Format = DXGI_FORMAT_B8G8R8A8_UNORM;
+ CopyBufferDesc.SampleDesc.Count = 1;
+ CopyBufferDesc.SampleDesc.Quality = 0;
+ CopyBufferDesc.Usage = D3D11_USAGE_STAGING;
+ CopyBufferDesc.BindFlags = 0;
+ CopyBufferDesc.CPUAccessFlags = D3D11_CPU_ACCESS_READ;
+ CopyBufferDesc.MiscFlags = 0;
+
+ FrameData->Frame->GetDevice(&Device);
+ if (!Device) {
+ hr = E_POINTER;
+ ProcessFailure(m_Device, L"Failed to get device", L"Error", hr, SystemTransitionsExpectedErrors);
+ goto bail;
+ }
+
+ hr = Device->CreateTexture2D(&CopyBufferDesc, nullptr, &CopyBuffer);
+ if (FAILED(hr)) {
+ ProcessFailure(m_Device, L"Failed creating staging texture for pointer", L"Error", hr, SystemTransitionsExpectedErrors);
+ goto bail;
+ }
+
+ Box.left = 0;
+ Box.top = 0;
+ Box.right = CopyBufferDesc.Width;
+ Box.bottom = CopyBufferDesc.Height;
+ Box.front = 0;
+ Box.back = 1;
+ m_DeviceContext->CopySubresourceRegion(CopyBuffer, 0, 0, 0, 0, FrameData->Frame, 0, &Box);
+
+ hr = CopyBuffer->QueryInterface(__uuidof(IDXGISurface), (void **)&CopySurface);
+ if (FAILED(hr)) {
+ ProcessFailure(nullptr, L"Failed to QI staging texture into IDXGISurface for pointer", L"Error", hr, SystemTransitionsExpectedErrors);
+ goto bail;
+ }
+
+ BuffSize = CopyBufferDesc.Width * CopyBufferDesc.Height * 4;
+ if (m_BufferSize < BuffSize) {
+ if (m_BufferPtr) {
+ VirtualFree(m_BufferPtr, 0, MEM_RELEASE);
+ m_BufferSize = 0;
+ }
+ if (!(m_BufferPtr = (BYTE*)VirtualAlloc(NULL, BuffSize, MEM_RESERVE | MEM_COMMIT, PAGE_READWRITE))) {
+ ProcessFailure(Device, L"Failed to allocate memory", L"Error", hr, SystemTransitionsExpectedErrors);
+ goto bail;
+ }
+ m_BufferSize = BuffSize;
+ }
+
+ hr = CopySurface->Map(&MappedSurface, DXGI_MAP_READ); // *** MAP *** //
+ if (FAILED(hr)) {
+ ProcessFailure(Device, L"Failed to map surface for pointer", L"Error", hr, SystemTransitionsExpectedErrors);
+ goto bail;
+ }
+
+ pProducer->video.width = CopyBufferDesc.Width;
+ pProducer->video.height = CopyBufferDesc.Height;
#if 0
- hr = MFCopyImage(
- m_BufferPtr,
- (LONG)(CopyBufferDesc.Width << 2),
- (BYTE*)MappedSurface.pBits,
- (LONG)MappedSurface.Pitch,
- (DWORD)(CopyBufferDesc.Width << 2),
- (DWORD)CopyBufferDesc.Height
- );
+ hr = MFCopyImage(
+ m_BufferPtr,
+ (LONG)(CopyBufferDesc.Width << 2),
+ (BYTE*)MappedSurface.pBits,
+ (LONG)MappedSurface.Pitch,
+ (DWORD)(CopyBufferDesc.Width << 2),
+ (DWORD)CopyBufferDesc.Height
+ );
#else;
- hr = CopyRGBb32DownTop(
- m_BufferPtr,
- MappedSurface.pBits,
- CopyBufferDesc.Width,
- (MappedSurface.Pitch >> 2), // Bytes -> Pixels
- CopyBufferDesc.Height);
+ hr = CopyRGBb32DownTop(
+ m_BufferPtr,
+ MappedSurface.pBits,
+ CopyBufferDesc.Width,
+ (MappedSurface.Pitch >> 2), // Bytes -> Pixels
+ CopyBufferDesc.Height);
#endif
- pProducer->enc_cb.callback(pProducer->enc_cb.callback_data, m_BufferPtr, BuffSize);
+ pProducer->enc_cb.callback(pProducer->enc_cb.callback_data, m_BufferPtr, BuffSize);
- CopySurface->Unmap(); // *** UNMAP *** //
+ CopySurface->Unmap(); // *** UNMAP *** //
bail:
- if (CopyBuffer)
- {
- CopyBuffer->Release();
- }
- if (CopySurface)
- {
- CopySurface->Release();
- }
- if (Device)
- {
- Device->Release();
- }
- return hr;
+ if (CopyBuffer) {
+ CopyBuffer->Release();
+ }
+ if (CopySurface) {
+ CopySurface->Release();
+ }
+ if (Device) {
+ Device->Release();
+ }
+ return hr;
}
// For RGB32:
// Direct3D -> Top-Down
// Video Processor -> Down-Top
static inline HRESULT CopyRGBb32DownTop(
- BYTE* pDst,
- const BYTE* pSrc,
- INT dwWidthDstPixels,
- INT dwWidthSrcPixels,
- INT dwHeightPixels
- )
+ BYTE* pDst,
+ const BYTE* pSrc,
+ INT dwWidthDstPixels,
+ INT dwWidthSrcPixels,
+ INT dwHeightPixels
+)
{
- RGBQUAD *pSrcPixel = &((RGBQUAD*)pSrc)[(dwWidthSrcPixels * dwHeightPixels) - dwWidthSrcPixels];
- RGBQUAD *pDestPixel = &((RGBQUAD*)pDst)[0];
-
- register INT x;
- register INT y;
-
- for (y = dwHeightPixels; y > 0; --y)
- {
- for (x = 0; x < dwWidthDstPixels; ++x)
- {
- pDestPixel[x] = pSrcPixel[x];
- }
- pDestPixel += dwWidthDstPixels;
- pSrcPixel -= dwWidthSrcPixels;
- }
- return S_OK;
+ RGBQUAD *pSrcPixel = &((RGBQUAD*)pSrc)[(dwWidthSrcPixels * dwHeightPixels) - dwWidthSrcPixels];
+ RGBQUAD *pDestPixel = &((RGBQUAD*)pDst)[0];
+
+ register INT x;
+ register INT y;
+
+ for (y = dwHeightPixels; y > 0; --y) {
+ for (x = 0; x < dwWidthDstPixels; ++x) {
+ pDestPixel[x] = pSrcPixel[x];
+ }
+ pDestPixel += dwWidthDstPixels;
+ pSrcPixel -= dwWidthSrcPixels;
+ }
+ return S_OK;
} \ No newline at end of file
diff --git a/plugins/pluginWinDD/internals/DuplicationManager.h b/plugins/pluginWinDD/internals/DuplicationManager.h
index 2c44b57..8f98145 100755
--- a/plugins/pluginWinDD/internals/DuplicationManager.h
+++ b/plugins/pluginWinDD/internals/DuplicationManager.h
@@ -15,29 +15,29 @@
//
class DUPLICATIONMANAGER
{
- public:
- DUPLICATIONMANAGER();
- ~DUPLICATIONMANAGER();
- _Success_(*Timeout == false && return == DUPL_RETURN_SUCCESS) DUPL_RETURN GetFrame(_Out_ FRAME_DATA* Data, _Out_ bool* Timeout);
- DUPL_RETURN DoneWithFrame();
- DUPL_RETURN InitDupl(_In_ ID3D11Device* Device, ID3D11DeviceContext* DeviceContext, UINT Output);
- DUPL_RETURN GetMouse(_Inout_ PTR_INFO* PtrInfo, _In_ DXGI_OUTDUPL_FRAME_INFO* FrameInfo, INT OffsetX, INT OffsetY);
- void GetOutputDesc(_Out_ DXGI_OUTPUT_DESC* DescPtr);
- HRESULT SendData(struct tmedia_producer_s* pProducer, FRAME_DATA* FrameData);
+public:
+ DUPLICATIONMANAGER();
+ ~DUPLICATIONMANAGER();
+ _Success_(*Timeout == false && return == DUPL_RETURN_SUCCESS) DUPL_RETURN GetFrame(_Out_ FRAME_DATA* Data, _Out_ bool* Timeout);
+ DUPL_RETURN DoneWithFrame();
+ DUPL_RETURN InitDupl(_In_ ID3D11Device* Device, ID3D11DeviceContext* DeviceContext, UINT Output);
+ DUPL_RETURN GetMouse(_Inout_ PTR_INFO* PtrInfo, _In_ DXGI_OUTDUPL_FRAME_INFO* FrameInfo, INT OffsetX, INT OffsetY);
+ void GetOutputDesc(_Out_ DXGI_OUTPUT_DESC* DescPtr);
+ HRESULT SendData(struct tmedia_producer_s* pProducer, FRAME_DATA* FrameData);
- private:
+private:
// vars
- IDXGIOutputDuplication* m_DeskDupl;
- ID3D11Texture2D* m_AcquiredDesktopImage;
- _Field_size_bytes_(m_MetaDataSize) BYTE* m_MetaDataBuffer;
- UINT m_MetaDataSize;
- UINT m_OutputNumber;
- DXGI_OUTPUT_DESC m_OutputDesc;
- ID3D11Device* m_Device;
- ID3D11DeviceContext* m_DeviceContext;
- BYTE* m_BufferPtr;
- UINT m_BufferSize;
+ IDXGIOutputDuplication* m_DeskDupl;
+ ID3D11Texture2D* m_AcquiredDesktopImage;
+ _Field_size_bytes_(m_MetaDataSize) BYTE* m_MetaDataBuffer;
+ UINT m_MetaDataSize;
+ UINT m_OutputNumber;
+ DXGI_OUTPUT_DESC m_OutputDesc;
+ ID3D11Device* m_Device;
+ ID3D11DeviceContext* m_DeviceContext;
+ BYTE* m_BufferPtr;
+ UINT m_BufferSize;
};
#endif
diff --git a/plugins/pluginWinDD/internals/OutputManager.cxx b/plugins/pluginWinDD/internals/OutputManager.cxx
index 7468cf2..a82f84c 100755
--- a/plugins/pluginWinDD/internals/OutputManager.cxx
+++ b/plugins/pluginWinDD/internals/OutputManager.cxx
@@ -12,20 +12,20 @@ using namespace DirectX;
// Constructor NULLs out all pointers & sets appropriate var vals
//
OUTPUTMANAGER::OUTPUTMANAGER() : m_SwapChain(nullptr),
- m_Device(nullptr),
- m_Factory(nullptr),
- m_DeviceContext(nullptr),
- m_RTV(nullptr),
- m_SamplerLinear(nullptr),
- m_BlendState(nullptr),
- m_VertexShader(nullptr),
- m_PixelShader(nullptr),
- m_InputLayout(nullptr),
- m_SharedSurf(nullptr),
- m_KeyMutex(nullptr),
- m_WindowHandle(nullptr),
- m_NeedsResize(false),
- m_OcclusionCookie(0)
+ m_Device(nullptr),
+ m_Factory(nullptr),
+ m_DeviceContext(nullptr),
+ m_RTV(nullptr),
+ m_SamplerLinear(nullptr),
+ m_BlendState(nullptr),
+ m_VertexShader(nullptr),
+ m_PixelShader(nullptr),
+ m_InputLayout(nullptr),
+ m_SharedSurf(nullptr),
+ m_KeyMutex(nullptr),
+ m_WindowHandle(nullptr),
+ m_NeedsResize(false),
+ m_OcclusionCookie(0)
{
}
@@ -56,8 +56,7 @@ DUPL_RETURN OUTPUTMANAGER::InitOutput(HWND Window, INT SingleOutput, _Out_ UINT*
m_WindowHandle = Window;
// Driver types supported
- D3D_DRIVER_TYPE DriverTypes[] =
- {
+ D3D_DRIVER_TYPE DriverTypes[] = {
D3D_DRIVER_TYPE_HARDWARE,
D3D_DRIVER_TYPE_WARP,
D3D_DRIVER_TYPE_REFERENCE,
@@ -65,8 +64,7 @@ DUPL_RETURN OUTPUTMANAGER::InitOutput(HWND Window, INT SingleOutput, _Out_ UINT*
UINT NumDriverTypes = ARRAYSIZE(DriverTypes);
// Feature levels supported
- D3D_FEATURE_LEVEL FeatureLevels[] =
- {
+ D3D_FEATURE_LEVEL FeatureLevels[] = {
D3D_FEATURE_LEVEL_11_0,
D3D_FEATURE_LEVEL_10_1,
D3D_FEATURE_LEVEL_10_0,
@@ -76,26 +74,22 @@ DUPL_RETURN OUTPUTMANAGER::InitOutput(HWND Window, INT SingleOutput, _Out_ UINT*
D3D_FEATURE_LEVEL FeatureLevel;
// Create device
- for (UINT DriverTypeIndex = 0; DriverTypeIndex < NumDriverTypes; ++DriverTypeIndex)
- {
+ for (UINT DriverTypeIndex = 0; DriverTypeIndex < NumDriverTypes; ++DriverTypeIndex) {
hr = D3D11CreateDevice(nullptr, DriverTypes[DriverTypeIndex], nullptr, 0, FeatureLevels, NumFeatureLevels,
- D3D11_SDK_VERSION, &m_Device, &FeatureLevel, &m_DeviceContext);
- if (SUCCEEDED(hr))
- {
+ D3D11_SDK_VERSION, &m_Device, &FeatureLevel, &m_DeviceContext);
+ if (SUCCEEDED(hr)) {
// Device creation succeeded, no need to loop anymore
break;
}
}
- if (FAILED(hr))
- {
+ if (FAILED(hr)) {
return ProcessFailure(m_Device, L"Device creation in OUTPUTMANAGER failed", L"Error", hr, SystemTransitionsExpectedErrors);
}
// Get DXGI factory
IDXGIDevice* DxgiDevice = nullptr;
hr = m_Device->QueryInterface(__uuidof(IDXGIDevice), reinterpret_cast<void**>(&DxgiDevice));
- if (FAILED(hr))
- {
+ if (FAILED(hr)) {
return ProcessFailure(nullptr, L"Failed to QI for DXGI Device", L"Error", hr, nullptr);
}
@@ -103,79 +97,69 @@ DUPL_RETURN OUTPUTMANAGER::InitOutput(HWND Window, INT SingleOutput, _Out_ UINT*
hr = DxgiDevice->GetParent(__uuidof(IDXGIAdapter), reinterpret_cast<void**>(&DxgiAdapter));
DxgiDevice->Release();
DxgiDevice = nullptr;
- if (FAILED(hr))
- {
+ if (FAILED(hr)) {
return ProcessFailure(m_Device, L"Failed to get parent DXGI Adapter", L"Error", hr, SystemTransitionsExpectedErrors);
}
hr = DxgiAdapter->GetParent(__uuidof(IDXGIFactory2), reinterpret_cast<void**>(&m_Factory));
DxgiAdapter->Release();
DxgiAdapter = nullptr;
- if (FAILED(hr))
- {
+ if (FAILED(hr)) {
return ProcessFailure(m_Device, L"Failed to get parent DXGI Factory", L"Error", hr, SystemTransitionsExpectedErrors);
}
// Register for occlusion status windows message
- if (m_WindowHandle)
- {
- hr = m_Factory->RegisterOcclusionStatusWindow(Window, OCCLUSION_STATUS_MSG, &m_OcclusionCookie);
- if (FAILED(hr))
- {
- return ProcessFailure(m_Device, L"Failed to register for occlusion message", L"Error", hr, SystemTransitionsExpectedErrors);
- }
- }
+ if (m_WindowHandle) {
+ hr = m_Factory->RegisterOcclusionStatusWindow(Window, OCCLUSION_STATUS_MSG, &m_OcclusionCookie);
+ if (FAILED(hr)) {
+ return ProcessFailure(m_Device, L"Failed to register for occlusion message", L"Error", hr, SystemTransitionsExpectedErrors);
+ }
+ }
// Get window size
RECT WindowRect;
- GetClientRect(m_WindowHandle ? m_WindowHandle : GetDesktopWindow(), &WindowRect);
+ GetClientRect(m_WindowHandle ? m_WindowHandle : GetDesktopWindow(), &WindowRect);
UINT Width = WindowRect.right - WindowRect.left;
UINT Height = WindowRect.bottom - WindowRect.top;
- if (m_WindowHandle)
- {
- // Create swapchain for window
- DXGI_SWAP_CHAIN_DESC1 SwapChainDesc;
- RtlZeroMemory(&SwapChainDesc, sizeof(SwapChainDesc));
-
- SwapChainDesc.SwapEffect = DXGI_SWAP_EFFECT_FLIP_SEQUENTIAL;
- SwapChainDesc.BufferCount = 2;
- SwapChainDesc.Width = Width;
- SwapChainDesc.Height = Height;
- SwapChainDesc.Format = DXGI_FORMAT_B8G8R8A8_UNORM;
- SwapChainDesc.BufferUsage = DXGI_USAGE_RENDER_TARGET_OUTPUT;
- SwapChainDesc.SampleDesc.Count = 1;
- SwapChainDesc.SampleDesc.Quality = 0;
- hr = m_Factory->CreateSwapChainForHwnd(m_Device, Window, &SwapChainDesc, nullptr, nullptr, &m_SwapChain);
- if (FAILED(hr))
- {
- return ProcessFailure(m_Device, L"Failed to create window swapchain", L"Error", hr, SystemTransitionsExpectedErrors);
- }
-
- // Disable the ALT-ENTER shortcut for entering full-screen mode
- hr = m_Factory->MakeWindowAssociation(Window, DXGI_MWA_NO_ALT_ENTER);
- if (FAILED(hr))
- {
- return ProcessFailure(m_Device, L"Failed to make window association", L"Error", hr, SystemTransitionsExpectedErrors);
- }
- }
+ if (m_WindowHandle) {
+ // Create swapchain for window
+ DXGI_SWAP_CHAIN_DESC1 SwapChainDesc;
+ RtlZeroMemory(&SwapChainDesc, sizeof(SwapChainDesc));
+
+ SwapChainDesc.SwapEffect = DXGI_SWAP_EFFECT_FLIP_SEQUENTIAL;
+ SwapChainDesc.BufferCount = 2;
+ SwapChainDesc.Width = Width;
+ SwapChainDesc.Height = Height;
+ SwapChainDesc.Format = DXGI_FORMAT_B8G8R8A8_UNORM;
+ SwapChainDesc.BufferUsage = DXGI_USAGE_RENDER_TARGET_OUTPUT;
+ SwapChainDesc.SampleDesc.Count = 1;
+ SwapChainDesc.SampleDesc.Quality = 0;
+ hr = m_Factory->CreateSwapChainForHwnd(m_Device, Window, &SwapChainDesc, nullptr, nullptr, &m_SwapChain);
+ if (FAILED(hr)) {
+ return ProcessFailure(m_Device, L"Failed to create window swapchain", L"Error", hr, SystemTransitionsExpectedErrors);
+ }
+
+ // Disable the ALT-ENTER shortcut for entering full-screen mode
+ hr = m_Factory->MakeWindowAssociation(Window, DXGI_MWA_NO_ALT_ENTER);
+ if (FAILED(hr)) {
+ return ProcessFailure(m_Device, L"Failed to make window association", L"Error", hr, SystemTransitionsExpectedErrors);
+ }
+ }
// Create shared texture
DUPL_RETURN Return = CreateSharedSurf(SingleOutput, OutCount, DeskBounds);
- if (Return != DUPL_RETURN_SUCCESS)
- {
+ if (Return != DUPL_RETURN_SUCCESS) {
return Return;
}
// Make new render target view
- if (m_WindowHandle)
- {
- Return = MakeRTV();
- if (Return != DUPL_RETURN_SUCCESS)
- {
- return Return;
- }
- }
+ if (m_WindowHandle) {
+ Return = MakeRTV();
+ if (Return != DUPL_RETURN_SUCCESS) {
+ return Return;
+ }
+ }
// Set view port
SetViewPort(Width, Height);
@@ -191,41 +175,37 @@ DUPL_RETURN OUTPUTMANAGER::InitOutput(HWND Window, INT SingleOutput, _Out_ UINT*
SampDesc.MinLOD = 0;
SampDesc.MaxLOD = D3D11_FLOAT32_MAX;
hr = m_Device->CreateSamplerState(&SampDesc, &m_SamplerLinear);
- if (FAILED(hr))
- {
+ if (FAILED(hr)) {
return ProcessFailure(m_Device, L"Failed to create sampler state in OUTPUTMANAGER", L"Error", hr, SystemTransitionsExpectedErrors);
}
- if (m_WindowHandle)
- {
- // Create the blend state
- D3D11_BLEND_DESC BlendStateDesc;
- BlendStateDesc.AlphaToCoverageEnable = FALSE;
- BlendStateDesc.IndependentBlendEnable = FALSE;
- BlendStateDesc.RenderTarget[0].BlendEnable = TRUE;
- BlendStateDesc.RenderTarget[0].SrcBlend = D3D11_BLEND_SRC_ALPHA;
- BlendStateDesc.RenderTarget[0].DestBlend = D3D11_BLEND_INV_SRC_ALPHA;
- BlendStateDesc.RenderTarget[0].BlendOp = D3D11_BLEND_OP_ADD;
- BlendStateDesc.RenderTarget[0].SrcBlendAlpha = D3D11_BLEND_ONE;
- BlendStateDesc.RenderTarget[0].DestBlendAlpha = D3D11_BLEND_ZERO;
- BlendStateDesc.RenderTarget[0].BlendOpAlpha = D3D11_BLEND_OP_ADD;
- BlendStateDesc.RenderTarget[0].RenderTargetWriteMask = D3D11_COLOR_WRITE_ENABLE_ALL;
- hr = m_Device->CreateBlendState(&BlendStateDesc, &m_BlendState);
- if (FAILED(hr))
- {
- return ProcessFailure(m_Device, L"Failed to create blend state in OUTPUTMANAGER", L"Error", hr, SystemTransitionsExpectedErrors);
- }
-
- // Initialize shaders
- Return = InitShaders();
- if (Return != DUPL_RETURN_SUCCESS)
- {
- return Return;
- }
-
- GetWindowRect(m_WindowHandle, &WindowRect);
- MoveWindow(m_WindowHandle, WindowRect.left, WindowRect.top, (DeskBounds->right - DeskBounds->left) / 2, (DeskBounds->bottom - DeskBounds->top) / 2, TRUE);
- }
+ if (m_WindowHandle) {
+ // Create the blend state
+ D3D11_BLEND_DESC BlendStateDesc;
+ BlendStateDesc.AlphaToCoverageEnable = FALSE;
+ BlendStateDesc.IndependentBlendEnable = FALSE;
+ BlendStateDesc.RenderTarget[0].BlendEnable = TRUE;
+ BlendStateDesc.RenderTarget[0].SrcBlend = D3D11_BLEND_SRC_ALPHA;
+ BlendStateDesc.RenderTarget[0].DestBlend = D3D11_BLEND_INV_SRC_ALPHA;
+ BlendStateDesc.RenderTarget[0].BlendOp = D3D11_BLEND_OP_ADD;
+ BlendStateDesc.RenderTarget[0].SrcBlendAlpha = D3D11_BLEND_ONE;
+ BlendStateDesc.RenderTarget[0].DestBlendAlpha = D3D11_BLEND_ZERO;
+ BlendStateDesc.RenderTarget[0].BlendOpAlpha = D3D11_BLEND_OP_ADD;
+ BlendStateDesc.RenderTarget[0].RenderTargetWriteMask = D3D11_COLOR_WRITE_ENABLE_ALL;
+ hr = m_Device->CreateBlendState(&BlendStateDesc, &m_BlendState);
+ if (FAILED(hr)) {
+ return ProcessFailure(m_Device, L"Failed to create blend state in OUTPUTMANAGER", L"Error", hr, SystemTransitionsExpectedErrors);
+ }
+
+ // Initialize shaders
+ Return = InitShaders();
+ if (Return != DUPL_RETURN_SUCCESS) {
+ return Return;
+ }
+
+ GetWindowRect(m_WindowHandle, &WindowRect);
+ MoveWindow(m_WindowHandle, WindowRect.left, WindowRect.top, (DeskBounds->right - DeskBounds->left) / 2, (DeskBounds->bottom - DeskBounds->top) / 2, TRUE);
+ }
return Return;
}
@@ -240,8 +220,7 @@ DUPL_RETURN OUTPUTMANAGER::CreateSharedSurf(INT SingleOutput, _Out_ UINT* OutCou
// Get DXGI resources
IDXGIDevice* DxgiDevice = nullptr;
hr = m_Device->QueryInterface(__uuidof(IDXGIDevice), reinterpret_cast<void**>(&DxgiDevice));
- if (FAILED(hr))
- {
+ if (FAILED(hr)) {
return ProcessFailure(nullptr, L"Failed to QI for DXGI Device", L"Error", hr);
}
@@ -249,8 +228,7 @@ DUPL_RETURN OUTPUTMANAGER::CreateSharedSurf(INT SingleOutput, _Out_ UINT* OutCou
hr = DxgiDevice->GetParent(__uuidof(IDXGIAdapter), reinterpret_cast<void**>(&DxgiAdapter));
DxgiDevice->Release();
DxgiDevice = nullptr;
- if (FAILED(hr))
- {
+ if (FAILED(hr)) {
return ProcessFailure(m_Device, L"Failed to get parent DXGI Adapter", L"Error", hr, SystemTransitionsExpectedErrors);
}
@@ -264,19 +242,15 @@ DUPL_RETURN OUTPUTMANAGER::CreateSharedSurf(INT SingleOutput, _Out_ UINT* OutCou
// Figure out right dimensions for full size desktop texture and # of outputs to duplicate
UINT OutputCount;
- if (SingleOutput < 0)
- {
+ if (SingleOutput < 0) {
hr = S_OK;
- for (OutputCount = 0; SUCCEEDED(hr); ++OutputCount)
- {
- if (DxgiOutput)
- {
+ for (OutputCount = 0; SUCCEEDED(hr); ++OutputCount) {
+ if (DxgiOutput) {
DxgiOutput->Release();
DxgiOutput = nullptr;
}
hr = DxgiAdapter->EnumOutputs(OutputCount, &DxgiOutput);
- if (DxgiOutput && (hr != DXGI_ERROR_NOT_FOUND))
- {
+ if (DxgiOutput && (hr != DXGI_ERROR_NOT_FOUND)) {
DXGI_OUTPUT_DESC DesktopDesc;
DxgiOutput->GetDesc(&DesktopDesc);
@@ -289,11 +263,9 @@ DUPL_RETURN OUTPUTMANAGER::CreateSharedSurf(INT SingleOutput, _Out_ UINT* OutCou
--OutputCount;
}
- else
- {
+ else {
hr = DxgiAdapter->EnumOutputs(SingleOutput, &DxgiOutput);
- if (FAILED(hr))
- {
+ if (FAILED(hr)) {
DxgiAdapter->Release();
DxgiAdapter = nullptr;
return ProcessFailure(m_Device, L"Output specified to be duplicated does not exist", L"Error", hr);
@@ -314,8 +286,7 @@ DUPL_RETURN OUTPUTMANAGER::CreateSharedSurf(INT SingleOutput, _Out_ UINT* OutCou
// Set passed in output count variable
*OutCount = OutputCount;
- if (OutputCount == 0)
- {
+ if (OutputCount == 0) {
// We could not find any outputs, the system must be in a transition so return expected error
// so we will attempt to recreate
return DUPL_RETURN_ERROR_EXPECTED;
@@ -336,10 +307,8 @@ DUPL_RETURN OUTPUTMANAGER::CreateSharedSurf(INT SingleOutput, _Out_ UINT* OutCou
DeskTexD.MiscFlags = D3D11_RESOURCE_MISC_SHARED_KEYEDMUTEX;
hr = m_Device->CreateTexture2D(&DeskTexD, nullptr, &m_SharedSurf);
- if (FAILED(hr))
- {
- if (OutputCount != 1)
- {
+ if (FAILED(hr)) {
+ if (OutputCount != 1) {
// If we are duplicating the complete desktop we try to create a single texture to hold the
// complete desktop image and blit updates from the per output DDA interface. The GPU can
// always support a texture size of the maximum resolution of any single output but there is no
@@ -348,16 +317,14 @@ DUPL_RETURN OUTPUTMANAGER::CreateSharedSurf(INT SingleOutput, _Out_ UINT* OutCou
// we could revert back to using GDI to update the window in this failure case.
return ProcessFailure(m_Device, L"Failed to create DirectX shared texture - we are attempting to create a texture the size of the complete desktop and this may be larger than the maximum texture size of your GPU. Please try again using the -output command line parameter to duplicate only 1 monitor or configure your computer to a single monitor configuration", L"Error", hr, SystemTransitionsExpectedErrors);
}
- else
- {
+ else {
return ProcessFailure(m_Device, L"Failed to create shared texture", L"Error", hr, SystemTransitionsExpectedErrors);
}
}
// Get keyed mutex
hr = m_SharedSurf->QueryInterface(__uuidof(IDXGIKeyedMutex), reinterpret_cast<void**>(&m_KeyMutex));
- if (FAILED(hr))
- {
+ if (FAILED(hr)) {
return ProcessFailure(m_Device, L"Failed to query for keyed mutex in OUTPUTMANAGER", L"Error", hr);
}
@@ -376,23 +343,19 @@ DUPL_RETURN OUTPUTMANAGER::UpdateApplicationWindow(_In_ PTR_INFO* PointerInfo, _
// Try and acquire sync on common display buffer
HRESULT hr = m_KeyMutex->AcquireSync(1, 100);
- if (hr == static_cast<HRESULT>(WAIT_TIMEOUT))
- {
+ if (hr == static_cast<HRESULT>(WAIT_TIMEOUT)) {
// Another thread has the keyed mutex so try again later
return DUPL_RETURN_SUCCESS;
}
- else if (FAILED(hr))
- {
+ else if (FAILED(hr)) {
return ProcessFailure(m_Device, L"Failed to acquire Keyed mutex in OUTPUTMANAGER", L"Error", hr, SystemTransitionsExpectedErrors);
}
// Got mutex, so draw
DUPL_RETURN Ret = DrawFrame();
- if (Ret == DUPL_RETURN_SUCCESS)
- {
+ if (Ret == DUPL_RETURN_SUCCESS) {
// We have keyed mutex so we can access the mouse info
- if (PointerInfo->Visible)
- {
+ if (PointerInfo->Visible) {
// Draw mouse into texture
Ret = DrawMouse(PointerInfo);
}
@@ -400,27 +363,22 @@ DUPL_RETURN OUTPUTMANAGER::UpdateApplicationWindow(_In_ PTR_INFO* PointerInfo, _
// Release keyed mutex
hr = m_KeyMutex->ReleaseSync(0);
- if (FAILED(hr))
- {
+ if (FAILED(hr)) {
return ProcessFailure(m_Device, L"Failed to Release Keyed mutex in OUTPUTMANAGER", L"Error", hr, SystemTransitionsExpectedErrors);
}
// Present to window if all worked
- if (Ret == DUPL_RETURN_SUCCESS)
- {
+ if (Ret == DUPL_RETURN_SUCCESS) {
// Present to window
- if (m_SwapChain)
- {
- hr = m_SwapChain->Present(1, 0);
- if (FAILED(hr))
- {
- return ProcessFailure(m_Device, L"Failed to present", L"Error", hr, SystemTransitionsExpectedErrors);
- }
- else if (hr == DXGI_STATUS_OCCLUDED)
- {
- *Occluded = true;
- }
- }
+ if (m_SwapChain) {
+ hr = m_SwapChain->Present(1, 0);
+ if (FAILED(hr)) {
+ return ProcessFailure(m_Device, L"Failed to present", L"Error", hr, SystemTransitionsExpectedErrors);
+ }
+ else if (hr == DXGI_STATUS_OCCLUDED) {
+ *Occluded = true;
+ }
+ }
}
return Ret;
@@ -436,8 +394,7 @@ HANDLE OUTPUTMANAGER::GetSharedHandle()
// QI IDXGIResource interface to synchronized shared surface.
IDXGIResource* DXGIResource = nullptr;
HRESULT hr = m_SharedSurf->QueryInterface(__uuidof(IDXGIResource), reinterpret_cast<void**>(&DXGIResource));
- if (SUCCEEDED(hr))
- {
+ if (SUCCEEDED(hr)) {
// Obtain handle to IDXGIResource object.
DXGIResource->GetSharedHandle(&Hnd);
DXGIResource->Release();
@@ -455,19 +412,16 @@ DUPL_RETURN OUTPUTMANAGER::DrawFrame()
HRESULT hr;
// If window was resized, resize swapchain
- if (m_NeedsResize)
- {
+ if (m_NeedsResize) {
DUPL_RETURN Ret = ResizeSwapChain();
- if (Ret != DUPL_RETURN_SUCCESS)
- {
+ if (Ret != DUPL_RETURN_SUCCESS) {
return Ret;
}
m_NeedsResize = false;
}
// Vertices for drawing whole texture
- VERTEX Vertices[NUMVERTICES] =
- {
+ VERTEX Vertices[NUMVERTICES] = {
{XMFLOAT3(-1.0f, -1.0f, 0), XMFLOAT2(0.0f, 1.0f)},
{XMFLOAT3(-1.0f, 1.0f, 0), XMFLOAT2(0.0f, 0.0f)},
{XMFLOAT3(1.0f, -1.0f, 0), XMFLOAT2(1.0f, 1.0f)},
@@ -488,8 +442,7 @@ DUPL_RETURN OUTPUTMANAGER::DrawFrame()
// Create new shader resource view
ID3D11ShaderResourceView* ShaderResource = nullptr;
hr = m_Device->CreateShaderResourceView(m_SharedSurf, &ShaderDesc, &ShaderResource);
- if (FAILED(hr))
- {
+ if (FAILED(hr)) {
return ProcessFailure(m_Device, L"Failed to create shader resource when drawing a frame", L"Error", hr, SystemTransitionsExpectedErrors);
}
@@ -519,8 +472,7 @@ DUPL_RETURN OUTPUTMANAGER::DrawFrame()
// Create vertex buffer
hr = m_Device->CreateBuffer(&BufferDesc, &InitData, &VertexBuffer);
- if (FAILED(hr))
- {
+ if (FAILED(hr)) {
ShaderResource->Release();
ShaderResource = nullptr;
return ProcessFailure(m_Device, L"Failed to create vertex buffer when drawing a frame", L"Error", hr, SystemTransitionsExpectedErrors);
@@ -543,7 +495,7 @@ DUPL_RETURN OUTPUTMANAGER::DrawFrame()
//
// Process both masked and monochrome pointers
//
-DUPL_RETURN OUTPUTMANAGER::ProcessMonoMask(bool IsMono, _Inout_ PTR_INFO* PtrInfo, _Out_ INT* PtrWidth, _Out_ INT* PtrHeight, _Out_ INT* PtrLeft, _Out_ INT* PtrTop, _Outptr_result_bytebuffer_(*PtrHeight * *PtrWidth * BPP) BYTE** InitBuffer, _Out_ D3D11_BOX* Box)
+DUPL_RETURN OUTPUTMANAGER::ProcessMonoMask(bool IsMono, _Inout_ PTR_INFO* PtrInfo, _Out_ INT* PtrWidth, _Out_ INT* PtrHeight, _Out_ INT* PtrLeft, _Out_ INT* PtrTop, _Outptr_result_bytebuffer_(*PtrHeight **PtrWidth * BPP) BYTE** InitBuffer, _Out_ D3D11_BOX* Box)
{
// Desktop dimensions
D3D11_TEXTURE2D_DESC FullDesc;
@@ -556,39 +508,31 @@ DUPL_RETURN OUTPUTMANAGER::ProcessMonoMask(bool IsMono, _Inout_ PTR_INFO* PtrInf
INT GivenTop = PtrInfo->Position.y;
// Figure out if any adjustment is needed for out of bound positions
- if (GivenLeft < 0)
- {
+ if (GivenLeft < 0) {
*PtrWidth = GivenLeft + static_cast<INT>(PtrInfo->ShapeInfo.Width);
}
- else if ((GivenLeft + static_cast<INT>(PtrInfo->ShapeInfo.Width)) > DesktopWidth)
- {
+ else if ((GivenLeft + static_cast<INT>(PtrInfo->ShapeInfo.Width)) > DesktopWidth) {
*PtrWidth = DesktopWidth - GivenLeft;
}
- else
- {
+ else {
*PtrWidth = static_cast<INT>(PtrInfo->ShapeInfo.Width);
}
- if (IsMono)
- {
+ if (IsMono) {
PtrInfo->ShapeInfo.Height = PtrInfo->ShapeInfo.Height / 2;
}
- if (GivenTop < 0)
- {
+ if (GivenTop < 0) {
*PtrHeight = GivenTop + static_cast<INT>(PtrInfo->ShapeInfo.Height);
}
- else if ((GivenTop + static_cast<INT>(PtrInfo->ShapeInfo.Height)) > DesktopHeight)
- {
+ else if ((GivenTop + static_cast<INT>(PtrInfo->ShapeInfo.Height)) > DesktopHeight) {
*PtrHeight = DesktopHeight - GivenTop;
}
- else
- {
+ else {
*PtrHeight = static_cast<INT>(PtrInfo->ShapeInfo.Height);
}
- if (IsMono)
- {
+ if (IsMono) {
PtrInfo->ShapeInfo.Height = PtrInfo->ShapeInfo.Height * 2;
}
@@ -611,8 +555,7 @@ DUPL_RETURN OUTPUTMANAGER::ProcessMonoMask(bool IsMono, _Inout_ PTR_INFO* PtrInf
ID3D11Texture2D* CopyBuffer = nullptr;
HRESULT hr = m_Device->CreateTexture2D(&CopyBufferDesc, nullptr, &CopyBuffer);
- if (FAILED(hr))
- {
+ if (FAILED(hr)) {
return ProcessFailure(m_Device, L"Failed creating staging texture for pointer", L"Error", hr, SystemTransitionsExpectedErrors);
}
@@ -628,25 +571,22 @@ DUPL_RETURN OUTPUTMANAGER::ProcessMonoMask(bool IsMono, _Inout_ PTR_INFO* PtrInf
hr = CopyBuffer->QueryInterface(__uuidof(IDXGISurface), (void **)&CopySurface);
CopyBuffer->Release();
CopyBuffer = nullptr;
- if (FAILED(hr))
- {
+ if (FAILED(hr)) {
return ProcessFailure(nullptr, L"Failed to QI staging texture into IDXGISurface for pointer", L"Error", hr, SystemTransitionsExpectedErrors);
}
// Map pixels
DXGI_MAPPED_RECT MappedSurface;
hr = CopySurface->Map(&MappedSurface, DXGI_MAP_READ);
- if (FAILED(hr))
- {
+ if (FAILED(hr)) {
CopySurface->Release();
CopySurface = nullptr;
return ProcessFailure(m_Device, L"Failed to map surface for pointer", L"Error", hr, SystemTransitionsExpectedErrors);
}
// New mouseshape buffer
- *InitBuffer = new (std::nothrow) BYTE[*PtrWidth * *PtrHeight * BPP];
- if (!(*InitBuffer))
- {
+ *InitBuffer = new (std::nothrow) BYTE[*PtrWidth **PtrHeight * BPP];
+ if (!(*InitBuffer)) {
return ProcessFailure(nullptr, L"Failed to allocate memory for new mouse shape buffer.", L"Error", E_OUTOFMEMORY);
}
@@ -658,15 +598,12 @@ DUPL_RETURN OUTPUTMANAGER::ProcessMonoMask(bool IsMono, _Inout_ PTR_INFO* PtrInf
UINT SkipX = (GivenLeft < 0) ? (-1 * GivenLeft) : (0);
UINT SkipY = (GivenTop < 0) ? (-1 * GivenTop) : (0);
- if (IsMono)
- {
- for (INT Row = 0; Row < *PtrHeight; ++Row)
- {
+ if (IsMono) {
+ for (INT Row = 0; Row < *PtrHeight; ++Row) {
// Set mask
BYTE Mask = 0x80;
Mask = Mask >> (SkipX % 8);
- for (INT Col = 0; Col < *PtrWidth; ++Col)
- {
+ for (INT Col = 0; Col < *PtrWidth; ++Col) {
// Get masks using appropriate offsets
BYTE AndMask = PtrInfo->PtrShapeBuffer[((Col + SkipX) / 8) + ((Row + SkipY) * (PtrInfo->ShapeInfo.Pitch))] & Mask;
BYTE XorMask = PtrInfo->PtrShapeBuffer[((Col + SkipX) / 8) + ((Row + SkipY + (PtrInfo->ShapeInfo.Height / 2)) * (PtrInfo->ShapeInfo.Pitch))] & Mask;
@@ -674,40 +611,33 @@ DUPL_RETURN OUTPUTMANAGER::ProcessMonoMask(bool IsMono, _Inout_ PTR_INFO* PtrInf
UINT XorMask32 = (XorMask) ? 0x00FFFFFF : 0x00000000;
// Set new pixel
- InitBuffer32[(Row * *PtrWidth) + Col] = (Desktop32[(Row * DesktopPitchInPixels) + Col] & AndMask32) ^ XorMask32;
+ InitBuffer32[(Row **PtrWidth) + Col] = (Desktop32[(Row * DesktopPitchInPixels) + Col] & AndMask32) ^ XorMask32;
// Adjust mask
- if (Mask == 0x01)
- {
+ if (Mask == 0x01) {
Mask = 0x80;
}
- else
- {
+ else {
Mask = Mask >> 1;
}
}
}
}
- else
- {
+ else {
UINT* Buffer32 = reinterpret_cast<UINT*>(PtrInfo->PtrShapeBuffer);
// Iterate through pixels
- for (INT Row = 0; Row < *PtrHeight; ++Row)
- {
- for (INT Col = 0; Col < *PtrWidth; ++Col)
- {
+ for (INT Row = 0; Row < *PtrHeight; ++Row) {
+ for (INT Col = 0; Col < *PtrWidth; ++Col) {
// Set up mask
UINT MaskVal = 0xFF000000 & Buffer32[(Col + SkipX) + ((Row + SkipY) * (PtrInfo->ShapeInfo.Pitch / sizeof(UINT)))];
- if (MaskVal)
- {
+ if (MaskVal) {
// Mask was 0xFF
- InitBuffer32[(Row * *PtrWidth) + Col] = (Desktop32[(Row * DesktopPitchInPixels) + Col] ^ Buffer32[(Col + SkipX) + ((Row + SkipY) * (PtrInfo->ShapeInfo.Pitch / sizeof(UINT)))]) | 0xFF000000;
+ InitBuffer32[(Row **PtrWidth) + Col] = (Desktop32[(Row * DesktopPitchInPixels) + Col] ^ Buffer32[(Col + SkipX) + ((Row + SkipY) * (PtrInfo->ShapeInfo.Pitch / sizeof(UINT)))]) | 0xFF000000;
}
- else
- {
+ else {
// Mask was 0x00
- InitBuffer32[(Row * *PtrWidth) + Col] = Buffer32[(Col + SkipX) + ((Row + SkipY) * (PtrInfo->ShapeInfo.Pitch / sizeof(UINT)))] | 0xFF000000;
+ InitBuffer32[(Row **PtrWidth) + Col] = Buffer32[(Col + SkipX) + ((Row + SkipY) * (PtrInfo->ShapeInfo.Pitch / sizeof(UINT)))] | 0xFF000000;
}
}
}
@@ -717,8 +647,7 @@ DUPL_RETURN OUTPUTMANAGER::ProcessMonoMask(bool IsMono, _Inout_ PTR_INFO* PtrInf
hr = CopySurface->Unmap();
CopySurface->Release();
CopySurface = nullptr;
- if (FAILED(hr))
- {
+ if (FAILED(hr)) {
return ProcessFailure(m_Device, L"Failed to unmap surface for pointer", L"Error", hr, SystemTransitionsExpectedErrors);
}
@@ -739,8 +668,7 @@ DUPL_RETURN OUTPUTMANAGER::DrawMouse(_In_ PTR_INFO* PtrInfo)
D3D11_SHADER_RESOURCE_VIEW_DESC SDesc;
// Position will be changed based on mouse position
- VERTEX Vertices[NUMVERTICES] =
- {
+ VERTEX Vertices[NUMVERTICES] = {
{XMFLOAT3(-1.0f, -1.0f, 0), XMFLOAT2(0.0f, 1.0f)},
{XMFLOAT3(-1.0f, 1.0f, 0), XMFLOAT2(0.0f, 0.0f)},
{XMFLOAT3(1.0f, -1.0f, 0), XMFLOAT2(1.0f, 1.0f)},
@@ -788,33 +716,29 @@ DUPL_RETURN OUTPUTMANAGER::DrawMouse(_In_ PTR_INFO* PtrInfo)
SDesc.Texture2D.MostDetailedMip = Desc.MipLevels - 1;
SDesc.Texture2D.MipLevels = Desc.MipLevels;
- switch (PtrInfo->ShapeInfo.Type)
- {
- case DXGI_OUTDUPL_POINTER_SHAPE_TYPE_COLOR:
- {
- PtrLeft = PtrInfo->Position.x;
- PtrTop = PtrInfo->Position.y;
+ switch (PtrInfo->ShapeInfo.Type) {
+ case DXGI_OUTDUPL_POINTER_SHAPE_TYPE_COLOR: {
+ PtrLeft = PtrInfo->Position.x;
+ PtrTop = PtrInfo->Position.y;
- PtrWidth = static_cast<INT>(PtrInfo->ShapeInfo.Width);
- PtrHeight = static_cast<INT>(PtrInfo->ShapeInfo.Height);
+ PtrWidth = static_cast<INT>(PtrInfo->ShapeInfo.Width);
+ PtrHeight = static_cast<INT>(PtrInfo->ShapeInfo.Height);
- break;
- }
+ break;
+ }
- case DXGI_OUTDUPL_POINTER_SHAPE_TYPE_MONOCHROME:
- {
- ProcessMonoMask(true, PtrInfo, &PtrWidth, &PtrHeight, &PtrLeft, &PtrTop, &InitBuffer, &Box);
- break;
- }
+ case DXGI_OUTDUPL_POINTER_SHAPE_TYPE_MONOCHROME: {
+ ProcessMonoMask(true, PtrInfo, &PtrWidth, &PtrHeight, &PtrLeft, &PtrTop, &InitBuffer, &Box);
+ break;
+ }
- case DXGI_OUTDUPL_POINTER_SHAPE_TYPE_MASKED_COLOR:
- {
- ProcessMonoMask(false, PtrInfo, &PtrWidth, &PtrHeight, &PtrLeft, &PtrTop, &InitBuffer, &Box);
- break;
- }
+ case DXGI_OUTDUPL_POINTER_SHAPE_TYPE_MASKED_COLOR: {
+ ProcessMonoMask(false, PtrInfo, &PtrWidth, &PtrHeight, &PtrLeft, &PtrTop, &InitBuffer, &Box);
+ break;
+ }
- default:
- break;
+ default:
+ break;
}
// VERTEX creation
@@ -842,15 +766,13 @@ DUPL_RETURN OUTPUTMANAGER::DrawMouse(_In_ PTR_INFO* PtrInfo)
// Create mouseshape as texture
HRESULT hr = m_Device->CreateTexture2D(&Desc, &InitData, &MouseTex);
- if (FAILED(hr))
- {
+ if (FAILED(hr)) {
return ProcessFailure(m_Device, L"Failed to create mouse pointer texture", L"Error", hr, SystemTransitionsExpectedErrors);
}
// Create shader resource from texture
hr = m_Device->CreateShaderResourceView(MouseTex, &SDesc, &ShaderRes);
- if (FAILED(hr))
- {
+ if (FAILED(hr)) {
MouseTex->Release();
MouseTex = nullptr;
return ProcessFailure(m_Device, L"Failed to create shader resource from mouse pointer texture", L"Error", hr, SystemTransitionsExpectedErrors);
@@ -868,8 +790,7 @@ DUPL_RETURN OUTPUTMANAGER::DrawMouse(_In_ PTR_INFO* PtrInfo)
// Create vertex buffer
hr = m_Device->CreateBuffer(&BDesc, &InitData, &VertexBufferMouse);
- if (FAILED(hr))
- {
+ if (FAILED(hr)) {
ShaderRes->Release();
ShaderRes = nullptr;
MouseTex->Release();
@@ -893,23 +814,19 @@ DUPL_RETURN OUTPUTMANAGER::DrawMouse(_In_ PTR_INFO* PtrInfo)
m_DeviceContext->Draw(NUMVERTICES, 0);
// Clean
- if (VertexBufferMouse)
- {
+ if (VertexBufferMouse) {
VertexBufferMouse->Release();
VertexBufferMouse = nullptr;
}
- if (ShaderRes)
- {
+ if (ShaderRes) {
ShaderRes->Release();
ShaderRes = nullptr;
}
- if (MouseTex)
- {
+ if (MouseTex) {
MouseTex->Release();
MouseTex = nullptr;
}
- if (InitBuffer)
- {
+ if (InitBuffer) {
delete [] InitBuffer;
InitBuffer = nullptr;
}
@@ -926,28 +843,24 @@ DUPL_RETURN OUTPUTMANAGER::InitShaders()
UINT Size = ARRAYSIZE(g_VS);
hr = m_Device->CreateVertexShader(g_VS, Size, nullptr, &m_VertexShader);
- if (FAILED(hr))
- {
+ if (FAILED(hr)) {
return ProcessFailure(m_Device, L"Failed to create vertex shader in OUTPUTMANAGER", L"Error", hr, SystemTransitionsExpectedErrors);
}
- D3D11_INPUT_ELEMENT_DESC Layout[] =
- {
+ D3D11_INPUT_ELEMENT_DESC Layout[] = {
{"POSITION", 0, DXGI_FORMAT_R32G32B32_FLOAT, 0, 0, D3D11_INPUT_PER_VERTEX_DATA, 0},
{"TEXCOORD", 0, DXGI_FORMAT_R32G32_FLOAT, 0, 12, D3D11_INPUT_PER_VERTEX_DATA, 0}
};
UINT NumElements = ARRAYSIZE(Layout);
hr = m_Device->CreateInputLayout(Layout, NumElements, g_VS, Size, &m_InputLayout);
- if (FAILED(hr))
- {
+ if (FAILED(hr)) {
return ProcessFailure(m_Device, L"Failed to create input layout in OUTPUTMANAGER", L"Error", hr, SystemTransitionsExpectedErrors);
}
m_DeviceContext->IASetInputLayout(m_InputLayout);
Size = ARRAYSIZE(g_PS);
hr = m_Device->CreatePixelShader(g_PS, Size, nullptr, &m_PixelShader);
- if (FAILED(hr))
- {
+ if (FAILED(hr)) {
return ProcessFailure(m_Device, L"Failed to create pixel shader in OUTPUTMANAGER", L"Error", hr, SystemTransitionsExpectedErrors);
}
@@ -962,16 +875,14 @@ DUPL_RETURN OUTPUTMANAGER::MakeRTV()
// Get backbuffer
ID3D11Texture2D* BackBuffer = nullptr;
HRESULT hr = m_SwapChain->GetBuffer(0, __uuidof(ID3D11Texture2D), reinterpret_cast<void**>(&BackBuffer));
- if (FAILED(hr))
- {
+ if (FAILED(hr)) {
return ProcessFailure(m_Device, L"Failed to get backbuffer for making render target view in OUTPUTMANAGER", L"Error", hr, SystemTransitionsExpectedErrors);
}
// Create a render target view
hr = m_Device->CreateRenderTargetView(BackBuffer, nullptr, &m_RTV);
BackBuffer->Release();
- if (FAILED(hr))
- {
+ if (FAILED(hr)) {
return ProcessFailure(m_Device, L"Failed to create render target view in OUTPUTMANAGER", L"Error", hr, SystemTransitionsExpectedErrors);
}
@@ -1001,8 +912,7 @@ void OUTPUTMANAGER::SetViewPort(UINT Width, UINT Height)
//
DUPL_RETURN OUTPUTMANAGER::ResizeSwapChain()
{
- if (m_RTV)
- {
+ if (m_RTV) {
m_RTV->Release();
m_RTV = nullptr;
}
@@ -1016,15 +926,13 @@ DUPL_RETURN OUTPUTMANAGER::ResizeSwapChain()
DXGI_SWAP_CHAIN_DESC SwapChainDesc;
m_SwapChain->GetDesc(&SwapChainDesc);
HRESULT hr = m_SwapChain->ResizeBuffers(SwapChainDesc.BufferCount, Width, Height, SwapChainDesc.BufferDesc.Format, SwapChainDesc.Flags);
- if (FAILED(hr))
- {
+ if (FAILED(hr)) {
return ProcessFailure(m_Device, L"Failed to resize swapchain buffers in OUTPUTMANAGER", L"Error", hr, SystemTransitionsExpectedErrors);
}
// Make new render target view
DUPL_RETURN Ret = MakeRTV();
- if (Ret != DUPL_RETURN_SUCCESS)
- {
+ if (Ret != DUPL_RETURN_SUCCESS) {
return Ret;
}
@@ -1039,76 +947,63 @@ DUPL_RETURN OUTPUTMANAGER::ResizeSwapChain()
//
void OUTPUTMANAGER::CleanRefs()
{
- if (m_VertexShader)
- {
+ if (m_VertexShader) {
m_VertexShader->Release();
m_VertexShader = nullptr;
}
- if (m_PixelShader)
- {
+ if (m_PixelShader) {
m_PixelShader->Release();
m_PixelShader = nullptr;
}
- if (m_InputLayout)
- {
+ if (m_InputLayout) {
m_InputLayout->Release();
m_InputLayout = nullptr;
}
- if (m_RTV)
- {
+ if (m_RTV) {
m_RTV->Release();
m_RTV = nullptr;
}
- if (m_SamplerLinear)
- {
+ if (m_SamplerLinear) {
m_SamplerLinear->Release();
m_SamplerLinear = nullptr;
}
- if (m_BlendState)
- {
+ if (m_BlendState) {
m_BlendState->Release();
m_BlendState = nullptr;
}
- if (m_DeviceContext)
- {
+ if (m_DeviceContext) {
m_DeviceContext->Release();
m_DeviceContext = nullptr;
}
- if (m_Device)
- {
+ if (m_Device) {
m_Device->Release();
m_Device = nullptr;
}
- if (m_SwapChain)
- {
+ if (m_SwapChain) {
m_SwapChain->Release();
m_SwapChain = nullptr;
}
- if (m_SharedSurf)
- {
+ if (m_SharedSurf) {
m_SharedSurf->Release();
m_SharedSurf = nullptr;
}
- if (m_KeyMutex)
- {
+ if (m_KeyMutex) {
m_KeyMutex->Release();
m_KeyMutex = nullptr;
}
- if (m_Factory)
- {
- if (m_OcclusionCookie)
- {
+ if (m_Factory) {
+ if (m_OcclusionCookie) {
m_Factory->UnregisterOcclusionStatus(m_OcclusionCookie);
m_OcclusionCookie = 0;
}
diff --git a/plugins/pluginWinDD/internals/OutputManager.h b/plugins/pluginWinDD/internals/OutputManager.h
index cd16e5f..fa1902a 100755
--- a/plugins/pluginWinDD/internals/OutputManager.h
+++ b/plugins/pluginWinDD/internals/OutputManager.h
@@ -19,43 +19,43 @@
//
class OUTPUTMANAGER
{
- public:
- OUTPUTMANAGER();
- ~OUTPUTMANAGER();
- DUPL_RETURN InitOutput(HWND Window, INT SingleOutput, _Out_ UINT* OutCount, _Out_ RECT* DeskBounds);
- DUPL_RETURN UpdateApplicationWindow(_In_ PTR_INFO* PointerInfo, _Inout_ bool* Occluded);
- void CleanRefs();
- HANDLE GetSharedHandle();
- void WindowResize();
+public:
+ OUTPUTMANAGER();
+ ~OUTPUTMANAGER();
+ DUPL_RETURN InitOutput(HWND Window, INT SingleOutput, _Out_ UINT* OutCount, _Out_ RECT* DeskBounds);
+ DUPL_RETURN UpdateApplicationWindow(_In_ PTR_INFO* PointerInfo, _Inout_ bool* Occluded);
+ void CleanRefs();
+ HANDLE GetSharedHandle();
+ void WindowResize();
- private:
+private:
// Methods
- DUPL_RETURN ProcessMonoMask(bool IsMono, _Inout_ PTR_INFO* PtrInfo, _Out_ INT* PtrWidth, _Out_ INT* PtrHeight, _Out_ INT* PtrLeft, _Out_ INT* PtrTop, _Outptr_result_bytebuffer_(*PtrHeight * *PtrWidth * BPP) BYTE** InitBuffer, _Out_ D3D11_BOX* Box);
- DUPL_RETURN MakeRTV();
- void SetViewPort(UINT Width, UINT Height);
- DUPL_RETURN InitShaders();
- DUPL_RETURN InitGeometry();
- DUPL_RETURN CreateSharedSurf(INT SingleOutput, _Out_ UINT* OutCount, _Out_ RECT* DeskBounds);
- DUPL_RETURN DrawFrame();
- DUPL_RETURN DrawMouse(_In_ PTR_INFO* PtrInfo);
- DUPL_RETURN ResizeSwapChain();
+ DUPL_RETURN ProcessMonoMask(bool IsMono, _Inout_ PTR_INFO* PtrInfo, _Out_ INT* PtrWidth, _Out_ INT* PtrHeight, _Out_ INT* PtrLeft, _Out_ INT* PtrTop, _Outptr_result_bytebuffer_(*PtrHeight **PtrWidth * BPP) BYTE** InitBuffer, _Out_ D3D11_BOX* Box);
+ DUPL_RETURN MakeRTV();
+ void SetViewPort(UINT Width, UINT Height);
+ DUPL_RETURN InitShaders();
+ DUPL_RETURN InitGeometry();
+ DUPL_RETURN CreateSharedSurf(INT SingleOutput, _Out_ UINT* OutCount, _Out_ RECT* DeskBounds);
+ DUPL_RETURN DrawFrame();
+ DUPL_RETURN DrawMouse(_In_ PTR_INFO* PtrInfo);
+ DUPL_RETURN ResizeSwapChain();
// Vars
- IDXGISwapChain1* m_SwapChain;
- ID3D11Device* m_Device;
- IDXGIFactory2* m_Factory;
- ID3D11DeviceContext* m_DeviceContext;
- ID3D11RenderTargetView* m_RTV;
- ID3D11SamplerState* m_SamplerLinear;
- ID3D11BlendState* m_BlendState;
- ID3D11VertexShader* m_VertexShader;
- ID3D11PixelShader* m_PixelShader;
- ID3D11InputLayout* m_InputLayout;
- ID3D11Texture2D* m_SharedSurf;
- IDXGIKeyedMutex* m_KeyMutex;
- HWND m_WindowHandle;
- bool m_NeedsResize;
- DWORD m_OcclusionCookie;
+ IDXGISwapChain1* m_SwapChain;
+ ID3D11Device* m_Device;
+ IDXGIFactory2* m_Factory;
+ ID3D11DeviceContext* m_DeviceContext;
+ ID3D11RenderTargetView* m_RTV;
+ ID3D11SamplerState* m_SamplerLinear;
+ ID3D11BlendState* m_BlendState;
+ ID3D11VertexShader* m_VertexShader;
+ ID3D11PixelShader* m_PixelShader;
+ ID3D11InputLayout* m_InputLayout;
+ ID3D11Texture2D* m_SharedSurf;
+ IDXGIKeyedMutex* m_KeyMutex;
+ HWND m_WindowHandle;
+ bool m_NeedsResize;
+ DWORD m_OcclusionCookie;
};
#endif
diff --git a/plugins/pluginWinDD/internals/ThreadManager.cxx b/plugins/pluginWinDD/internals/ThreadManager.cxx
index 68fe757..fcae447 100755
--- a/plugins/pluginWinDD/internals/ThreadManager.cxx
+++ b/plugins/pluginWinDD/internals/ThreadManager.cxx
@@ -10,8 +10,8 @@
DWORD WINAPI DDProc(_In_ void* Param);
THREADMANAGER::THREADMANAGER() : m_ThreadCount(0),
- m_ThreadHandles(nullptr),
- m_ThreadData(nullptr)
+ m_ThreadHandles(nullptr),
+ m_ThreadData(nullptr)
{
RtlZeroMemory(&m_PtrInfo, sizeof(m_PtrInfo));
}
@@ -26,19 +26,15 @@ THREADMANAGER::~THREADMANAGER()
//
void THREADMANAGER::Clean()
{
- if (m_PtrInfo.PtrShapeBuffer)
- {
+ if (m_PtrInfo.PtrShapeBuffer) {
delete [] m_PtrInfo.PtrShapeBuffer;
m_PtrInfo.PtrShapeBuffer = nullptr;
}
RtlZeroMemory(&m_PtrInfo, sizeof(m_PtrInfo));
- if (m_ThreadHandles)
- {
- for (UINT i = 0; i < m_ThreadCount; ++i)
- {
- if (m_ThreadHandles[i])
- {
+ if (m_ThreadHandles) {
+ for (UINT i = 0; i < m_ThreadCount; ++i) {
+ if (m_ThreadHandles[i]) {
CloseHandle(m_ThreadHandles[i]);
}
}
@@ -46,10 +42,8 @@ void THREADMANAGER::Clean()
m_ThreadHandles = nullptr;
}
- if (m_ThreadData)
- {
- for (UINT i = 0; i < m_ThreadCount; ++i)
- {
+ if (m_ThreadData) {
+ for (UINT i = 0; i < m_ThreadCount; ++i) {
CleanDx(&m_ThreadData[i].DxRes);
}
delete [] m_ThreadData;
@@ -64,38 +58,32 @@ void THREADMANAGER::Clean()
//
void THREADMANAGER::CleanDx(_Inout_ DX_RESOURCES* Data)
{
- if (Data->Device)
- {
+ if (Data->Device) {
Data->Device->Release();
Data->Device = nullptr;
}
- if (Data->Context)
- {
+ if (Data->Context) {
Data->Context->Release();
Data->Context = nullptr;
}
- if (Data->VertexShader)
- {
+ if (Data->VertexShader) {
Data->VertexShader->Release();
Data->VertexShader = nullptr;
}
- if (Data->PixelShader)
- {
+ if (Data->PixelShader) {
Data->PixelShader->Release();
Data->PixelShader = nullptr;
}
- if (Data->InputLayout)
- {
+ if (Data->InputLayout) {
Data->InputLayout->Release();
Data->InputLayout = nullptr;
}
- if (Data->SamplerLinear)
- {
+ if (Data->SamplerLinear) {
Data->SamplerLinear->Release();
Data->SamplerLinear = nullptr;
}
@@ -109,15 +97,13 @@ DUPL_RETURN THREADMANAGER::Initialize(INT SingleOutput, UINT OutputCount, HANDLE
m_ThreadCount = OutputCount;
m_ThreadHandles = new (std::nothrow) HANDLE[m_ThreadCount];
m_ThreadData = new (std::nothrow) THREAD_DATA[m_ThreadCount];
- if (!m_ThreadHandles || !m_ThreadData)
- {
+ if (!m_ThreadHandles || !m_ThreadData) {
return ProcessFailure(nullptr, L"Failed to allocate array for threads", L"Error", E_OUTOFMEMORY);
}
// Create appropriate # of threads for duplication
DUPL_RETURN Ret = DUPL_RETURN_SUCCESS;
- for (UINT i = 0; i < m_ThreadCount; ++i)
- {
+ for (UINT i = 0; i < m_ThreadCount; ++i) {
m_ThreadData[i].UnexpectedErrorEvent = UnexpectedErrorEvent;
m_ThreadData[i].ExpectedErrorEvent = ExpectedErrorEvent;
m_ThreadData[i].TerminateThreadsEvent = TerminateThreadsEvent;
@@ -126,19 +112,17 @@ DUPL_RETURN THREADMANAGER::Initialize(INT SingleOutput, UINT OutputCount, HANDLE
m_ThreadData[i].OffsetX = DesktopDim->left;
m_ThreadData[i].OffsetY = DesktopDim->top;
m_ThreadData[i].PtrInfo = &m_PtrInfo;
- m_ThreadData[i].Producer = Producer;
+ m_ThreadData[i].Producer = Producer;
RtlZeroMemory(&m_ThreadData[i].DxRes, sizeof(DX_RESOURCES));
Ret = InitializeDx(&m_ThreadData[i].DxRes);
- if (Ret != DUPL_RETURN_SUCCESS)
- {
+ if (Ret != DUPL_RETURN_SUCCESS) {
return Ret;
}
DWORD ThreadId;
m_ThreadHandles[i] = CreateThread(nullptr, 0, DDProc, &m_ThreadData[i], 0, &ThreadId);
- if (m_ThreadHandles[i] == nullptr)
- {
+ if (m_ThreadHandles[i] == nullptr) {
return ProcessFailure(nullptr, L"Failed to create thread", L"Error", E_FAIL);
}
}
@@ -154,8 +138,7 @@ DUPL_RETURN THREADMANAGER::InitializeDx(_Out_ DX_RESOURCES* Data)
HRESULT hr = S_OK;
// Driver types supported
- D3D_DRIVER_TYPE DriverTypes[] =
- {
+ D3D_DRIVER_TYPE DriverTypes[] = {
D3D_DRIVER_TYPE_HARDWARE,
D3D_DRIVER_TYPE_WARP,
D3D_DRIVER_TYPE_REFERENCE,
@@ -163,8 +146,7 @@ DUPL_RETURN THREADMANAGER::InitializeDx(_Out_ DX_RESOURCES* Data)
UINT NumDriverTypes = ARRAYSIZE(DriverTypes);
// Feature levels supported
- D3D_FEATURE_LEVEL FeatureLevels[] =
- {
+ D3D_FEATURE_LEVEL FeatureLevels[] = {
D3D_FEATURE_LEVEL_11_0,
D3D_FEATURE_LEVEL_10_1,
D3D_FEATURE_LEVEL_10_0,
@@ -175,39 +157,33 @@ DUPL_RETURN THREADMANAGER::InitializeDx(_Out_ DX_RESOURCES* Data)
D3D_FEATURE_LEVEL FeatureLevel;
// Create device
- for (UINT DriverTypeIndex = 0; DriverTypeIndex < NumDriverTypes; ++DriverTypeIndex)
- {
+ for (UINT DriverTypeIndex = 0; DriverTypeIndex < NumDriverTypes; ++DriverTypeIndex) {
hr = D3D11CreateDevice(nullptr, DriverTypes[DriverTypeIndex], nullptr, 0, FeatureLevels, NumFeatureLevels,
- D3D11_SDK_VERSION, &Data->Device, &FeatureLevel, &Data->Context);
- if (SUCCEEDED(hr))
- {
+ D3D11_SDK_VERSION, &Data->Device, &FeatureLevel, &Data->Context);
+ if (SUCCEEDED(hr)) {
// Device creation success, no need to loop anymore
break;
}
}
- if (FAILED(hr))
- {
+ if (FAILED(hr)) {
return ProcessFailure(nullptr, L"Failed to create device in InitializeDx", L"Error", hr);
}
// VERTEX shader
UINT Size = ARRAYSIZE(g_VS);
hr = Data->Device->CreateVertexShader(g_VS, Size, nullptr, &Data->VertexShader);
- if (FAILED(hr))
- {
+ if (FAILED(hr)) {
return ProcessFailure(Data->Device, L"Failed to create vertex shader in InitializeDx", L"Error", hr, SystemTransitionsExpectedErrors);
}
// Input layout
- D3D11_INPUT_ELEMENT_DESC Layout[] =
- {
+ D3D11_INPUT_ELEMENT_DESC Layout[] = {
{"POSITION", 0, DXGI_FORMAT_R32G32B32_FLOAT, 0, 0, D3D11_INPUT_PER_VERTEX_DATA, 0},
{"TEXCOORD", 0, DXGI_FORMAT_R32G32_FLOAT, 0, 12, D3D11_INPUT_PER_VERTEX_DATA, 0}
};
UINT NumElements = ARRAYSIZE(Layout);
hr = Data->Device->CreateInputLayout(Layout, NumElements, g_VS, Size, &Data->InputLayout);
- if (FAILED(hr))
- {
+ if (FAILED(hr)) {
return ProcessFailure(Data->Device, L"Failed to create input layout in InitializeDx", L"Error", hr, SystemTransitionsExpectedErrors);
}
Data->Context->IASetInputLayout(Data->InputLayout);
@@ -215,8 +191,7 @@ DUPL_RETURN THREADMANAGER::InitializeDx(_Out_ DX_RESOURCES* Data)
// Pixel shader
Size = ARRAYSIZE(g_PS);
hr = Data->Device->CreatePixelShader(g_PS, Size, nullptr, &Data->PixelShader);
- if (FAILED(hr))
- {
+ if (FAILED(hr)) {
return ProcessFailure(Data->Device, L"Failed to create pixel shader in InitializeDx", L"Error", hr, SystemTransitionsExpectedErrors);
}
@@ -231,8 +206,7 @@ DUPL_RETURN THREADMANAGER::InitializeDx(_Out_ DX_RESOURCES* Data)
SampDesc.MinLOD = 0;
SampDesc.MaxLOD = D3D11_FLOAT32_MAX;
hr = Data->Device->CreateSamplerState(&SampDesc, &Data->SamplerLinear);
- if (FAILED(hr))
- {
+ if (FAILED(hr)) {
return ProcessFailure(Data->Device, L"Failed to create sampler state in InitializeDx", L"Error", hr, SystemTransitionsExpectedErrors);
}
@@ -252,10 +226,9 @@ PTR_INFO* THREADMANAGER::GetPointerInfo()
//
bool THREADMANAGER::WaitForThreadTermination(DWORD timeout /*= INFINITE*/)
{
- bool bRet = true;
- if (m_ThreadCount != 0)
- {
- bRet = (WaitForMultipleObjectsEx(m_ThreadCount, m_ThreadHandles, TRUE, timeout, FALSE) != WAIT_TIMEOUT);
+ bool bRet = true;
+ if (m_ThreadCount != 0) {
+ bRet = (WaitForMultipleObjectsEx(m_ThreadCount, m_ThreadHandles, TRUE, timeout, FALSE) != WAIT_TIMEOUT);
}
- return bRet;
+ return bRet;
}
diff --git a/plugins/pluginWinDD/internals/ThreadManager.h b/plugins/pluginWinDD/internals/ThreadManager.h
index d710998..2a9157f 100755
--- a/plugins/pluginWinDD/internals/ThreadManager.h
+++ b/plugins/pluginWinDD/internals/ThreadManager.h
@@ -12,22 +12,22 @@
class THREADMANAGER
{
- public:
- THREADMANAGER();
- ~THREADMANAGER();
- void Clean();
- DUPL_RETURN Initialize(INT SingleOutput, UINT OutputCount, HANDLE UnexpectedErrorEvent, HANDLE ExpectedErrorEvent, HANDLE TerminateThreadsEvent, HANDLE SharedHandle, _In_ const struct tmedia_producer_s* Producer, _In_ RECT* DesktopDim);
- PTR_INFO* GetPointerInfo();
- bool WaitForThreadTermination(DWORD timeout = INFINITE);
+public:
+ THREADMANAGER();
+ ~THREADMANAGER();
+ void Clean();
+ DUPL_RETURN Initialize(INT SingleOutput, UINT OutputCount, HANDLE UnexpectedErrorEvent, HANDLE ExpectedErrorEvent, HANDLE TerminateThreadsEvent, HANDLE SharedHandle, _In_ const struct tmedia_producer_s* Producer, _In_ RECT* DesktopDim);
+ PTR_INFO* GetPointerInfo();
+ bool WaitForThreadTermination(DWORD timeout = INFINITE);
- private:
- DUPL_RETURN InitializeDx(_Out_ DX_RESOURCES* Data);
- void CleanDx(_Inout_ DX_RESOURCES* Data);
+private:
+ DUPL_RETURN InitializeDx(_Out_ DX_RESOURCES* Data);
+ void CleanDx(_Inout_ DX_RESOURCES* Data);
- PTR_INFO m_PtrInfo;
- UINT m_ThreadCount;
- _Field_size_(m_ThreadCount) HANDLE* m_ThreadHandles;
- _Field_size_(m_ThreadCount) THREAD_DATA* m_ThreadData;
+ PTR_INFO m_PtrInfo;
+ UINT m_ThreadCount;
+ _Field_size_(m_ThreadCount) HANDLE* m_ThreadHandles;
+ _Field_size_(m_ThreadCount) THREAD_DATA* m_ThreadData;
};
#endif
diff --git a/plugins/pluginWinDD/plugin_win_dd_config.h b/plugins/pluginWinDD/plugin_win_dd_config.h
index a1d3855..70e6ce5 100755
--- a/plugins/pluginWinDD/plugin_win_dd_config.h
+++ b/plugins/pluginWinDD/plugin_win_dd_config.h
@@ -48,12 +48,12 @@
# define PLUGIN_WIN_DD_UNDER_X86 1
#endif
-// Guards against C++ name mangling
+// Guards against C++ name mangling
#ifdef __cplusplus
# define PLUGIN_WIN_DD_BEGIN_DECLS extern "C" {
# define PLUGIN_WIN_DD_END_DECLS }
#else
-# define PLUGIN_WIN_DD_BEGIN_DECLS
+# define PLUGIN_WIN_DD_BEGIN_DECLS
# define PLUGIN_WIN_DD_END_DECLS
#endif
diff --git a/plugins/pluginWinDD/plugin_win_dd_producer.cxx b/plugins/pluginWinDD/plugin_win_dd_producer.cxx
index 7c13767..01d857d 100755
--- a/plugins/pluginWinDD/plugin_win_dd_producer.cxx
+++ b/plugins/pluginWinDD/plugin_win_dd_producer.cxx
@@ -55,28 +55,27 @@
//
// plugin_win_dd_producer_t
//
-typedef struct plugin_win_dd_producer_s
-{
- TMEDIA_DECLARE_PRODUCER;
-
- bool bStarted, bPrepared, bMuted, bWindowHooked, bThreadTerminationDelayed;
- tsk_thread_handle_t* ppTread[1];
-
- OUTPUTMANAGER *pOutMgr;
- THREADMANAGER *pThreadMgr;
-
- // Window handles
- HWND hwndPreview;
- WNDPROC wndPreviewProc;
- HWND hwndSrc;
-
- // Synchronization
- HANDLE hlUnexpectedErrorEvent;
- HANDLE hlExpectedErrorEvent;
- HANDLE hlOcclutionEvent;
- HANDLE hlTerminateThreadsEvent;
-
- HCURSOR hcCursor;
+typedef struct plugin_win_dd_producer_s {
+ TMEDIA_DECLARE_PRODUCER;
+
+ bool bStarted, bPrepared, bMuted, bWindowHooked, bThreadTerminationDelayed;
+ tsk_thread_handle_t* ppTread[1];
+
+ OUTPUTMANAGER *pOutMgr;
+ THREADMANAGER *pThreadMgr;
+
+ // Window handles
+ HWND hwndPreview;
+ WNDPROC wndPreviewProc;
+ HWND hwndSrc;
+
+ // Synchronization
+ HANDLE hlUnexpectedErrorEvent;
+ HANDLE hlExpectedErrorEvent;
+ HANDLE hlOcclutionEvent;
+ HANDLE hlTerminateThreadsEvent;
+
+ HCURSOR hcCursor;
}
plugin_win_dd_producer_t;
@@ -97,11 +96,10 @@ static void* TSK_STDCALL DDThread(void *pArg);
//
// Class for progressive waits
//
-typedef struct
-{
- UINT WaitTime;
- UINT WaitCount;
-}WAIT_BAND;
+typedef struct {
+ UINT WaitTime;
+ UINT WaitCount;
+} WAIT_BAND;
#define WAIT_BAND_COUNT 3
#define WAIT_BAND_STOP 0
@@ -109,360 +107,316 @@ typedef struct
class DYNAMIC_WAIT
{
public:
- DYNAMIC_WAIT();
- ~DYNAMIC_WAIT();
+ DYNAMIC_WAIT();
+ ~DYNAMIC_WAIT();
- void Wait();
+ void Wait();
private:
- static const WAIT_BAND m_WaitBands[WAIT_BAND_COUNT];
+ static const WAIT_BAND m_WaitBands[WAIT_BAND_COUNT];
- // Period in seconds that a new wait call is considered part of the same wait sequence
- static const UINT m_WaitSequenceTimeInSeconds = 2;
+ // Period in seconds that a new wait call is considered part of the same wait sequence
+ static const UINT m_WaitSequenceTimeInSeconds = 2;
- UINT m_CurrentWaitBandIdx;
- UINT m_WaitCountInCurrentBand;
- LARGE_INTEGER m_QPCFrequency;
- LARGE_INTEGER m_LastWakeUpTime;
- BOOL m_QPCValid;
+ UINT m_CurrentWaitBandIdx;
+ UINT m_WaitCountInCurrentBand;
+ LARGE_INTEGER m_QPCFrequency;
+ LARGE_INTEGER m_LastWakeUpTime;
+ BOOL m_QPCValid;
};
const WAIT_BAND DYNAMIC_WAIT::m_WaitBands[WAIT_BAND_COUNT] = {
- { 250, 20 },
- { 2000, 60 },
- { 5000, WAIT_BAND_STOP } // Never move past this band
+ { 250, 20 },
+ { 2000, 60 },
+ { 5000, WAIT_BAND_STOP } // Never move past this band
};
/* ============ Video DD Producer Interface ================= */
static int plugin_win_dd_producer_set(tmedia_producer_t *p_self, const tmedia_param_t* pc_param)
{
- int ret = -1;
- plugin_win_dd_producer_t* p_dd = (plugin_win_dd_producer_t*)p_self;
-
- if (!p_dd || !pc_param)
- {
- DD_DEBUG_ERROR("Invalid parameter");
- return -1;
- }
-
- if (pc_param->value_type == tmedia_pvt_int64)
- {
- if (tsk_striequals(pc_param->key, "local-hwnd") || tsk_striequals(pc_param->key, "preview-hwnd"))
- {
- HWND hwnd = (HWND)*((int64_t*)pc_param->value);
- ret = SUCCEEDED(HookWindow(p_dd, hwnd)) ? 0 : -1;
- }
- else if (tsk_striequals(pc_param->key, "src-hwnd"))
- {
- p_dd->hwndSrc = (HWND)*((int64_t*)pc_param->value);
- ret = 0;
- }
- }
- else if (pc_param->value_type == tmedia_pvt_int32)
- {
- if (tsk_striequals(pc_param->key, "mute"))
- {
- p_dd->bMuted = (TSK_TO_INT32((uint8_t*)pc_param->value) != 0);
- ret = 0;
- }
- }
-
- return ret;
+ int ret = -1;
+ plugin_win_dd_producer_t* p_dd = (plugin_win_dd_producer_t*)p_self;
+
+ if (!p_dd || !pc_param) {
+ DD_DEBUG_ERROR("Invalid parameter");
+ return -1;
+ }
+
+ if (pc_param->value_type == tmedia_pvt_int64) {
+ if (tsk_striequals(pc_param->key, "local-hwnd") || tsk_striequals(pc_param->key, "preview-hwnd")) {
+ HWND hwnd = (HWND)*((int64_t*)pc_param->value);
+ ret = SUCCEEDED(HookWindow(p_dd, hwnd)) ? 0 : -1;
+ }
+ else if (tsk_striequals(pc_param->key, "src-hwnd")) {
+ p_dd->hwndSrc = (HWND)*((int64_t*)pc_param->value);
+ ret = 0;
+ }
+ }
+ else if (pc_param->value_type == tmedia_pvt_int32) {
+ if (tsk_striequals(pc_param->key, "mute")) {
+ p_dd->bMuted = (TSK_TO_INT32((uint8_t*)pc_param->value) != 0);
+ ret = 0;
+ }
+ }
+
+ return ret;
}
static int plugin_win_dd_producer_prepare(tmedia_producer_t* self, const tmedia_codec_t* codec)
{
- plugin_win_dd_producer_t* pSelf = (plugin_win_dd_producer_t*)self;
- HRESULT hr = S_OK;
-
- if (!pSelf || !codec && codec->plugin)
- {
- DD_DEBUG_ERROR("Invalid parameter");
- DD_CHECK_HR(hr = E_UNEXPECTED);
- }
-
- if (pSelf->bPrepared)
- {
- DD_DEBUG_WARN("DD video producer already prepared");
- DD_CHECK_HR(hr = E_UNEXPECTED);
- }
-
- if (pSelf->bThreadTerminationDelayed)
- {
- DD_DEBUG_INFO("Thread termination was delayed ...cleanup now");
- if (_plugin_win_dd_producer_unprepare(pSelf, true/*cleanup?*/) != 0)
- {
- DD_CHECK_HR(hr = E_UNEXPECTED);
- }
- }
-
- TMEDIA_PRODUCER(pSelf)->video.fps = TMEDIA_CODEC_VIDEO(codec)->out.fps;
- TMEDIA_PRODUCER(pSelf)->video.width = TMEDIA_CODEC_VIDEO(codec)->out.width;
- TMEDIA_PRODUCER(pSelf)->video.height = TMEDIA_CODEC_VIDEO(codec)->out.height;
- TMEDIA_PRODUCER(pSelf)->encoder.codec_id = tmedia_codec_id_none; // means RAW frames as input
-
- DD_DEBUG_INFO("DD video producer: fps=%d, width=%d, height=%d",
- TMEDIA_PRODUCER(pSelf)->video.fps,
- TMEDIA_PRODUCER(pSelf)->video.width,
- TMEDIA_PRODUCER(pSelf)->video.height);
-
- // Event used by the threads to signal an unexpected error and we want to quit the app
- if (!pSelf->hlUnexpectedErrorEvent && !(pSelf->hlUnexpectedErrorEvent = CreateEvent(nullptr, TRUE, FALSE, nullptr)))
- {
- ProcessFailure(nullptr, L"UnexpectedErrorEvent creation failed", L"Error", E_UNEXPECTED);
- DD_CHECK_HR(hr = E_UNEXPECTED);
- }
-
- // Event for when a thread encounters an expected error
- if (!pSelf->hlExpectedErrorEvent && !(pSelf->hlExpectedErrorEvent = CreateEvent(nullptr, TRUE, FALSE, nullptr)))
- {
- ProcessFailure(nullptr, L"ExpectedErrorEvent creation failed", L"Error", E_UNEXPECTED);
- DD_CHECK_HR(hr = E_UNEXPECTED);
- }
-
- // Event for Occlution
- if (!pSelf->hlOcclutionEvent && !(pSelf->hlOcclutionEvent = CreateEvent(nullptr, TRUE, FALSE, nullptr)))
- {
- ProcessFailure(nullptr, L"OcclutionEvent creation failed", L"Error", E_UNEXPECTED);
- DD_CHECK_HR(hr = E_UNEXPECTED);
- }
-
- // Event to tell spawned threads to quit
- if (!pSelf->hlTerminateThreadsEvent && !(pSelf->hlTerminateThreadsEvent = CreateEvent(nullptr, TRUE, FALSE, nullptr)))
- {
- ProcessFailure(nullptr, L"TerminateThreadsEvent creation failed", L"Error", E_UNEXPECTED);
- DD_CHECK_HR(hr = E_UNEXPECTED);
- }
-
- // Load simple cursor
- if (!pSelf->hcCursor && !(pSelf->hcCursor = LoadCursor(nullptr, IDC_ARROW)))
- {
- ProcessFailure(nullptr, L"Cursor load failed", L"Error", E_UNEXPECTED);
- DD_CHECK_HR(hr = E_UNEXPECTED);
- }
-
- if (!pSelf->pOutMgr && !(pSelf->pOutMgr = new OUTPUTMANAGER()))
- {
- ProcessFailure(nullptr, L"Out manager allocation failed", L"Error", E_OUTOFMEMORY);
- DD_CHECK_HR(hr = E_OUTOFMEMORY);
- }
-
- if (!pSelf->pThreadMgr && !(pSelf->pThreadMgr = new THREADMANAGER()))
- {
- ProcessFailure(nullptr, L"Thread managed allocation failed", L"Error", E_OUTOFMEMORY);
- DD_CHECK_HR(hr = E_OUTOFMEMORY);
- }
+ plugin_win_dd_producer_t* pSelf = (plugin_win_dd_producer_t*)self;
+ HRESULT hr = S_OK;
+
+ if (!pSelf || !codec && codec->plugin) {
+ DD_DEBUG_ERROR("Invalid parameter");
+ DD_CHECK_HR(hr = E_UNEXPECTED);
+ }
+
+ if (pSelf->bPrepared) {
+ DD_DEBUG_WARN("DD video producer already prepared");
+ DD_CHECK_HR(hr = E_UNEXPECTED);
+ }
+
+ if (pSelf->bThreadTerminationDelayed) {
+ DD_DEBUG_INFO("Thread termination was delayed ...cleanup now");
+ if (_plugin_win_dd_producer_unprepare(pSelf, true/*cleanup?*/) != 0) {
+ DD_CHECK_HR(hr = E_UNEXPECTED);
+ }
+ }
+
+ TMEDIA_PRODUCER(pSelf)->video.fps = TMEDIA_CODEC_VIDEO(codec)->out.fps;
+ TMEDIA_PRODUCER(pSelf)->video.width = TMEDIA_CODEC_VIDEO(codec)->out.width;
+ TMEDIA_PRODUCER(pSelf)->video.height = TMEDIA_CODEC_VIDEO(codec)->out.height;
+ TMEDIA_PRODUCER(pSelf)->encoder.codec_id = tmedia_codec_id_none; // means RAW frames as input
+
+ DD_DEBUG_INFO("DD video producer: fps=%d, width=%d, height=%d",
+ TMEDIA_PRODUCER(pSelf)->video.fps,
+ TMEDIA_PRODUCER(pSelf)->video.width,
+ TMEDIA_PRODUCER(pSelf)->video.height);
+
+ // Event used by the threads to signal an unexpected error and we want to quit the app
+ if (!pSelf->hlUnexpectedErrorEvent && !(pSelf->hlUnexpectedErrorEvent = CreateEvent(nullptr, TRUE, FALSE, nullptr))) {
+ ProcessFailure(nullptr, L"UnexpectedErrorEvent creation failed", L"Error", E_UNEXPECTED);
+ DD_CHECK_HR(hr = E_UNEXPECTED);
+ }
+
+ // Event for when a thread encounters an expected error
+ if (!pSelf->hlExpectedErrorEvent && !(pSelf->hlExpectedErrorEvent = CreateEvent(nullptr, TRUE, FALSE, nullptr))) {
+ ProcessFailure(nullptr, L"ExpectedErrorEvent creation failed", L"Error", E_UNEXPECTED);
+ DD_CHECK_HR(hr = E_UNEXPECTED);
+ }
+
+ // Event for Occlution
+ if (!pSelf->hlOcclutionEvent && !(pSelf->hlOcclutionEvent = CreateEvent(nullptr, TRUE, FALSE, nullptr))) {
+ ProcessFailure(nullptr, L"OcclutionEvent creation failed", L"Error", E_UNEXPECTED);
+ DD_CHECK_HR(hr = E_UNEXPECTED);
+ }
+
+ // Event to tell spawned threads to quit
+ if (!pSelf->hlTerminateThreadsEvent && !(pSelf->hlTerminateThreadsEvent = CreateEvent(nullptr, TRUE, FALSE, nullptr))) {
+ ProcessFailure(nullptr, L"TerminateThreadsEvent creation failed", L"Error", E_UNEXPECTED);
+ DD_CHECK_HR(hr = E_UNEXPECTED);
+ }
+
+ // Load simple cursor
+ if (!pSelf->hcCursor && !(pSelf->hcCursor = LoadCursor(nullptr, IDC_ARROW))) {
+ ProcessFailure(nullptr, L"Cursor load failed", L"Error", E_UNEXPECTED);
+ DD_CHECK_HR(hr = E_UNEXPECTED);
+ }
+
+ if (!pSelf->pOutMgr && !(pSelf->pOutMgr = new OUTPUTMANAGER())) {
+ ProcessFailure(nullptr, L"Out manager allocation failed", L"Error", E_OUTOFMEMORY);
+ DD_CHECK_HR(hr = E_OUTOFMEMORY);
+ }
+
+ if (!pSelf->pThreadMgr && !(pSelf->pThreadMgr = new THREADMANAGER())) {
+ ProcessFailure(nullptr, L"Thread managed allocation failed", L"Error", E_OUTOFMEMORY);
+ DD_CHECK_HR(hr = E_OUTOFMEMORY);
+ }
bail:
- pSelf->bPrepared = SUCCEEDED(hr);
- return SUCCEEDED(hr) ? 0 : -1;
+ pSelf->bPrepared = SUCCEEDED(hr);
+ return SUCCEEDED(hr) ? 0 : -1;
}
static int plugin_win_dd_producer_start(tmedia_producer_t* self)
{
- plugin_win_dd_producer_t* pSelf = (plugin_win_dd_producer_t*)self;
- HRESULT hr = S_OK;
-
- if (!pSelf)
- {
- DD_DEBUG_ERROR("Invalid parameter");
- return -1;
- }
-
- if (pSelf->bStarted)
- {
- DD_DEBUG_INFO("Producer already started");
- goto bail;
- }
- if (!pSelf->bPrepared)
- {
- DD_DEBUG_ERROR("Producer not prepared");
- DD_CHECK_HR(hr = E_UNEXPECTED);
- }
-
- DD_CHECK_HR(hr = HookWindow(pSelf, pSelf->hwndPreview));
-
- // Start asynchronous watcher thread
- pSelf->bStarted = true;
- int ret = tsk_thread_create(&pSelf->ppTread[0], DDThread, pSelf);
- if (ret != 0)
- {
- TSK_DEBUG_ERROR("Failed to create thread");
- pSelf->bStarted = false;
- if (pSelf->ppTread[0])
- {
- tsk_thread_join(&pSelf->ppTread[0]);
- }
- DD_CHECK_HR(hr = E_UNEXPECTED);
- }
+ plugin_win_dd_producer_t* pSelf = (plugin_win_dd_producer_t*)self;
+ HRESULT hr = S_OK;
+
+ if (!pSelf) {
+ DD_DEBUG_ERROR("Invalid parameter");
+ return -1;
+ }
+
+ if (pSelf->bStarted) {
+ DD_DEBUG_INFO("Producer already started");
+ goto bail;
+ }
+ if (!pSelf->bPrepared) {
+ DD_DEBUG_ERROR("Producer not prepared");
+ DD_CHECK_HR(hr = E_UNEXPECTED);
+ }
+
+ DD_CHECK_HR(hr = HookWindow(pSelf, pSelf->hwndPreview));
+
+ // Start asynchronous watcher thread
+ pSelf->bStarted = true;
+ int ret = tsk_thread_create(&pSelf->ppTread[0], DDThread, pSelf);
+ if (ret != 0) {
+ TSK_DEBUG_ERROR("Failed to create thread");
+ pSelf->bStarted = false;
+ if (pSelf->ppTread[0]) {
+ tsk_thread_join(&pSelf->ppTread[0]);
+ }
+ DD_CHECK_HR(hr = E_UNEXPECTED);
+ }
bail:
- if (FAILED(hr))
- {
- UnhookWindow(pSelf);
- return -1;
- }
- pSelf->bStarted = true;
- return 0;
+ if (FAILED(hr)) {
+ UnhookWindow(pSelf);
+ return -1;
+ }
+ pSelf->bStarted = true;
+ return 0;
}
static int plugin_win_dd_producer_pause(tmedia_producer_t* self)
{
- plugin_win_dd_producer_t* pSelf = (plugin_win_dd_producer_t*)self;
-
- if (!pSelf)
- {
- DD_DEBUG_ERROR("Invalid parameter");
- return -1;
- }
- if (!pSelf->bStarted)
- {
- DD_DEBUG_INFO("MF video producer not started");
- }
-
- return 0;
+ plugin_win_dd_producer_t* pSelf = (plugin_win_dd_producer_t*)self;
+
+ if (!pSelf) {
+ DD_DEBUG_ERROR("Invalid parameter");
+ return -1;
+ }
+ if (!pSelf->bStarted) {
+ DD_DEBUG_INFO("MF video producer not started");
+ }
+
+ return 0;
}
static int plugin_win_dd_producer_stop(tmedia_producer_t* self)
{
- plugin_win_dd_producer_t* pSelf = (plugin_win_dd_producer_t*)self;
-
- if (!pSelf)
- {
- DD_DEBUG_ERROR("Invalid parameter");
- return -1;
- }
-
- pSelf->bStarted = false;
-
- UnhookWindow(pSelf);
-
- if (pSelf->hlTerminateThreadsEvent)
- {
- SetEvent(pSelf->hlTerminateThreadsEvent);
- }
- if (pSelf->ppTread[0])
- {
- tsk_thread_join(&pSelf->ppTread[0]);
- }
-
- // next start() will be called after prepare()
- int ret = _plugin_win_dd_producer_unprepare(pSelf);
-
- return ret;
+ plugin_win_dd_producer_t* pSelf = (plugin_win_dd_producer_t*)self;
+
+ if (!pSelf) {
+ DD_DEBUG_ERROR("Invalid parameter");
+ return -1;
+ }
+
+ pSelf->bStarted = false;
+
+ UnhookWindow(pSelf);
+
+ if (pSelf->hlTerminateThreadsEvent) {
+ SetEvent(pSelf->hlTerminateThreadsEvent);
+ }
+ if (pSelf->ppTread[0]) {
+ tsk_thread_join(&pSelf->ppTread[0]);
+ }
+
+ // next start() will be called after prepare()
+ int ret = _plugin_win_dd_producer_unprepare(pSelf);
+
+ return ret;
}
static int _plugin_win_dd_producer_unprepare(plugin_win_dd_producer_t* pSelf, bool bCleanup /*= false*/)
{
- HRESULT hr = S_OK;
-
- if (!pSelf)
- {
- DD_DEBUG_ERROR("Invalid parameter");
- return -1;
- }
-
- if (pSelf->bStarted)
- {
- DD_CHECK_HR(hr = E_UNEXPECTED);
- }
-
- pSelf->bThreadTerminationDelayed = false;
-
- // Thread manager must be destroyed before the events and output manager
- if (pSelf->pThreadMgr)
- {
- // if we are cleaning the producer then all threads must exit only when all threads are destroyed
- // https://code.google.com/p/sincity/issues/detail?id=7
- if (pSelf->pThreadMgr->WaitForThreadTermination(bCleanup ? INFINITE : DD_DDPROC_THREAD_TIMEOUT) == true)
- {
- delete pSelf->pThreadMgr;
- pSelf->pThreadMgr = nullptr;
- }
- else
- {
- // Thread wait timedout
- DD_DEBUG_WARN("DDProc thread termination delayed");
- pSelf->bThreadTerminationDelayed = true;
- }
- }
-
- if (!pSelf->bThreadTerminationDelayed)
- {
- if (pSelf->hlUnexpectedErrorEvent)
- {
- CloseHandle(pSelf->hlUnexpectedErrorEvent);
- pSelf->hlUnexpectedErrorEvent = nullptr;
- }
- if (pSelf->hlExpectedErrorEvent)
- {
- CloseHandle(pSelf->hlExpectedErrorEvent);
- pSelf->hlExpectedErrorEvent = nullptr;
- }
- if (pSelf->hlOcclutionEvent)
- {
- CloseHandle(pSelf->hlOcclutionEvent);
- pSelf->hlOcclutionEvent = nullptr;
- }
- if (pSelf->hlTerminateThreadsEvent)
- {
- CloseHandle(pSelf->hlTerminateThreadsEvent);
- pSelf->hlTerminateThreadsEvent = nullptr;
- }
-
- if (pSelf->hcCursor)
- {
- DestroyCursor(pSelf->hcCursor);
- pSelf->hcCursor = nullptr;
- }
-
- if (pSelf->pOutMgr)
- {
- delete pSelf->pOutMgr;
- pSelf->pOutMgr = nullptr;
- }
- }
-
- pSelf->bPrepared = false;
+ HRESULT hr = S_OK;
+
+ if (!pSelf) {
+ DD_DEBUG_ERROR("Invalid parameter");
+ return -1;
+ }
+
+ if (pSelf->bStarted) {
+ DD_CHECK_HR(hr = E_UNEXPECTED);
+ }
+
+ pSelf->bThreadTerminationDelayed = false;
+
+ // Thread manager must be destroyed before the events and output manager
+ if (pSelf->pThreadMgr) {
+ // if we are cleaning the producer then all threads must exit only when all threads are destroyed
+ // https://code.google.com/p/sincity/issues/detail?id=7
+ if (pSelf->pThreadMgr->WaitForThreadTermination(bCleanup ? INFINITE : DD_DDPROC_THREAD_TIMEOUT) == true) {
+ delete pSelf->pThreadMgr;
+ pSelf->pThreadMgr = nullptr;
+ }
+ else {
+ // Thread wait timedout
+ DD_DEBUG_WARN("DDProc thread termination delayed");
+ pSelf->bThreadTerminationDelayed = true;
+ }
+ }
+
+ if (!pSelf->bThreadTerminationDelayed) {
+ if (pSelf->hlUnexpectedErrorEvent) {
+ CloseHandle(pSelf->hlUnexpectedErrorEvent);
+ pSelf->hlUnexpectedErrorEvent = nullptr;
+ }
+ if (pSelf->hlExpectedErrorEvent) {
+ CloseHandle(pSelf->hlExpectedErrorEvent);
+ pSelf->hlExpectedErrorEvent = nullptr;
+ }
+ if (pSelf->hlOcclutionEvent) {
+ CloseHandle(pSelf->hlOcclutionEvent);
+ pSelf->hlOcclutionEvent = nullptr;
+ }
+ if (pSelf->hlTerminateThreadsEvent) {
+ CloseHandle(pSelf->hlTerminateThreadsEvent);
+ pSelf->hlTerminateThreadsEvent = nullptr;
+ }
+
+ if (pSelf->hcCursor) {
+ DestroyCursor(pSelf->hcCursor);
+ pSelf->hcCursor = nullptr;
+ }
+
+ if (pSelf->pOutMgr) {
+ delete pSelf->pOutMgr;
+ pSelf->pOutMgr = nullptr;
+ }
+ }
+
+ pSelf->bPrepared = false;
bail:
- return 0;
+ return 0;
}
static HRESULT HookWindow(struct plugin_win_dd_producer_s *pSelf, HWND hWnd)
{
- HRESULT hr = S_OK;
-
- DD_CHECK_HR(hr = UnhookWindow(pSelf));
-
- if ((pSelf->hwndPreview = hWnd))
- {
- pSelf->wndPreviewProc = (WNDPROC)SetWindowLongPtr(pSelf->hwndPreview, GWLP_WNDPROC, (LONG_PTR)WndProc);
- if (!pSelf->wndPreviewProc)
- {
- DD_DEBUG_ERROR("HookWindowLongPtr() failed with errcode=%d", GetLastError());
- DD_CHECK_HR(hr = E_FAIL);
- }
- SetProp(pSelf->hwndPreview, L"Self", pSelf);
- pSelf->bWindowHooked = true;
- }
+ HRESULT hr = S_OK;
+
+ DD_CHECK_HR(hr = UnhookWindow(pSelf));
+
+ if ((pSelf->hwndPreview = hWnd)) {
+ pSelf->wndPreviewProc = (WNDPROC)SetWindowLongPtr(pSelf->hwndPreview, GWLP_WNDPROC, (LONG_PTR)WndProc);
+ if (!pSelf->wndPreviewProc) {
+ DD_DEBUG_ERROR("HookWindowLongPtr() failed with errcode=%d", GetLastError());
+ DD_CHECK_HR(hr = E_FAIL);
+ }
+ SetProp(pSelf->hwndPreview, L"Self", pSelf);
+ pSelf->bWindowHooked = true;
+ }
bail:
- return S_OK;
+ return S_OK;
}
static HRESULT UnhookWindow(struct plugin_win_dd_producer_s *pSelf)
{
- if (pSelf->hwndPreview && pSelf->wndPreviewProc)
- {
- SetWindowLongPtr(pSelf->hwndPreview, GWLP_WNDPROC, (LONG_PTR)pSelf->wndPreviewProc);
- pSelf->wndPreviewProc = NULL;
- }
- if (pSelf->hwndPreview)
- {
- ::InvalidateRect(pSelf->hwndPreview, NULL, FALSE);
- }
- pSelf->bWindowHooked = false;
- return S_OK;
+ if (pSelf->hwndPreview && pSelf->wndPreviewProc) {
+ SetWindowLongPtr(pSelf->hwndPreview, GWLP_WNDPROC, (LONG_PTR)pSelf->wndPreviewProc);
+ pSelf->wndPreviewProc = NULL;
+ }
+ if (pSelf->hwndPreview) {
+ ::InvalidateRect(pSelf->hwndPreview, NULL, FALSE);
+ }
+ pSelf->bWindowHooked = false;
+ return S_OK;
}
//
@@ -471,66 +425,61 @@ static HRESULT UnhookWindow(struct plugin_win_dd_producer_s *pSelf)
/* constructor */
static tsk_object_t* plugin_win_dd_producer_ctor(tsk_object_t * self, va_list * app)
{
- plugin_win_dd_producer_t *pSelf = (plugin_win_dd_producer_t *)self;
- if (pSelf)
- {
- /* init base */
- tmedia_producer_init(TMEDIA_PRODUCER(pSelf));
-
- /* init self with default values*/
- TMEDIA_PRODUCER(pSelf)->encoder.codec_id = tmedia_codec_id_none; // means RAW frames as input
- TMEDIA_PRODUCER(pSelf)->video.chroma = tmedia_chroma_rgb32;
- TMEDIA_PRODUCER(pSelf)->video.fps = 15;
- TMEDIA_PRODUCER(pSelf)->video.width = 352;
- TMEDIA_PRODUCER(pSelf)->video.height = 288;
-
- DD_DEBUG_INFO("Create Microsoft Desktop Duplication producer");
- }
- return self;
+ plugin_win_dd_producer_t *pSelf = (plugin_win_dd_producer_t *)self;
+ if (pSelf) {
+ /* init base */
+ tmedia_producer_init(TMEDIA_PRODUCER(pSelf));
+
+ /* init self with default values*/
+ TMEDIA_PRODUCER(pSelf)->encoder.codec_id = tmedia_codec_id_none; // means RAW frames as input
+ TMEDIA_PRODUCER(pSelf)->video.chroma = tmedia_chroma_rgb32;
+ TMEDIA_PRODUCER(pSelf)->video.fps = 15;
+ TMEDIA_PRODUCER(pSelf)->video.width = 352;
+ TMEDIA_PRODUCER(pSelf)->video.height = 288;
+
+ DD_DEBUG_INFO("Create Microsoft Desktop Duplication producer");
+ }
+ return self;
}
/* destructor */
static tsk_object_t* plugin_win_dd_producer_dtor(tsk_object_t * self)
{
- plugin_win_dd_producer_t *pSelf = (plugin_win_dd_producer_t *)self;
- if (pSelf)
- {
- /* stop */
- if (pSelf->bStarted)
- {
- plugin_win_dd_producer_stop(TMEDIA_PRODUCER(pSelf));
- }
-
- /* deinit base */
- tmedia_producer_deinit(TMEDIA_PRODUCER(pSelf));
- /* deinit self */
- _plugin_win_dd_producer_unprepare(pSelf, true/*cleanup*/);
-
- DD_DEBUG_INFO("*** WinDD producer destroyed ***");
- }
-
- return self;
+ plugin_win_dd_producer_t *pSelf = (plugin_win_dd_producer_t *)self;
+ if (pSelf) {
+ /* stop */
+ if (pSelf->bStarted) {
+ plugin_win_dd_producer_stop(TMEDIA_PRODUCER(pSelf));
+ }
+
+ /* deinit base */
+ tmedia_producer_deinit(TMEDIA_PRODUCER(pSelf));
+ /* deinit self */
+ _plugin_win_dd_producer_unprepare(pSelf, true/*cleanup*/);
+
+ DD_DEBUG_INFO("*** WinDD producer destroyed ***");
+ }
+
+ return self;
}
/* object definition */
-static const tsk_object_def_t plugin_win_dd_producer_def_s =
-{
- sizeof(plugin_win_dd_producer_t),
- plugin_win_dd_producer_ctor,
- plugin_win_dd_producer_dtor,
- tsk_null,
+static const tsk_object_def_t plugin_win_dd_producer_def_s = {
+ sizeof(plugin_win_dd_producer_t),
+ plugin_win_dd_producer_ctor,
+ plugin_win_dd_producer_dtor,
+ tsk_null,
};
/* plugin definition*/
-static const tmedia_producer_plugin_def_t plugin_win_dd_producer_plugin_def_s =
-{
- &plugin_win_dd_producer_def_s,
+static const tmedia_producer_plugin_def_t plugin_win_dd_producer_plugin_def_s = {
+ &plugin_win_dd_producer_def_s,
- tmedia_bfcp_video,
- "Microsoft Windows Desktop Duplication producer (Video)",
+ tmedia_bfcp_video,
+ "Microsoft Windows Desktop Duplication producer (Video)",
- plugin_win_dd_producer_set,
- plugin_win_dd_producer_prepare,
- plugin_win_dd_producer_start,
- plugin_win_dd_producer_pause,
- plugin_win_dd_producer_stop
+ plugin_win_dd_producer_set,
+ plugin_win_dd_producer_prepare,
+ plugin_win_dd_producer_start,
+ plugin_win_dd_producer_pause,
+ plugin_win_dd_producer_stop
};
const tmedia_producer_plugin_def_t *plugin_win_dd_producer_plugin_def_t = &plugin_win_dd_producer_plugin_def_s;
@@ -545,130 +494,115 @@ const tmedia_producer_plugin_def_t *plugin_win_dd_producer_plugin_def_t = &plugi
// These are the errors we expect from general Dxgi API due to a transition
HRESULT SystemTransitionsExpectedErrors[] = {
- DXGI_ERROR_DEVICE_REMOVED,
- DXGI_ERROR_ACCESS_LOST,
- static_cast<HRESULT>(WAIT_ABANDONED),
- S_OK // Terminate list with zero valued HRESULT
+ DXGI_ERROR_DEVICE_REMOVED,
+ DXGI_ERROR_ACCESS_LOST,
+ static_cast<HRESULT>(WAIT_ABANDONED),
+ S_OK // Terminate list with zero valued HRESULT
};
// These are the errors we expect from IDXGIOutput1::DuplicateOutput due to a transition
HRESULT CreateDuplicationExpectedErrors[] = {
- DXGI_ERROR_DEVICE_REMOVED,
- static_cast<HRESULT>(E_ACCESSDENIED),
- DXGI_ERROR_UNSUPPORTED,
- DXGI_ERROR_SESSION_DISCONNECTED,
- S_OK // Terminate list with zero valued HRESULT
+ DXGI_ERROR_DEVICE_REMOVED,
+ static_cast<HRESULT>(E_ACCESSDENIED),
+ DXGI_ERROR_UNSUPPORTED,
+ DXGI_ERROR_SESSION_DISCONNECTED,
+ S_OK // Terminate list with zero valued HRESULT
};
// These are the errors we expect from IDXGIOutputDuplication methods due to a transition
HRESULT FrameInfoExpectedErrors[] = {
- DXGI_ERROR_DEVICE_REMOVED,
- DXGI_ERROR_ACCESS_LOST,
- S_OK // Terminate list with zero valued HRESULT
+ DXGI_ERROR_DEVICE_REMOVED,
+ DXGI_ERROR_ACCESS_LOST,
+ S_OK // Terminate list with zero valued HRESULT
};
// These are the errors we expect from IDXGIAdapter::EnumOutputs methods due to outputs becoming stale during a transition
HRESULT EnumOutputsExpectedErrors[] = {
- DXGI_ERROR_NOT_FOUND,
- S_OK // Terminate list with zero valued HRESULT
+ DXGI_ERROR_NOT_FOUND,
+ S_OK // Terminate list with zero valued HRESULT
};
_Post_satisfies_(return != DUPL_RETURN_SUCCESS)
- DUPL_RETURN ProcessFailure(_In_opt_ ID3D11Device* Device, _In_ LPCWSTR Str, _In_ LPCWSTR Title, HRESULT hr, _In_opt_z_ HRESULT* ExpectedErrors)
+DUPL_RETURN ProcessFailure(_In_opt_ ID3D11Device* Device, _In_ LPCWSTR Str, _In_ LPCWSTR Title, HRESULT hr, _In_opt_z_ HRESULT* ExpectedErrors)
{
- HRESULT TranslatedHr;
-
- // On an error check if the DX device is lost
- if (Device)
- {
- HRESULT DeviceRemovedReason = Device->GetDeviceRemovedReason();
-
- switch (DeviceRemovedReason)
- {
- case DXGI_ERROR_DEVICE_REMOVED:
- case DXGI_ERROR_DEVICE_RESET:
- case static_cast<HRESULT>(E_OUTOFMEMORY) :
- {
- // Our device has been stopped due to an external event on the GPU so map them all to
- // device removed and continue processing the condition
- TranslatedHr = DXGI_ERROR_DEVICE_REMOVED;
- break;
- }
-
- case S_OK:
- {
- // Device is not removed so use original error
- TranslatedHr = hr;
- break;
- }
-
- default:
- {
- // Device is removed but not a error we want to remap
- TranslatedHr = DeviceRemovedReason;
- }
- }
- }
- else
- {
- TranslatedHr = hr;
- }
-
- // Check if this error was expected or not
- if (ExpectedErrors)
- {
- HRESULT* CurrentResult = ExpectedErrors;
-
- while (*CurrentResult != S_OK)
- {
- if (*(CurrentResult++) == TranslatedHr)
- {
- return DUPL_RETURN_ERROR_EXPECTED;
- }
- }
- }
-
- // Error was not expected so display the message box
- DisplayMsg(Str, Title, TranslatedHr);
-
- return DUPL_RETURN_ERROR_UNEXPECTED;
+ HRESULT TranslatedHr;
+
+ // On an error check if the DX device is lost
+ if (Device) {
+ HRESULT DeviceRemovedReason = Device->GetDeviceRemovedReason();
+
+ switch (DeviceRemovedReason) {
+ case DXGI_ERROR_DEVICE_REMOVED:
+ case DXGI_ERROR_DEVICE_RESET:
+ case static_cast<HRESULT>(E_OUTOFMEMORY) : {
+ // Our device has been stopped due to an external event on the GPU so map them all to
+ // device removed and continue processing the condition
+ TranslatedHr = DXGI_ERROR_DEVICE_REMOVED;
+ break;
+ }
+
+ case S_OK: {
+ // Device is not removed so use original error
+ TranslatedHr = hr;
+ break;
+ }
+
+ default: {
+ // Device is removed but not a error we want to remap
+ TranslatedHr = DeviceRemovedReason;
+ }
+ }
+ }
+ else {
+ TranslatedHr = hr;
+ }
+
+ // Check if this error was expected or not
+ if (ExpectedErrors) {
+ HRESULT* CurrentResult = ExpectedErrors;
+
+ while (*CurrentResult != S_OK) {
+ if (*(CurrentResult++) == TranslatedHr) {
+ return DUPL_RETURN_ERROR_EXPECTED;
+ }
+ }
+ }
+
+ // Error was not expected so display the message box
+ DisplayMsg(Str, Title, TranslatedHr);
+
+ return DUPL_RETURN_ERROR_UNEXPECTED;
}
LRESULT CALLBACK WndProc(HWND hWnd, UINT message, WPARAM wParam, LPARAM lParam)
{
- switch (message)
- {
- case WM_DESTROY:
- {
- PostQuitMessage(0);
- break;
- }
- case WM_SIZE:
- {
- // Tell output manager that window size has changed
- plugin_win_dd_producer_t* pSelf = static_cast<plugin_win_dd_producer_t*>(GetProp(hWnd, L"Self"));
- if (pSelf && pSelf->pOutMgr)
- {
- pSelf->pOutMgr->WindowResize();
- }
- break;
- }
- case OCCLUSION_STATUS_MSG:
- {
- plugin_win_dd_producer_t* pSelf = static_cast<plugin_win_dd_producer_t*>(GetProp(hWnd, L"Self"));
- if (pSelf && pSelf->hlOcclutionEvent)
- {
- SetEvent(pSelf->hlOcclutionEvent);
- }
- break;
- }
- default:
- return DefWindowProc(hWnd, message, wParam, lParam);
- }
-
- return 0;
+ switch (message) {
+ case WM_DESTROY: {
+ PostQuitMessage(0);
+ break;
+ }
+ case WM_SIZE: {
+ // Tell output manager that window size has changed
+ plugin_win_dd_producer_t* pSelf = static_cast<plugin_win_dd_producer_t*>(GetProp(hWnd, L"Self"));
+ if (pSelf && pSelf->pOutMgr) {
+ pSelf->pOutMgr->WindowResize();
+ }
+ break;
+ }
+ case OCCLUSION_STATUS_MSG: {
+ plugin_win_dd_producer_t* pSelf = static_cast<plugin_win_dd_producer_t*>(GetProp(hWnd, L"Self"));
+ if (pSelf && pSelf->hlOcclutionEvent) {
+ SetEvent(pSelf->hlOcclutionEvent);
+ }
+ break;
+ }
+ default:
+ return DefWindowProc(hWnd, message, wParam, lParam);
+ }
+
+ return 0;
}
//
@@ -676,399 +610,357 @@ LRESULT CALLBACK WndProc(HWND hWnd, UINT message, WPARAM wParam, LPARAM lParam)
//
DWORD WINAPI DDProc(_In_ void* Param)
{
- DD_DEBUG_INFO("DDProc (producer) - ENTER");
-
- // Classes
- DISPLAYMANAGER DispMgr;
- DUPLICATIONMANAGER DuplMgr;
-
- // D3D objects
- ID3D11Texture2D* SharedSurf = nullptr;
- IDXGIKeyedMutex* KeyMutex = nullptr;
-
- // Data passed in from thread creation
- THREAD_DATA* TData = reinterpret_cast<THREAD_DATA*>(Param);
-
- // Get desktop
- DUPL_RETURN Ret;
- HDESK CurrentDesktop = nullptr;
- CurrentDesktop = OpenInputDesktop(0, FALSE, GENERIC_ALL);
- if (!CurrentDesktop)
- {
- // We do not have access to the desktop so request a retry
- SetEvent(TData->ExpectedErrorEvent);
- Ret = DUPL_RETURN_ERROR_EXPECTED;
- goto Exit;
- }
-
- // Attach desktop to this thread
- bool DesktopAttached = SetThreadDesktop(CurrentDesktop) != 0;
- CloseDesktop(CurrentDesktop);
- CurrentDesktop = nullptr;
- if (!DesktopAttached)
- {
- // We do not have access to the desktop so request a retry
- Ret = DUPL_RETURN_ERROR_EXPECTED;
- goto Exit;
- }
-
- // New display manager
- DispMgr.InitD3D(&TData->DxRes);
-
- // FPS manager
- uint64_t TimeNow, TimeLastFrame = 0;
- const uint64_t TimeFrameDuration = 1000 / TData->Producer->video.fps;
-
- // Obtain handle to sync shared Surface
- HRESULT hr = TData->DxRes.Device->OpenSharedResource(TData->TexSharedHandle, __uuidof(ID3D11Texture2D), reinterpret_cast<void**>(&SharedSurf));
- if (FAILED(hr))
- {
- Ret = ProcessFailure(TData->DxRes.Device, L"Opening shared texture failed", L"Error", hr, SystemTransitionsExpectedErrors);
- goto Exit;
- }
-
- hr = SharedSurf->QueryInterface(__uuidof(IDXGIKeyedMutex), reinterpret_cast<void**>(&KeyMutex));
- if (FAILED(hr))
- {
- Ret = ProcessFailure(nullptr, L"Failed to get keyed mutex interface in spawned thread", L"Error", hr);
- goto Exit;
- }
-
- // Make duplication manager
- Ret = DuplMgr.InitDupl(TData->DxRes.Device, TData->DxRes.Context ,TData->Output);
- if (Ret != DUPL_RETURN_SUCCESS)
- {
- goto Exit;
- }
-
- // Get output description
- DXGI_OUTPUT_DESC DesktopDesc;
- RtlZeroMemory(&DesktopDesc, sizeof(DXGI_OUTPUT_DESC));
- DuplMgr.GetOutputDesc(&DesktopDesc);
-
- // Main duplication loop
- bool WaitToProcessCurrentFrame = false;
- FRAME_DATA CurrentData;
-
- while (TData->Producer->is_started && (WaitForSingleObjectEx(TData->TerminateThreadsEvent, 0, FALSE) == WAIT_TIMEOUT))
- {
- if (!WaitToProcessCurrentFrame)
- {
- // Get new frame from desktop duplication
- bool TimeOut;
- Ret = DuplMgr.GetFrame(&CurrentData, &TimeOut);
- if (Ret != DUPL_RETURN_SUCCESS)
- {
- // An error occurred getting the next frame drop out of loop which
- // will check if it was expected or not
- break;
- }
-
- // Check for timeout
- if (TimeOut)
- {
- // No new frame at the moment
- continue;
- }
- }
-
- // We have a new frame so try and process it
- // Try to acquire keyed mutex in order to access shared surface
- hr = KeyMutex->AcquireSync(0, 1000);
- if (hr == static_cast<HRESULT>(WAIT_TIMEOUT))
- {
- // Can't use shared surface right now, try again later
- WaitToProcessCurrentFrame = true;
- continue;
- }
- else if (FAILED(hr))
- {
- // Generic unknown failure
- Ret = ProcessFailure(TData->DxRes.Device, L"Unexpected error acquiring KeyMutex", L"Error", hr, SystemTransitionsExpectedErrors);
- DuplMgr.DoneWithFrame();
- break;
- }
-
- // We can now process the current frame
- WaitToProcessCurrentFrame = false;
-
- // Get mouse info
- Ret = DuplMgr.GetMouse(TData->PtrInfo, &(CurrentData.FrameInfo), TData->OffsetX, TData->OffsetY);
- if (Ret != DUPL_RETURN_SUCCESS)
- {
- DuplMgr.DoneWithFrame();
- KeyMutex->ReleaseSync(1);
- break;
- }
-
- // Process new frame
- Ret = DispMgr.ProcessFrame(&CurrentData, SharedSurf, TData->OffsetX, TData->OffsetY, &DesktopDesc);
- if (Ret != DUPL_RETURN_SUCCESS)
- {
- DuplMgr.DoneWithFrame();
- KeyMutex->ReleaseSync(1);
- break;
- }
-
- // Release acquired keyed mutex
- hr = KeyMutex->ReleaseSync(1);
- if (FAILED(hr))
- {
- Ret = ProcessFailure(TData->DxRes.Device, L"Unexpected error releasing the keyed mutex", L"Error", hr, SystemTransitionsExpectedErrors);
- DuplMgr.DoneWithFrame();
- break;
- }
-
- // Send Frame Over the Network
- TimeNow = tsk_time_now();
- if ((TimeNow - TimeLastFrame) > TimeFrameDuration)
- {
- if (!((const plugin_win_dd_producer_t*)TData->Producer)->bMuted)
- {
- hr = DuplMgr.SendData(const_cast<struct tmedia_producer_s*>(TData->Producer), &CurrentData);
- }
- if (SUCCEEDED(hr))
- {
- TimeLastFrame = TimeNow;
- }
- }
+ DD_DEBUG_INFO("DDProc (producer) - ENTER");
+
+ // Classes
+ DISPLAYMANAGER DispMgr;
+ DUPLICATIONMANAGER DuplMgr;
+
+ // D3D objects
+ ID3D11Texture2D* SharedSurf = nullptr;
+ IDXGIKeyedMutex* KeyMutex = nullptr;
+
+ // Data passed in from thread creation
+ THREAD_DATA* TData = reinterpret_cast<THREAD_DATA*>(Param);
+
+ // Get desktop
+ DUPL_RETURN Ret;
+ HDESK CurrentDesktop = nullptr;
+ CurrentDesktop = OpenInputDesktop(0, FALSE, GENERIC_ALL);
+ if (!CurrentDesktop) {
+ // We do not have access to the desktop so request a retry
+ SetEvent(TData->ExpectedErrorEvent);
+ Ret = DUPL_RETURN_ERROR_EXPECTED;
+ goto Exit;
+ }
+
+ // Attach desktop to this thread
+ bool DesktopAttached = SetThreadDesktop(CurrentDesktop) != 0;
+ CloseDesktop(CurrentDesktop);
+ CurrentDesktop = nullptr;
+ if (!DesktopAttached) {
+ // We do not have access to the desktop so request a retry
+ Ret = DUPL_RETURN_ERROR_EXPECTED;
+ goto Exit;
+ }
+
+ // New display manager
+ DispMgr.InitD3D(&TData->DxRes);
+
+ // FPS manager
+ uint64_t TimeNow, TimeLastFrame = 0;
+ const uint64_t TimeFrameDuration = 1000 / TData->Producer->video.fps;
+
+ // Obtain handle to sync shared Surface
+ HRESULT hr = TData->DxRes.Device->OpenSharedResource(TData->TexSharedHandle, __uuidof(ID3D11Texture2D), reinterpret_cast<void**>(&SharedSurf));
+ if (FAILED(hr)) {
+ Ret = ProcessFailure(TData->DxRes.Device, L"Opening shared texture failed", L"Error", hr, SystemTransitionsExpectedErrors);
+ goto Exit;
+ }
+
+ hr = SharedSurf->QueryInterface(__uuidof(IDXGIKeyedMutex), reinterpret_cast<void**>(&KeyMutex));
+ if (FAILED(hr)) {
+ Ret = ProcessFailure(nullptr, L"Failed to get keyed mutex interface in spawned thread", L"Error", hr);
+ goto Exit;
+ }
+
+ // Make duplication manager
+ Ret = DuplMgr.InitDupl(TData->DxRes.Device, TData->DxRes.Context ,TData->Output);
+ if (Ret != DUPL_RETURN_SUCCESS) {
+ goto Exit;
+ }
+
+ // Get output description
+ DXGI_OUTPUT_DESC DesktopDesc;
+ RtlZeroMemory(&DesktopDesc, sizeof(DXGI_OUTPUT_DESC));
+ DuplMgr.GetOutputDesc(&DesktopDesc);
+
+ // Main duplication loop
+ bool WaitToProcessCurrentFrame = false;
+ FRAME_DATA CurrentData;
+
+ while (TData->Producer->is_started && (WaitForSingleObjectEx(TData->TerminateThreadsEvent, 0, FALSE) == WAIT_TIMEOUT)) {
+ if (!WaitToProcessCurrentFrame) {
+ // Get new frame from desktop duplication
+ bool TimeOut;
+ Ret = DuplMgr.GetFrame(&CurrentData, &TimeOut);
+ if (Ret != DUPL_RETURN_SUCCESS) {
+ // An error occurred getting the next frame drop out of loop which
+ // will check if it was expected or not
+ break;
+ }
+
+ // Check for timeout
+ if (TimeOut) {
+ // No new frame at the moment
+ continue;
+ }
+ }
+
+ // We have a new frame so try and process it
+ // Try to acquire keyed mutex in order to access shared surface
+ hr = KeyMutex->AcquireSync(0, 1000);
+ if (hr == static_cast<HRESULT>(WAIT_TIMEOUT)) {
+ // Can't use shared surface right now, try again later
+ WaitToProcessCurrentFrame = true;
+ continue;
+ }
+ else if (FAILED(hr)) {
+ // Generic unknown failure
+ Ret = ProcessFailure(TData->DxRes.Device, L"Unexpected error acquiring KeyMutex", L"Error", hr, SystemTransitionsExpectedErrors);
+ DuplMgr.DoneWithFrame();
+ break;
+ }
+
+ // We can now process the current frame
+ WaitToProcessCurrentFrame = false;
+
+ // Get mouse info
+ Ret = DuplMgr.GetMouse(TData->PtrInfo, &(CurrentData.FrameInfo), TData->OffsetX, TData->OffsetY);
+ if (Ret != DUPL_RETURN_SUCCESS) {
+ DuplMgr.DoneWithFrame();
+ KeyMutex->ReleaseSync(1);
+ break;
+ }
+
+ // Process new frame
+ Ret = DispMgr.ProcessFrame(&CurrentData, SharedSurf, TData->OffsetX, TData->OffsetY, &DesktopDesc);
+ if (Ret != DUPL_RETURN_SUCCESS) {
+ DuplMgr.DoneWithFrame();
+ KeyMutex->ReleaseSync(1);
+ break;
+ }
+
+ // Release acquired keyed mutex
+ hr = KeyMutex->ReleaseSync(1);
+ if (FAILED(hr)) {
+ Ret = ProcessFailure(TData->DxRes.Device, L"Unexpected error releasing the keyed mutex", L"Error", hr, SystemTransitionsExpectedErrors);
+ DuplMgr.DoneWithFrame();
+ break;
+ }
+
+ // Send Frame Over the Network
+ TimeNow = tsk_time_now();
+ if ((TimeNow - TimeLastFrame) > TimeFrameDuration) {
+ if (!((const plugin_win_dd_producer_t*)TData->Producer)->bMuted) {
+ hr = DuplMgr.SendData(const_cast<struct tmedia_producer_s*>(TData->Producer), &CurrentData);
+ }
+ if (SUCCEEDED(hr)) {
+ TimeLastFrame = TimeNow;
+ }
+ }
#if 0
- else
- {
- DD_DEBUG_INFO("Skip frame");
- }
+ else {
+ DD_DEBUG_INFO("Skip frame");
+ }
#endif
- // Release frame back to desktop duplication
- Ret = DuplMgr.DoneWithFrame();
- if (Ret != DUPL_RETURN_SUCCESS)
- {
- break;
- }
- }
+ // Release frame back to desktop duplication
+ Ret = DuplMgr.DoneWithFrame();
+ if (Ret != DUPL_RETURN_SUCCESS) {
+ break;
+ }
+ }
Exit:
- if (Ret != DUPL_RETURN_SUCCESS)
- {
- if (Ret == DUPL_RETURN_ERROR_EXPECTED)
- {
- // The system is in a transition state so request the duplication be restarted
- SetEvent(TData->ExpectedErrorEvent);
- }
- else
- {
- // Unexpected error so exit the application
- SetEvent(TData->UnexpectedErrorEvent);
- }
- }
-
- if (SharedSurf)
- {
- SharedSurf->Release();
- SharedSurf = nullptr;
- }
-
- if (KeyMutex)
- {
- KeyMutex->Release();
- KeyMutex = nullptr;
- }
-
- DD_DEBUG_INFO("DDProc (producer) - EXIT");
-
- return 0;
+ if (Ret != DUPL_RETURN_SUCCESS) {
+ if (Ret == DUPL_RETURN_ERROR_EXPECTED) {
+ // The system is in a transition state so request the duplication be restarted
+ SetEvent(TData->ExpectedErrorEvent);
+ }
+ else {
+ // Unexpected error so exit the application
+ SetEvent(TData->UnexpectedErrorEvent);
+ }
+ }
+
+ if (SharedSurf) {
+ SharedSurf->Release();
+ SharedSurf = nullptr;
+ }
+
+ if (KeyMutex) {
+ KeyMutex->Release();
+ KeyMutex = nullptr;
+ }
+
+ DD_DEBUG_INFO("DDProc (producer) - EXIT");
+
+ return 0;
}
// Run session async thread
static void* TSK_STDCALL DDThread(void *pArg)
{
- plugin_win_dd_producer_t *pSelf = (plugin_win_dd_producer_t *)pArg;
- HRESULT hr = S_OK;
- INT SingleOutput = -1;
-
- RECT DeskBounds = {};
- UINT OutputCount = 1;
-
- bool FirstTime = true;
- bool Occluded = true;
- bool PreviewChanged = false;
- DYNAMIC_WAIT DynamicWait;
- HWND hwndPreview = NULL;
-
- DD_DEBUG_INFO("DDThread (producer) - ENTER");
-
- while (pSelf->bStarted)
- {
- DUPL_RETURN Ret = DUPL_RETURN_SUCCESS;
-
- // Check if Preview window changed
- PreviewChanged = (hwndPreview != pSelf->hwndPreview);
-
- if (WaitForSingleObjectEx(pSelf->hlOcclutionEvent, 0, FALSE) == WAIT_OBJECT_0)
- {
- Occluded = false;
- }
- if (WaitForSingleObjectEx(pSelf->hlUnexpectedErrorEvent, 0, FALSE) == WAIT_OBJECT_0)
- {
- // Unexpected error occurred so exit the application
- DD_CHECK_HR(hr = E_UNEXPECTED);
- }
- else if (FirstTime || PreviewChanged || WaitForSingleObjectEx(pSelf->hlExpectedErrorEvent, 0, FALSE) == WAIT_OBJECT_0)
- {
- if (PreviewChanged)
- {
- hwndPreview = pSelf->hwndPreview;
- }
-
- if (!FirstTime)
- {
- // Terminate other threads
- SetEvent(pSelf->hlTerminateThreadsEvent);
- pSelf->pThreadMgr->WaitForThreadTermination();
- ResetEvent(pSelf->hlTerminateThreadsEvent);
- ResetEvent(pSelf->hlExpectedErrorEvent);
-
- // Clean up
- pSelf->pThreadMgr->Clean();
- pSelf->pOutMgr->CleanRefs();
-
- // As we have encountered an error due to a system transition we wait before trying again, using this dynamic wait
- // the wait periods will get progressively long to avoid wasting too much system resource if this state lasts a long time
- DynamicWait.Wait();
- }
- else
- {
- // First time through the loop so nothing to clean up
- FirstTime = false;
- }
-
- // Re-initialize
- Ret = pSelf->pOutMgr->InitOutput(hwndPreview, SingleOutput, &OutputCount, &DeskBounds);
- if (Ret == DUPL_RETURN_SUCCESS)
- {
- HANDLE SharedHandle = pSelf->pOutMgr->GetSharedHandle();
- if (SharedHandle)
- {
- Ret = pSelf->pThreadMgr->Initialize(SingleOutput, OutputCount, pSelf->hlUnexpectedErrorEvent, pSelf->hlExpectedErrorEvent, pSelf->hlTerminateThreadsEvent, SharedHandle, TMEDIA_PRODUCER(pSelf), &DeskBounds);
- }
- else
- {
- DisplayMsg(L"Failed to get handle of shared surface", L"Error", S_OK);
- Ret = DUPL_RETURN_ERROR_UNEXPECTED;
- }
- }
-
-
- // We start off in occluded state and we should immediate get a occlusion status window message
- Occluded = true;
- }
- else
- {
- // Nothing else to do, so try to present to write out to window if not occluded
- if (!Occluded || !pSelf->bWindowHooked)
- {
- Ret = pSelf->pOutMgr->UpdateApplicationWindow(pSelf->pThreadMgr->GetPointerInfo(), &Occluded);
- }
- }
-
- // Check if for errors
- if (Ret != DUPL_RETURN_SUCCESS)
- {
- if (Ret == DUPL_RETURN_ERROR_EXPECTED)
- {
- // Some type of system transition is occurring so retry
- SetEvent(pSelf->hlExpectedErrorEvent);
- }
- else
- {
- // Unexpected error so exit
- DD_CHECK_HR(hr = E_UNEXPECTED);
- break;
- }
- }
- }
+ plugin_win_dd_producer_t *pSelf = (plugin_win_dd_producer_t *)pArg;
+ HRESULT hr = S_OK;
+ INT SingleOutput = -1;
+
+ RECT DeskBounds = {};
+ UINT OutputCount = 1;
+
+ bool FirstTime = true;
+ bool Occluded = true;
+ bool PreviewChanged = false;
+ DYNAMIC_WAIT DynamicWait;
+ HWND hwndPreview = NULL;
+
+ DD_DEBUG_INFO("DDThread (producer) - ENTER");
+
+ while (pSelf->bStarted) {
+ DUPL_RETURN Ret = DUPL_RETURN_SUCCESS;
+
+ // Check if Preview window changed
+ PreviewChanged = (hwndPreview != pSelf->hwndPreview);
+
+ if (WaitForSingleObjectEx(pSelf->hlOcclutionEvent, 0, FALSE) == WAIT_OBJECT_0) {
+ Occluded = false;
+ }
+ if (WaitForSingleObjectEx(pSelf->hlUnexpectedErrorEvent, 0, FALSE) == WAIT_OBJECT_0) {
+ // Unexpected error occurred so exit the application
+ DD_CHECK_HR(hr = E_UNEXPECTED);
+ }
+ else if (FirstTime || PreviewChanged || WaitForSingleObjectEx(pSelf->hlExpectedErrorEvent, 0, FALSE) == WAIT_OBJECT_0) {
+ if (PreviewChanged) {
+ hwndPreview = pSelf->hwndPreview;
+ }
+
+ if (!FirstTime) {
+ // Terminate other threads
+ SetEvent(pSelf->hlTerminateThreadsEvent);
+ pSelf->pThreadMgr->WaitForThreadTermination();
+ ResetEvent(pSelf->hlTerminateThreadsEvent);
+ ResetEvent(pSelf->hlExpectedErrorEvent);
+
+ // Clean up
+ pSelf->pThreadMgr->Clean();
+ pSelf->pOutMgr->CleanRefs();
+
+ // As we have encountered an error due to a system transition we wait before trying again, using this dynamic wait
+ // the wait periods will get progressively long to avoid wasting too much system resource if this state lasts a long time
+ DynamicWait.Wait();
+ }
+ else {
+ // First time through the loop so nothing to clean up
+ FirstTime = false;
+ }
+
+ // Re-initialize
+ Ret = pSelf->pOutMgr->InitOutput(hwndPreview, SingleOutput, &OutputCount, &DeskBounds);
+ if (Ret == DUPL_RETURN_SUCCESS) {
+ HANDLE SharedHandle = pSelf->pOutMgr->GetSharedHandle();
+ if (SharedHandle) {
+ Ret = pSelf->pThreadMgr->Initialize(SingleOutput, OutputCount, pSelf->hlUnexpectedErrorEvent, pSelf->hlExpectedErrorEvent, pSelf->hlTerminateThreadsEvent, SharedHandle, TMEDIA_PRODUCER(pSelf), &DeskBounds);
+ }
+ else {
+ DisplayMsg(L"Failed to get handle of shared surface", L"Error", S_OK);
+ Ret = DUPL_RETURN_ERROR_UNEXPECTED;
+ }
+ }
+
+
+ // We start off in occluded state and we should immediate get a occlusion status window message
+ Occluded = true;
+ }
+ else {
+ // Nothing else to do, so try to present to write out to window if not occluded
+ if (!Occluded || !pSelf->bWindowHooked) {
+ Ret = pSelf->pOutMgr->UpdateApplicationWindow(pSelf->pThreadMgr->GetPointerInfo(), &Occluded);
+ }
+ }
+
+ // Check if for errors
+ if (Ret != DUPL_RETURN_SUCCESS) {
+ if (Ret == DUPL_RETURN_ERROR_EXPECTED) {
+ // Some type of system transition is occurring so retry
+ SetEvent(pSelf->hlExpectedErrorEvent);
+ }
+ else {
+ // Unexpected error so exit
+ DD_CHECK_HR(hr = E_UNEXPECTED);
+ break;
+ }
+ }
+ }
bail:
-
- DD_DEBUG_INFO("DDThread (producer) - BAIL");
+
+ DD_DEBUG_INFO("DDThread (producer) - BAIL");
#if 0 // Done by unprepare()
- // Make sure all other threads have exited
- if (SetEvent(pSelf->hlTerminateThreadsEvent))
- {
- ThreadMgr.WaitForThreadTermination();
- }
-
- // Clean up
- CloseHandle(pSelf->hlUnexpectedErrorEvent); pSelf->hlUnexpectedErrorEvent = NULL;
- CloseHandle(pSelf->hlExpectedErrorEvent); pSelf->hlExpectedErrorEvent = NULL;
- CloseHandle(pSelf->hlTerminateThreadsEvent); pSelf->hlTerminateThreadsEvent = NULL;
+ // Make sure all other threads have exited
+ if (SetEvent(pSelf->hlTerminateThreadsEvent)) {
+ ThreadMgr.WaitForThreadTermination();
+ }
+
+ // Clean up
+ CloseHandle(pSelf->hlUnexpectedErrorEvent);
+ pSelf->hlUnexpectedErrorEvent = NULL;
+ CloseHandle(pSelf->hlExpectedErrorEvent);
+ pSelf->hlExpectedErrorEvent = NULL;
+ CloseHandle(pSelf->hlTerminateThreadsEvent);
+ pSelf->hlTerminateThreadsEvent = NULL;
#endif
- DD_DEBUG_INFO("DDThread (producer) - EXIT");
+ DD_DEBUG_INFO("DDThread (producer) - EXIT");
- return NULL;
+ return NULL;
}
//
// Displays a message
//
- void DisplayMsg(_In_ LPCWSTR Str, _In_ LPCWSTR Title, HRESULT hr)
+void DisplayMsg(_In_ LPCWSTR Str, _In_ LPCWSTR Title, HRESULT hr)
{
- const UINT StringLen = (UINT)(wcslen(Str) + sizeof(" with HRESULT 0x########."));
- wchar_t* OutStr = new wchar_t[StringLen];
- if (!OutStr)
- {
- return;
- }
-
- INT LenWritten = swprintf_s(OutStr, StringLen, L"%s with 0x%X.", Str, hr);
- if (LenWritten != -1)
- {
- DD_DEBUG_ERROR("%ls: %ls", Title, OutStr);
- }
-
- delete[] OutStr;
+ const UINT StringLen = (UINT)(wcslen(Str) + sizeof(" with HRESULT 0x########."));
+ wchar_t* OutStr = new wchar_t[StringLen];
+ if (!OutStr) {
+ return;
+ }
+
+ INT LenWritten = swprintf_s(OutStr, StringLen, L"%s with 0x%X.", Str, hr);
+ if (LenWritten != -1) {
+ DD_DEBUG_ERROR("%ls: %ls", Title, OutStr);
+ }
+
+ delete[] OutStr;
}
- DYNAMIC_WAIT::DYNAMIC_WAIT() : m_CurrentWaitBandIdx(0), m_WaitCountInCurrentBand(0)
- {
- m_QPCValid = QueryPerformanceFrequency(&m_QPCFrequency);
- m_LastWakeUpTime.QuadPart = 0L;
- }
-
- DYNAMIC_WAIT::~DYNAMIC_WAIT()
- {
- }
-
- void DYNAMIC_WAIT::Wait()
- {
- LARGE_INTEGER CurrentQPC = { 0 };
-
- // Is this wait being called with the period that we consider it to be part of the same wait sequence
- QueryPerformanceCounter(&CurrentQPC);
- if (m_QPCValid && (CurrentQPC.QuadPart <= (m_LastWakeUpTime.QuadPart + (m_QPCFrequency.QuadPart * m_WaitSequenceTimeInSeconds))))
- {
- // We are still in the same wait sequence, lets check if we should move to the next band
- if ((m_WaitBands[m_CurrentWaitBandIdx].WaitCount != WAIT_BAND_STOP) && (m_WaitCountInCurrentBand > m_WaitBands[m_CurrentWaitBandIdx].WaitCount))
- {
- m_CurrentWaitBandIdx++;
- m_WaitCountInCurrentBand = 0;
- }
- }
- else
- {
- // Either we could not get the current time or we are starting a new wait sequence
- m_WaitCountInCurrentBand = 0;
- m_CurrentWaitBandIdx = 0;
- }
-
- // Sleep for the required period of time
- Sleep(m_WaitBands[m_CurrentWaitBandIdx].WaitTime);
-
- // Record the time we woke up so we can detect wait sequences
- QueryPerformanceCounter(&m_LastWakeUpTime);
- m_WaitCountInCurrentBand++;
- } \ No newline at end of file
+DYNAMIC_WAIT::DYNAMIC_WAIT() : m_CurrentWaitBandIdx(0), m_WaitCountInCurrentBand(0)
+{
+ m_QPCValid = QueryPerformanceFrequency(&m_QPCFrequency);
+ m_LastWakeUpTime.QuadPart = 0L;
+}
+
+DYNAMIC_WAIT::~DYNAMIC_WAIT()
+{
+}
+
+void DYNAMIC_WAIT::Wait()
+{
+ LARGE_INTEGER CurrentQPC = { 0 };
+
+ // Is this wait being called with the period that we consider it to be part of the same wait sequence
+ QueryPerformanceCounter(&CurrentQPC);
+ if (m_QPCValid && (CurrentQPC.QuadPart <= (m_LastWakeUpTime.QuadPart + (m_QPCFrequency.QuadPart * m_WaitSequenceTimeInSeconds)))) {
+ // We are still in the same wait sequence, lets check if we should move to the next band
+ if ((m_WaitBands[m_CurrentWaitBandIdx].WaitCount != WAIT_BAND_STOP) && (m_WaitCountInCurrentBand > m_WaitBands[m_CurrentWaitBandIdx].WaitCount)) {
+ m_CurrentWaitBandIdx++;
+ m_WaitCountInCurrentBand = 0;
+ }
+ }
+ else {
+ // Either we could not get the current time or we are starting a new wait sequence
+ m_WaitCountInCurrentBand = 0;
+ m_CurrentWaitBandIdx = 0;
+ }
+
+ // Sleep for the required period of time
+ Sleep(m_WaitBands[m_CurrentWaitBandIdx].WaitTime);
+
+ // Record the time we woke up so we can detect wait sequences
+ QueryPerformanceCounter(&m_LastWakeUpTime);
+ m_WaitCountInCurrentBand++;
+} \ No newline at end of file
diff --git a/plugins/pluginWinIPSecVista/plugin_win_ipsec_vista.c b/plugins/pluginWinIPSecVista/plugin_win_ipsec_vista.c
index ac65b94..63b1194 100755
--- a/plugins/pluginWinIPSecVista/plugin_win_ipsec_vista.c
+++ b/plugins/pluginWinIPSecVista/plugin_win_ipsec_vista.c
@@ -60,11 +60,11 @@ typedef struct plugin_win_ipsec_vista_ctx_s {
tipsec_ctx_t* pc_base;
UINT64 saId_us;
UINT64 saId_uc;
- UINT64 filterId_in_us;
- UINT64 filterId_out_us;
- UINT64 filterId_in_uc;
- UINT64 filterId_out_uc;
- WCHAR filter_name[256];
+ UINT64 filterId_in_us;
+ UINT64 filterId_out_us;
+ UINT64 filterId_in_uc;
+ UINT64 filterId_out_uc;
+ WCHAR filter_name[256];
HANDLE engine;
}
@@ -84,33 +84,33 @@ static tipsec_error_t _plugin_win_ipsec_vista_ctx_init(tipsec_ctx_t* _p_ctx)
{
plugin_win_ipsec_vista_ctx_t* p_ctx = (plugin_win_ipsec_vista_ctx_t*)_p_ctx;
DWORD code;
- UUID uuid;
- RPC_STATUS status;
- static uint64_t __guard = 0;
+ UUID uuid;
+ RPC_STATUS status;
+ static uint64_t __guard = 0;
if (p_ctx->pc_base->initialized) {
TSK_DEBUG_ERROR("Already initialized");
return tipsec_error_invalid_state;
}
- /* Create filter name */
+ /* Create filter name */
status = UuidCreate(&uuid);
if (status == RPC_S_OK) {
- WCHAR* wszUuid = NULL;
+ WCHAR* wszUuid = NULL;
UuidToStringW(&uuid, (RPC_WSTR*)&wszUuid);
- if (!wszUuid) {
- TSK_DEBUG_ERROR("Failed to convert the UUID");
- return tipsec_error_sys;
- }
- swprintf(p_ctx->filter_name, sizeof(p_ctx->filter_name)/sizeof(p_ctx->filter_name[0]), L"%s//%s//%llu", TINYIPSEC_FILTER_NAME, wszUuid, __guard++);
- RpcStringFree((RPC_WSTR*)&wszUuid);
- }
- else {
- TSK_DEBUG_ERROR("Failed to create new UUID");
- return tipsec_error_sys;
- }
-
-
+ if (!wszUuid) {
+ TSK_DEBUG_ERROR("Failed to convert the UUID");
+ return tipsec_error_sys;
+ }
+ swprintf(p_ctx->filter_name, sizeof(p_ctx->filter_name)/sizeof(p_ctx->filter_name[0]), L"%s//%s//%llu", TINYIPSEC_FILTER_NAME, wszUuid, __guard++);
+ RpcStringFree((RPC_WSTR*)&wszUuid);
+ }
+ else {
+ TSK_DEBUG_ERROR("Failed to create new UUID");
+ return tipsec_error_sys;
+ }
+
+
/* Open engine */
if ((code = FwpmEngineOpen0(NULL, RPC_C_AUTHN_WINNT, NULL, NULL, &p_ctx->engine))) {
@@ -172,7 +172,7 @@ static tipsec_error_t _plugin_win_ipsec_vista_ctx_set_local(tipsec_ctx_t* _p_ctx
_p_ctx->port_us = port_us;
// Create SA1: (UC -> PS)
- if ((ret = _vista_createLocalSA(p_ctx, _p_ctx->port_uc, &_p_ctx->spi_uc, &p_ctx->saId_uc, &p_ctx->filterId_in_uc, &p_ctx->filterId_out_uc))) {
+ if ((ret = _vista_createLocalSA(p_ctx, _p_ctx->port_uc, &_p_ctx->spi_uc, &p_ctx->saId_uc, &p_ctx->filterId_in_uc, &p_ctx->filterId_out_uc))) {
return tipsec_error_sys;
}
@@ -312,12 +312,12 @@ static int _vista_createLocalSA(__in const plugin_win_ipsec_vista_ctx_t* p_ctx,
IPSEC_GETSPI0 getSpi;
int ret = -1;
FWPM_FILTER_CONDITION0 conds[6];
- UINT32 numFilterConditions = 3;
+ UINT32 numFilterConditions = 3;
*spi = 0;
*saId = 0;
- *filterId_in = 0;
- *filterId_out = 0;
+ *filterId_in = 0;
+ *filterId_out = 0;
conds[0].fieldKey = FWPM_CONDITION_IP_LOCAL_ADDRESS;
conds[0].matchType = FWP_MATCH_EQUAL;
@@ -341,19 +341,19 @@ static int _vista_createLocalSA(__in const plugin_win_ipsec_vista_ctx_t* p_ctx,
conds[2].conditionValue.type = FWP_UINT16;
conds[2].conditionValue.uint16 = local_port;
- if (p_ctx->pc_base->ipproto != tipsec_ipproto_all) {
- conds[numFilterConditions].fieldKey = FWPM_CONDITION_IP_PROTOCOL;
- conds[numFilterConditions].matchType = FWP_MATCH_EQUAL;
- conds[numFilterConditions].conditionValue.type = FWP_UINT8;
- conds[numFilterConditions].conditionValue.uint8 = TINYIPSEC_VISTA_GET_IPPROTO(p_ctx->pc_base->ipproto);
- ++numFilterConditions;
- }
+ if (p_ctx->pc_base->ipproto != tipsec_ipproto_all) {
+ conds[numFilterConditions].fieldKey = FWPM_CONDITION_IP_PROTOCOL;
+ conds[numFilterConditions].matchType = FWP_MATCH_EQUAL;
+ conds[numFilterConditions].conditionValue.type = FWP_UINT8;
+ conds[numFilterConditions].conditionValue.uint8 = TINYIPSEC_VISTA_GET_IPPROTO(p_ctx->pc_base->ipproto);
+ ++numFilterConditions;
+ }
// Fill in the common fields shared by both filters.
memset(&filter, 0, sizeof(filter));
// For MUI compatibility, object names should be indirect strings. See
// SHLoadIndirectString for details.
- filter.displayData.name = (PWCHAR)p_ctx->filter_name;
+ filter.displayData.name = (PWCHAR)p_ctx->filter_name;
// Link all objects to our provider. When multiple providers are installed
// on a computer, this makes it easy to determine who added what.
filter.providerKey = (GUID*)TINYIPSEC_PROVIDER_KEY;
@@ -362,7 +362,7 @@ static int _vista_createLocalSA(__in const plugin_win_ipsec_vista_ctx_t* p_ctx,
filter.action.type = FWP_ACTION_CALLOUT_TERMINATING;
filter.flags = FWPM_FILTER_FLAG_NONE;
filter.weight.type = FWP_EMPTY;
-
+
// Add the inbound filter.
filter.layerKey = (p_ctx->pc_base->use_ipv6) ? FWPM_LAYER_INBOUND_TRANSPORT_V6 : FWPM_LAYER_INBOUND_TRANSPORT_V4;
if (p_ctx->pc_base->mode == tipsec_mode_tun) {
@@ -427,8 +427,8 @@ static int _vista_createLocalSA(__in const plugin_win_ipsec_vista_ctx_t* p_ctx,
}
//// Return the various LUIDs to the caller, so he can clean up.
- *filterId_in = tmpInFilterId;
- *filterId_out = tmpOutFilterId;
+ *filterId_in = tmpInFilterId;
+ *filterId_out = tmpOutFilterId;
*saId = tmpSaId;
CLEANUP:
@@ -449,7 +449,7 @@ static int _vista_boundSA(__in const plugin_win_ipsec_vista_ctx_t* p_ctx, __in U
IPSEC_SA0 sa;
IPSEC_SA_BUNDLE0 bundle;
IPSEC_SA_AUTH_INFORMATION0 authInfo; // must be global because use as reference (X = &authInfo)
- IPSEC_SA_AUTH_AND_CIPHER_INFORMATION0 cipherAuthInfo; // must be global because use as reference (X = &cipherAuthInfo)
+ IPSEC_SA_AUTH_AND_CIPHER_INFORMATION0 cipherAuthInfo; // must be global because use as reference (X = &cipherAuthInfo)
PFWP_BYTE_BLOB ik = (PFWP_BYTE_BLOB)p_ctx->pc_base->ik;
PFWP_BYTE_BLOB ck = (PFWP_BYTE_BLOB)p_ctx->pc_base->ck;
@@ -490,7 +490,7 @@ static int _vista_boundSA(__in const plugin_win_ipsec_vista_ctx_t* p_ctx, __in U
else if ( sa.saTransformType == IPSEC_TRANSFORM_ESP_AUTH ) {
sa.espAuthInformation = &authInfo;
}
- else if ( sa.saTransformType == IPSEC_TRANSFORM_ESP_CIPHER ) {
+ else if ( sa.saTransformType == IPSEC_TRANSFORM_ESP_CIPHER ) {
IPSEC_SA_CIPHER_INFORMATION0 cipherInfo;
memset(&cipherInfo, 0, sizeof(cipherInfo));
@@ -540,29 +540,29 @@ CLEANUP:
static int _vista_flushAll(const plugin_win_ipsec_vista_ctx_t* p_ctx)
{
#if 1
- int ret = -1;
- if (p_ctx && p_ctx->engine) {
- DWORD result;
- result = FwpmFilterDeleteById0(p_ctx->engine, p_ctx->filterId_in_uc);
- if (result != ERROR_SUCCESS && result != FWP_E_FILTER_NOT_FOUND) {
- TSK_DEBUG_ERROR("FwpmFilterDeleteById0 failed with error code [%x]", result);
- }
- result = FwpmFilterDeleteById0(p_ctx->engine, p_ctx->filterId_in_us);
- if (result != ERROR_SUCCESS && result != FWP_E_FILTER_NOT_FOUND) {
- TSK_DEBUG_ERROR("FwpmFilterDeleteById0 failed with error code [%x]", result);
- }
- result = FwpmFilterDeleteById0(p_ctx->engine, p_ctx->filterId_out_uc);
- if (result != ERROR_SUCCESS && result != FWP_E_FILTER_NOT_FOUND) {
- TSK_DEBUG_ERROR("FwpmFilterDeleteById0 failed with error code [%x]", result);
- }
- result = FwpmFilterDeleteById0(p_ctx->engine, p_ctx->filterId_out_us);
- if (result != ERROR_SUCCESS && result != FWP_E_FILTER_NOT_FOUND) {
- TSK_DEBUG_ERROR("FwpmFilterDeleteById0 failed with error code [%x]", result);
- }
- return 0;
- }
- //
- return ret;
+ int ret = -1;
+ if (p_ctx && p_ctx->engine) {
+ DWORD result;
+ result = FwpmFilterDeleteById0(p_ctx->engine, p_ctx->filterId_in_uc);
+ if (result != ERROR_SUCCESS && result != FWP_E_FILTER_NOT_FOUND) {
+ TSK_DEBUG_ERROR("FwpmFilterDeleteById0 failed with error code [%x]", result);
+ }
+ result = FwpmFilterDeleteById0(p_ctx->engine, p_ctx->filterId_in_us);
+ if (result != ERROR_SUCCESS && result != FWP_E_FILTER_NOT_FOUND) {
+ TSK_DEBUG_ERROR("FwpmFilterDeleteById0 failed with error code [%x]", result);
+ }
+ result = FwpmFilterDeleteById0(p_ctx->engine, p_ctx->filterId_out_uc);
+ if (result != ERROR_SUCCESS && result != FWP_E_FILTER_NOT_FOUND) {
+ TSK_DEBUG_ERROR("FwpmFilterDeleteById0 failed with error code [%x]", result);
+ }
+ result = FwpmFilterDeleteById0(p_ctx->engine, p_ctx->filterId_out_us);
+ if (result != ERROR_SUCCESS && result != FWP_E_FILTER_NOT_FOUND) {
+ TSK_DEBUG_ERROR("FwpmFilterDeleteById0 failed with error code [%x]", result);
+ }
+ return 0;
+ }
+ //
+ return ret;
#else
UINT32 i;
int ret = -1;
@@ -593,18 +593,18 @@ static int _vista_flushAll(const plugin_win_ipsec_vista_ctx_t* p_ctx)
}
}
- TSK_DEBUG_INFO("All SAs have been flushed.");
+ TSK_DEBUG_INFO("All SAs have been flushed.");
ret = 0;
CLEANUP:
- if (entries) {
- FwpmFreeMemory0((void**)entries);
- }
- if (enumHandle) {
- if ((result = IPsecSaDestroyEnumHandle0(p_ctx->engine, enumHandle)) != ERROR_SUCCESS) {
- TSK_DEBUG_ERROR("IPsecSaDestroyEnumHandle0 failed with error code [%x].", result);
- }
- }
+ if (entries) {
+ FwpmFreeMemory0((void**)entries);
+ }
+ if (enumHandle) {
+ if ((result = IPsecSaDestroyEnumHandle0(p_ctx->engine, enumHandle)) != ERROR_SUCCESS) {
+ TSK_DEBUG_ERROR("IPsecSaDestroyEnumHandle0 failed with error code [%x].", result);
+ }
+ }
}
return ret;
@@ -672,15 +672,15 @@ static tsk_object_t* _plugin_win_ipsec_vista_ctx_dtor(tsk_object_t * self)
TSK_FREE(p_ctx->pc_base->addr_local);
TSK_FREE(p_ctx->pc_base->addr_remote);
-
- if (p_ctx->pc_base->ik) {
- TSK_FREE(((PFWP_BYTE_BLOB)p_ctx->pc_base->ik)->data);
- TSK_FREE(p_ctx->pc_base->ik);
- }
+
+ if (p_ctx->pc_base->ik) {
+ TSK_FREE(((PFWP_BYTE_BLOB)p_ctx->pc_base->ik)->data);
+ TSK_FREE(p_ctx->pc_base->ik);
+ }
if (p_ctx->pc_base->ck) {
- TSK_FREE(((PFWP_BYTE_BLOB)p_ctx->pc_base->ck)->data);
- TSK_FREE(p_ctx->pc_base->ck);
- }
+ TSK_FREE(((PFWP_BYTE_BLOB)p_ctx->pc_base->ck)->data);
+ TSK_FREE(p_ctx->pc_base->ck);
+ }
TSK_DEBUG_INFO("*** Windows Vista IPSec plugin (Windows Filtering Platform) context destroyed ***");
}
diff --git a/plugins/pluginWinMF/dllmain_mf.cxx b/plugins/pluginWinMF/dllmain_mf.cxx
index aeeb863..99cace8 100755
--- a/plugins/pluginWinMF/dllmain_mf.cxx
+++ b/plugins/pluginWinMF/dllmain_mf.cxx
@@ -1,18 +1,18 @@
/* Copyright (C) 2013 Mamadou DIOP
* Copyright (C) 2013 Doubango Telecom <http://www.doubango.org>
-*
+*
* This file is part of Open Source Doubango Framework.
*
* DOUBANGO is free software: you can redistribute it and/or modify
* it under the terms of the GNU General Public License as published by
* the Free Software Foundation, either version 3 of the License, or
* (at your option) any later version.
-*
+*
* DOUBANGO is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
* GNU General Public License for more details.
-*
+*
* You should have received a copy of the GNU General Public License
* along with DOUBANGO.
*/
@@ -71,174 +71,151 @@ PLUGIN_WIN_MF_END_DECLS /* END */
BOOL APIENTRY DllMain( HMODULE hModule,
DWORD ul_reason_for_call,
LPVOID lpReserved
- )
+ )
{
- switch (ul_reason_for_call)
- {
- case DLL_PROCESS_ATTACH:
- break;
- case DLL_THREAD_ATTACH:
- break;
- case DLL_THREAD_DETACH:
- break;
- case DLL_PROCESS_DETACH:
- break;
- }
- return TRUE;
+ switch (ul_reason_for_call) {
+ case DLL_PROCESS_ATTACH:
+ break;
+ case DLL_THREAD_ATTACH:
+ break;
+ case DLL_THREAD_DETACH:
+ break;
+ case DLL_PROCESS_DETACH:
+ break;
+ }
+ return TRUE;
}
-typedef enum PLUGIN_INDEX_E
-{
+typedef enum PLUGIN_INDEX_E {
#if PLUGIN_MF_ENABLE_AUDIO_IO
- PLUGIN_INDEX_AUDIO_CONSUMER,
- PLUGIN_INDEX_AUDIO_PRODUCER,
+ PLUGIN_INDEX_AUDIO_CONSUMER,
+ PLUGIN_INDEX_AUDIO_PRODUCER,
#endif
#if PLUGIN_MF_ENABLE_VIDEO_IO
- PLUGIN_INDEX_VIDEO_PRODUCER,
- PLUGIN_INDEX_VIDEO_CONSUMER,
+ PLUGIN_INDEX_VIDEO_PRODUCER,
+ PLUGIN_INDEX_VIDEO_CONSUMER,
#endif
#if PLUGIN_MF_ENABLE_VIDEO_CONVERTER
- PLUGIN_INDEX_VIDEO_CONVERTER,
+ PLUGIN_INDEX_VIDEO_CONVERTER,
#endif
- PLUGIN_INDEX_CODEC_H264_MAIN,
- PLUGIN_INDEX_CODEC_H264_BASE,
-
- PLUGIN_INDEX_COUNT
+ PLUGIN_INDEX_CODEC_H264_MAIN,
+ PLUGIN_INDEX_CODEC_H264_BASE,
+
+ PLUGIN_INDEX_COUNT
}
PLUGIN_INDEX_T;
int __plugin_get_def_count()
{
- int count = PLUGIN_INDEX_COUNT;
- if(!MFUtils::IsLowLatencyH264Supported())
- {
- count -= 2;
- }
- return count;
+ int count = PLUGIN_INDEX_COUNT;
+ if(!MFUtils::IsLowLatencyH264Supported()) {
+ count -= 2;
+ }
+ return count;
}
tsk_plugin_def_type_t __plugin_get_def_type_at(int index)
{
- switch(index){
+ switch(index) {
#if PLUGIN_MF_ENABLE_AUDIO_IO
- case PLUGIN_INDEX_AUDIO_CONSUMER:
- case PLUGIN_INDEX_AUDIO_PRODUCER:
- {
- return (index == PLUGIN_INDEX_AUDIO_CONSUMER) ? tsk_plugin_def_type_consumer : tsk_plugin_def_type_producer;
- }
+ case PLUGIN_INDEX_AUDIO_CONSUMER:
+ case PLUGIN_INDEX_AUDIO_PRODUCER: {
+ return (index == PLUGIN_INDEX_AUDIO_CONSUMER) ? tsk_plugin_def_type_consumer : tsk_plugin_def_type_producer;
+ }
#endif
#if PLUGIN_MF_ENABLE_VIDEO_IO
- case PLUGIN_INDEX_VIDEO_CONSUMER:
- {
- return MFUtils::IsD3D9Supported() ? tsk_plugin_def_type_consumer : tsk_plugin_def_type_none;
- }
- case PLUGIN_INDEX_VIDEO_PRODUCER:
- {
- return tsk_plugin_def_type_producer;
- }
+ case PLUGIN_INDEX_VIDEO_CONSUMER: {
+ return MFUtils::IsD3D9Supported() ? tsk_plugin_def_type_consumer : tsk_plugin_def_type_none;
+ }
+ case PLUGIN_INDEX_VIDEO_PRODUCER: {
+ return tsk_plugin_def_type_producer;
+ }
#endif
#if PLUGIN_MF_ENABLE_VIDEO_CONVERTER
- case PLUGIN_INDEX_VIDEO_CONVERTER:
- {
- return tsk_plugin_def_type_converter;
- }
-#endif
- case PLUGIN_INDEX_CODEC_H264_MAIN:
- case PLUGIN_INDEX_CODEC_H264_BASE:
- {
- return MFUtils::IsLowLatencyH264Supported() ? tsk_plugin_def_type_codec : tsk_plugin_def_type_none;
- }
- default:
- {
- TSK_DEBUG_ERROR("No plugin at index %d", index);
- return tsk_plugin_def_type_none;
- }
- }
+ case PLUGIN_INDEX_VIDEO_CONVERTER: {
+ return tsk_plugin_def_type_converter;
+ }
+#endif
+ case PLUGIN_INDEX_CODEC_H264_MAIN:
+ case PLUGIN_INDEX_CODEC_H264_BASE: {
+ return MFUtils::IsLowLatencyH264Supported() ? tsk_plugin_def_type_codec : tsk_plugin_def_type_none;
+ }
+ default: {
+ TSK_DEBUG_ERROR("No plugin at index %d", index);
+ return tsk_plugin_def_type_none;
+ }
+ }
}
tsk_plugin_def_media_type_t __plugin_get_def_media_type_at(int index)
{
- switch(index){
+ switch(index) {
#if PLUGIN_MF_ENABLE_AUDIO_IO
- case PLUGIN_INDEX_AUDIO_CONSUMER:
- case PLUGIN_INDEX_AUDIO_PRODUCER:
- {
- return tsk_plugin_def_media_type_audio;
- }
+ case PLUGIN_INDEX_AUDIO_CONSUMER:
+ case PLUGIN_INDEX_AUDIO_PRODUCER: {
+ return tsk_plugin_def_media_type_audio;
+ }
#endif
#if PLUGIN_MF_ENABLE_VIDEO_IO
- case PLUGIN_INDEX_VIDEO_CONSUMER:
- {
- return MFUtils::IsD3D9Supported() ? tsk_plugin_def_media_type_video : tsk_plugin_def_media_type_none;
- }
- case PLUGIN_INDEX_VIDEO_PRODUCER:
- {
- return tsk_plugin_def_media_type_video;
- }
+ case PLUGIN_INDEX_VIDEO_CONSUMER: {
+ return MFUtils::IsD3D9Supported() ? tsk_plugin_def_media_type_video : tsk_plugin_def_media_type_none;
+ }
+ case PLUGIN_INDEX_VIDEO_PRODUCER: {
+ return tsk_plugin_def_media_type_video;
+ }
#endif
#if PLUGIN_MF_ENABLE_VIDEO_CONVERTER
- case PLUGIN_INDEX_VIDEO_CONVERTER:
- {
- return tsk_plugin_def_media_type_video;
- }
-#endif
- case PLUGIN_INDEX_CODEC_H264_MAIN:
- case PLUGIN_INDEX_CODEC_H264_BASE:
- {
- return MFUtils::IsLowLatencyH264Supported() ? tsk_plugin_def_media_type_video : tsk_plugin_def_media_type_none;
- }
- default:
- {
- TSK_DEBUG_ERROR("No plugin at index %d", index);
- return tsk_plugin_def_media_type_none;
- }
- }
+ case PLUGIN_INDEX_VIDEO_CONVERTER: {
+ return tsk_plugin_def_media_type_video;
+ }
+#endif
+ case PLUGIN_INDEX_CODEC_H264_MAIN:
+ case PLUGIN_INDEX_CODEC_H264_BASE: {
+ return MFUtils::IsLowLatencyH264Supported() ? tsk_plugin_def_media_type_video : tsk_plugin_def_media_type_none;
+ }
+ default: {
+ TSK_DEBUG_ERROR("No plugin at index %d", index);
+ return tsk_plugin_def_media_type_none;
+ }
+ }
}
tsk_plugin_def_ptr_const_t __plugin_get_def_at(int index)
{
- switch(index){
+ switch(index) {
#if PLUGIN_MF_ENABLE_VIDEO_IO
- case PLUGIN_INDEX_VIDEO_PRODUCER:
- {
- return plugin_win_mf_producer_video_plugin_def_t;
- }
- case PLUGIN_INDEX_VIDEO_CONSUMER:
- {
- return MFUtils::IsD3D9Supported() ? plugin_win_mf_consumer_video_plugin_def_t : tsk_null;
- }
+ case PLUGIN_INDEX_VIDEO_PRODUCER: {
+ return plugin_win_mf_producer_video_plugin_def_t;
+ }
+ case PLUGIN_INDEX_VIDEO_CONSUMER: {
+ return MFUtils::IsD3D9Supported() ? plugin_win_mf_consumer_video_plugin_def_t : tsk_null;
+ }
#endif
#if PLUGIN_MF_ENABLE_AUDIO_IO
- case PLUGIN_INDEX_AUDIO_PRODUCER:
- {
- return plugin_win_mf_producer_audio_plugin_def_t;
- }
- case PLUGIN_INDEX_AUDIO_CONSUMER:
- {
- return plugin_win_mf_consumer_audio_plugin_def_t;
- }
+ case PLUGIN_INDEX_AUDIO_PRODUCER: {
+ return plugin_win_mf_producer_audio_plugin_def_t;
+ }
+ case PLUGIN_INDEX_AUDIO_CONSUMER: {
+ return plugin_win_mf_consumer_audio_plugin_def_t;
+ }
#endif
#if PLUGIN_MF_ENABLE_VIDEO_CONVERTER
- case PLUGIN_INDEX_VIDEO_CONVERTER:
- {
- return plugin_win_mf_converter_video_ms_plugin_def_t;
- }
-#endif
- case PLUGIN_INDEX_CODEC_H264_MAIN:
- {
- return MFUtils::IsLowLatencyH264Supported() ? mf_codec_h264_main_plugin_def_t : tsk_null;
- }
- case PLUGIN_INDEX_CODEC_H264_BASE:
- {
- return MFUtils::IsLowLatencyH264Supported() ? mf_codec_h264_base_plugin_def_t : tsk_null;
- }
- default:
- {
- TSK_DEBUG_ERROR("No plugin at index %d", index);
- return tsk_null;
- }
- }
+ case PLUGIN_INDEX_VIDEO_CONVERTER: {
+ return plugin_win_mf_converter_video_ms_plugin_def_t;
+ }
+#endif
+ case PLUGIN_INDEX_CODEC_H264_MAIN: {
+ return MFUtils::IsLowLatencyH264Supported() ? mf_codec_h264_main_plugin_def_t : tsk_null;
+ }
+ case PLUGIN_INDEX_CODEC_H264_BASE: {
+ return MFUtils::IsLowLatencyH264Supported() ? mf_codec_h264_base_plugin_def_t : tsk_null;
+ }
+ default: {
+ TSK_DEBUG_ERROR("No plugin at index %d", index);
+ return tsk_null;
+ }
+ }
}
diff --git a/plugins/pluginWinMF/internals/mf_codec.cxx b/plugins/pluginWinMF/internals/mf_codec.cxx
index e2968f4..27fb903 100755
--- a/plugins/pluginWinMF/internals/mf_codec.cxx
+++ b/plugins/pluginWinMF/internals/mf_codec.cxx
@@ -1,18 +1,18 @@
/* Copyright (C) 2013 Mamadou DIOP
* Copyright (C) 2013 Doubango Telecom <http://www.doubango.org>
-*
+*
* This file is part of Open Source Doubango Framework.
*
* DOUBANGO is free software: you can redistribute it and/or modify
* it under the terms of the GNU General Public License as published by
* the Free Software Foundation, either version 3 of the License, or
* (at your option) any later version.
-*
+*
* DOUBANGO is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
* GNU General Public License for more details.
-*
+*
* You should have received a copy of the GNU General Public License
* along with DOUBANGO.
*/
@@ -40,13 +40,13 @@
#endif
// Make sure usable on Win7 SDK targeting Win8 OS
-#if !defined(CODECAPI_AVLowLatencyMode)
+#if !defined(CODECAPI_AVLowLatencyMode)
DEFINE_GUID(CODECAPI_AVLowLatencyMode,
- 0x9c27891a, 0xed7a, 0x40e1, 0x88, 0xe8, 0xb2, 0x27, 0x27, 0xa0, 0x24, 0xee);
+ 0x9c27891a, 0xed7a, 0x40e1, 0x88, 0xe8, 0xb2, 0x27, 0x27, 0xa0, 0x24, 0xee);
#endif
#if !defined(CODECAPI_AVDecVideoH264ErrorConcealment)
DEFINE_GUID(CODECAPI_AVDecVideoH264ErrorConcealment,
-0xececace8, 0x3436, 0x462c, 0x92, 0x94, 0xcd, 0x7b, 0xac, 0xd7, 0x58, 0xa9);
+ 0xececace8, 0x3436, 0x462c, 0x92, 0x94, 0xcd, 0x7b, 0xac, 0xd7, 0x58, 0xa9);
#endif
//
@@ -54,117 +54,105 @@ DEFINE_GUID(CODECAPI_AVDecVideoH264ErrorConcealment,
//
MFCodec::MFCodec(MFCodecId_t eId, MFCodecType_t eType, IMFTransform *pMFT /*= NULL*/)
-: m_nRefCount(1)
-, m_eId(eId)
-, m_eType(eType)
-, m_pMFT(NULL)
-, m_pCodecAPI(NULL)
-, m_pOutputType(NULL)
-, m_pInputType(NULL)
-, m_dwInputID(0)
-, m_dwOutputID(0)
-, m_rtStart(0)
-, m_rtDuration(0)
-, m_pSampleIn(NULL)
-, m_pSampleOut(NULL)
-, m_pEventGenerator(NULL)
-, m_bIsAsync(FALSE)
-, m_bIsFirstFrame(TRUE)
-, m_bIsBundled(FALSE)
-, m_nMETransformNeedInputCount(0)
-, m_nMETransformHaveOutputCount(0)
-, m_pSampleQueueAsyncInput(NULL)
+ : m_nRefCount(1)
+ , m_eId(eId)
+ , m_eType(eType)
+ , m_pMFT(NULL)
+ , m_pCodecAPI(NULL)
+ , m_pOutputType(NULL)
+ , m_pInputType(NULL)
+ , m_dwInputID(0)
+ , m_dwOutputID(0)
+ , m_rtStart(0)
+ , m_rtDuration(0)
+ , m_pSampleIn(NULL)
+ , m_pSampleOut(NULL)
+ , m_pEventGenerator(NULL)
+ , m_bIsAsync(FALSE)
+ , m_bIsFirstFrame(TRUE)
+ , m_bIsBundled(FALSE)
+ , m_nMETransformNeedInputCount(0)
+ , m_nMETransformHaveOutputCount(0)
+ , m_pSampleQueueAsyncInput(NULL)
{
- MFUtils::Startup();
-
- HRESULT hr = S_OK;
-
- switch(eId)
- {
- case MFCodecId_H264Base:
- case MFCodecId_H264Main:
- {
- m_eMediaType = MFCodecMediaType_Video;
- m_guidCompressedFormat = MFVideoFormat_H264;
- break;
- }
- case MFCodecId_AAC:
- {
- m_eMediaType = MFCodecMediaType_Audio;
- m_guidCompressedFormat = MFAudioFormat_AAC;
- break;
- }
- default:
- {
- assert(false);
- break;
- }
- }
- CHECK_HR(hr = MFCreateMediaType(&m_pOutputType));
- CHECK_HR(hr = MFCreateMediaType(&m_pInputType));
- if(pMFT) // up to the caller to make sure all parameters are corrrect
- {
- m_pMFT = pMFT;
- m_pMFT->AddRef();
- }
- else
- {
- CHECK_HR(hr = MFUtils::GetBestCodec(
- (m_eType == MFCodecType_Encoder) ? TRUE : FALSE, // Encoder ?
- (m_eMediaType == MFCodecMediaType_Video) ? MFMediaType_Video : MFMediaType_Audio, // Media Type
- (m_eType == MFCodecType_Encoder) ? kMFCodecUncompressedFormat : m_guidCompressedFormat/*GUID_NULL*/, // Input
- (m_eType == MFCodecType_Encoder) ? m_guidCompressedFormat : kMFCodecUncompressedFormat, // Output
- &m_pMFT));
- }
- hr = m_pMFT->QueryInterface(IID_PPV_ARGS(&m_pCodecAPI));
- if(FAILED(hr) && m_eType == MFCodecType_Encoder) // Required only for Encoders
- {
- CHECK_HR(hr);
- }
-
-
- CHECK_HR(hr = MFUtils::IsAsyncMFT(m_pMFT, &m_bIsAsync));
- if(m_bIsAsync)
- {
- m_pSampleQueueAsyncInput = new MFSampleQueue();
- if(!m_pSampleQueueAsyncInput)
- {
- CHECK_HR(hr = E_OUTOFMEMORY);
- }
- CHECK_HR(hr = MFUtils::UnlockAsyncMFT(m_pMFT));
- CHECK_HR(hr = m_pMFT->QueryInterface(IID_PPV_ARGS(&m_pEventGenerator)));
- }
+ MFUtils::Startup();
+
+ HRESULT hr = S_OK;
+
+ switch(eId) {
+ case MFCodecId_H264Base:
+ case MFCodecId_H264Main: {
+ m_eMediaType = MFCodecMediaType_Video;
+ m_guidCompressedFormat = MFVideoFormat_H264;
+ break;
+ }
+ case MFCodecId_AAC: {
+ m_eMediaType = MFCodecMediaType_Audio;
+ m_guidCompressedFormat = MFAudioFormat_AAC;
+ break;
+ }
+ default: {
+ assert(false);
+ break;
+ }
+ }
+ CHECK_HR(hr = MFCreateMediaType(&m_pOutputType));
+ CHECK_HR(hr = MFCreateMediaType(&m_pInputType));
+ if(pMFT) { // up to the caller to make sure all parameters are corrrect
+ m_pMFT = pMFT;
+ m_pMFT->AddRef();
+ }
+ else {
+ CHECK_HR(hr = MFUtils::GetBestCodec(
+ (m_eType == MFCodecType_Encoder) ? TRUE : FALSE, // Encoder ?
+ (m_eMediaType == MFCodecMediaType_Video) ? MFMediaType_Video : MFMediaType_Audio, // Media Type
+ (m_eType == MFCodecType_Encoder) ? kMFCodecUncompressedFormat : m_guidCompressedFormat/*GUID_NULL*/, // Input
+ (m_eType == MFCodecType_Encoder) ? m_guidCompressedFormat : kMFCodecUncompressedFormat, // Output
+ &m_pMFT));
+ }
+ hr = m_pMFT->QueryInterface(IID_PPV_ARGS(&m_pCodecAPI));
+ if(FAILED(hr) && m_eType == MFCodecType_Encoder) { // Required only for Encoders
+ CHECK_HR(hr);
+ }
+
+
+ CHECK_HR(hr = MFUtils::IsAsyncMFT(m_pMFT, &m_bIsAsync));
+ if(m_bIsAsync) {
+ m_pSampleQueueAsyncInput = new MFSampleQueue();
+ if(!m_pSampleQueueAsyncInput) {
+ CHECK_HR(hr = E_OUTOFMEMORY);
+ }
+ CHECK_HR(hr = MFUtils::UnlockAsyncMFT(m_pMFT));
+ CHECK_HR(hr = m_pMFT->QueryInterface(IID_PPV_ARGS(&m_pEventGenerator)));
+ }
bail:
- if(FAILED(hr))
- {
- SafeRelease(&m_pMFT);
- SafeRelease(&m_pCodecAPI);
- }
- if(!IsValid())
- {
- TSK_DEBUG_ERROR("Failed to create codec with id = %d", m_eId);
- }
-}
+ if(FAILED(hr)) {
+ SafeRelease(&m_pMFT);
+ SafeRelease(&m_pCodecAPI);
+ }
+ if(!IsValid()) {
+ TSK_DEBUG_ERROR("Failed to create codec with id = %d", m_eId);
+ }
+}
MFCodec::~MFCodec()
{
- assert(m_nRefCount == 0);
+ assert(m_nRefCount == 0);
- if(m_bIsAsync && m_pMFT)
- {
- m_pMFT->ProcessMessage(MFT_MESSAGE_NOTIFY_END_OF_STREAM, NULL);
- m_pMFT->ProcessMessage(MFT_MESSAGE_COMMAND_DRAIN, NULL);
- }
+ if(m_bIsAsync && m_pMFT) {
+ m_pMFT->ProcessMessage(MFT_MESSAGE_NOTIFY_END_OF_STREAM, NULL);
+ m_pMFT->ProcessMessage(MFT_MESSAGE_COMMAND_DRAIN, NULL);
+ }
- SafeRelease(&m_pMFT);
- SafeRelease(&m_pCodecAPI);
+ SafeRelease(&m_pMFT);
+ SafeRelease(&m_pCodecAPI);
SafeRelease(&m_pOutputType);
- SafeRelease(&m_pInputType);
- SafeRelease(&m_pSampleIn);
- SafeRelease(&m_pSampleOut);
- SafeRelease(&m_pEventGenerator);
- SafeRelease(&m_pSampleQueueAsyncInput);
+ SafeRelease(&m_pInputType);
+ SafeRelease(&m_pSampleIn);
+ SafeRelease(&m_pSampleOut);
+ SafeRelease(&m_pEventGenerator);
+ SafeRelease(&m_pSampleQueueAsyncInput);
}
ULONG MFCodec::AddRef()
@@ -175,8 +163,7 @@ ULONG MFCodec::AddRef()
ULONG MFCodec::Release()
{
ULONG uCount = InterlockedDecrement(&m_nRefCount);
- if (uCount == 0)
- {
+ if (uCount == 0) {
delete this;
}
// For thread safety, return a temporary variable.
@@ -185,242 +172,210 @@ ULONG MFCodec::Release()
HRESULT MFCodec::QueryInterface(REFIID iid, void** ppv)
{
- if(!IsValid())
- {
- return E_FAIL;
- }
- return m_pMFT->QueryInterface(iid, ppv);
+ if(!IsValid()) {
+ return E_FAIL;
+ }
+ return m_pMFT->QueryInterface(iid, ppv);
}
// IMFAsyncCallback
STDMETHODIMP MFCodec::GetParameters(DWORD *pdwFlags, DWORD *pdwQueue)
{
- return E_NOTIMPL;
+ return E_NOTIMPL;
}
STDMETHODIMP MFCodec::Invoke(IMFAsyncResult *pAsyncResult)
{
- HRESULT hr = S_OK, hrStatus = S_OK;
+ HRESULT hr = S_OK, hrStatus = S_OK;
IMFMediaEvent* pEvent = NULL;
MediaEventType meType = MEUnknown;
-
+
CHECK_HR(hr = m_pEventGenerator->EndGetEvent(pAsyncResult, &pEvent));
- CHECK_HR(hr = pEvent->GetType(&meType));
+ CHECK_HR(hr = pEvent->GetType(&meType));
CHECK_HR(hr = pEvent->GetStatus(&hrStatus));
- if (SUCCEEDED(hrStatus))
- {
- switch(meType)
- {
- case METransformNeedInput:
- {
- InterlockedIncrement(&m_nMETransformNeedInputCount);
- break;
- }
-
- case METransformHaveOutput:
- {
- InterlockedIncrement(&m_nMETransformHaveOutputCount);
- break;
- }
- }
- }
-
+ if (SUCCEEDED(hrStatus)) {
+ switch(meType) {
+ case METransformNeedInput: {
+ InterlockedIncrement(&m_nMETransformNeedInputCount);
+ break;
+ }
+
+ case METransformHaveOutput: {
+ InterlockedIncrement(&m_nMETransformHaveOutputCount);
+ break;
+ }
+ }
+ }
+
CHECK_HR(hr = m_pEventGenerator->BeginGetEvent(this, NULL));
bail:
- SafeRelease(&pEvent);
+ SafeRelease(&pEvent);
return hr;
}
HRESULT MFCodec::ProcessInput(IMFSample* pSample)
{
- assert(IsReady());
-
- HRESULT hr = S_OK;
-
- if(m_bIsFirstFrame)
- {
- if(m_bIsAsync && !m_bIsBundled)
- {
- CHECK_HR(hr = m_pMFT->ProcessMessage(MFT_MESSAGE_NOTIFY_START_OF_STREAM, NULL));
- CHECK_HR(hr = m_pEventGenerator->BeginGetEvent(this, NULL));
- }
- m_bIsFirstFrame = FALSE;
- }
-
- if(m_bIsAsync)
- {
- if(m_nMETransformNeedInputCount == 1 && m_pSampleQueueAsyncInput->IsEmpty())
- {
- InterlockedDecrement(&m_nMETransformNeedInputCount);
- return m_pMFT->ProcessInput(m_dwInputID, pSample, 0);
- }
-
- if(m_pSampleQueueAsyncInput->Count() > kMFCodecQueuedFramesMax)
- {
- m_pSampleQueueAsyncInput->Clear();
- CHECK_HR(hr = E_UNEXPECTED);
- }
-
- // Input sample holds shared memory (also used by other samples)
- IMFSample *pSampleCopy = NULL;
- IMFMediaBuffer *pMediaBuffer = NULL, *pMediaBufferCopy = NULL;
- BYTE *pBufferPtr = NULL, *pBufferPtrCopy = NULL;
- DWORD dwDataLength = 0;
- BOOL bMediaBufferLocked = FALSE, bMediaBufferLockedCopy = FALSE;
-
- CHECK_HR(hr = pSample->GetBufferByIndex(0, &pMediaBuffer));
- hr = pMediaBuffer->GetCurrentLength(&dwDataLength);
- if(FAILED(hr))
- {
- goto endofcopy;
- }
- hr = pMediaBuffer->Lock(&pBufferPtr, NULL, NULL);
- if(FAILED(hr))
- {
- goto endofcopy;
- }
- bMediaBufferLocked = TRUE;
-
- hr = MFUtils::CreateMediaSample(dwDataLength, &pSampleCopy);
- if(FAILED(hr))
- {
- goto endofcopy;
- }
- hr = pSampleCopy->GetBufferByIndex(0, &pMediaBufferCopy);
- if(FAILED(hr))
- {
- goto endofcopy;
- }
- hr = pMediaBufferCopy->Lock(&pBufferPtrCopy, NULL, NULL);
- if(FAILED(hr))
- {
- goto endofcopy;
- }
- bMediaBufferLockedCopy = TRUE;
-
- memcpy(pBufferPtrCopy, pBufferPtr, dwDataLength);
- hr = pMediaBufferCopy->SetCurrentLength(dwDataLength);
- if(FAILED(hr))
- {
- goto endofcopy;
- }
-
- LONGLONG hnsSampleTime = 0;
- LONGLONG hnsSampleDuration = 0;
- hr = pSample->GetSampleTime(&hnsSampleTime);
- if(SUCCEEDED(hr))
- {
- hr = pSampleCopy->SetSampleTime(hnsSampleTime);
- }
- hr = pSample->GetSampleDuration(&hnsSampleDuration);
- if(SUCCEEDED(hr))
- {
- hr = pSampleCopy->SetSampleDuration(hnsSampleDuration);
- }
-
- // EnQueue
- hr = m_pSampleQueueAsyncInput->Queue(pSampleCopy);
+ assert(IsReady());
+
+ HRESULT hr = S_OK;
+
+ if(m_bIsFirstFrame) {
+ if(m_bIsAsync && !m_bIsBundled) {
+ CHECK_HR(hr = m_pMFT->ProcessMessage(MFT_MESSAGE_NOTIFY_START_OF_STREAM, NULL));
+ CHECK_HR(hr = m_pEventGenerator->BeginGetEvent(this, NULL));
+ }
+ m_bIsFirstFrame = FALSE;
+ }
+
+ if(m_bIsAsync) {
+ if(m_nMETransformNeedInputCount == 1 && m_pSampleQueueAsyncInput->IsEmpty()) {
+ InterlockedDecrement(&m_nMETransformNeedInputCount);
+ return m_pMFT->ProcessInput(m_dwInputID, pSample, 0);
+ }
+
+ if(m_pSampleQueueAsyncInput->Count() > kMFCodecQueuedFramesMax) {
+ m_pSampleQueueAsyncInput->Clear();
+ CHECK_HR(hr = E_UNEXPECTED);
+ }
+
+ // Input sample holds shared memory (also used by other samples)
+ IMFSample *pSampleCopy = NULL;
+ IMFMediaBuffer *pMediaBuffer = NULL, *pMediaBufferCopy = NULL;
+ BYTE *pBufferPtr = NULL, *pBufferPtrCopy = NULL;
+ DWORD dwDataLength = 0;
+ BOOL bMediaBufferLocked = FALSE, bMediaBufferLockedCopy = FALSE;
+
+ CHECK_HR(hr = pSample->GetBufferByIndex(0, &pMediaBuffer));
+ hr = pMediaBuffer->GetCurrentLength(&dwDataLength);
+ if(FAILED(hr)) {
+ goto endofcopy;
+ }
+ hr = pMediaBuffer->Lock(&pBufferPtr, NULL, NULL);
+ if(FAILED(hr)) {
+ goto endofcopy;
+ }
+ bMediaBufferLocked = TRUE;
+
+ hr = MFUtils::CreateMediaSample(dwDataLength, &pSampleCopy);
+ if(FAILED(hr)) {
+ goto endofcopy;
+ }
+ hr = pSampleCopy->GetBufferByIndex(0, &pMediaBufferCopy);
+ if(FAILED(hr)) {
+ goto endofcopy;
+ }
+ hr = pMediaBufferCopy->Lock(&pBufferPtrCopy, NULL, NULL);
+ if(FAILED(hr)) {
+ goto endofcopy;
+ }
+ bMediaBufferLockedCopy = TRUE;
+
+ memcpy(pBufferPtrCopy, pBufferPtr, dwDataLength);
+ hr = pMediaBufferCopy->SetCurrentLength(dwDataLength);
+ if(FAILED(hr)) {
+ goto endofcopy;
+ }
+
+ LONGLONG hnsSampleTime = 0;
+ LONGLONG hnsSampleDuration = 0;
+ hr = pSample->GetSampleTime(&hnsSampleTime);
+ if(SUCCEEDED(hr)) {
+ hr = pSampleCopy->SetSampleTime(hnsSampleTime);
+ }
+ hr = pSample->GetSampleDuration(&hnsSampleDuration);
+ if(SUCCEEDED(hr)) {
+ hr = pSampleCopy->SetSampleDuration(hnsSampleDuration);
+ }
+
+ // EnQueue
+ hr = m_pSampleQueueAsyncInput->Queue(pSampleCopy);
endofcopy:
- if(pMediaBuffer && bMediaBufferLocked)
- {
- pMediaBuffer->Unlock();
- }
- if(pMediaBufferCopy && bMediaBufferLockedCopy)
- {
- pMediaBufferCopy->Unlock();
- }
- SafeRelease(&pSampleCopy);
- SafeRelease(&pMediaBuffer);
- CHECK_HR(hr);
-
- while(m_nMETransformNeedInputCount > 0)
- {
- if(m_pSampleQueueAsyncInput->IsEmpty())
- {
- break;
- }
- IMFSample *_pSample = NULL;
- hr = m_pSampleQueueAsyncInput->Dequeue(&_pSample);
- if(SUCCEEDED(hr))
- {
- InterlockedDecrement(&m_nMETransformNeedInputCount);
- hr = m_pMFT->ProcessInput(m_dwInputID, _pSample, 0);
- }
- SafeRelease(&_pSample);
- CHECK_HR(hr);
- }
- }
- else
- {
- CHECK_HR(hr = m_pMFT->ProcessInput(m_dwInputID, pSample, 0));
- }
+ if(pMediaBuffer && bMediaBufferLocked) {
+ pMediaBuffer->Unlock();
+ }
+ if(pMediaBufferCopy && bMediaBufferLockedCopy) {
+ pMediaBufferCopy->Unlock();
+ }
+ SafeRelease(&pSampleCopy);
+ SafeRelease(&pMediaBuffer);
+ CHECK_HR(hr);
+
+ while(m_nMETransformNeedInputCount > 0) {
+ if(m_pSampleQueueAsyncInput->IsEmpty()) {
+ break;
+ }
+ IMFSample *_pSample = NULL;
+ hr = m_pSampleQueueAsyncInput->Dequeue(&_pSample);
+ if(SUCCEEDED(hr)) {
+ InterlockedDecrement(&m_nMETransformNeedInputCount);
+ hr = m_pMFT->ProcessInput(m_dwInputID, _pSample, 0);
+ }
+ SafeRelease(&_pSample);
+ CHECK_HR(hr);
+ }
+ }
+ else {
+ CHECK_HR(hr = m_pMFT->ProcessInput(m_dwInputID, pSample, 0));
+ }
bail:
- return hr;
+ return hr;
}
HRESULT MFCodec::ProcessOutput(IMFSample **ppSample)
{
- assert(IsReady());
+ assert(IsReady());
- if(m_bIsAsync)
- {
- if(m_nMETransformHaveOutputCount == 0)
- {
- return S_OK;
- }
- InterlockedDecrement(&m_nMETransformHaveOutputCount);
- }
+ if(m_bIsAsync) {
+ if(m_nMETransformHaveOutputCount == 0) {
+ return S_OK;
+ }
+ InterlockedDecrement(&m_nMETransformHaveOutputCount);
+ }
- *ppSample = NULL;
+ *ppSample = NULL;
IMFMediaBuffer* pBufferOut = NULL;
DWORD dwStatus;
- HRESULT hr = S_OK;
-
+ HRESULT hr = S_OK;
+
MFT_OUTPUT_STREAM_INFO mftStreamInfo = { 0 };
MFT_OUTPUT_DATA_BUFFER mftOutputData = { 0 };
- CHECK_HR(hr = m_pMFT->GetOutputStreamInfo(m_dwOutputID, &mftStreamInfo));
-
- BOOL bOutputStreamProvidesSamples = (mftStreamInfo.dwFlags & MFT_OUTPUT_STREAM_PROVIDES_SAMPLES) == MFT_OUTPUT_STREAM_PROVIDES_SAMPLES;
-
- if(!bOutputStreamProvidesSamples)
- {
- if(!m_pSampleOut)
- {
- CHECK_HR(hr = MFUtils::CreateMediaSample(mftStreamInfo.cbSize, &m_pSampleOut));
- hr = m_pSampleOut->GetBufferByIndex(0, &pBufferOut);
- if(FAILED(hr))
- {
- SafeRelease(&m_pSampleOut);
- CHECK_HR(hr);
- }
- }
- else
- {
- DWORD dwMaxLength = 0;
- CHECK_HR(hr = m_pSampleOut->GetBufferByIndex(0, &pBufferOut));
- CHECK_HR(hr = pBufferOut->GetMaxLength(&dwMaxLength));
- if(dwMaxLength < mftStreamInfo.cbSize)
- {
- CHECK_HR(hr = m_pSampleOut->RemoveAllBuffers());
- SafeRelease(&pBufferOut);
- CHECK_HR(hr = MFCreateMemoryBuffer(mftStreamInfo.cbSize, &pBufferOut));
- CHECK_HR(hr = m_pSampleOut->AddBuffer(pBufferOut));
- }
- }
- }
-
- if(pBufferOut)
- {
- CHECK_HR(hr = pBufferOut->SetCurrentLength(0));
- }
-
+ CHECK_HR(hr = m_pMFT->GetOutputStreamInfo(m_dwOutputID, &mftStreamInfo));
+
+ BOOL bOutputStreamProvidesSamples = (mftStreamInfo.dwFlags & MFT_OUTPUT_STREAM_PROVIDES_SAMPLES) == MFT_OUTPUT_STREAM_PROVIDES_SAMPLES;
+
+ if(!bOutputStreamProvidesSamples) {
+ if(!m_pSampleOut) {
+ CHECK_HR(hr = MFUtils::CreateMediaSample(mftStreamInfo.cbSize, &m_pSampleOut));
+ hr = m_pSampleOut->GetBufferByIndex(0, &pBufferOut);
+ if(FAILED(hr)) {
+ SafeRelease(&m_pSampleOut);
+ CHECK_HR(hr);
+ }
+ }
+ else {
+ DWORD dwMaxLength = 0;
+ CHECK_HR(hr = m_pSampleOut->GetBufferByIndex(0, &pBufferOut));
+ CHECK_HR(hr = pBufferOut->GetMaxLength(&dwMaxLength));
+ if(dwMaxLength < mftStreamInfo.cbSize) {
+ CHECK_HR(hr = m_pSampleOut->RemoveAllBuffers());
+ SafeRelease(&pBufferOut);
+ CHECK_HR(hr = MFCreateMemoryBuffer(mftStreamInfo.cbSize, &pBufferOut));
+ CHECK_HR(hr = m_pSampleOut->AddBuffer(pBufferOut));
+ }
+ }
+ }
+
+ if(pBufferOut) {
+ CHECK_HR(hr = pBufferOut->SetCurrentLength(0));
+ }
+
//Set the output sample
mftOutputData.pSample = bOutputStreamProvidesSamples ? NULL : m_pSampleOut;
//Set the output id
@@ -428,167 +383,145 @@ HRESULT MFCodec::ProcessOutput(IMFSample **ppSample)
//Generate the output sample
hr = m_pMFT->ProcessOutput(0, 1, &mftOutputData, &dwStatus);
- if (hr == MF_E_TRANSFORM_NEED_MORE_INPUT)
- {
+ if (hr == MF_E_TRANSFORM_NEED_MORE_INPUT) {
hr = S_OK;
goto bail;
}
- if (FAILED(hr))
- {
+ if (FAILED(hr)) {
goto bail;
}
*ppSample = mftOutputData.pSample;
- if(*ppSample)
- {
- (*ppSample)->AddRef();
- }
+ if(*ppSample) {
+ (*ppSample)->AddRef();
+ }
bail:
- if(bOutputStreamProvidesSamples)
- {
- SafeRelease(&mftOutputData.pSample);
- }
+ if(bOutputStreamProvidesSamples) {
+ SafeRelease(&mftOutputData.pSample);
+ }
SafeRelease(&pBufferOut);
return hr;
}
bool MFCodec::IsValid()
{
- return (m_pMFT && (m_eType == MFCodecType_Decoder || m_pCodecAPI));
+ return (m_pMFT && (m_eType == MFCodecType_Decoder || m_pCodecAPI));
}
bool MFCodec::IsReady()
{
- return (IsValid() && m_pOutputType && m_pInputType);
+ return (IsValid() && m_pOutputType && m_pInputType);
}
HRESULT MFCodec::Process(const void* pcInputPtr, UINT32 nInputSize, IMFSample **ppSampleOut)
{
- if(!pcInputPtr || !nInputSize || !ppSampleOut)
- {
- TSK_DEBUG_ERROR("Invalid parameter");
- return E_INVALIDARG;
- }
-
- *ppSampleOut = NULL;
-
- HRESULT hr = S_OK;
-
- IMFMediaBuffer* pBufferIn = NULL;
- BYTE* pBufferPtr = NULL;
- BOOL bMediaChangeHandled = FALSE; // Endless loop guard
-
- if(!m_pSampleIn)
- {
- CHECK_HR(hr = MFUtils::CreateMediaSample(nInputSize, &m_pSampleIn));
- hr = m_pSampleIn->GetBufferByIndex(0, &pBufferIn);
- if(FAILED(hr))
- {
- SafeRelease(&m_pSampleIn);
- CHECK_HR(hr);
- }
- }
- else
- {
- DWORD dwMaxLength = 0;
- CHECK_HR(hr = m_pSampleIn->GetBufferByIndex(0, &pBufferIn));
- CHECK_HR(hr = pBufferIn->GetMaxLength(&dwMaxLength));
- if(dwMaxLength < nInputSize)
- {
- CHECK_HR(hr = m_pSampleIn->RemoveAllBuffers());
- SafeRelease(&pBufferIn);
- CHECK_HR(hr = MFCreateMemoryBuffer(nInputSize, &pBufferIn));
- CHECK_HR(hr = m_pSampleIn->AddBuffer(pBufferIn));
- }
- }
-
- CHECK_HR(hr = pBufferIn->Lock(&pBufferPtr, NULL, NULL));
- memcpy(pBufferPtr, pcInputPtr, nInputSize);
- CHECK_HR(hr = pBufferIn->Unlock());
- CHECK_HR(hr = pBufferIn->SetCurrentLength(nInputSize));
-
- if(m_eType == MFCodecType_Encoder)
- {
- CHECK_HR(hr = m_pSampleIn->SetSampleDuration(m_rtDuration));
- CHECK_HR(hr = m_pSampleIn->SetSampleTime(m_rtStart)); // FIXME: use clock(), Same for custom source
- }
+ if(!pcInputPtr || !nInputSize || !ppSampleOut) {
+ TSK_DEBUG_ERROR("Invalid parameter");
+ return E_INVALIDARG;
+ }
+
+ *ppSampleOut = NULL;
+
+ HRESULT hr = S_OK;
+
+ IMFMediaBuffer* pBufferIn = NULL;
+ BYTE* pBufferPtr = NULL;
+ BOOL bMediaChangeHandled = FALSE; // Endless loop guard
+
+ if(!m_pSampleIn) {
+ CHECK_HR(hr = MFUtils::CreateMediaSample(nInputSize, &m_pSampleIn));
+ hr = m_pSampleIn->GetBufferByIndex(0, &pBufferIn);
+ if(FAILED(hr)) {
+ SafeRelease(&m_pSampleIn);
+ CHECK_HR(hr);
+ }
+ }
+ else {
+ DWORD dwMaxLength = 0;
+ CHECK_HR(hr = m_pSampleIn->GetBufferByIndex(0, &pBufferIn));
+ CHECK_HR(hr = pBufferIn->GetMaxLength(&dwMaxLength));
+ if(dwMaxLength < nInputSize) {
+ CHECK_HR(hr = m_pSampleIn->RemoveAllBuffers());
+ SafeRelease(&pBufferIn);
+ CHECK_HR(hr = MFCreateMemoryBuffer(nInputSize, &pBufferIn));
+ CHECK_HR(hr = m_pSampleIn->AddBuffer(pBufferIn));
+ }
+ }
+
+ CHECK_HR(hr = pBufferIn->Lock(&pBufferPtr, NULL, NULL));
+ memcpy(pBufferPtr, pcInputPtr, nInputSize);
+ CHECK_HR(hr = pBufferIn->Unlock());
+ CHECK_HR(hr = pBufferIn->SetCurrentLength(nInputSize));
+
+ if(m_eType == MFCodecType_Encoder) {
+ CHECK_HR(hr = m_pSampleIn->SetSampleDuration(m_rtDuration));
+ CHECK_HR(hr = m_pSampleIn->SetSampleTime(m_rtStart)); // FIXME: use clock(), Same for custom source
+ }
Label_ProcessInput:
- hr = ProcessInput(m_pSampleIn);
- while(hr == MF_E_NOTACCEPTING)
- {
- TSK_DEBUG_INFO("MF_E_NOTACCEPTING");
- IMFSample* pSample = NULL;
- hr = ProcessOutput(&pSample);
- if(SUCCEEDED(hr) && pSample)
- {
- SafeRelease(ppSampleOut);
- *ppSampleOut = pSample, pSample = NULL;
-
- hr = m_pSampleIn->SetUINT32(MFSampleExtension_Discontinuity, TRUE);
- hr = ProcessInput(m_pSampleIn);
- }
- }
- if(!*ppSampleOut)
- {
- hr = ProcessOutput(ppSampleOut);
- if(hr == MF_E_TRANSFORM_STREAM_CHANGE) /* Handling Stream Changes: http://msdn.microsoft.com/en-us/library/windows/desktop/ee663587(v=vs.85).aspx */
- {
- TSK_DEBUG_INFO("[MF Codec] Stream changed");
- if(m_eType == MFCodecType_Decoder)
- {
- IMFMediaType *pTypeOut = NULL;
- hr = m_pMFT->GetOutputAvailableType(m_dwOutputID, 0, &pTypeOut);
- if(SUCCEEDED(hr))
- {
- UINT32 uWidth = 0, uHeight = 0;
- hr = MFGetAttributeSize(pTypeOut, MF_MT_FRAME_SIZE, &uWidth, &uHeight);
- if(SUCCEEDED(hr))
- {
- TSK_DEBUG_INFO("[MF Decoder] New size: width=%u, height=%u", uWidth, uHeight);
- hr = m_pMFT->SetOutputType(m_dwOutputID, pTypeOut, 0);
- if(SUCCEEDED(hr))
- {
- SafeRelease(&m_pOutputType);
- pTypeOut->AddRef();
- m_pOutputType = pTypeOut;
- if(m_eMediaType == MFCodecMediaType_Video)
- {
- dynamic_cast<MFCodecVideo*>(this)->m_nWidth = uWidth;
- dynamic_cast<MFCodecVideo*>(this)->m_nHeight = uHeight;
- }
- }
- }
- }
- SafeRelease(&pTypeOut);
- if(SUCCEEDED(hr))
- {
- if(!bMediaChangeHandled)
- {
- bMediaChangeHandled = TRUE;
- goto Label_ProcessInput;
- }
- }
- }
- }
- }
-
- m_rtStart += m_rtDuration;
-
+ hr = ProcessInput(m_pSampleIn);
+ while(hr == MF_E_NOTACCEPTING) {
+ TSK_DEBUG_INFO("MF_E_NOTACCEPTING");
+ IMFSample* pSample = NULL;
+ hr = ProcessOutput(&pSample);
+ if(SUCCEEDED(hr) && pSample) {
+ SafeRelease(ppSampleOut);
+ *ppSampleOut = pSample, pSample = NULL;
+
+ hr = m_pSampleIn->SetUINT32(MFSampleExtension_Discontinuity, TRUE);
+ hr = ProcessInput(m_pSampleIn);
+ }
+ }
+ if(!*ppSampleOut) {
+ hr = ProcessOutput(ppSampleOut);
+ if(hr == MF_E_TRANSFORM_STREAM_CHANGE) { /* Handling Stream Changes: http://msdn.microsoft.com/en-us/library/windows/desktop/ee663587(v=vs.85).aspx */
+ TSK_DEBUG_INFO("[MF Codec] Stream changed");
+ if(m_eType == MFCodecType_Decoder) {
+ IMFMediaType *pTypeOut = NULL;
+ hr = m_pMFT->GetOutputAvailableType(m_dwOutputID, 0, &pTypeOut);
+ if(SUCCEEDED(hr)) {
+ UINT32 uWidth = 0, uHeight = 0;
+ hr = MFGetAttributeSize(pTypeOut, MF_MT_FRAME_SIZE, &uWidth, &uHeight);
+ if(SUCCEEDED(hr)) {
+ TSK_DEBUG_INFO("[MF Decoder] New size: width=%u, height=%u", uWidth, uHeight);
+ hr = m_pMFT->SetOutputType(m_dwOutputID, pTypeOut, 0);
+ if(SUCCEEDED(hr)) {
+ SafeRelease(&m_pOutputType);
+ pTypeOut->AddRef();
+ m_pOutputType = pTypeOut;
+ if(m_eMediaType == MFCodecMediaType_Video) {
+ dynamic_cast<MFCodecVideo*>(this)->m_nWidth = uWidth;
+ dynamic_cast<MFCodecVideo*>(this)->m_nHeight = uHeight;
+ }
+ }
+ }
+ }
+ SafeRelease(&pTypeOut);
+ if(SUCCEEDED(hr)) {
+ if(!bMediaChangeHandled) {
+ bMediaChangeHandled = TRUE;
+ goto Label_ProcessInput;
+ }
+ }
+ }
+ }
+ }
+
+ m_rtStart += m_rtDuration;
+
bail:
- SafeRelease(&pBufferIn);
- return hr;
+ SafeRelease(&pBufferIn);
+ return hr;
}
enum tmedia_chroma_e MFCodec::GetUncompressedChroma()
{
- if(kMFCodecUncompressedFormat == MFVideoFormat_NV12)
- {
- return tmedia_chroma_nv12;
- }
- assert(false);
- return tmedia_chroma_none;
+ if(kMFCodecUncompressedFormat == MFVideoFormat_NV12) {
+ return tmedia_chroma_nv12;
+ }
+ assert(false);
+ return tmedia_chroma_none;
}
//
@@ -596,12 +529,12 @@ enum tmedia_chroma_e MFCodec::GetUncompressedChroma()
//
MFCodecVideo::MFCodecVideo(MFCodecId_t eId, MFCodecType_t eType, IMFTransform *pMFT /*= NULL*/)
-: MFCodec(eId, eType, pMFT)
-, m_nFrameRate(0)
-, m_nWidth(0)
-, m_nHeight(0)
+ : MFCodec(eId, eType, pMFT)
+ , m_nFrameRate(0)
+ , m_nWidth(0)
+ , m_nHeight(0)
{
- assert(m_eMediaType == MFCodecMediaType_Video);
+ assert(m_eMediaType == MFCodecMediaType_Video);
}
MFCodecVideo::~MFCodecVideo()
@@ -610,279 +543,261 @@ MFCodecVideo::~MFCodecVideo()
}
HRESULT MFCodecVideo::Initialize(
- UINT32 nFrameRate,
- UINT32 nWidth,
- UINT32 nHeight,
- UINT32 nOutputBitRateInBps /*= 0*/
- )
+ UINT32 nFrameRate,
+ UINT32 nWidth,
+ UINT32 nHeight,
+ UINT32 nOutputBitRateInBps /*= 0*/
+)
{
- assert(IsValid());
-
- HRESULT hr = S_OK;
+ assert(IsValid());
- VARIANT var = {0};
+ HRESULT hr = S_OK;
- // make sure identifiers are zero-based (other layouts not supported yet)
- hr = m_pMFT->GetStreamIDs(1, &m_dwInputID, 1, &m_dwOutputID);
- if (hr == E_NOTIMPL)
- {
+ VARIANT var = {0};
+
+ // make sure identifiers are zero-based (other layouts not supported yet)
+ hr = m_pMFT->GetStreamIDs(1, &m_dwInputID, 1, &m_dwOutputID);
+ if (hr == E_NOTIMPL) {
m_dwInputID = 0;
m_dwOutputID = 0;
hr = S_OK;
}
- else if (FAILED(hr))
- {
- TSK_DEBUG_ERROR("The stream identifiers are not zero-based");
+ else if (FAILED(hr)) {
+ TSK_DEBUG_ERROR("The stream identifiers are not zero-based");
return hr;
}
- m_rtStart = 0;
- CHECK_HR(hr = MFFrameRateToAverageTimePerFrame(nFrameRate, 1, &m_rtDuration));
+ m_rtStart = 0;
+ CHECK_HR(hr = MFFrameRateToAverageTimePerFrame(nFrameRate, 1, &m_rtDuration));
+
+ CHECK_HR(hr = m_pOutputType->SetGUID(MF_MT_MAJOR_TYPE, (m_eMediaType == MFCodecMediaType_Video) ? MFMediaType_Video : MFMediaType_Audio));
+ CHECK_HR(hr = m_pInputType->SetGUID(MF_MT_MAJOR_TYPE, (m_eMediaType == MFCodecMediaType_Video) ? MFMediaType_Video : MFMediaType_Audio));
- CHECK_HR(hr = m_pOutputType->SetGUID(MF_MT_MAJOR_TYPE, (m_eMediaType == MFCodecMediaType_Video) ? MFMediaType_Video : MFMediaType_Audio));
- CHECK_HR(hr = m_pInputType->SetGUID(MF_MT_MAJOR_TYPE, (m_eMediaType == MFCodecMediaType_Video) ? MFMediaType_Video : MFMediaType_Audio));
+ CHECK_HR(hr = m_pOutputType->SetGUID(MF_MT_SUBTYPE, (m_eType == MFCodecType_Encoder) ? m_guidCompressedFormat : kMFCodecUncompressedFormat));
+ CHECK_HR(hr = m_pInputType->SetGUID(MF_MT_SUBTYPE, (m_eType == MFCodecType_Encoder) ? kMFCodecUncompressedFormat : m_guidCompressedFormat));
- CHECK_HR(hr = m_pOutputType->SetGUID(MF_MT_SUBTYPE, (m_eType == MFCodecType_Encoder) ? m_guidCompressedFormat : kMFCodecUncompressedFormat));
- CHECK_HR(hr = m_pInputType->SetGUID(MF_MT_SUBTYPE, (m_eType == MFCodecType_Encoder) ? kMFCodecUncompressedFormat : m_guidCompressedFormat));
-
- CHECK_HR(hr = m_pOutputType->SetUINT32(MF_MT_ALL_SAMPLES_INDEPENDENT, (m_eType == MFCodecType_Encoder) ? FALSE : TRUE));
- CHECK_HR(hr = m_pInputType->SetUINT32(MF_MT_ALL_SAMPLES_INDEPENDENT, (m_eType == MFCodecType_Encoder) ? TRUE : FALSE));
+ CHECK_HR(hr = m_pOutputType->SetUINT32(MF_MT_ALL_SAMPLES_INDEPENDENT, (m_eType == MFCodecType_Encoder) ? FALSE : TRUE));
+ CHECK_HR(hr = m_pInputType->SetUINT32(MF_MT_ALL_SAMPLES_INDEPENDENT, (m_eType == MFCodecType_Encoder) ? TRUE : FALSE));
+
+ CHECK_HR(hr = m_pOutputType->SetUINT32(MF_MT_FIXED_SIZE_SAMPLES, (m_eType == MFCodecType_Encoder) ? FALSE : TRUE));
+ CHECK_HR(hr = m_pInputType->SetUINT32(MF_MT_FIXED_SIZE_SAMPLES, (m_eType == MFCodecType_Encoder) ? TRUE : FALSE));
+
+ // Set bitrate
+ // Set (MF_MT_AVG_BITRATE) for MediaType
+ // Set (CODECAPI_AVEncCommonMeanBitRate) for H.264
+ hr = SetBitRate(nOutputBitRateInBps);
- CHECK_HR(hr = m_pOutputType->SetUINT32(MF_MT_FIXED_SIZE_SAMPLES, (m_eType == MFCodecType_Encoder) ? FALSE : TRUE));
- CHECK_HR(hr = m_pInputType->SetUINT32(MF_MT_FIXED_SIZE_SAMPLES, (m_eType == MFCodecType_Encoder) ? TRUE : FALSE));
-
- // Set bitrate
- // Set (MF_MT_AVG_BITRATE) for MediaType
- // Set (CODECAPI_AVEncCommonMeanBitRate) for H.264
- hr = SetBitRate(nOutputBitRateInBps);
-
CHECK_HR(hr = m_pOutputType->SetUINT32(MF_MT_INTERLACE_MODE, MFVideoInterlace_Progressive));
- CHECK_HR(hr = m_pInputType->SetUINT32(MF_MT_INTERLACE_MODE, MFVideoInterlace_Progressive));
+ CHECK_HR(hr = m_pInputType->SetUINT32(MF_MT_INTERLACE_MODE, MFVideoInterlace_Progressive));
CHECK_HR(hr = MFSetAttributeSize(m_pOutputType, MF_MT_FRAME_SIZE, nWidth, nHeight));
- CHECK_HR(hr = MFSetAttributeSize(m_pInputType, MF_MT_FRAME_SIZE, nWidth, nHeight));
+ CHECK_HR(hr = MFSetAttributeSize(m_pInputType, MF_MT_FRAME_SIZE, nWidth, nHeight));
CHECK_HR(hr = MFSetAttributeRatio(m_pOutputType, MF_MT_FRAME_RATE, nFrameRate, 1));
- CHECK_HR(hr = MFSetAttributeRatio(m_pInputType, MF_MT_FRAME_RATE, nFrameRate, 1));
+ CHECK_HR(hr = MFSetAttributeRatio(m_pInputType, MF_MT_FRAME_RATE, nFrameRate, 1));
CHECK_HR(hr = MFSetAttributeRatio(m_pOutputType, MF_MT_PIXEL_ASPECT_RATIO, 1, 1));
- CHECK_HR(hr = MFSetAttributeRatio(m_pInputType, MF_MT_PIXEL_ASPECT_RATIO, 1, 1));
-
- // Encoder: Output format must be set before input
- // Decoder: Input format must be set before output
- if(m_eType == MFCodecType_Encoder)
- {
- CHECK_HR(hr = m_pMFT->SetOutputType(m_dwOutputID, m_pOutputType, 0));
- CHECK_HR(hr = m_pMFT->SetInputType(m_dwInputID, m_pInputType, 0));
- }
- else
- {
- CHECK_HR(hr = m_pMFT->SetInputType(m_dwInputID, m_pInputType, 0));
- CHECK_HR(hr = m_pMFT->SetOutputType(m_dwOutputID, m_pOutputType, 0));
- }
-
- if(m_eId == MFCodecId_H264Base || m_eId == MFCodecId_H264Main)
- {
- if(m_eType == MFCodecType_Decoder)
- {
- // Only decoder support GetAttributes()
- IMFAttributes* pAttributes = NULL;
- hr = m_pMFT->GetAttributes(&pAttributes);
- if(SUCCEEDED(hr))
- {
- // FIXME: Very strange that "CODECAPI_AVLowLatencyMode" only works with "IMFAttributes->" and not "ICodecAPI->SetValue()"
- hr = pAttributes->SetUINT32(CODECAPI_AVLowLatencyMode, TRUE);
- }
- SafeRelease(&pAttributes);
- }
- else
- {
- var.vt = VT_BOOL;
- var.boolVal = VARIANT_TRUE;
- hr = m_pCodecAPI->SetValue(&CODECAPI_AVLowLatencyMode, &var);
-
- var.vt = VT_BOOL;
- var.boolVal = VARIANT_TRUE;
- hr = m_pCodecAPI->SetValue(&CODECAPI_AVEncCommonLowLatency, &var); // Correct for the decoder
-
- // Disable B-Frames
- var.vt = VT_UI4;
- var.ulVal = 0;
- hr = m_pCodecAPI->SetValue(&CODECAPI_AVEncMPVDefaultBPictureCount, &var);
-
- // Constant bitrate (updated using RTCP)
- var.vt = VT_UI4;
- var.ulVal = eAVEncCommonRateControlMode_CBR;
- hr = m_pCodecAPI->SetValue(&CODECAPI_AVEncCommonRateControlMode, &var);
- }
-
- hr = S_OK; // Not mandatory features
- }
+ CHECK_HR(hr = MFSetAttributeRatio(m_pInputType, MF_MT_PIXEL_ASPECT_RATIO, 1, 1));
+
+ // Encoder: Output format must be set before input
+ // Decoder: Input format must be set before output
+ if(m_eType == MFCodecType_Encoder) {
+ CHECK_HR(hr = m_pMFT->SetOutputType(m_dwOutputID, m_pOutputType, 0));
+ CHECK_HR(hr = m_pMFT->SetInputType(m_dwInputID, m_pInputType, 0));
+ }
+ else {
+ CHECK_HR(hr = m_pMFT->SetInputType(m_dwInputID, m_pInputType, 0));
+ CHECK_HR(hr = m_pMFT->SetOutputType(m_dwOutputID, m_pOutputType, 0));
+ }
+
+ if(m_eId == MFCodecId_H264Base || m_eId == MFCodecId_H264Main) {
+ if(m_eType == MFCodecType_Decoder) {
+ // Only decoder support GetAttributes()
+ IMFAttributes* pAttributes = NULL;
+ hr = m_pMFT->GetAttributes(&pAttributes);
+ if(SUCCEEDED(hr)) {
+ // FIXME: Very strange that "CODECAPI_AVLowLatencyMode" only works with "IMFAttributes->" and not "ICodecAPI->SetValue()"
+ hr = pAttributes->SetUINT32(CODECAPI_AVLowLatencyMode, TRUE);
+ }
+ SafeRelease(&pAttributes);
+ }
+ else {
+ var.vt = VT_BOOL;
+ var.boolVal = VARIANT_TRUE;
+ hr = m_pCodecAPI->SetValue(&CODECAPI_AVLowLatencyMode, &var);
+
+ var.vt = VT_BOOL;
+ var.boolVal = VARIANT_TRUE;
+ hr = m_pCodecAPI->SetValue(&CODECAPI_AVEncCommonLowLatency, &var); // Correct for the decoder
+
+ // Disable B-Frames
+ var.vt = VT_UI4;
+ var.ulVal = 0;
+ hr = m_pCodecAPI->SetValue(&CODECAPI_AVEncMPVDefaultBPictureCount, &var);
+
+ // Constant bitrate (updated using RTCP)
+ var.vt = VT_UI4;
+ var.ulVal = eAVEncCommonRateControlMode_CBR;
+ hr = m_pCodecAPI->SetValue(&CODECAPI_AVEncCommonRateControlMode, &var);
+ }
+
+ hr = S_OK; // Not mandatory features
+ }
bail:
- if(SUCCEEDED(hr))
- {
- m_nFrameRate = nFrameRate;
- m_nWidth = nWidth;
- m_nHeight = nHeight;
- }
+ if(SUCCEEDED(hr)) {
+ m_nFrameRate = nFrameRate;
+ m_nWidth = nWidth;
+ m_nHeight = nHeight;
+ }
- return hr;
+ return hr;
}
HRESULT MFCodecVideo::SetGOPSize(UINT32 nFramesCount)
{
- assert(IsValid());
+ assert(IsValid());
- HRESULT hr = S_OK;
+ HRESULT hr = S_OK;
- if(m_eType == MFCodecType_Encoder && (m_eId == MFCodecId_H264Base || m_eId == MFCodecId_H264Main))
- {
- VARIANT var = {0};
- var.vt = VT_UI4;
- var.ullVal = nFramesCount;
- CHECK_HR(hr = m_pCodecAPI->SetValue(&CODECAPI_AVEncMPVGOPSize, &var));
- }
+ if(m_eType == MFCodecType_Encoder && (m_eId == MFCodecId_H264Base || m_eId == MFCodecId_H264Main)) {
+ VARIANT var = {0};
+ var.vt = VT_UI4;
+ var.ullVal = nFramesCount;
+ CHECK_HR(hr = m_pCodecAPI->SetValue(&CODECAPI_AVEncMPVGOPSize, &var));
+ }
bail:
- return hr;
+ return hr;
}
HRESULT MFCodecVideo::SetBitRate(UINT32 nBitRateInBps)
{
- assert(IsValid());
+ assert(IsValid());
- HRESULT hr = S_OK;
+ HRESULT hr = S_OK;
- if(nBitRateInBps > 0 && m_eType == MFCodecType_Encoder)
- {
- CHECK_HR(hr = m_pOutputType->SetUINT32(MF_MT_AVG_BITRATE, nBitRateInBps));
+ if(nBitRateInBps > 0 && m_eType == MFCodecType_Encoder) {
+ CHECK_HR(hr = m_pOutputType->SetUINT32(MF_MT_AVG_BITRATE, nBitRateInBps));
- if((m_eId == MFCodecId_H264Base || m_eId == MFCodecId_H264Main))
- {
- VARIANT var = {0};
+ if((m_eId == MFCodecId_H264Base || m_eId == MFCodecId_H264Main)) {
+ VARIANT var = {0};
- // Set BitRate
- var.vt = VT_UI4;
- var.ullVal = nBitRateInBps;
- CHECK_HR(hr = m_pCodecAPI->SetValue(&CODECAPI_AVEncCommonMeanBitRate, &var));
- }
- }
+ // Set BitRate
+ var.vt = VT_UI4;
+ var.ullVal = nBitRateInBps;
+ CHECK_HR(hr = m_pCodecAPI->SetValue(&CODECAPI_AVEncCommonMeanBitRate, &var));
+ }
+ }
bail:
- return hr;
+ return hr;
}
HRESULT MFCodecVideo::IsSetSliceMaxSizeInBytesSupported(BOOL &supported)
{
- HRESULT hr = S_OK;
- supported = FALSE;
+ HRESULT hr = S_OK;
+ supported = FALSE;
- if ((m_eId == MFCodecId_H264Base || m_eId == MFCodecId_H264Main))
- {
+ if ((m_eId == MFCodecId_H264Base || m_eId == MFCodecId_H264Main)) {
#if defined(CODECAPI_AVEncSliceControlMode) && defined(CODECAPI_AVEncSliceControlSize)
- if (m_pCodecAPI->IsSupported(&CODECAPI_AVEncSliceControlMode) == S_OK && m_pCodecAPI->IsSupported(&CODECAPI_AVEncSliceControlSize) == S_OK) {
- supported = TRUE;
- }
+ if (m_pCodecAPI->IsSupported(&CODECAPI_AVEncSliceControlMode) == S_OK && m_pCodecAPI->IsSupported(&CODECAPI_AVEncSliceControlSize) == S_OK) {
+ supported = TRUE;
+ }
#endif
- }
- return hr;
+ }
+ return hr;
}
HRESULT MFCodecVideo::SetSliceMaxSizeInBytes(UINT32 nSliceMaxSizeInBytes)
{
- assert(IsValid() && nSliceMaxSizeInBytes > 0);
+ assert(IsValid() && nSliceMaxSizeInBytes > 0);
- HRESULT hr = S_OK;
+ HRESULT hr = S_OK;
- if ((m_eId == MFCodecId_H264Base || m_eId == MFCodecId_H264Main))
- {
+ if ((m_eId == MFCodecId_H264Base || m_eId == MFCodecId_H264Main)) {
#if defined(CODECAPI_AVEncSliceControlMode) && defined(CODECAPI_AVEncSliceControlSize)
- if (m_pCodecAPI->IsSupported(&CODECAPI_AVEncSliceControlMode) == S_OK && m_pCodecAPI->IsSupported(&CODECAPI_AVEncSliceControlSize) == S_OK) {
- VARIANT var = { 0 };
- var.vt = VT_UI4;
+ if (m_pCodecAPI->IsSupported(&CODECAPI_AVEncSliceControlMode) == S_OK && m_pCodecAPI->IsSupported(&CODECAPI_AVEncSliceControlSize) == S_OK) {
+ VARIANT var = { 0 };
+ var.vt = VT_UI4;
- var.ulVal = 1; // Bits
- CHECK_HR(hr = m_pCodecAPI->SetValue(&CODECAPI_AVEncSliceControlMode, &var));
+ var.ulVal = 1; // Bits
+ CHECK_HR(hr = m_pCodecAPI->SetValue(&CODECAPI_AVEncSliceControlMode, &var));
- var.ulVal = (nSliceMaxSizeInBytes << 3); // From Bytes to Bits
- CHECK_HR(hr = m_pCodecAPI->SetValue(&CODECAPI_AVEncSliceControlSize, &var));
- }
+ var.ulVal = (nSliceMaxSizeInBytes << 3); // From Bytes to Bits
+ CHECK_HR(hr = m_pCodecAPI->SetValue(&CODECAPI_AVEncSliceControlSize, &var));
+ }
#else
- CHECK_HR(hr = S_OK);
+ CHECK_HR(hr = S_OK);
#endif
- }
+ }
bail:
- return hr;
+ return hr;
}
HRESULT MFCodecVideo::RequestKeyFrame()
{
- assert(IsValid());
-
- HRESULT hr = S_OK;
+ assert(IsValid());
+
+ HRESULT hr = S_OK;
- if ((m_eId == MFCodecId_H264Base || m_eId == MFCodecId_H264Main))
- {
+ if ((m_eId == MFCodecId_H264Base || m_eId == MFCodecId_H264Main)) {
#if defined(CODECAPI_AVEncVideoForceKeyFrame)
- if (m_pCodecAPI->IsSupported(&CODECAPI_AVEncVideoForceKeyFrame) == S_OK) {
- VARIANT var = { 0 };
+ if (m_pCodecAPI->IsSupported(&CODECAPI_AVEncVideoForceKeyFrame) == S_OK) {
+ VARIANT var = { 0 };
- var.vt = VT_UI4;
- var.ulVal = 1;
- CHECK_HR(hr = m_pCodecAPI->SetValue(&CODECAPI_AVEncVideoForceKeyFrame, &var));
- }
+ var.vt = VT_UI4;
+ var.ulVal = 1;
+ CHECK_HR(hr = m_pCodecAPI->SetValue(&CODECAPI_AVEncVideoForceKeyFrame, &var));
+ }
#else
- CHECK_HR(hr = S_OK);
+ CHECK_HR(hr = S_OK);
#endif
- }
+ }
bail:
- return hr;
+ return hr;
}
//
// MFCodecVideo
//
MFCodecVideoH264::MFCodecVideoH264(MFCodecId_t eId, MFCodecType_t eType, IMFTransform *pMFT /*= NULL*/)
-: MFCodecVideo(eId, eType, pMFT)
+ : MFCodecVideo(eId, eType, pMFT)
{
- assert(eId == MFCodecId_H264Base || eId == MFCodecId_H264Main);
-
- HRESULT hr = S_OK;
+ assert(eId == MFCodecId_H264Base || eId == MFCodecId_H264Main);
- if(m_pOutputType)
- {
- CHECK_HR(hr = m_pOutputType->SetUINT32(MF_MT_MPEG2_PROFILE, (m_eId == MFCodecId_H264Base) ? eAVEncH264VProfile_Base : eAVEncH264VProfile_Main));
- }
+ HRESULT hr = S_OK;
+
+ if(m_pOutputType) {
+ CHECK_HR(hr = m_pOutputType->SetUINT32(MF_MT_MPEG2_PROFILE, (m_eId == MFCodecId_H264Base) ? eAVEncH264VProfile_Base : eAVEncH264VProfile_Main));
+ }
bail:
- assert(SUCCEEDED(hr));
+ assert(SUCCEEDED(hr));
}
MFCodecVideoH264::~MFCodecVideoH264()
{
-
+
}
MFCodecVideoH264* MFCodecVideoH264::CreateCodecH264Base(MFCodecType_t eType, IMFTransform *pMFT /*= NULL*/)
{
- MFCodecVideoH264* pCodec = new MFCodecVideoH264(MFCodecId_H264Base, eType, pMFT);
- if(pCodec && !pCodec->IsValid())
- {
- SafeRelease(&pCodec);
- }
- return pCodec;
+ MFCodecVideoH264* pCodec = new MFCodecVideoH264(MFCodecId_H264Base, eType, pMFT);
+ if(pCodec && !pCodec->IsValid()) {
+ SafeRelease(&pCodec);
+ }
+ return pCodec;
}
MFCodecVideoH264* MFCodecVideoH264::CreateCodecH264Main(MFCodecType_t eType, IMFTransform *pMFT /*= NULL*/)
{
- MFCodecVideoH264* pCodec = new MFCodecVideoH264(MFCodecId_H264Main, eType, pMFT);
- if(pCodec && !pCodec->IsValid())
- {
- SafeRelease(&pCodec);
- }
- return pCodec;
+ MFCodecVideoH264* pCodec = new MFCodecVideoH264(MFCodecId_H264Main, eType, pMFT);
+ if(pCodec && !pCodec->IsValid()) {
+ SafeRelease(&pCodec);
+ }
+ return pCodec;
}
diff --git a/plugins/pluginWinMF/internals/mf_codec.h b/plugins/pluginWinMF/internals/mf_codec.h
index 51b06dc..d4f00c7 100755
--- a/plugins/pluginWinMF/internals/mf_codec.h
+++ b/plugins/pluginWinMF/internals/mf_codec.h
@@ -1,18 +1,18 @@
/* Copyright (C) 2013 Mamadou DIOP
* Copyright (C) 2013 Doubango Telecom <http://www.doubango.org>
-*
+*
* This file is part of Open Source Doubango Framework.
*
* DOUBANGO is free software: you can redistribute it and/or modify
* it under the terms of the GNU General Public License as published by
* the Free Software Foundation, either version 3 of the License, or
* (at your option) any later version.
-*
+*
* DOUBANGO is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
* GNU General Public License for more details.
-*
+*
* You should have received a copy of the GNU General Public License
* along with DOUBANGO.
*/
@@ -30,125 +30,136 @@
class MFSampleQueue;
-typedef enum MFCodecId_e
-{
- MFCodecId_H264Base,
- MFCodecId_H264Main,
- MFCodecId_AAC
+typedef enum MFCodecId_e {
+ MFCodecId_H264Base,
+ MFCodecId_H264Main,
+ MFCodecId_AAC
}
MFCodecId_t;
-typedef enum MFCodecType_e
-{
- MFCodecType_Encoder,
- MFCodecType_Decoder
+typedef enum MFCodecType_e {
+ MFCodecType_Encoder,
+ MFCodecType_Decoder
}
MFCodecType_t;
-typedef enum MFCodecMediaType_e
-{
- MFCodecMediaType_Audio,
- MFCodecMediaType_Video
+typedef enum MFCodecMediaType_e {
+ MFCodecMediaType_Audio,
+ MFCodecMediaType_Video
}
MFCodecMediaType_t;
-class MFCodec : IMFAsyncCallback
+class MFCodec : IMFAsyncCallback
{
protected:
- MFCodec(MFCodecId_t eId, MFCodecType_t eType, IMFTransform *pMFT = NULL);
- virtual ~MFCodec();
- HRESULT ProcessInput(IMFSample* pSample);
- HRESULT ProcessOutput(IMFSample **ppSample);
+ MFCodec(MFCodecId_t eId, MFCodecType_t eType, IMFTransform *pMFT = NULL);
+ virtual ~MFCodec();
+ HRESULT ProcessInput(IMFSample* pSample);
+ HRESULT ProcessOutput(IMFSample **ppSample);
public:
- virtual bool IsValid();
- virtual bool IsReady();
- virtual HRESULT Process(const void* pcInputPtr, UINT32 nInputSize, IMFSample **ppSampleOut);
- static enum tmedia_chroma_e GetUncompressedChroma();
- inline IMFTransform* GetMFT(){ return m_pMFT; }
- inline MFCodecId_t GetId() { return m_eId; }
- inline MFCodecType_t GetType() { return m_eType; }
- inline void setBundled(BOOL bBundled) { m_bIsBundled = bBundled; }
-
- // IUnknown
+ virtual bool IsValid();
+ virtual bool IsReady();
+ virtual HRESULT Process(const void* pcInputPtr, UINT32 nInputSize, IMFSample **ppSampleOut);
+ static enum tmedia_chroma_e GetUncompressedChroma();
+ inline IMFTransform* GetMFT() {
+ return m_pMFT;
+ }
+ inline MFCodecId_t GetId() {
+ return m_eId;
+ }
+ inline MFCodecType_t GetType() {
+ return m_eType;
+ }
+ inline void setBundled(BOOL bBundled) {
+ m_bIsBundled = bBundled;
+ }
+
+ // IUnknown
STDMETHODIMP QueryInterface(REFIID iid, void** ppv);
STDMETHODIMP_(ULONG) AddRef();
STDMETHODIMP_(ULONG) Release();
- // IMFAsyncCallback
- STDMETHODIMP GetParameters(DWORD *pdwFlags, DWORD *pdwQueue);
- STDMETHODIMP Invoke(IMFAsyncResult *pAsyncResult);
+ // IMFAsyncCallback
+ STDMETHODIMP GetParameters(DWORD *pdwFlags, DWORD *pdwQueue);
+ STDMETHODIMP Invoke(IMFAsyncResult *pAsyncResult);
private:
- long m_nRefCount;
+ long m_nRefCount;
protected:
- MFCodecId_t m_eId; // Codec Id
- MFCodecType_t m_eType; // Codec type.
- MFCodecMediaType_t m_eMediaType; // Codec Media type.
- DWORD m_dwInputID; // Input stream ID.
+ MFCodecId_t m_eId; // Codec Id
+ MFCodecType_t m_eType; // Codec type.
+ MFCodecMediaType_t m_eMediaType; // Codec Media type.
+ DWORD m_dwInputID; // Input stream ID.
DWORD m_dwOutputID; // Output stream ID.
- GUID m_guidCompressedFormat; // Compressed Media format (e.g. MFVideoFormat_H264)
+ GUID m_guidCompressedFormat; // Compressed Media format (e.g. MFVideoFormat_H264)
IMFTransform *m_pMFT; // Pointer to the encoder MFT.
- ICodecAPI *m_pCodecAPI; // Pointer to CodecAPI.
+ ICodecAPI *m_pCodecAPI; // Pointer to CodecAPI.
IMFMediaType *m_pOutputType; // Output media type of the codec.
- IMFMediaType *m_pInputType; // Input media type of the codec.
+ IMFMediaType *m_pInputType; // Input media type of the codec.
- LONGLONG m_rtStart;
+ LONGLONG m_rtStart;
UINT64 m_rtDuration;
- IMFSample *m_pSampleIn;
- IMFSample *m_pSampleOut;
+ IMFSample *m_pSampleIn;
+ IMFSample *m_pSampleOut;
- MFSampleQueue *m_pSampleQueueAsyncInput;
- BOOL m_bIsBundled; // Bundled with a producer or cosumer -> do not monitor events
- BOOL m_bIsAsync;
- IMFMediaEventGenerator *m_pEventGenerator;
- BOOL m_bIsFirstFrame;
- long m_nMETransformNeedInputCount, m_nMETransformHaveOutputCount;
+ MFSampleQueue *m_pSampleQueueAsyncInput;
+ BOOL m_bIsBundled; // Bundled with a producer or cosumer -> do not monitor events
+ BOOL m_bIsAsync;
+ IMFMediaEventGenerator *m_pEventGenerator;
+ BOOL m_bIsFirstFrame;
+ long m_nMETransformNeedInputCount, m_nMETransformHaveOutputCount;
};
class MFCodecVideo : public MFCodec
{
- friend class MFCodec;
+ friend class MFCodec;
protected:
- MFCodecVideo(MFCodecId_t eId, MFCodecType_t eType, IMFTransform *pMFT = NULL);
- virtual ~MFCodecVideo();
+ MFCodecVideo(MFCodecId_t eId, MFCodecType_t eType, IMFTransform *pMFT = NULL);
+ virtual ~MFCodecVideo();
public:
- virtual HRESULT Initialize(
- UINT32 nFrameRate,
- UINT32 nWidth,
- UINT32 nHeight,
- UINT32 nOutputBitRateInBps = 0 // Only for encoders
- );
- virtual HRESULT SetGOPSize(UINT32 nFramesCount);
- virtual HRESULT SetBitRate(UINT32 nBitRateInBps);
- virtual HRESULT SetSliceMaxSizeInBytes(UINT32 nSliceMaxSizeInBytes);
- virtual HRESULT RequestKeyFrame();
-
- virtual HRESULT IsSetSliceMaxSizeInBytesSupported(BOOL &supported);
- virtual inline UINT32 GetFrameRate() { return m_nFrameRate; }
- virtual inline UINT32 GetWidth() { return m_nWidth; }
- virtual inline UINT32 GetHeight() { return m_nHeight; }
+ virtual HRESULT Initialize(
+ UINT32 nFrameRate,
+ UINT32 nWidth,
+ UINT32 nHeight,
+ UINT32 nOutputBitRateInBps = 0 // Only for encoders
+ );
+ virtual HRESULT SetGOPSize(UINT32 nFramesCount);
+ virtual HRESULT SetBitRate(UINT32 nBitRateInBps);
+ virtual HRESULT SetSliceMaxSizeInBytes(UINT32 nSliceMaxSizeInBytes);
+ virtual HRESULT RequestKeyFrame();
+
+ virtual HRESULT IsSetSliceMaxSizeInBytesSupported(BOOL &supported);
+ virtual inline UINT32 GetFrameRate() {
+ return m_nFrameRate;
+ }
+ virtual inline UINT32 GetWidth() {
+ return m_nWidth;
+ }
+ virtual inline UINT32 GetHeight() {
+ return m_nHeight;
+ }
protected:
- UINT32 m_nFrameRate;
- UINT32 m_nWidth;
- UINT32 m_nHeight;
+ UINT32 m_nFrameRate;
+ UINT32 m_nWidth;
+ UINT32 m_nHeight;
};
class MFCodecVideoH264 : public MFCodecVideo
{
protected:
- MFCodecVideoH264(MFCodecId_t eId, MFCodecType_t eType, IMFTransform *pMFT = NULL);
+ MFCodecVideoH264(MFCodecId_t eId, MFCodecType_t eType, IMFTransform *pMFT = NULL);
public:
- virtual ~MFCodecVideoH264();
- static MFCodecVideoH264* CreateCodecH264Base(MFCodecType_t eType, IMFTransform *pMFT = NULL);
- static MFCodecVideoH264* CreateCodecH264Main(MFCodecType_t eType, IMFTransform *pMFT = NULL);
+ virtual ~MFCodecVideoH264();
+ static MFCodecVideoH264* CreateCodecH264Base(MFCodecType_t eType, IMFTransform *pMFT = NULL);
+ static MFCodecVideoH264* CreateCodecH264Main(MFCodecType_t eType, IMFTransform *pMFT = NULL);
protected:
diff --git a/plugins/pluginWinMF/internals/mf_codec_topology.cxx b/plugins/pluginWinMF/internals/mf_codec_topology.cxx
index 1ee2a16..1f63466 100755
--- a/plugins/pluginWinMF/internals/mf_codec_topology.cxx
+++ b/plugins/pluginWinMF/internals/mf_codec_topology.cxx
@@ -1,18 +1,18 @@
/* Copyright (C) 2013 Mamadou DIOP
* Copyright (C) 2013 Doubango Telecom <http://www.doubango.org>
-*
+*
* This file is part of Open Source Doubango Framework.
*
* DOUBANGO is free software: you can redistribute it and/or modify
* it under the terms of the GNU General Public License as published by
* the Free Software Foundation, either version 3 of the License, or
* (at your option) any later version.
-*
+*
* DOUBANGO is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
* GNU General Public License for more details.
-*
+*
* You should have received a copy of the GNU General Public License
* along with DOUBANGO.
*/
@@ -25,134 +25,117 @@
// MFCodecTopologySampleGrabberCB
//
-class MFCodecTopologySampleGrabberCB : public IMFSampleGrabberSinkCallback
+class MFCodecTopologySampleGrabberCB : public IMFSampleGrabberSinkCallback
{
long m_cRef;
MFCodecTopology *m_pCodecTopology;
MFCodecTopologySampleGrabberCB(MFCodecTopology *pCodecTopology)
- : m_cRef(1)
- {
- m_pCodecTopology = pCodecTopology;
- m_pCodecTopology->AddRef();
- }
- virtual ~MFCodecTopologySampleGrabberCB()
- {
- SafeRelease(&m_pCodecTopology);
- }
+ : m_cRef(1) {
+ m_pCodecTopology = pCodecTopology;
+ m_pCodecTopology->AddRef();
+ }
+ virtual ~MFCodecTopologySampleGrabberCB() {
+ SafeRelease(&m_pCodecTopology);
+ }
public:
// Create a new instance of the object.
- static HRESULT MFCodecTopologySampleGrabberCB::CreateInstance(MFCodecTopology *pCodecTopology, MFCodecTopologySampleGrabberCB **ppCB)
- {
- *ppCB = new (std::nothrow) MFCodecTopologySampleGrabberCB(pCodecTopology);
-
- if (ppCB == NULL)
- {
- return E_OUTOFMEMORY;
- }
- return S_OK;
- }
-
- STDMETHODIMP MFCodecTopologySampleGrabberCB::QueryInterface(REFIID riid, void** ppv)
- {
- static const QITAB qit[] =
- {
- QITABENT(MFCodecTopologySampleGrabberCB, IMFSampleGrabberSinkCallback),
- QITABENT(MFCodecTopologySampleGrabberCB, IMFClockStateSink),
- { 0 }
- };
- return QISearch(this, qit, riid, ppv);
- }
-
- STDMETHODIMP_(ULONG) MFCodecTopologySampleGrabberCB::AddRef()
- {
- return InterlockedIncrement(&m_cRef);
- }
-
- STDMETHODIMP_(ULONG) MFCodecTopologySampleGrabberCB::Release()
- {
- ULONG cRef = InterlockedDecrement(&m_cRef);
- if (cRef == 0)
- {
- delete this;
- }
- return cRef;
-
- }
-
- // IMFClockStateSink methods
-
- STDMETHODIMP MFCodecTopologySampleGrabberCB::OnClockStart(MFTIME hnsSystemTime, LONGLONG llClockStartOffset)
- {
- TSK_DEBUG_INFO("MFCodecTopologySampleGrabberCB::OnClockStart(%lld, %lld)", hnsSystemTime, llClockStartOffset);
- return S_OK;
- }
-
- STDMETHODIMP MFCodecTopologySampleGrabberCB::OnClockStop(MFTIME hnsSystemTime)
- {
- TSK_DEBUG_INFO("MFCodecTopologySampleGrabberCB::OnClockStop(%lld)", hnsSystemTime);
- return S_OK;
- }
-
- STDMETHODIMP MFCodecTopologySampleGrabberCB::OnClockPause(MFTIME hnsSystemTime)
- {
- TSK_DEBUG_INFO("MFCodecTopologySampleGrabberCB::OnClockPause(%lld)", hnsSystemTime);
- return S_OK;
- }
-
- STDMETHODIMP MFCodecTopologySampleGrabberCB::OnClockRestart(MFTIME hnsSystemTime)
- {
- TSK_DEBUG_INFO("MFCodecTopologySampleGrabberCB::OnClockRestart(%lld)", hnsSystemTime);
- return S_OK;
- }
-
- STDMETHODIMP MFCodecTopologySampleGrabberCB::OnClockSetRate(MFTIME hnsSystemTime, float flRate)
- {
- TSK_DEBUG_INFO("MFCodecTopologySampleGrabberCB::OnClockSetRate(%lld, %f)", hnsSystemTime, flRate);
- return S_OK;
- }
-
- // IMFSampleGrabberSink methods.
-
- STDMETHODIMP MFCodecTopologySampleGrabberCB::OnSetPresentationClock(IMFPresentationClock* pClock)
- {
- TSK_DEBUG_INFO("MFCodecTopologySampleGrabberCB::OnSetPresentationClock");
- return S_OK;
- }
-
- STDMETHODIMP MFCodecTopologySampleGrabberCB::OnProcessSample(
- REFGUID guidMajorMediaType, DWORD dwSampleFlags,
- LONGLONG llSampleTime, LONGLONG llSampleDuration, const BYTE * pSampleBuffer,
- DWORD dwSampleSize)
- {
- HRESULT hr = S_OK;
- IMFSample *pSample = NULL;
- IMFMediaBuffer* pMediaBuffer = NULL;
- BYTE* _pcBufferPtr = NULL;
-
- CHECK_HR(hr = MFUtils::CreateMediaSample(dwSampleSize, &pSample));
- CHECK_HR(hr = pSample->SetSampleTime(llSampleTime));
- CHECK_HR(hr = pSample->SetSampleDuration(llSampleDuration));
- CHECK_HR(hr = pSample->GetBufferByIndex(0, &pMediaBuffer));
- CHECK_HR(hr = pMediaBuffer->Lock(&_pcBufferPtr, NULL, NULL));
- memcpy(_pcBufferPtr, pSampleBuffer, dwSampleSize);
- CHECK_HR(hr = pMediaBuffer->SetCurrentLength(dwSampleSize));
- CHECK_HR(hr = pMediaBuffer->Unlock());
-
- m_pCodecTopology->m_SampleQueue.Queue(pSample); // thread-safe
-
+ static HRESULT MFCodecTopologySampleGrabberCB::CreateInstance(MFCodecTopology *pCodecTopology, MFCodecTopologySampleGrabberCB **ppCB) {
+ *ppCB = new (std::nothrow) MFCodecTopologySampleGrabberCB(pCodecTopology);
+
+ if (ppCB == NULL) {
+ return E_OUTOFMEMORY;
+ }
+ return S_OK;
+ }
+
+ STDMETHODIMP MFCodecTopologySampleGrabberCB::QueryInterface(REFIID riid, void** ppv) {
+ static const QITAB qit[] = {
+ QITABENT(MFCodecTopologySampleGrabberCB, IMFSampleGrabberSinkCallback),
+ QITABENT(MFCodecTopologySampleGrabberCB, IMFClockStateSink),
+ { 0 }
+ };
+ return QISearch(this, qit, riid, ppv);
+ }
+
+ STDMETHODIMP_(ULONG) MFCodecTopologySampleGrabberCB::AddRef() {
+ return InterlockedIncrement(&m_cRef);
+ }
+
+ STDMETHODIMP_(ULONG) MFCodecTopologySampleGrabberCB::Release() {
+ ULONG cRef = InterlockedDecrement(&m_cRef);
+ if (cRef == 0) {
+ delete this;
+ }
+ return cRef;
+
+ }
+
+ // IMFClockStateSink methods
+
+ STDMETHODIMP MFCodecTopologySampleGrabberCB::OnClockStart(MFTIME hnsSystemTime, LONGLONG llClockStartOffset) {
+ TSK_DEBUG_INFO("MFCodecTopologySampleGrabberCB::OnClockStart(%lld, %lld)", hnsSystemTime, llClockStartOffset);
+ return S_OK;
+ }
+
+ STDMETHODIMP MFCodecTopologySampleGrabberCB::OnClockStop(MFTIME hnsSystemTime) {
+ TSK_DEBUG_INFO("MFCodecTopologySampleGrabberCB::OnClockStop(%lld)", hnsSystemTime);
+ return S_OK;
+ }
+
+ STDMETHODIMP MFCodecTopologySampleGrabberCB::OnClockPause(MFTIME hnsSystemTime) {
+ TSK_DEBUG_INFO("MFCodecTopologySampleGrabberCB::OnClockPause(%lld)", hnsSystemTime);
+ return S_OK;
+ }
+
+ STDMETHODIMP MFCodecTopologySampleGrabberCB::OnClockRestart(MFTIME hnsSystemTime) {
+ TSK_DEBUG_INFO("MFCodecTopologySampleGrabberCB::OnClockRestart(%lld)", hnsSystemTime);
+ return S_OK;
+ }
+
+ STDMETHODIMP MFCodecTopologySampleGrabberCB::OnClockSetRate(MFTIME hnsSystemTime, float flRate) {
+ TSK_DEBUG_INFO("MFCodecTopologySampleGrabberCB::OnClockSetRate(%lld, %f)", hnsSystemTime, flRate);
+ return S_OK;
+ }
+
+ // IMFSampleGrabberSink methods.
+
+ STDMETHODIMP MFCodecTopologySampleGrabberCB::OnSetPresentationClock(IMFPresentationClock* pClock) {
+ TSK_DEBUG_INFO("MFCodecTopologySampleGrabberCB::OnSetPresentationClock");
+ return S_OK;
+ }
+
+ STDMETHODIMP MFCodecTopologySampleGrabberCB::OnProcessSample(
+ REFGUID guidMajorMediaType, DWORD dwSampleFlags,
+ LONGLONG llSampleTime, LONGLONG llSampleDuration, const BYTE * pSampleBuffer,
+ DWORD dwSampleSize) {
+ HRESULT hr = S_OK;
+ IMFSample *pSample = NULL;
+ IMFMediaBuffer* pMediaBuffer = NULL;
+ BYTE* _pcBufferPtr = NULL;
+
+ CHECK_HR(hr = MFUtils::CreateMediaSample(dwSampleSize, &pSample));
+ CHECK_HR(hr = pSample->SetSampleTime(llSampleTime));
+ CHECK_HR(hr = pSample->SetSampleDuration(llSampleDuration));
+ CHECK_HR(hr = pSample->GetBufferByIndex(0, &pMediaBuffer));
+ CHECK_HR(hr = pMediaBuffer->Lock(&_pcBufferPtr, NULL, NULL));
+ memcpy(_pcBufferPtr, pSampleBuffer, dwSampleSize);
+ CHECK_HR(hr = pMediaBuffer->SetCurrentLength(dwSampleSize));
+ CHECK_HR(hr = pMediaBuffer->Unlock());
+
+ m_pCodecTopology->m_SampleQueue.Queue(pSample); // thread-safe
+
bail:
- SafeRelease(&pSample);
- SafeRelease(&pMediaBuffer);
- return hr;
- }
-
- STDMETHODIMP MFCodecTopologySampleGrabberCB::OnShutdown()
- {
- TSK_DEBUG_INFO("MFCodecTopologySampleGrabberCB::OnShutdown");
- return S_OK;
- }
+ SafeRelease(&pSample);
+ SafeRelease(&pMediaBuffer);
+ return hr;
+ }
+
+ STDMETHODIMP MFCodecTopologySampleGrabberCB::OnShutdown() {
+ TSK_DEBUG_INFO("MFCodecTopologySampleGrabberCB::OnShutdown");
+ return S_OK;
+ }
};
//
@@ -161,36 +144,36 @@ bail:
MFCodecTopology::MFCodecTopology(MFCodec* pCodec, HRESULT &hr)
-: m_nRefCount(1)
-, m_bInitialized(FALSE)
-, m_bStarted(FALSE)
-, m_pCodec(NULL)
-, m_pSource(NULL)
-, m_pSession(NULL)
-, m_pTopologyFull(NULL)
-, m_pTopologyPartial(NULL)
-, m_pOutputType(NULL)
-, m_pInputType(NULL)
-, m_pGrabberCallback(NULL)
-, m_pGrabberActivate(NULL)
-, m_pTread(NULL)
+ : m_nRefCount(1)
+ , m_bInitialized(FALSE)
+ , m_bStarted(FALSE)
+ , m_pCodec(NULL)
+ , m_pSource(NULL)
+ , m_pSession(NULL)
+ , m_pTopologyFull(NULL)
+ , m_pTopologyPartial(NULL)
+ , m_pOutputType(NULL)
+ , m_pInputType(NULL)
+ , m_pGrabberCallback(NULL)
+ , m_pGrabberActivate(NULL)
+ , m_pTread(NULL)
{
- hr = S_OK;
+ hr = S_OK;
- if(!pCodec)
- {
- CHECK_HR(hr = E_POINTER);
- }
+ if(!pCodec) {
+ CHECK_HR(hr = E_POINTER);
+ }
- m_pCodec = pCodec;
- m_pCodec->AddRef();
+ m_pCodec = pCodec;
+ m_pCodec->AddRef();
-bail: ;
+bail:
+ ;
}
MFCodecTopology::~MFCodecTopology()
{
- DeInitialize();
+ DeInitialize();
}
ULONG MFCodecTopology::AddRef()
@@ -201,8 +184,7 @@ ULONG MFCodecTopology::AddRef()
ULONG MFCodecTopology::Release()
{
ULONG uCount = InterlockedDecrement(&m_nRefCount);
- if (uCount == 0)
- {
+ if (uCount == 0) {
delete this;
}
// For thread safety, return a temporary variable.
@@ -211,245 +193,226 @@ ULONG MFCodecTopology::Release()
HRESULT MFCodecTopology::QueryInterface(REFIID iid, void** ppv)
{
- return E_NOTIMPL;
+ return E_NOTIMPL;
}
HRESULT MFCodecTopology::Start()
{
- HRESULT hr = S_OK;
-
- if(m_bStarted)
- {
- return S_OK;
- }
-
- if(!m_bInitialized)
- {
- CHECK_HR(hr = E_FAIL);
- }
-
- CHECK_HR(hr = MFUtils::RunSession(m_pSession, m_pTopologyFull));
-
- // Start asynchronous watcher thread
- m_bStarted = TRUE;
- int ret = tsk_thread_create(&m_pTread, MFCodecTopology::RunSessionThread, this);
- if(ret != 0)
- {
- TSK_DEBUG_ERROR("Failed to create thread");
- m_bStarted = FALSE;
- if(m_pTread)
- {
- tsk_thread_join(&m_pTread);
- }
- MFUtils::ShutdownSession(m_pSession, m_pSource);
- CHECK_HR(hr = E_FAIL);
- }
-
- // FIXME
- Sleep(2000);
+ HRESULT hr = S_OK;
+
+ if(m_bStarted) {
+ return S_OK;
+ }
+
+ if(!m_bInitialized) {
+ CHECK_HR(hr = E_FAIL);
+ }
+
+ CHECK_HR(hr = MFUtils::RunSession(m_pSession, m_pTopologyFull));
+
+ // Start asynchronous watcher thread
+ m_bStarted = TRUE;
+ int ret = tsk_thread_create(&m_pTread, MFCodecTopology::RunSessionThread, this);
+ if(ret != 0) {
+ TSK_DEBUG_ERROR("Failed to create thread");
+ m_bStarted = FALSE;
+ if(m_pTread) {
+ tsk_thread_join(&m_pTread);
+ }
+ MFUtils::ShutdownSession(m_pSession, m_pSource);
+ CHECK_HR(hr = E_FAIL);
+ }
+
+ // FIXME
+ Sleep(2000);
bail:
- return hr;
+ return hr;
}
HRESULT MFCodecTopology::Stop()
{
- HRESULT hr = S_OK;
+ HRESULT hr = S_OK;
- if(!m_bStarted)
- {
- return S_OK;
- }
+ if(!m_bStarted) {
+ return S_OK;
+ }
- m_bStarted = FALSE;
+ m_bStarted = FALSE;
hr = MFUtils::ShutdownSession(m_pSession, NULL); // stop session to wakeup the asynchronous thread
- if(m_pTread)
- {
+ if(m_pTread) {
tsk_thread_join(&m_pTread);
}
hr = MFUtils::ShutdownSession(NULL, m_pSource);
-
- return hr;
+
+ return hr;
}
HRESULT MFCodecTopology::Initialize()
{
- HRESULT hr = S_OK;
- IMFAttributes* pSessionAttributes = NULL;
+ HRESULT hr = S_OK;
+ IMFAttributes* pSessionAttributes = NULL;
- if(m_bInitialized)
- {
- CHECK_HR(hr = E_FAIL);
- }
+ if(m_bInitialized) {
+ CHECK_HR(hr = E_FAIL);
+ }
- // Set session attributes
- CHECK_HR(hr = MFCreateAttributes(&pSessionAttributes, 1));
- CHECK_HR(hr = pSessionAttributes->SetUINT32(PLUGIN_MF_LOW_LATENCY, 1));
+ // Set session attributes
+ CHECK_HR(hr = MFCreateAttributes(&pSessionAttributes, 1));
+ CHECK_HR(hr = pSessionAttributes->SetUINT32(PLUGIN_MF_LOW_LATENCY, 1));
- // Get input and output type
- CHECK_HR(hr = m_pCodec->GetInputType(&m_pInputType));
- CHECK_HR(hr = m_pCodec->GetOutputType(&m_pOutputType));
+ // Get input and output type
+ CHECK_HR(hr = m_pCodec->GetInputType(&m_pInputType));
+ CHECK_HR(hr = m_pCodec->GetOutputType(&m_pOutputType));
- // Create custom source
- CHECK_HR(hr = CMFSource::CreateInstanceEx(IID_IMFMediaSource, (void**)&m_pSource, m_pInputType));
+ // Create custom source
+ CHECK_HR(hr = CMFSource::CreateInstanceEx(IID_IMFMediaSource, (void**)&m_pSource, m_pInputType));
- // Create the sample grabber sink.
- CHECK_HR(hr = MFCodecTopologySampleGrabberCB::CreateInstance(this, &m_pGrabberCallback));
- CHECK_HR(hr = MFCreateSampleGrabberSinkActivate(m_pOutputType, m_pGrabberCallback, &m_pGrabberActivate));
+ // Create the sample grabber sink.
+ CHECK_HR(hr = MFCodecTopologySampleGrabberCB::CreateInstance(this, &m_pGrabberCallback));
+ CHECK_HR(hr = MFCreateSampleGrabberSinkActivate(m_pOutputType, m_pGrabberCallback, &m_pGrabberActivate));
- // To run as fast as possible, set this attribute (requires Windows 7 or later):
- CHECK_HR(hr = m_pGrabberActivate->SetUINT32(MF_SAMPLEGRABBERSINK_IGNORE_CLOCK, TRUE));
+ // To run as fast as possible, set this attribute (requires Windows 7 or later):
+ CHECK_HR(hr = m_pGrabberActivate->SetUINT32(MF_SAMPLEGRABBERSINK_IGNORE_CLOCK, TRUE));
- // Create the Media Session.
- CHECK_HR(hr = MFCreateMediaSession(pSessionAttributes, &m_pSession));
+ // Create the Media Session.
+ CHECK_HR(hr = MFCreateMediaSession(pSessionAttributes, &m_pSession));
- // Create the topology.
- CHECK_HR(hr = MFUtils::CreateTopology(
- m_pSource,
- m_pCodec->GetMFT(),
- m_pGrabberActivate,
- NULL, // no preview
- m_pOutputType,
- &m_pTopologyPartial));
- // Resolve topology (adds video processors if needed).
- CHECK_HR(hr = MFUtils::ResolveTopology(m_pTopologyPartial, &m_pTopologyFull));
+ // Create the topology.
+ CHECK_HR(hr = MFUtils::CreateTopology(
+ m_pSource,
+ m_pCodec->GetMFT(),
+ m_pGrabberActivate,
+ NULL, // no preview
+ m_pOutputType,
+ &m_pTopologyPartial));
+ // Resolve topology (adds video processors if needed).
+ CHECK_HR(hr = MFUtils::ResolveTopology(m_pTopologyPartial, &m_pTopologyFull));
- m_bInitialized = TRUE;
+ m_bInitialized = TRUE;
bail:
- SafeRelease(&pSessionAttributes);
+ SafeRelease(&pSessionAttributes);
- if(FAILED(hr))
- {
- DeInitialize();
- }
+ if(FAILED(hr)) {
+ DeInitialize();
+ }
- return hr;
+ return hr;
}
void* TSK_STDCALL MFCodecTopology::RunSessionThread(void *pArg)
{
- MFCodecTopology *pSelf = (MFCodecTopology *)pArg;
- HRESULT hrStatus = S_OK;
- HRESULT hr = S_OK;
- IMFMediaEvent *pEvent = NULL;
- MediaEventType met;
-
- TSK_DEBUG_INFO("RunSessionThread (MFCodecTopology) - ENTER");
-
- while(pSelf->isStarted())
- {
- CHECK_HR(hr = pSelf->m_pSession->GetEvent(0, &pEvent));
- CHECK_HR(hr = pEvent->GetStatus(&hrStatus));
- CHECK_HR(hr = pEvent->GetType(&met));
-
- if (FAILED(hrStatus) /*&& hrStatus != MF_E_NO_SAMPLE_TIMESTAMP*/)
- {
- TSK_DEBUG_ERROR("Session error: 0x%x (event id: %d)\n", hrStatus, met);
- hr = hrStatus;
- goto bail;
- }
- if (met == MESessionEnded)
- {
- break;
- }
- SafeRelease(&pEvent);
- }
+ MFCodecTopology *pSelf = (MFCodecTopology *)pArg;
+ HRESULT hrStatus = S_OK;
+ HRESULT hr = S_OK;
+ IMFMediaEvent *pEvent = NULL;
+ MediaEventType met;
+
+ TSK_DEBUG_INFO("RunSessionThread (MFCodecTopology) - ENTER");
+
+ while(pSelf->isStarted()) {
+ CHECK_HR(hr = pSelf->m_pSession->GetEvent(0, &pEvent));
+ CHECK_HR(hr = pEvent->GetStatus(&hrStatus));
+ CHECK_HR(hr = pEvent->GetType(&met));
+
+ if (FAILED(hrStatus) /*&& hrStatus != MF_E_NO_SAMPLE_TIMESTAMP*/) {
+ TSK_DEBUG_ERROR("Session error: 0x%x (event id: %d)\n", hrStatus, met);
+ hr = hrStatus;
+ goto bail;
+ }
+ if (met == MESessionEnded) {
+ break;
+ }
+ SafeRelease(&pEvent);
+ }
bail:
- TSK_DEBUG_INFO("RunSessionThread (MFCodecTopology) - EXIT");
+ TSK_DEBUG_INFO("RunSessionThread (MFCodecTopology) - EXIT");
- return NULL;
+ return NULL;
}
HRESULT MFCodecTopology::DeInitialize()
{
- Stop();
-
- SafeRelease(&m_pCodec);
- SafeRelease(&m_pSource);
- SafeRelease(&m_pCodec);
- SafeRelease(&m_pSession);
- SafeRelease(&m_pTopologyFull);
- SafeRelease(&m_pTopologyPartial);
- SafeRelease(&m_pOutputType);
- SafeRelease(&m_pInputType);
- SafeRelease(&m_pGrabberCallback);
- SafeRelease(&m_pGrabberActivate);
-
- if(m_pTread)
- {
- tsk_thread_join(&m_pTread);
- }
+ Stop();
+
+ SafeRelease(&m_pCodec);
+ SafeRelease(&m_pSource);
+ SafeRelease(&m_pCodec);
+ SafeRelease(&m_pSession);
+ SafeRelease(&m_pTopologyFull);
+ SafeRelease(&m_pTopologyPartial);
+ SafeRelease(&m_pOutputType);
+ SafeRelease(&m_pInputType);
+ SafeRelease(&m_pGrabberCallback);
+ SafeRelease(&m_pGrabberActivate);
+
+ if(m_pTread) {
+ tsk_thread_join(&m_pTread);
+ }
- m_SampleQueue.Clear();
+ m_SampleQueue.Clear();
- m_bInitialized = FALSE;
+ m_bInitialized = FALSE;
- return S_OK;
+ return S_OK;
}
HRESULT MFCodecTopology::ProcessInput(IMFSample* pSample)
{
- HRESULT hr = S_OK;
- IMFMediaBuffer* pMediaBuffer = NULL;
- BYTE* _pcBufferPtr = NULL;
-
- if(!pSample)
- {
- CHECK_HR(hr = E_POINTER);
- }
-
- if(m_pCodec->GetMediaType() != MFCodecMediaType_Video)
- {
- CHECK_HR(hr = E_NOTIMPL);
- }
-
- if(!m_bStarted)
- {
- CHECK_HR(hr = Start());
- }
-
- CHECK_HR(hr = pSample->GetBufferByIndex(0, &pMediaBuffer));
-
- DWORD dwDataLength = 0;
- BOOL bLocked = FALSE;
- CHECK_HR(hr = pMediaBuffer->GetCurrentLength(&dwDataLength));
- bLocked = TRUE;
- if(dwDataLength > 0)
- {
- CHECK_HR(hr = pMediaBuffer->Lock(&_pcBufferPtr, NULL, NULL));
- CHECK_HR(hr = m_pSource->CopyVideoBuffer(
- dynamic_cast<MFCodecVideo*>(m_pCodec)->GetWidth(),
- dynamic_cast<MFCodecVideo*>(m_pCodec)->GetHeight(),
- _pcBufferPtr, dwDataLength));
- }
+ HRESULT hr = S_OK;
+ IMFMediaBuffer* pMediaBuffer = NULL;
+ BYTE* _pcBufferPtr = NULL;
+
+ if(!pSample) {
+ CHECK_HR(hr = E_POINTER);
+ }
+
+ if(m_pCodec->GetMediaType() != MFCodecMediaType_Video) {
+ CHECK_HR(hr = E_NOTIMPL);
+ }
+
+ if(!m_bStarted) {
+ CHECK_HR(hr = Start());
+ }
+
+ CHECK_HR(hr = pSample->GetBufferByIndex(0, &pMediaBuffer));
+
+ DWORD dwDataLength = 0;
+ BOOL bLocked = FALSE;
+ CHECK_HR(hr = pMediaBuffer->GetCurrentLength(&dwDataLength));
+ bLocked = TRUE;
+ if(dwDataLength > 0) {
+ CHECK_HR(hr = pMediaBuffer->Lock(&_pcBufferPtr, NULL, NULL));
+ CHECK_HR(hr = m_pSource->CopyVideoBuffer(
+ dynamic_cast<MFCodecVideo*>(m_pCodec)->GetWidth(),
+ dynamic_cast<MFCodecVideo*>(m_pCodec)->GetHeight(),
+ _pcBufferPtr, dwDataLength));
+ }
bail:
- if(bLocked)
- {
- pMediaBuffer->Unlock();
- }
- SafeRelease(&pMediaBuffer);
- return hr;
+ if(bLocked) {
+ pMediaBuffer->Unlock();
+ }
+ SafeRelease(&pMediaBuffer);
+ return hr;
}
HRESULT MFCodecTopology::ProcessOutput(IMFSample **ppSample)
{
- HRESULT hr = S_OK;
+ HRESULT hr = S_OK;
- if(!ppSample)
- {
- CHECK_HR(hr = E_POINTER);
- }
+ if(!ppSample) {
+ CHECK_HR(hr = E_POINTER);
+ }
- if(!m_SampleQueue.IsEmpty())
- {
- CHECK_HR(hr = m_SampleQueue.Dequeue(ppSample)); // thread-safe
- }
+ if(!m_SampleQueue.IsEmpty()) {
+ CHECK_HR(hr = m_SampleQueue.Dequeue(ppSample)); // thread-safe
+ }
bail:
- return hr;
+ return hr;
}
//
@@ -458,11 +421,11 @@ bail:
MFCodecVideoTopology::MFCodecVideoTopology(MFCodec* pCodec, HRESULT &hr)
-: MFCodecTopology(pCodec, hr)
-, m_nWidth(0)
-, m_nHeight(0)
+ : MFCodecTopology(pCodec, hr)
+ , m_nWidth(0)
+ , m_nHeight(0)
{
- assert(pCodec->GetMediaType() == MFCodecMediaType_Video);
+ assert(pCodec->GetMediaType() == MFCodecMediaType_Video);
}
MFCodecVideoTopology::~MFCodecVideoTopology()
diff --git a/plugins/pluginWinMF/internals/mf_codec_topology.h b/plugins/pluginWinMF/internals/mf_codec_topology.h
index c5d2f34..6a4bb94 100755
--- a/plugins/pluginWinMF/internals/mf_codec_topology.h
+++ b/plugins/pluginWinMF/internals/mf_codec_topology.h
@@ -1,18 +1,18 @@
/* Copyright (C) 2013 Mamadou DIOP
* Copyright (C) 2013 Doubango Telecom <http://www.doubango.org>
-*
+*
* This file is part of Open Source Doubango Framework.
*
* DOUBANGO is free software: you can redistribute it and/or modify
* it under the terms of the GNU General Public License as published by
* the Free Software Foundation, either version 3 of the License, or
* (at your option) any later version.
-*
+*
* DOUBANGO is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
* GNU General Public License for more details.
-*
+*
* You should have received a copy of the GNU General Public License
* along with DOUBANGO.
*/
@@ -28,59 +28,63 @@ class MFCodecTopologySampleGrabberCB;
class MFCodecTopology : IUnknown
{
- friend class MFCodecTopologySampleGrabberCB;
+ friend class MFCodecTopologySampleGrabberCB;
public:
- MFCodecTopology(MFCodec* pCodec, HRESULT &hr);
- virtual ~MFCodecTopology();
+ MFCodecTopology(MFCodec* pCodec, HRESULT &hr);
+ virtual ~MFCodecTopology();
- virtual HRESULT Initialize();
- virtual HRESULT DeInitialize();
+ virtual HRESULT Initialize();
+ virtual HRESULT DeInitialize();
- virtual HRESULT ProcessInput(IMFSample* pSample);
- virtual HRESULT ProcessOutput(IMFSample **ppSample);
+ virtual HRESULT ProcessInput(IMFSample* pSample);
+ virtual HRESULT ProcessOutput(IMFSample **ppSample);
- // IUnknown
+ // IUnknown
STDMETHODIMP QueryInterface(REFIID iid, void** ppv);
STDMETHODIMP_(ULONG) AddRef();
STDMETHODIMP_(ULONG) Release();
- inline BOOL isStarted() { return m_bStarted; }
- inline BOOL isInitialized() { return m_bInitialized; }
+ inline BOOL isStarted() {
+ return m_bStarted;
+ }
+ inline BOOL isInitialized() {
+ return m_bInitialized;
+ }
private:
- static void* TSK_STDCALL RunSessionThread(void *pArg);
+ static void* TSK_STDCALL RunSessionThread(void *pArg);
protected:
- HRESULT Start();
- HRESULT Stop();
+ HRESULT Start();
+ HRESULT Stop();
private:
- long m_nRefCount;
+ long m_nRefCount;
protected:
- BOOL m_bInitialized;
- BOOL m_bStarted;
- MFCodec* m_pCodec;
- CMFSource *m_pSource;
- IMFMediaSession *m_pSession;
+ BOOL m_bInitialized;
+ BOOL m_bStarted;
+ MFCodec* m_pCodec;
+ CMFSource *m_pSource;
+ IMFMediaSession *m_pSession;
IMFTopology *m_pTopologyFull;
- IMFTopology *m_pTopologyPartial;
- IMFMediaType *m_pOutputType;
- IMFMediaType *m_pInputType;
- MFCodecTopologySampleGrabberCB *m_pGrabberCallback;
+ IMFTopology *m_pTopologyPartial;
+ IMFMediaType *m_pOutputType;
+ IMFMediaType *m_pInputType;
+ MFCodecTopologySampleGrabberCB *m_pGrabberCallback;
IMFActivate *m_pGrabberActivate;
- tsk_thread_handle_t* m_pTread;
- SampleQueue m_SampleQueue;
+ tsk_thread_handle_t* m_pTread;
+ SampleQueue m_SampleQueue;
};
class MFCodecVideoTopology : public MFCodecTopology
{
public:
- MFCodecVideoTopology(MFCodec* pCodec, HRESULT &hr);
- virtual ~MFCodecVideoTopology();
+ MFCodecVideoTopology(MFCodec* pCodec, HRESULT &hr);
+ virtual ~MFCodecVideoTopology();
private:
- UINT32 m_nWidth, m_nHeight;
+ UINT32 m_nWidth, m_nHeight;
};
diff --git a/plugins/pluginWinMF/internals/mf_custom_src.cxx b/plugins/pluginWinMF/internals/mf_custom_src.cxx
index 1de9904..186887e 100755
--- a/plugins/pluginWinMF/internals/mf_custom_src.cxx
+++ b/plugins/pluginWinMF/internals/mf_custom_src.cxx
@@ -1,20 +1,20 @@
-/*
+/*
* Copyright (C) Microsoft Corporation. All rights reserved.
* Copyright (C) 2013 Mamadou DIOP
* Copyright (C) 2013 Doubango Telecom <http://www.doubango.org>
-*
+*
* This file is part of Open Source Doubango Framework.
*
* DOUBANGO is free software: you can redistribute it and/or modify
* it under the terms of the GNU General Public License as published by
* the Free Software Foundation, either version 3 of the License, or
* (at your option) any later version.
-*
+*
* DOUBANGO is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
* GNU General Public License for more details.
-*
+*
* You should have received a copy of the GNU General Public License
* along with DOUBANGO.
*/
@@ -63,7 +63,7 @@ LONGLONG BufferSizeFromAudioDuration(const WAVEFORMATEX *pWav, LONGLONG duration
HRESULT CMFSource_CreateInstance(REFIID iid, void **ppMFT)
{
- return CMFSource::CreateInstance(iid, ppMFT);
+ return CMFSource::CreateInstance(iid, ppMFT);
}
@@ -77,30 +77,26 @@ HRESULT CMFSource_CreateInstance(REFIID iid, void **ppMFT)
HRESULT CMFSource::CreateInstance(REFIID iid, void **ppSource) // Called when source used as plugin
{
- return CreateInstanceEx(iid, ppSource, NULL);
+ return CreateInstanceEx(iid, ppSource, NULL);
}
HRESULT CMFSource::CreateInstanceEx(REFIID iid, void **ppSource, IMFMediaType *pMediaType) // Called when source directly called
{
- if (ppSource == NULL)
- {
+ if (ppSource == NULL) {
return E_POINTER;
}
HRESULT hr = S_OK;
CMFSource *pSource = new (std::nothrow) CMFSource(hr, pMediaType); // Created with ref count = 1.
- if (pSource == NULL)
- {
+ if (pSource == NULL) {
return E_OUTOFMEMORY;
}
- if (SUCCEEDED(hr))
- {
+ if (SUCCEEDED(hr)) {
hr = pSource->QueryInterface(iid, ppSource);
- if(SUCCEEDED(hr))
- {
- ((CMFSource*)(*ppSource))->AddRef();
- }
+ if(SUCCEEDED(hr)) {
+ ((CMFSource*)(*ppSource))->AddRef();
+ }
}
SafeRelease(&pSource);
@@ -115,22 +111,21 @@ HRESULT CMFSource::CreateInstanceEx(REFIID iid, void **ppSource, IMFMediaType *p
//-------------------------------------------------------------------
CMFSource::CMFSource(HRESULT& hr, IMFMediaType *pMediaType)
- : m_nRefCount(1),
- m_pEventQueue(NULL),
- m_pPresentationDescriptor(NULL),
- m_IsShutdown(FALSE),
- m_state(STATE_STOPPED),
- m_pStream(NULL),
- m_pMediaType(NULL)
+ : m_nRefCount(1),
+ m_pEventQueue(NULL),
+ m_pPresentationDescriptor(NULL),
+ m_IsShutdown(FALSE),
+ m_state(STATE_STOPPED),
+ m_pStream(NULL),
+ m_pMediaType(NULL)
{
// Create the media event queue.
hr = MFCreateEventQueue(&m_pEventQueue);
- if(pMediaType)
- {
- m_pMediaType = pMediaType;
- pMediaType->AddRef();
- }
+ if(pMediaType) {
+ m_pMediaType = pMediaType;
+ pMediaType->AddRef();
+ }
InitializeCriticalSection(&m_critSec);
}
@@ -145,7 +140,7 @@ CMFSource::~CMFSource()
{
assert(m_IsShutdown);
assert(m_nRefCount == 0);
- SafeRelease(&m_pMediaType);
+ SafeRelease(&m_pMediaType);
DeleteCriticalSection(&m_critSec);
}
@@ -154,26 +149,22 @@ CMFSource::~CMFSource()
HRESULT CMFSource::CopyVideoBuffer(UINT32 nWidth, UINT32 nHeight, const void* pBufferPtr, UINT32 nBufferSize)
{
- if(!pBufferPtr)
- {
- TSK_DEBUG_ERROR("Invalid buffer pointer");
- return E_POINTER;
- }
-
- if(!nWidth || !nHeight || !nBufferSize)
- {
- TSK_DEBUG_ERROR("Invalid parameter");
- return E_INVALIDARG;
- }
- if(m_pStream)
- {
- return m_pStream->CopyVideoBuffer(nWidth, nHeight, pBufferPtr, nBufferSize);
- }
- else
- {
- TSK_DEBUG_ERROR("No stream associated to this source");
- return E_NOT_VALID_STATE;
- }
+ if(!pBufferPtr) {
+ TSK_DEBUG_ERROR("Invalid buffer pointer");
+ return E_POINTER;
+ }
+
+ if(!nWidth || !nHeight || !nBufferSize) {
+ TSK_DEBUG_ERROR("Invalid parameter");
+ return E_INVALIDARG;
+ }
+ if(m_pStream) {
+ return m_pStream->CopyVideoBuffer(nWidth, nHeight, pBufferPtr, nBufferSize);
+ }
+ else {
+ TSK_DEBUG_ERROR("No stream associated to this source");
+ return E_NOT_VALID_STATE;
+ }
}
// IUnknown methods
@@ -186,8 +177,7 @@ ULONG CMFSource::AddRef()
ULONG CMFSource::Release()
{
ULONG uCount = InterlockedDecrement(&m_nRefCount);
- if (uCount == 0)
- {
+ if (uCount == 0) {
delete this;
}
// For thread safety, return a temporary variable.
@@ -196,8 +186,7 @@ ULONG CMFSource::Release()
HRESULT CMFSource::QueryInterface(REFIID iid, void** ppv)
{
- static const QITAB qit[] =
- {
+ static const QITAB qit[] = {
QITABENT(CMFSource, IMFMediaEventGenerator),
QITABENT(CMFSource, IMFMediaSource),
{ 0 }
@@ -220,8 +209,7 @@ HRESULT CMFSource::BeginGetEvent(IMFAsyncCallback* pCallback, IUnknown* punkStat
hr = CheckShutdown();
- if (SUCCEEDED(hr))
- {
+ if (SUCCEEDED(hr)) {
hr = m_pEventQueue->BeginGetEvent(pCallback, punkState);
}
@@ -238,8 +226,7 @@ HRESULT CMFSource::EndGetEvent(IMFAsyncResult* pResult, IMFMediaEvent** ppEvent)
hr = CheckShutdown();
- if (SUCCEEDED(hr))
- {
+ if (SUCCEEDED(hr)) {
hr = m_pEventQueue->EndGetEvent(pResult, ppEvent);
}
@@ -263,16 +250,14 @@ HRESULT CMFSource::GetEvent(DWORD dwFlags, IMFMediaEvent** ppEvent)
// Check shutdown
hr = CheckShutdown();
- if (SUCCEEDED(hr))
- {
+ if (SUCCEEDED(hr)) {
pQueue = m_pEventQueue;
pQueue->AddRef();
}
LeaveCriticalSection(&m_critSec);
- if (SUCCEEDED(hr))
- {
+ if (SUCCEEDED(hr)) {
hr = pQueue->GetEvent(dwFlags, ppEvent);
}
@@ -288,8 +273,7 @@ HRESULT CMFSource::QueueEvent(MediaEventType met, REFGUID guidExtendedType, HRES
hr = CheckShutdown();
- if (SUCCEEDED(hr))
- {
+ if (SUCCEEDED(hr)) {
hr = m_pEventQueue->QueueEventParamVar(met, guidExtendedType, hrStatus, pvValue);
}
@@ -309,8 +293,7 @@ HRESULT CMFSource::QueueEvent(MediaEventType met, REFGUID guidExtendedType, HRES
HRESULT CMFSource::CreatePresentationDescriptor(IMFPresentationDescriptor** ppPresentationDescriptor)
{
- if (ppPresentationDescriptor == NULL)
- {
+ if (ppPresentationDescriptor == NULL) {
return E_POINTER;
}
@@ -320,17 +303,14 @@ HRESULT CMFSource::CreatePresentationDescriptor(IMFPresentationDescriptor** ppPr
hr = CheckShutdown();
- if (SUCCEEDED(hr))
- {
- if (m_pPresentationDescriptor == NULL)
- {
+ if (SUCCEEDED(hr)) {
+ if (m_pPresentationDescriptor == NULL) {
hr = CreatePresentationDescriptor();
}
}
// Clone our default presentation descriptor.
- if (SUCCEEDED(hr))
- {
+ if (SUCCEEDED(hr)) {
hr = m_pPresentationDescriptor->Clone(ppPresentationDescriptor);
}
@@ -347,8 +327,7 @@ HRESULT CMFSource::CreatePresentationDescriptor(IMFPresentationDescriptor** ppPr
HRESULT CMFSource::GetCharacteristics(DWORD* pdwCharacteristics)
{
- if (pdwCharacteristics == NULL)
- {
+ if (pdwCharacteristics == NULL) {
return E_POINTER;
}
@@ -358,8 +337,7 @@ HRESULT CMFSource::GetCharacteristics(DWORD* pdwCharacteristics)
hr = CheckShutdown();
- if (SUCCEEDED(hr))
- {
+ if (SUCCEEDED(hr)) {
*pdwCharacteristics = MFMEDIASOURCE_CAN_PAUSE | MFMEDIASOURCE_IS_LIVE;
}
@@ -377,7 +355,7 @@ HRESULT CMFSource::Start(
IMFPresentationDescriptor* pPresentationDescriptor,
const GUID* pguidTimeFormat,
const PROPVARIANT* pvarStartPosition
- )
+)
{
HRESULT hr = S_OK;
LONGLONG llStartOffset = 0;
@@ -392,14 +370,12 @@ HRESULT CMFSource::Start(
// Check parameters.
// Start position and presentation descriptor cannot be NULL.
- if (pvarStartPosition == NULL || pPresentationDescriptor == NULL)
- {
+ if (pvarStartPosition == NULL || pPresentationDescriptor == NULL) {
return E_INVALIDARG;
}
// Check the time format. Must be "reference time" units.
- if ((pguidTimeFormat != NULL) && (*pguidTimeFormat != GUID_NULL))
- {
+ if ((pguidTimeFormat != NULL) && (*pguidTimeFormat != GUID_NULL)) {
// Unrecognized time format GUID.
return MF_E_UNSUPPORTED_TIME_FORMAT;
}
@@ -410,33 +386,27 @@ HRESULT CMFSource::Start(
CHECK_HR(hr = CheckShutdown());
// Check the start position.
- if (pvarStartPosition->vt == VT_I8)
- {
+ if (pvarStartPosition->vt == VT_I8) {
// Start position is given in pvarStartPosition in 100-ns units.
llStartOffset = pvarStartPosition->hVal.QuadPart;
- if (m_state != STATE_STOPPED)
- {
+ if (m_state != STATE_STOPPED) {
// Source is running or paused, so this is a seek.
bIsSeek = TRUE;
}
}
- else if (pvarStartPosition->vt == VT_EMPTY)
- {
+ else if (pvarStartPosition->vt == VT_EMPTY) {
// Start position is "current position".
// For stopped, that means 0. Otherwise, use the current position.
- if (m_state == STATE_STOPPED)
- {
+ if (m_state == STATE_STOPPED) {
llStartOffset = 0;
}
- else
- {
+ else {
llStartOffset = GetCurrentPosition();
bIsRestartFromCurrentPosition = TRUE;
}
}
- else
- {
+ else {
// We don't support this time format.
hr = MF_E_UNSUPPORTED_TIME_FORMAT;
goto bail;
@@ -457,12 +427,10 @@ HRESULT CMFSource::Start(
var.hVal.QuadPart = llStartOffset;
// Send the source event.
- if (bIsSeek)
- {
+ if (bIsSeek) {
CHECK_HR(hr = QueueEvent(MESourceSeeked, GUID_NULL, hr, &var));
}
- else
- {
+ else {
// For starting, if we are RESTARTING from the current position and our
// previous state was running/paused, then we need to add the
// MF_EVENT_SOURCE_ACTUAL_START attribute to the event. This requires
@@ -472,8 +440,7 @@ HRESULT CMFSource::Start(
CHECK_HR(hr = MFCreateMediaEvent(MESourceStarted, GUID_NULL, hr, &var, &pEvent));
// For restarts, set the actual start time as an attribute.
- if (bIsRestartFromCurrentPosition)
- {
+ if (bIsRestartFromCurrentPosition) {
CHECK_HR(hr = pEvent->SetUINT64(MF_EVENT_SOURCE_ACTUAL_START, llStartOffset));
}
@@ -484,31 +451,26 @@ HRESULT CMFSource::Start(
bQueuedStartEvent = TRUE;
// Send the stream event.
- if (m_pStream)
- {
- if (bIsSeek)
- {
+ if (m_pStream) {
+ if (bIsSeek) {
CHECK_HR(hr = m_pStream->QueueEvent(MEStreamSeeked, GUID_NULL, hr, &var));
}
- else
- {
+ else {
CHECK_HR(hr = m_pStream->QueueEvent(MEStreamStarted, GUID_NULL, hr, &var));
}
}
- if (bIsSeek)
- {
+ if (bIsSeek) {
// For seek requests, flush any queued samples.
CHECK_HR(hr = m_pStream->Flush());
}
- else
- {
+ else {
// Otherwise, deliver any queued samples.
CHECK_HR(hr = m_pStream->DeliverQueuedSamples());
}
- // Initialize Stream parameters
- CHECK_HR(hr = m_pStream->InitializeParams());
+ // Initialize Stream parameters
+ CHECK_HR(hr = m_pStream->InitializeParams());
m_state = STATE_STARTED;
@@ -522,8 +484,7 @@ bail:
// event (with a success code), then we need to raise an
// MEError event.
- if (FAILED(hr) && bQueuedStartEvent)
- {
+ if (FAILED(hr) && bQueuedStartEvent) {
hr = QueueEvent(MEError, GUID_NULL, hr, &var);
}
@@ -531,7 +492,7 @@ bail:
SafeRelease(&pEvent);
LeaveCriticalSection(&m_critSec);
-
+
return hr;
}
@@ -550,31 +511,25 @@ HRESULT CMFSource::Pause()
hr = CheckShutdown();
// Pause is only allowed from started state.
- if (SUCCEEDED(hr))
- {
- if (m_state != STATE_STARTED)
- {
+ if (SUCCEEDED(hr)) {
+ if (m_state != STATE_STARTED) {
hr = MF_E_INVALID_STATE_TRANSITION;
}
}
// Send the appropriate events.
- if (SUCCEEDED(hr))
- {
- if (m_pStream)
- {
+ if (SUCCEEDED(hr)) {
+ if (m_pStream) {
hr = m_pStream->QueueEvent(MEStreamPaused, GUID_NULL, S_OK, NULL);
}
}
- if (SUCCEEDED(hr))
- {
+ if (SUCCEEDED(hr)) {
hr = QueueEvent(MESourcePaused, GUID_NULL, S_OK, NULL);
}
// Update our state.
- if (SUCCEEDED(hr))
- {
+ if (SUCCEEDED(hr)) {
m_state = STATE_PAUSED;
}
@@ -597,8 +552,7 @@ HRESULT CMFSource::Stop()
hr = CheckShutdown();
- if (SUCCEEDED(hr))
- {
+ if (SUCCEEDED(hr)) {
// Update our state.
m_state = STATE_STOPPED;
@@ -610,15 +564,12 @@ HRESULT CMFSource::Stop()
// Queue events.
//
- if (SUCCEEDED(hr))
- {
- if (m_pStream)
- {
+ if (SUCCEEDED(hr)) {
+ if (m_pStream) {
hr = m_pStream->QueueEvent(MEStreamStopped, GUID_NULL, S_OK, NULL);
}
}
- if (SUCCEEDED(hr))
- {
+ if (SUCCEEDED(hr)) {
hr = QueueEvent(MESourceStopped, GUID_NULL, S_OK, NULL);
}
@@ -645,17 +596,14 @@ HRESULT CMFSource::Shutdown()
hr = CheckShutdown();
- if (SUCCEEDED(hr))
- {
+ if (SUCCEEDED(hr)) {
// Shut down the stream object.
- if (m_pStream)
- {
+ if (m_pStream) {
(void)m_pStream->Shutdown();
}
// Shut down the event queue.
- if (m_pEventQueue)
- {
+ if (m_pEventQueue) {
(void)m_pEventQueue->Shutdown();
}
@@ -694,44 +642,39 @@ HRESULT CMFSource::CreatePresentationDescriptor()
// Create the stream descriptor.
hr = MFCreateStreamDescriptor(
- 0, // stream identifier
- 1, // Number of media types.
- &m_pMediaType, // Array of media types
- &pStreamDescriptor
- );
+ 0, // stream identifier
+ 1, // Number of media types.
+ &m_pMediaType, // Array of media types
+ &pStreamDescriptor
+ );
// Set the default media type on the media type handler.
- if (SUCCEEDED(hr))
- {
+ if (SUCCEEDED(hr)) {
hr = pStreamDescriptor->GetMediaTypeHandler(&pHandler);
}
-
- if (SUCCEEDED(hr))
- {
- hr = pHandler->SetCurrentMediaType(m_pMediaType);
+
+ if (SUCCEEDED(hr)) {
+ hr = pHandler->SetCurrentMediaType(m_pMediaType);
}
// Create the presentation descriptor.
- if (SUCCEEDED(hr))
- {
+ if (SUCCEEDED(hr)) {
hr = MFCreatePresentationDescriptor(
- 1, // Number of stream descriptors
- &pStreamDescriptor, // Array of stream descriptors
- &m_pPresentationDescriptor
- );
+ 1, // Number of stream descriptors
+ &pStreamDescriptor, // Array of stream descriptors
+ &m_pPresentationDescriptor
+ );
}
// Select the first stream
- if (SUCCEEDED(hr))
- {
+ if (SUCCEEDED(hr)) {
hr = m_pPresentationDescriptor->SelectStream(0);
}
// Set the file/stream duration as an attribute on the presentation descriptor.
- if (SUCCEEDED(hr))
- {
- hr = m_pPresentationDescriptor->SetUINT64(MF_PD_DURATION, (UINT64)ULLONG_MAX);
+ if (SUCCEEDED(hr)) {
+ hr = m_pPresentationDescriptor->SetUINT64(MF_PD_DURATION, (UINT64)ULLONG_MAX);
}
-
+
SafeRelease(&pStreamDescriptor);
SafeRelease(&pHandler);
return hr;
@@ -764,7 +707,7 @@ HRESULT CMFSource::ValidatePresentationDescriptor(IMFPresentationDescriptor *pPD
IMFStreamDescriptor *pStreamDescriptor = NULL;
IMFMediaTypeHandler *pHandler = NULL;
IMFMediaType *pMediaType = NULL;
- GUID majorType;
+ GUID majorType;
DWORD cStreamDescriptors = 0;
BOOL fSelected = FALSE;
@@ -772,85 +715,71 @@ HRESULT CMFSource::ValidatePresentationDescriptor(IMFPresentationDescriptor *pPD
// Make sure there is only one stream.
hr = pPD->GetStreamDescriptorCount(&cStreamDescriptors);
- if (SUCCEEDED(hr))
- {
- if (cStreamDescriptors != 1)
- {
+ if (SUCCEEDED(hr)) {
+ if (cStreamDescriptors != 1) {
hr = MF_E_UNSUPPORTED_REPRESENTATION;
}
}
// Get the stream descriptor.
- if (SUCCEEDED(hr))
- {
+ if (SUCCEEDED(hr)) {
hr = pPD->GetStreamDescriptorByIndex(0, &fSelected, &pStreamDescriptor);
}
// Make sure it's selected. (This media source has only one stream, so it
// is not useful to deselect the only stream.)
- if (SUCCEEDED(hr))
- {
- if (!fSelected)
- {
+ if (SUCCEEDED(hr)) {
+ if (!fSelected) {
hr = MF_E_UNSUPPORTED_REPRESENTATION;
}
}
// Get the media type handler, so that we can get the media type.
- if (SUCCEEDED(hr))
- {
+ if (SUCCEEDED(hr)) {
hr = pStreamDescriptor->GetMediaTypeHandler(&pHandler);
}
- if (SUCCEEDED(hr))
- {
+ if (SUCCEEDED(hr)) {
hr = pHandler->GetCurrentMediaType(&pMediaType);
}
- hr = pMediaType->GetMajorType(&majorType);
-
- if (SUCCEEDED(hr))
- {
- if(majorType == MFMediaType_Video)
- {
- if (SUCCEEDED(hr))
- {
- hr = MFUtils::ValidateVideoFormat(pMediaType);
- }
- }
- else
- {
- WAVEFORMATEX *pFormat = NULL;
- UINT32 cbWaveFormat = 0;
-
- if (SUCCEEDED(hr))
- {
- hr = MFCreateWaveFormatExFromMFMediaType(
- pMediaType,
- &pFormat,
- &cbWaveFormat);
- }
- if (SUCCEEDED(hr))
- {
- /*assert(this->WaveFormat() != NULL);
-
- if (cbWaveFormat < this->WaveFormatSize())
- {
- hr = MF_E_INVALIDMEDIATYPE;
- }*/
- }
-
- if (SUCCEEDED(hr))
- {
- /*if (memcmp(pFormat, WaveFormat(), WaveFormatSize()) != 0)
- {
- hr = MF_E_INVALIDMEDIATYPE;
- }*/
- }
-
- CoTaskMemFree(pFormat);
- }
- }
+ hr = pMediaType->GetMajorType(&majorType);
+
+ if (SUCCEEDED(hr)) {
+ if(majorType == MFMediaType_Video) {
+ if (SUCCEEDED(hr)) {
+ hr = MFUtils::ValidateVideoFormat(pMediaType);
+ }
+ }
+ else {
+ WAVEFORMATEX *pFormat = NULL;
+ UINT32 cbWaveFormat = 0;
+
+ if (SUCCEEDED(hr)) {
+ hr = MFCreateWaveFormatExFromMFMediaType(
+ pMediaType,
+ &pFormat,
+ &cbWaveFormat);
+ }
+ if (SUCCEEDED(hr)) {
+ /*assert(this->WaveFormat() != NULL);
+
+ if (cbWaveFormat < this->WaveFormatSize())
+ {
+ hr = MF_E_INVALIDMEDIATYPE;
+ }*/
+ }
+
+ if (SUCCEEDED(hr)) {
+ /*if (memcmp(pFormat, WaveFormat(), WaveFormatSize()) != 0)
+ {
+ hr = MF_E_INVALIDMEDIATYPE;
+ }*/
+ }
+
+ CoTaskMemFree(pFormat);
+ }
+ }
SafeRelease(&pStreamDescriptor);
SafeRelease(&pHandler);
@@ -882,27 +811,23 @@ HRESULT CMFSource::QueueNewStreamEvent(IMFPresentationDescriptor *pPD)
hr = pPD->GetStreamDescriptorByIndex(0, &fSelected, &pSD);
- if (SUCCEEDED(hr))
- {
+ if (SUCCEEDED(hr)) {
// The stream must be selected, because we don't allow the app
// to de-select the stream. See ValidatePresentationDescriptor.
assert(fSelected);
- if (m_pStream)
- {
+ if (m_pStream) {
// The stream already exists, and is still selected.
// Send the MEUpdatedStream event.
hr = QueueEventWithIUnknown(this, MEUpdatedStream, S_OK, m_pStream);
}
- else
- {
+ else {
// The stream does not exist, and is now selected.
// Create a new stream.
hr = CreateCMFStreamSource(pSD);
- if (SUCCEEDED(hr))
- {
+ if (SUCCEEDED(hr)) {
// CreateCMFStreamSource creates the stream, so m_pStream is no longer NULL.
assert(m_pStream != NULL);
@@ -926,13 +851,11 @@ HRESULT CMFSource::CreateCMFStreamSource(IMFStreamDescriptor *pSD)
HRESULT hr = S_OK;
m_pStream = new (std::nothrow) CMFStreamSource(this, pSD, hr);
- if (m_pStream == NULL)
- {
+ if (m_pStream == NULL) {
hr = E_OUTOFMEMORY;
}
- if (FAILED(hr))
- {
+ if (FAILED(hr)) {
SafeRelease(&m_pStream);
}
@@ -948,12 +871,10 @@ HRESULT CMFSource::CreateCMFStreamSource(IMFStreamDescriptor *pSD)
LONGLONG CMFSource::GetCurrentPosition() const
{
- if (m_pStream)
- {
+ if (m_pStream) {
return m_pStream->GetCurrentPosition();
}
- else
- {
+ else {
// If no stream is selected, we are at time 0 by definition.
return 0;
}
@@ -977,11 +898,11 @@ CMFStreamSource::CMFStreamSource(CMFSource *pSource, IMFStreamDescriptor *pSD,
m_pEventQueue(NULL),
m_IsShutdown(FALSE),
m_rtCurrentPosition(0),
- m_rtDuration(0),
+ m_rtDuration(0),
m_discontinuity(FALSE),
m_EOS(FALSE),
- m_pMediaBuffer(NULL),
- m_nBufferSize(0)
+ m_pMediaBuffer(NULL),
+ m_nBufferSize(0)
{
m_pSource = pSource;
m_pSource->AddRef();
@@ -992,12 +913,12 @@ CMFStreamSource::CMFStreamSource(CMFSource *pSource, IMFStreamDescriptor *pSD,
// Create the media event queue.
CHECK_HR(hr = MFCreateEventQueue(&m_pEventQueue));
- //CHECK_HR(hr = InitializeParams());
-
+ //CHECK_HR(hr = InitializeParams());
+
InitializeCriticalSection(&m_critSec);
bail:
- return;
+ return;
}
@@ -1010,7 +931,7 @@ CMFStreamSource::~CMFStreamSource()
assert(m_IsShutdown);
assert(m_nRefCount == 0);
- SafeRelease(&m_pMediaBuffer);
+ SafeRelease(&m_pMediaBuffer);
DeleteCriticalSection(&m_critSec);
}
@@ -1020,38 +941,36 @@ CMFStreamSource::~CMFStreamSource()
HRESULT CMFStreamSource::CopyVideoBuffer(UINT32 nWidth, UINT32 nHeight, const void* pBufferPtr, UINT32 nBufferSize)
{
- // Buffer pointer and size validity already checked by source (caller)
- if(m_guidMajorType != MFMediaType_Video)
- {
- TSK_DEBUG_ERROR("Calling CopyVideoBuffer on no-video stream");
+ // Buffer pointer and size validity already checked by source (caller)
+ if(m_guidMajorType != MFMediaType_Video) {
+ TSK_DEBUG_ERROR("Calling CopyVideoBuffer on no-video stream");
#if defined(E_ILLEGAL_METHOD_CALL)
- return E_ILLEGAL_METHOD_CALL;
+ return E_ILLEGAL_METHOD_CALL;
#else
- return _HRESULT_TYPEDEF_(0x8000000EL);
+ return _HRESULT_TYPEDEF_(0x8000000EL);
#endif
- }
- if(nWidth != m_structVideoParams.nWidth || nHeight != m_structVideoParams.nHeigh || nBufferSize != m_nBufferSize)
- {
- TSK_DEBUG_ERROR("Invalid argument %u#%u or %u#%u or %u#%u. If the call is from a video consumer then, you can safely ignore this message.", nWidth, m_structVideoParams.nWidth, nHeight, m_structVideoParams.nHeigh, nBufferSize, m_nBufferSize);
+ }
+ if(nWidth != m_structVideoParams.nWidth || nHeight != m_structVideoParams.nHeigh || nBufferSize != m_nBufferSize) {
+ TSK_DEBUG_ERROR("Invalid argument %u#%u or %u#%u or %u#%u. If the call is from a video consumer then, you can safely ignore this message.", nWidth, m_structVideoParams.nWidth, nHeight, m_structVideoParams.nHeigh, nBufferSize, m_nBufferSize);
#if defined(E_BOUNDS)
- return E_BOUNDS;
+ return E_BOUNDS;
#else
- return _HRESULT_TYPEDEF_(0x8000000BL);
+ return _HRESULT_TYPEDEF_(0x8000000BL);
#endif
- }
-
- HRESULT hr = S_OK;
-
- BYTE* pMediaBufferPtr = NULL;
- DWORD cbMaxLength = nBufferSize, cbCurrentLength = nBufferSize;
- CHECK_HR(hr = m_pMediaBuffer->Lock(&pMediaBufferPtr, &cbMaxLength, &cbCurrentLength));
-
- memcpy(pMediaBufferPtr, pBufferPtr, nBufferSize);
- CHECK_HR(hr = m_pMediaBuffer->SetCurrentLength(nBufferSize));
- CHECK_HR(hr = m_pMediaBuffer->Unlock());
-
+ }
+
+ HRESULT hr = S_OK;
+
+ BYTE* pMediaBufferPtr = NULL;
+ DWORD cbMaxLength = nBufferSize, cbCurrentLength = nBufferSize;
+ CHECK_HR(hr = m_pMediaBuffer->Lock(&pMediaBufferPtr, &cbMaxLength, &cbCurrentLength));
+
+ memcpy(pMediaBufferPtr, pBufferPtr, nBufferSize);
+ CHECK_HR(hr = m_pMediaBuffer->SetCurrentLength(nBufferSize));
+ CHECK_HR(hr = m_pMediaBuffer->Unlock());
+
bail:
- return hr;
+ return hr;
}
// IUnknown methods
@@ -1064,8 +983,7 @@ ULONG CMFStreamSource::AddRef()
ULONG CMFStreamSource::Release()
{
ULONG uCount = InterlockedDecrement(&m_nRefCount);
- if (uCount == 0)
- {
+ if (uCount == 0) {
delete this;
}
// For thread safety, return a temporary variable.
@@ -1074,8 +992,7 @@ ULONG CMFStreamSource::Release()
HRESULT CMFStreamSource::QueryInterface(REFIID iid, void** ppv)
{
- static const QITAB qit[] =
- {
+ static const QITAB qit[] = {
QITABENT(CMFStreamSource, IMFMediaEventGenerator),
QITABENT(CMFStreamSource, IMFMediaStream),
{ 0 }
@@ -1094,8 +1011,7 @@ HRESULT CMFStreamSource::BeginGetEvent(IMFAsyncCallback* pCallback, IUnknown* pu
EnterCriticalSection(&m_critSec);
hr = CheckShutdown();
- if (SUCCEEDED(hr))
- {
+ if (SUCCEEDED(hr)) {
hr = m_pEventQueue->BeginGetEvent(pCallback, punkState);
}
@@ -1110,8 +1026,7 @@ HRESULT CMFStreamSource::EndGetEvent(IMFAsyncResult* pResult, IMFMediaEvent** pp
EnterCriticalSection(&m_critSec);
hr = CheckShutdown();
- if (SUCCEEDED(hr))
- {
+ if (SUCCEEDED(hr)) {
hr = m_pEventQueue->EndGetEvent(pResult, ppEvent);
}
@@ -1129,16 +1044,14 @@ HRESULT CMFStreamSource::GetEvent(DWORD dwFlags, IMFMediaEvent** ppEvent)
hr = CheckShutdown();
- if (SUCCEEDED(hr))
- {
+ if (SUCCEEDED(hr)) {
pQueue = m_pEventQueue;
pQueue->AddRef();
}
LeaveCriticalSection(&m_critSec);
- if (SUCCEEDED(hr))
- {
+ if (SUCCEEDED(hr)) {
hr = pQueue->GetEvent(dwFlags, ppEvent);
}
@@ -1153,8 +1066,7 @@ HRESULT CMFStreamSource::QueueEvent(MediaEventType met, REFGUID guidExtendedType
EnterCriticalSection(&m_critSec);
hr = CheckShutdown();
- if (SUCCEEDED(hr))
- {
+ if (SUCCEEDED(hr)) {
hr = m_pEventQueue->QueueEventParamVar(met, guidExtendedType, hrStatus, pvValue);
}
@@ -1173,8 +1085,7 @@ HRESULT CMFStreamSource::QueueEvent(MediaEventType met, REFGUID guidExtendedType
HRESULT CMFStreamSource::GetMediaSource(IMFMediaSource** ppMediaSource)
{
- if (ppMediaSource == NULL)
- {
+ if (ppMediaSource == NULL) {
return E_POINTER;
}
@@ -1187,16 +1098,13 @@ HRESULT CMFStreamSource::GetMediaSource(IMFMediaSource** ppMediaSource)
hr = CheckShutdown();
- if (SUCCEEDED(hr))
- {
- if (m_pSource == NULL)
- {
+ if (SUCCEEDED(hr)) {
+ if (m_pSource == NULL) {
hr = E_UNEXPECTED;
}
}
- if (SUCCEEDED(hr))
- {
+ if (SUCCEEDED(hr)) {
hr = m_pSource->QueryInterface(IID_PPV_ARGS(ppMediaSource));
}
@@ -1212,13 +1120,11 @@ HRESULT CMFStreamSource::GetMediaSource(IMFMediaSource** ppMediaSource)
HRESULT CMFStreamSource::GetStreamDescriptor(IMFStreamDescriptor** ppStreamDescriptor)
{
- if (ppStreamDescriptor == NULL)
- {
+ if (ppStreamDescriptor == NULL) {
return E_POINTER;
}
- if (m_pStreamDescriptor == NULL)
- {
+ if (m_pStreamDescriptor == NULL) {
return E_UNEXPECTED;
}
@@ -1228,8 +1134,7 @@ HRESULT CMFStreamSource::GetStreamDescriptor(IMFStreamDescriptor** ppStreamDescr
hr = CheckShutdown();
- if (SUCCEEDED(hr))
- {
+ if (SUCCEEDED(hr)) {
*ppStreamDescriptor = m_pStreamDescriptor;
(*ppStreamDescriptor)->AddRef();
}
@@ -1250,8 +1155,7 @@ HRESULT CMFStreamSource::GetStreamDescriptor(IMFStreamDescriptor** ppStreamDescr
HRESULT CMFStreamSource::RequestSample(IUnknown* pToken)
{
- if (m_pSource == NULL)
- {
+ if (m_pSource == NULL) {
return E_UNEXPECTED;
}
@@ -1266,60 +1170,49 @@ HRESULT CMFStreamSource::RequestSample(IUnknown* pToken)
hr = CheckShutdown();
// Check if we already reached the end of the stream.
- if (SUCCEEDED(hr))
- {
- if (m_EOS)
- {
+ if (SUCCEEDED(hr)) {
+ if (m_EOS) {
hr = MF_E_END_OF_STREAM;
}
}
// Check the source is stopped.
// GetState does not hold the source's critical section. Safe to call.
- if (SUCCEEDED(hr))
- {
- if (m_pSource->GetState() == CMFSource::STATE_STOPPED)
- {
+ if (SUCCEEDED(hr)) {
+ if (m_pSource->GetState() == CMFSource::STATE_STOPPED) {
hr = MF_E_INVALIDREQUEST;
}
}
- if (SUCCEEDED(hr))
- {
+ if (SUCCEEDED(hr)) {
// Create a new audio sample.
hr = CreateSample(&pSample);
}
- if (SUCCEEDED(hr))
- {
+ if (SUCCEEDED(hr)) {
// If the caller provided a token, attach it to the sample as
// an attribute.
// NOTE: If we processed sample requests asynchronously, we would
// need to call AddRef on the token and put the token onto a FIFO
// queue. See documenation for IMFMediaStream::RequestSample.
- if (pToken && pSample)
- {
+ if (pToken && pSample) {
hr = pSample->SetUnknown(MFSampleExtension_Token, pToken);
}
}
// If paused, queue the sample for later delivery. Otherwise, deliver the sample now.
- if (SUCCEEDED(hr) && pSample)
- {
- if (m_pSource->GetState() == CMFSource::STATE_PAUSED)
- {
+ if (SUCCEEDED(hr) && pSample) {
+ if (m_pSource->GetState() == CMFSource::STATE_PAUSED) {
hr = m_sampleQueue.Queue(pSample);
}
- else
- {
+ else {
hr = DeliverSample(pSample);
}
}
// Cache a pointer to the source, prior to leaving the critical section.
- if (SUCCEEDED(hr))
- {
+ if (SUCCEEDED(hr)) {
pSource = m_pSource;
pSource->AddRef();
}
@@ -1334,10 +1227,8 @@ HRESULT CMFStreamSource::RequestSample(IUnknown* pToken)
// source's critical section while holding the stream's critical section, at
// risk of deadlock.
- if (SUCCEEDED(hr))
- {
- if (m_EOS)
- {
+ if (SUCCEEDED(hr)) {
+ if (m_EOS) {
hr = pSource->QueueEvent(MEEndOfPresentation, GUID_NULL, S_OK, NULL);
}
}
@@ -1352,78 +1243,72 @@ HRESULT CMFStreamSource::RequestSample(IUnknown* pToken)
HRESULT CMFStreamSource::InitializeParams()
{
- HRESULT hr = S_OK;
-
- IMFMediaTypeHandler *pMediaTypeHandler = NULL;
- IMFMediaType* pMediaType = NULL;
-
- CHECK_HR(hr = m_pStreamDescriptor->GetMediaTypeHandler(&pMediaTypeHandler));
- CHECK_HR(hr = pMediaTypeHandler->GetCurrentMediaType(&pMediaType));
-
- GUID majorType, subType;
- pMediaType->GetMajorType(&majorType);
- if(majorType == MFMediaType_Video)
- {
- memset(&m_structVideoParams, 0, sizeof(m_structVideoParams));
- CHECK_HR(hr = MFGetAttributeSize(pMediaType, MF_MT_FRAME_SIZE, &m_structVideoParams.nWidth, &m_structVideoParams.nHeigh));
- CHECK_HR(hr = pMediaType->GetGUID(MF_MT_SUBTYPE, &subType));
-
- m_guidMajorType = MFMediaType_Video;
- m_guidSubType = subType;
-
- // Guess video size
- UINT32 nBufferSize;
- if(subType == MFVideoFormat_RGB32)
- {
- nBufferSize = (m_structVideoParams.nWidth * m_structVideoParams.nHeigh << 2);
- }
- else if(subType == MFVideoFormat_RGB24)
- {
- nBufferSize = (m_structVideoParams.nWidth * m_structVideoParams.nHeigh << 2);
- }
- else if(subType == MFVideoFormat_NV12 || subType == MFVideoFormat_I420)
- {
- nBufferSize = (m_structVideoParams.nWidth * m_structVideoParams.nHeigh * 3) >> 1;
- }
- else
- {
- TSK_DEBUG_ERROR("Video subType not supported");
- CHECK_HR(hr = E_NOTIMPL);
- }
-
- // Allocate media buffer
- SafeRelease(&m_pMediaBuffer);
- CHECK_HR(hr = MFCreateMemoryBuffer(nBufferSize, &m_pMediaBuffer));
- m_nBufferSize = nBufferSize;
- {
- //FIXME: DeliverSample() stops if no data
- BYTE* pBuffer = NULL;
- CHECK_HR(hr = m_pMediaBuffer->Lock(&pBuffer, NULL, NULL));
- memset(pBuffer, 0, nBufferSize);
- CHECK_HR(hr = m_pMediaBuffer->SetCurrentLength(nBufferSize));
- CHECK_HR(hr = m_pMediaBuffer->Unlock());
- }
-
- // Retrieve video Frame rate
- UINT32 unNumerator, unDenominator;
- CHECK_HR(hr = MFGetAttributeRatio(pMediaType, MF_MT_FRAME_RATE, &unNumerator, &unDenominator));
- m_structVideoParams.nFps = (unNumerator / unDenominator);
-
- // Retrieve sample duration based on framerate
- m_rtCurrentPosition = 0;
- CHECK_HR(hr = MFFrameRateToAverageTimePerFrame(m_structVideoParams.nFps, 1, &m_rtDuration));
- }
- else
- {
- TSK_DEBUG_ERROR("Only video media type is supported");
- CHECK_HR(hr = E_NOTIMPL);
- }
+ HRESULT hr = S_OK;
+
+ IMFMediaTypeHandler *pMediaTypeHandler = NULL;
+ IMFMediaType* pMediaType = NULL;
+
+ CHECK_HR(hr = m_pStreamDescriptor->GetMediaTypeHandler(&pMediaTypeHandler));
+ CHECK_HR(hr = pMediaTypeHandler->GetCurrentMediaType(&pMediaType));
+
+ GUID majorType, subType;
+ pMediaType->GetMajorType(&majorType);
+ if(majorType == MFMediaType_Video) {
+ memset(&m_structVideoParams, 0, sizeof(m_structVideoParams));
+ CHECK_HR(hr = MFGetAttributeSize(pMediaType, MF_MT_FRAME_SIZE, &m_structVideoParams.nWidth, &m_structVideoParams.nHeigh));
+ CHECK_HR(hr = pMediaType->GetGUID(MF_MT_SUBTYPE, &subType));
+
+ m_guidMajorType = MFMediaType_Video;
+ m_guidSubType = subType;
+
+ // Guess video size
+ UINT32 nBufferSize;
+ if(subType == MFVideoFormat_RGB32) {
+ nBufferSize = (m_structVideoParams.nWidth * m_structVideoParams.nHeigh << 2);
+ }
+ else if(subType == MFVideoFormat_RGB24) {
+ nBufferSize = (m_structVideoParams.nWidth * m_structVideoParams.nHeigh << 2);
+ }
+ else if(subType == MFVideoFormat_NV12 || subType == MFVideoFormat_I420) {
+ nBufferSize = (m_structVideoParams.nWidth * m_structVideoParams.nHeigh * 3) >> 1;
+ }
+ else {
+ TSK_DEBUG_ERROR("Video subType not supported");
+ CHECK_HR(hr = E_NOTIMPL);
+ }
+
+ // Allocate media buffer
+ SafeRelease(&m_pMediaBuffer);
+ CHECK_HR(hr = MFCreateMemoryBuffer(nBufferSize, &m_pMediaBuffer));
+ m_nBufferSize = nBufferSize;
+ {
+ //FIXME: DeliverSample() stops if no data
+ BYTE* pBuffer = NULL;
+ CHECK_HR(hr = m_pMediaBuffer->Lock(&pBuffer, NULL, NULL));
+ memset(pBuffer, 0, nBufferSize);
+ CHECK_HR(hr = m_pMediaBuffer->SetCurrentLength(nBufferSize));
+ CHECK_HR(hr = m_pMediaBuffer->Unlock());
+ }
+
+ // Retrieve video Frame rate
+ UINT32 unNumerator, unDenominator;
+ CHECK_HR(hr = MFGetAttributeRatio(pMediaType, MF_MT_FRAME_RATE, &unNumerator, &unDenominator));
+ m_structVideoParams.nFps = (unNumerator / unDenominator);
+
+ // Retrieve sample duration based on framerate
+ m_rtCurrentPosition = 0;
+ CHECK_HR(hr = MFFrameRateToAverageTimePerFrame(m_structVideoParams.nFps, 1, &m_rtDuration));
+ }
+ else {
+ TSK_DEBUG_ERROR("Only video media type is supported");
+ CHECK_HR(hr = E_NOTIMPL);
+ }
bail:
- SafeRelease(&pMediaTypeHandler);
- SafeRelease(&pMediaType);
+ SafeRelease(&pMediaTypeHandler);
+ SafeRelease(&pMediaType);
- return hr;
+ return hr;
}
// NOTE: Some of these methods hold the stream's critical section
@@ -1436,29 +1321,27 @@ bail:
HRESULT CMFStreamSource::CreateSample(IMFSample **ppSample)
{
- *ppSample = NULL;
+ *ppSample = NULL;
HRESULT hr = S_OK;
IMFSample *pSample = NULL;
- DWORD nCurrentLength = 0;
-
- CHECK_HR(hr = m_pMediaBuffer->GetCurrentLength(&nCurrentLength));
-
- if(nCurrentLength > 0)
- {
- CHECK_HR(hr = MFCreateSample(&pSample));
- CHECK_HR(hr = pSample->SetSampleTime(m_rtCurrentPosition));
- CHECK_HR(hr = pSample->SetSampleDuration(m_rtDuration));
- m_rtCurrentPosition += m_rtDuration;
- CHECK_HR(hr = pSample->AddBuffer(m_pMediaBuffer));
-
- if((*ppSample = pSample))
- {
- (*ppSample)->AddRef();
- }
- }
-
+ DWORD nCurrentLength = 0;
+
+ CHECK_HR(hr = m_pMediaBuffer->GetCurrentLength(&nCurrentLength));
+
+ if(nCurrentLength > 0) {
+ CHECK_HR(hr = MFCreateSample(&pSample));
+ CHECK_HR(hr = pSample->SetSampleTime(m_rtCurrentPosition));
+ CHECK_HR(hr = pSample->SetSampleDuration(m_rtDuration));
+ m_rtCurrentPosition += m_rtDuration;
+ CHECK_HR(hr = pSample->AddBuffer(m_pMediaBuffer));
+
+ if((*ppSample = pSample)) {
+ (*ppSample)->AddRef();
+ }
+ }
+
bail:
SafeRelease(&pSample);
return hr;
@@ -1472,15 +1355,13 @@ HRESULT CMFStreamSource::DeliverSample(IMFSample *pSample)
{
HRESULT hr = S_OK;
- if(pSample)
- {
- // Send the MEMediaSample event with the new sample.
- hr = QueueEventWithIUnknown(this, MEMediaSample, hr, pSample);
- }
+ if(pSample) {
+ // Send the MEMediaSample event with the new sample.
+ hr = QueueEventWithIUnknown(this, MEMediaSample, hr, pSample);
+ }
// See if we reached the end of the stream.
- if (SUCCEEDED(hr))
- {
+ if (SUCCEEDED(hr)) {
hr = CheckEndOfStream(); // This method sends MEEndOfStream if needed.
}
@@ -1506,25 +1387,20 @@ HRESULT CMFStreamSource::DeliverQueuedSamples()
// If we already reached the end of the stream, send the MEEndStream
// event again.
- if (m_EOS)
- {
+ if (m_EOS) {
hr = QueueEvent(MEEndOfStream, GUID_NULL, S_OK, NULL);
}
- if (SUCCEEDED(hr))
- {
+ if (SUCCEEDED(hr)) {
// Deliver any queued samples.
- while (!m_sampleQueue.IsEmpty())
- {
+ while (!m_sampleQueue.IsEmpty()) {
hr = m_sampleQueue.Dequeue(&pSample);
- if (FAILED(hr))
- {
+ if (FAILED(hr)) {
break;
}
hr = DeliverSample(pSample);
- if (FAILED(hr))
- {
+ if (FAILED(hr)) {
break;
}
@@ -1536,10 +1412,8 @@ HRESULT CMFStreamSource::DeliverQueuedSamples()
// If we reached the end of the stream, send the end-of-presentation event from
// the media source.
- if (SUCCEEDED(hr))
- {
- if (m_EOS)
- {
+ if (SUCCEEDED(hr)) {
+ if (m_EOS) {
hr = m_pSource->QueueEvent(MEEndOfPresentation, GUID_NULL, S_OK, NULL);
}
}
@@ -1578,8 +1452,7 @@ HRESULT CMFStreamSource::Shutdown()
Flush();
// Shut down the event queue.
- if (m_pEventQueue)
- {
+ if (m_pEventQueue) {
m_pEventQueue->Shutdown();
}
@@ -1602,37 +1475,37 @@ HRESULT CMFStreamSource::SetPosition(LONGLONG rtNewPosition)
{
EnterCriticalSection(&m_critSec);
- HRESULT hr = S_OK;
+ HRESULT hr = S_OK;
-/*
- // Check if the requested position is beyond the end of the stream.
- LONGLONG duration = AudioDurationFromBufferSize(m_pRiff->Format(), m_pRiff->Chunk().DataSize());
+ /*
+ // Check if the requested position is beyond the end of the stream.
+ LONGLONG duration = AudioDurationFromBufferSize(m_pRiff->Format(), m_pRiff->Chunk().DataSize());
- if (rtNewPosition > duration)
- {
- LeaveCriticalSection(&m_critSec);
+ if (rtNewPosition > duration)
+ {
+ LeaveCriticalSection(&m_critSec);
- return MF_E_INVALIDREQUEST; // Start position is past the end of the presentation.
- }
+ return MF_E_INVALIDREQUEST; // Start position is past the end of the presentation.
+ }
- if (m_rtCurrentPosition != rtNewPosition)
- {
- LONGLONG offset = BufferSizeFromAudioDuration(m_pRiff->Format(), rtNewPosition);
+ if (m_rtCurrentPosition != rtNewPosition)
+ {
+ LONGLONG offset = BufferSizeFromAudioDuration(m_pRiff->Format(), rtNewPosition);
- // The chunk size is a DWORD. So if our calculations are correct, there is no
- // way that the maximum valid seek position can be larger than a DWORD.
- assert(offset <= MAXDWORD);
+ // The chunk size is a DWORD. So if our calculations are correct, there is no
+ // way that the maximum valid seek position can be larger than a DWORD.
+ assert(offset <= MAXDWORD);
- hr = m_pRiff->MoveToChunkOffset((DWORD)offset);
+ hr = m_pRiff->MoveToChunkOffset((DWORD)offset);
- if (SUCCEEDED(hr))
- {
- m_rtCurrentPosition = rtNewPosition;
- m_discontinuity = TRUE;
- m_EOS = FALSE;
+ if (SUCCEEDED(hr))
+ {
+ m_rtCurrentPosition = rtNewPosition;
+ m_discontinuity = TRUE;
+ m_EOS = FALSE;
+ }
}
- }
-*/
+ */
LeaveCriticalSection(&m_critSec);
return hr;
}
@@ -1640,18 +1513,18 @@ HRESULT CMFStreamSource::SetPosition(LONGLONG rtNewPosition)
HRESULT CMFStreamSource::CheckEndOfStream()
{
HRESULT hr = S_OK;
-/*
- if (m_pRiff->BytesRemainingInChunk() < m_pRiff->Format()->nBlockAlign)
- {
- // The remaining data is smaller than the audio block size. (In theory there shouldn't be
- // partial bits of data at the end, so we should reach an even zero bytes, but the file
- // might not be authored correctly.)
- m_EOS = TRUE;
-
- // Send the end-of-stream event,
- hr = QueueEvent(MEEndOfStream, GUID_NULL, S_OK, NULL);
- }
- */
+ /*
+ if (m_pRiff->BytesRemainingInChunk() < m_pRiff->Format()->nBlockAlign)
+ {
+ // The remaining data is smaller than the audio block size. (In theory there shouldn't be
+ // partial bits of data at the end, so we should reach an even zero bytes, but the file
+ // might not be authored correctly.)
+ m_EOS = TRUE;
+
+ // Send the end-of-stream event,
+ hr = QueueEvent(MEEndOfStream, GUID_NULL, S_OK, NULL);
+ }
+ */
return hr;
}
@@ -1697,8 +1570,7 @@ LONGLONG AudioDurationFromBufferSize(const WAVEFORMATEX *pWav, DWORD cbAudioData
{
assert(pWav != NULL);
- if (pWav->nAvgBytesPerSec == 0)
- {
+ if (pWav->nAvgBytesPerSec == 0) {
return 0;
}
return (LONGLONG)cbAudioDataSize * 10000000 / pWav->nAvgBytesPerSec;
@@ -1711,8 +1583,7 @@ LONGLONG BufferSizeFromAudioDuration(const WAVEFORMATEX *pWav, LONGLONG duration
ULONG ulRemainder = (ULONG)(cbSize % pWav->nBlockAlign);
// Round up to the next block.
- if(ulRemainder)
- {
+ if(ulRemainder) {
cbSize += pWav->nBlockAlign - ulRemainder;
}
diff --git a/plugins/pluginWinMF/internals/mf_custom_src.h b/plugins/pluginWinMF/internals/mf_custom_src.h
index f9194c9..15d8b90 100755
--- a/plugins/pluginWinMF/internals/mf_custom_src.h
+++ b/plugins/pluginWinMF/internals/mf_custom_src.h
@@ -1,20 +1,20 @@
-/*
+/*
* Copyright (C) Microsoft Corporation. All rights reserved.
* Copyright (C) 2013 Mamadou DIOP
* Copyright (C) 2013 Doubango Telecom <http://www.doubango.org>
-*
+*
* This file is part of Open Source Doubango Framework.
*
* DOUBANGO is free software: you can redistribute it and/or modify
* it under the terms of the GNU General Public License as published by
* the Free Software Foundation, either version 3 of the License, or
* (at your option) any later version.
-*
+*
* DOUBANGO is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
* GNU General Public License for more details.
-*
+*
* You should have received a copy of the GNU General Public License
* along with DOUBANGO.
*/
@@ -50,10 +50,10 @@ class CMFSource : public IMFMediaSource
public:
static HRESULT CreateInstance(REFIID iid, void **ppSource);
- static HRESULT CreateInstanceEx(REFIID iid, void **ppSource, IMFMediaType *pMediaType);
+ static HRESULT CreateInstanceEx(REFIID iid, void **ppSource, IMFMediaType *pMediaType);
- // IMFCustomSource
- HRESULT CopyVideoBuffer(UINT32 nWidth, UINT32 nHeight, const void* pBufferPtr, UINT32 nBufferSize);
+ // IMFCustomSource
+ HRESULT CopyVideoBuffer(UINT32 nWidth, UINT32 nHeight, const void* pBufferPtr, UINT32 nBufferSize);
// IUnknown
STDMETHODIMP QueryInterface(REFIID iid, void** ppv);
@@ -80,8 +80,7 @@ public:
private:
- enum State
- {
+ enum State {
STATE_STOPPED,
STATE_PAUSED,
STATE_STARTED
@@ -92,14 +91,11 @@ private:
CMFSource(HRESULT &hr, IMFMediaType *pMediaType);
virtual ~CMFSource();
- HRESULT CheckShutdown() const
- {
- if (m_IsShutdown)
- {
+ HRESULT CheckShutdown() const {
+ if (m_IsShutdown) {
return MF_E_SHUTDOWN;
}
- else
- {
+ else {
return S_OK;
}
}
@@ -110,7 +106,9 @@ private:
HRESULT ValidatePresentationDescriptor(IMFPresentationDescriptor *pPD);
LONGLONG GetCurrentPosition() const;
- State GetState() const { return m_state; }
+ State GetState() const {
+ return m_state;
+ }
IMFMediaEventQueue *m_pEventQueue; // Event generator helper
IMFPresentationDescriptor *m_pPresentationDescriptor; // Default presentation
@@ -122,7 +120,7 @@ private:
BOOL m_IsShutdown; // Flag to indicate if Shutdown() method was called.
State m_state; // Current state (running, stopped, paused)
- IMFMediaType *m_pMediaType; // The supported mediaType
+ IMFMediaType *m_pMediaType; // The supported mediaType
};
@@ -131,22 +129,21 @@ class SampleQueue
protected:
// Nodes in the linked list
- struct Node
- {
+ struct Node {
Node *prev;
Node *next;
IMFSample* item;
- Node() : prev(NULL), next(NULL)
- {
+ Node() : prev(NULL), next(NULL) {
}
- Node(IMFSample* item) : prev(NULL), next(NULL)
- {
+ Node(IMFSample* item) : prev(NULL), next(NULL) {
this->item = item;
}
- IMFSample* Item() const { return item; }
+ IMFSample* Item() const {
+ return item;
+ }
};
@@ -155,27 +152,22 @@ protected:
public:
- SampleQueue()
- {
+ SampleQueue() {
m_anchor.next = &m_anchor;
m_anchor.prev = &m_anchor;
}
- virtual ~SampleQueue()
- {
+ virtual ~SampleQueue() {
Clear();
}
- HRESULT Queue(IMFSample* item)
- {
- if (item == NULL)
- {
+ HRESULT Queue(IMFSample* item) {
+ if (item == NULL) {
return E_POINTER;
}
Node *pNode = new (std::nothrow) Node(item);
- if (pNode == NULL)
- {
+ if (pNode == NULL) {
return E_OUTOFMEMORY;
}
@@ -195,14 +187,11 @@ public:
}
- HRESULT Dequeue(IMFSample* *ppItem)
- {
- if (IsEmpty())
- {
+ HRESULT Dequeue(IMFSample**ppItem) {
+ if (IsEmpty()) {
return E_FAIL;
}
- if (ppItem == NULL)
- {
+ if (ppItem == NULL) {
return E_POINTER;
}
@@ -220,17 +209,16 @@ public:
return S_OK;
}
- BOOL IsEmpty() const { return m_anchor.next == &m_anchor; }
+ BOOL IsEmpty() const {
+ return m_anchor.next == &m_anchor;
+ }
- void Clear()
- {
+ void Clear() {
Node *n = m_anchor.next;
// Delete the nodes
- while (n != &m_anchor)
- {
- if (n->item)
- {
+ while (n != &m_anchor) {
+ if (n->item) {
n->item->Release();
}
@@ -260,8 +248,8 @@ class CMFStreamSource : public IMFMediaStream
public:
- // IMFCustomSource
- HRESULT CopyVideoBuffer(UINT32 nWidth, UINT32 nHeight, const void* pBufferPtr, UINT32 nBufferSize);
+ // IMFCustomSource
+ HRESULT CopyVideoBuffer(UINT32 nWidth, UINT32 nHeight, const void* pBufferPtr, UINT32 nBufferSize);
// IUnknown
STDMETHODIMP QueryInterface(REFIID iid, void** ppv);
@@ -285,26 +273,25 @@ private:
~CMFStreamSource();
- HRESULT CheckShutdown() const
- {
- if (m_IsShutdown)
- {
+ HRESULT CheckShutdown() const {
+ if (m_IsShutdown) {
return MF_E_SHUTDOWN;
}
- else
- {
+ else {
return S_OK;
}
}
- HRESULT InitializeParams();
+ HRESULT InitializeParams();
HRESULT Shutdown();
HRESULT CreateSample(IMFSample **pSample);
HRESULT DeliverSample(IMFSample *pSample);
HRESULT DeliverQueuedSamples();
HRESULT Flush();
- LONGLONG GetCurrentPosition() const { return m_rtCurrentPosition; }
+ LONGLONG GetCurrentPosition() const {
+ return m_rtCurrentPosition;
+ }
HRESULT SetPosition(LONGLONG rtNewPosition);
HRESULT CheckEndOfStream();
@@ -313,7 +300,7 @@ private:
CRITICAL_SECTION m_critSec;
BOOL m_IsShutdown; // Flag to indicate if source's Shutdown() method was called.
LONGLONG m_rtCurrentPosition; // Current position in the stream, in 100-ns units
- UINT64 m_rtDuration; // Sample duration, in 100-ns units
+ UINT64 m_rtDuration; // Sample duration, in 100-ns units
BOOL m_discontinuity; // Is the next sample a discontinuity?
BOOL m_EOS; // Did we reach the end of the stream?
@@ -322,18 +309,17 @@ private:
IMFStreamDescriptor *m_pStreamDescriptor; // Stream descriptor for this stream.
SampleQueue m_sampleQueue; // Queue for samples while paused.
- GUID m_guidMajorType; // major media type (e.g. MFMediaType_Video or MFMediaType_Audio)
- GUID m_guidSubType; // Media subtype (e.g. MFVideoFormat_RGB32 or MFVideoFormat_H264)
- IMFMediaBuffer *m_pMediaBuffer; // Pointer to the data to deliver
- UINT32 m_nBufferSize; // Size of the data to deliver
-
- struct
- {
- UINT32 nWidth;
- UINT32 nHeigh;
- UINT32 nFps;
- }
- m_structVideoParams;
+ GUID m_guidMajorType; // major media type (e.g. MFMediaType_Video or MFMediaType_Audio)
+ GUID m_guidSubType; // Media subtype (e.g. MFVideoFormat_RGB32 or MFVideoFormat_H264)
+ IMFMediaBuffer *m_pMediaBuffer; // Pointer to the data to deliver
+ UINT32 m_nBufferSize; // Size of the data to deliver
+
+ struct {
+ UINT32 nWidth;
+ UINT32 nHeigh;
+ UINT32 nFps;
+ }
+ m_structVideoParams;
};
diff --git a/plugins/pluginWinMF/internals/mf_devices.cxx b/plugins/pluginWinMF/internals/mf_devices.cxx
index 22b862e..49005de 100755
--- a/plugins/pluginWinMF/internals/mf_devices.cxx
+++ b/plugins/pluginWinMF/internals/mf_devices.cxx
@@ -1,18 +1,18 @@
/* Copyright (C) 2013 Mamadou DIOP
* Copyright (C) 2013 Doubango Telecom <http://www.doubango.org>
-*
+*
* This file is part of Open Source Doubango Framework.
*
* DOUBANGO is free software: you can redistribute it and/or modify
* it under the terms of the GNU General Public License as published by
* the Free Software Foundation, either version 3 of the License, or
* (at your option) any later version.
-*
+*
* DOUBANGO is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
* GNU General Public License for more details.
-*
+*
* You should have received a copy of the GNU General Public License
* along with DOUBANGO.
*/
@@ -20,25 +20,25 @@
#include "mf_utils.h"
DeviceList::DeviceList()
-: m_ppDevices(NULL)
-, m_cDevices(0)
+ : m_ppDevices(NULL)
+ , m_cDevices(0)
{
-
+
}
DeviceList::~DeviceList()
{
- Clear();
+ Clear();
}
UINT32 DeviceList::Count()const
{
- return m_cDevices;
+ return m_cDevices;
}
void DeviceList::Clear()
{
- for (UINT32 i = 0; i < m_cDevices; i++) {
+ for (UINT32 i = 0; i < m_cDevices; i++) {
SafeRelease(&m_ppDevices[i]);
}
CoTaskMemFree(m_ppDevices);
@@ -49,7 +49,7 @@ void DeviceList::Clear()
HRESULT DeviceList::EnumerateDevices(const GUID& sourceType)
{
- HRESULT hr = S_OK;
+ HRESULT hr = S_OK;
IMFAttributes *pAttributes = NULL;
Clear();
@@ -60,17 +60,15 @@ HRESULT DeviceList::EnumerateDevices(const GUID& sourceType)
hr = MFCreateAttributes(&pAttributes, 1);
// Ask for source type = video capture devices
- if (SUCCEEDED(hr))
- {
+ if (SUCCEEDED(hr)) {
hr = pAttributes->SetGUID(
- MF_DEVSOURCE_ATTRIBUTE_SOURCE_TYPE,
- sourceType
- );
+ MF_DEVSOURCE_ATTRIBUTE_SOURCE_TYPE,
+ sourceType
+ );
}
// Enumerate devices.
- if (SUCCEEDED(hr))
- {
+ if (SUCCEEDED(hr)) {
hr = MFEnumDeviceSources(pAttributes, &m_ppDevices, &m_cDevices);
}
@@ -81,8 +79,7 @@ HRESULT DeviceList::EnumerateDevices(const GUID& sourceType)
HRESULT DeviceList::GetDeviceAtIndex(UINT32 index, IMFActivate **ppActivate)
{
- if (index >= Count())
- {
+ if (index >= Count()) {
return E_INVALIDARG;
}
@@ -94,58 +91,52 @@ HRESULT DeviceList::GetDeviceAtIndex(UINT32 index, IMFActivate **ppActivate)
HRESULT DeviceList::GetDeviceBest(IMFActivate **ppActivate, WCHAR *pszName /*= NULL*/)
{
- UINT32 index = 0;
- if(pszName)
- {
- WCHAR *_pszName = NULL;
- BOOL bFound = FALSE;
- for(UINT32 i = 0; i < Count() && !bFound; ++i)
- {
- if((SUCCEEDED(GetDeviceName(i, &_pszName))))
- {
- if(wcscmp(_pszName, pszName) == 0)
- {
- index = i;
- bFound = TRUE;
- // do not break the loop because we need to free(_pszName)
- }
- }
- if(_pszName)
- {
- CoTaskMemFree(_pszName), _pszName = NULL;
- }
- }
- }
- return GetDeviceAtIndex(index, ppActivate);
+ UINT32 index = 0;
+ if(pszName) {
+ WCHAR *_pszName = NULL;
+ BOOL bFound = FALSE;
+ for(UINT32 i = 0; i < Count() && !bFound; ++i) {
+ if((SUCCEEDED(GetDeviceName(i, &_pszName)))) {
+ if(wcscmp(_pszName, pszName) == 0) {
+ index = i;
+ bFound = TRUE;
+ // do not break the loop because we need to free(_pszName)
+ }
+ }
+ if(_pszName) {
+ CoTaskMemFree(_pszName), _pszName = NULL;
+ }
+ }
+ }
+ return GetDeviceAtIndex(index, ppActivate);
}
// The caller must free the memory for the string by calling CoTaskMemFree
HRESULT DeviceList::GetDeviceName(UINT32 index, WCHAR **ppszName)
{
- if (index >= Count())
- {
+ if (index >= Count()) {
return E_INVALIDARG;
}
HRESULT hr = S_OK;
hr = m_ppDevices[index]->GetAllocatedString(
- MF_DEVSOURCE_ATTRIBUTE_FRIENDLY_NAME,
- ppszName,
- NULL
- );
+ MF_DEVSOURCE_ATTRIBUTE_FRIENDLY_NAME,
+ ppszName,
+ NULL
+ );
return hr;
}
HRESULT DeviceListAudio::EnumerateDevices()
{
- // call base class function
- return DeviceList::EnumerateDevices(MF_DEVSOURCE_ATTRIBUTE_SOURCE_TYPE_AUDCAP_GUID);
+ // call base class function
+ return DeviceList::EnumerateDevices(MF_DEVSOURCE_ATTRIBUTE_SOURCE_TYPE_AUDCAP_GUID);
}
HRESULT DeviceListVideo::EnumerateDevices()
{
- // call base class function
- return DeviceList::EnumerateDevices(MF_DEVSOURCE_ATTRIBUTE_SOURCE_TYPE_VIDCAP_GUID);
+ // call base class function
+ return DeviceList::EnumerateDevices(MF_DEVSOURCE_ATTRIBUTE_SOURCE_TYPE_VIDCAP_GUID);
}
diff --git a/plugins/pluginWinMF/internals/mf_devices.h b/plugins/pluginWinMF/internals/mf_devices.h
index 03d010f..3a16a29 100755
--- a/plugins/pluginWinMF/internals/mf_devices.h
+++ b/plugins/pluginWinMF/internals/mf_devices.h
@@ -1,18 +1,18 @@
/* Copyright (C) 2013 Mamadou DIOP
* Copyright (C) 2013 Doubango Telecom <http://www.doubango.org>
-*
+*
* This file is part of Open Source Doubango Framework.
*
* DOUBANGO is free software: you can redistribute it and/or modify
* it under the terms of the GNU General Public License as published by
* the Free Software Foundation, either version 3 of the License, or
* (at your option) any later version.
-*
+*
* DOUBANGO is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
* GNU General Public License for more details.
-*
+*
* You should have received a copy of the GNU General Public License
* along with DOUBANGO.
*/
@@ -46,19 +46,19 @@ public:
HRESULT GetDeviceName(UINT32 index, WCHAR **ppszName);
protected:
- HRESULT EnumerateDevices(const GUID& sourceType);
+ HRESULT EnumerateDevices(const GUID& sourceType);
};
class DeviceListAudio : public DeviceList
{
public:
- HRESULT EnumerateDevices();
+ HRESULT EnumerateDevices();
};
class DeviceListVideo : public DeviceList
{
public:
- HRESULT EnumerateDevices();
+ HRESULT EnumerateDevices();
};
#endif /* PLUGIN_WIN_MF_DEVICES_H */
diff --git a/plugins/pluginWinMF/internals/mf_display_watcher.cxx b/plugins/pluginWinMF/internals/mf_display_watcher.cxx
index 62dbc5f..38f3687 100755
--- a/plugins/pluginWinMF/internals/mf_display_watcher.cxx
+++ b/plugins/pluginWinMF/internals/mf_display_watcher.cxx
@@ -1,18 +1,18 @@
/* Copyright (C) 2013 Mamadou DIOP
* Copyright (C) 2013 Doubango Telecom <http://www.doubango.org>
-*
+*
* This file is part of Open Source Doubango Framework.
*
* DOUBANGO is free software: you can redistribute it and/or modify
* it under the terms of the GNU General Public License as published by
* the Free Software Foundation, either version 3 of the License, or
* (at your option) any later version.
-*
+*
* DOUBANGO is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
* GNU General Public License for more details.
-*
+*
* You should have received a copy of the GNU General Public License
* along with DOUBANGO.
*/
@@ -24,137 +24,126 @@
#include <assert.h>
DisplayWatcher::DisplayWatcher(HWND hWnd, IMFMediaSink* pMediaSink, HRESULT &hr)
-: m_pDisplayControl(NULL)
-, m_hWnd(hWnd)
-, m_pWndProc(NULL)
-, m_bStarted(FALSE)
-, m_bFullScreen(FALSE)
+ : m_pDisplayControl(NULL)
+ , m_hWnd(hWnd)
+ , m_pWndProc(NULL)
+ , m_bStarted(FALSE)
+ , m_bFullScreen(FALSE)
{
- IMFGetService *pService = NULL;
+ IMFGetService *pService = NULL;
- CHECK_HR(hr = pMediaSink->QueryInterface(__uuidof(IMFGetService), (void**)&pService));
- CHECK_HR(hr = pService->GetService(MR_VIDEO_RENDER_SERVICE, __uuidof(IMFVideoDisplayControl), (void**)&m_pDisplayControl));
- CHECK_HR(hr = m_pDisplayControl->SetAspectRatioMode(MFVideoARMode_PreservePicture));
+ CHECK_HR(hr = pMediaSink->QueryInterface(__uuidof(IMFGetService), (void**)&pService));
+ CHECK_HR(hr = pService->GetService(MR_VIDEO_RENDER_SERVICE, __uuidof(IMFVideoDisplayControl), (void**)&m_pDisplayControl));
+ CHECK_HR(hr = m_pDisplayControl->SetAspectRatioMode(MFVideoARMode_PreservePicture));
bail:
- SafeRelease(&pService);
+ SafeRelease(&pService);
}
DisplayWatcher::~DisplayWatcher()
{
- Stop();
+ Stop();
- SafeRelease(&m_pDisplayControl);
+ SafeRelease(&m_pDisplayControl);
}
HRESULT DisplayWatcher::Start()
{
- HRESULT hr = S_OK;
- HWND hWnd = m_hWnd; // save()
- CHECK_HR(hr = Stop());
-
- if((m_hWnd = hWnd) && m_pDisplayControl)
- {
- CHECK_HR(hr = m_pDisplayControl->SetVideoWindow(hWnd));
-
- BOOL ret = SetPropA(m_hWnd, "This", this);
- assert(ret);
-
+ HRESULT hr = S_OK;
+ HWND hWnd = m_hWnd; // save()
+ CHECK_HR(hr = Stop());
+
+ if((m_hWnd = hWnd) && m_pDisplayControl) {
+ CHECK_HR(hr = m_pDisplayControl->SetVideoWindow(hWnd));
+
+ BOOL ret = SetPropA(m_hWnd, "This", this);
+ assert(ret);
+
#if _M_X64
- m_pWndProc = (WNDPROC)SetWindowLongPtr(m_hWnd, GWLP_WNDPROC, (LONG_PTR)DisplayWatcher::WndProc);
+ m_pWndProc = (WNDPROC)SetWindowLongPtr(m_hWnd, GWLP_WNDPROC, (LONG_PTR)DisplayWatcher::WndProc);
#else
- m_pWndProc = (WNDPROC)SetWindowLongPtr(m_hWnd, GWL_WNDPROC, (LONG)DisplayWatcher::WndProc);
+ m_pWndProc = (WNDPROC)SetWindowLongPtr(m_hWnd, GWL_WNDPROC, (LONG)DisplayWatcher::WndProc);
#endif
- UpdatePosition(); // black screen if attached later
- }
- m_bStarted = TRUE;
+ UpdatePosition(); // black screen if attached later
+ }
+ m_bStarted = TRUE;
bail:
- return hr;
+ return hr;
}
HRESULT DisplayWatcher::SetFullscreen(BOOL bEnabled)
{
- if(m_pDisplayControl)
- {
- HRESULT hr = m_pDisplayControl->SetFullscreen(bEnabled);
- m_bFullScreen = SUCCEEDED(hr);
- return hr;
- }
-
- return E_FAIL;
+ if(m_pDisplayControl) {
+ HRESULT hr = m_pDisplayControl->SetFullscreen(bEnabled);
+ m_bFullScreen = SUCCEEDED(hr);
+ return hr;
+ }
+
+ return E_FAIL;
}
HRESULT DisplayWatcher::SetHwnd(HWND hWnd)
{
- BOOL bWasStarted = m_bStarted;
- Stop();
- m_hWnd = hWnd;
- if(bWasStarted)
- {
- return Start();
- }
- return S_OK;
+ BOOL bWasStarted = m_bStarted;
+ Stop();
+ m_hWnd = hWnd;
+ if(bWasStarted) {
+ return Start();
+ }
+ return S_OK;
}
HRESULT DisplayWatcher::Stop()
{
- if(m_hWnd && m_pWndProc)
- {
- // Restore
-
+ if(m_hWnd && m_pWndProc) {
+ // Restore
+
#if _M_X64
- SetWindowLongPtr(m_hWnd, GWLP_WNDPROC, (LONG_PTR)m_pWndProc);
+ SetWindowLongPtr(m_hWnd, GWLP_WNDPROC, (LONG_PTR)m_pWndProc);
#else
- SetWindowLongPtr(m_hWnd, GWL_WNDPROC, (LONG)m_pWndProc);
+ SetWindowLongPtr(m_hWnd, GWL_WNDPROC, (LONG)m_pWndProc);
#endif
- }
- m_hWnd = NULL;
- m_pWndProc = NULL;
- m_bStarted = FALSE;
- return S_OK;
+ }
+ m_hWnd = NULL;
+ m_pWndProc = NULL;
+ m_bStarted = FALSE;
+ return S_OK;
}
void DisplayWatcher::UpdatePosition()
{
- if(m_pDisplayControl && m_hWnd)
- {
- RECT rcDst = { 0, 0, 0, 0 };
- GetClientRect(m_hWnd, &rcDst);
- m_pDisplayControl->SetVideoPosition(NULL, &rcDst);
- }
+ if(m_pDisplayControl && m_hWnd) {
+ RECT rcDst = { 0, 0, 0, 0 };
+ GetClientRect(m_hWnd, &rcDst);
+ m_pDisplayControl->SetVideoPosition(NULL, &rcDst);
+ }
}
LRESULT CALLBACK DisplayWatcher::WndProc(HWND hWnd, UINT uMsg, WPARAM wParam, LPARAM lParam)
{
- switch(uMsg)
- {
- case WM_CREATE:
- case WM_SIZE:
- case WM_MOVE:
- {
- DisplayWatcher* This = dynamic_cast<DisplayWatcher*>((DisplayWatcher*)GetPropA(hWnd, "This"));
- if(This)
- {
- This->UpdatePosition();
- }
- break;
- }
-
- case WM_CHAR:
- case WM_KEYUP:
- {
- DisplayWatcher* This = dynamic_cast<DisplayWatcher*>((DisplayWatcher*)GetPropA(hWnd, "This"));
- if(This)
- {
- if(This->m_bFullScreen && (wParam == 0x1B || wParam == VK_ESCAPE))
- {
- This->SetFullscreen(FALSE);
- }
- }
-
- break;
- }
- }
-
- return DefWindowProc(hWnd, uMsg, wParam, lParam);
+ switch(uMsg) {
+ case WM_CREATE:
+ case WM_SIZE:
+ case WM_MOVE: {
+ DisplayWatcher* This = dynamic_cast<DisplayWatcher*>((DisplayWatcher*)GetPropA(hWnd, "This"));
+ if(This) {
+ This->UpdatePosition();
+ }
+ break;
+ }
+
+ case WM_CHAR:
+ case WM_KEYUP: {
+ DisplayWatcher* This = dynamic_cast<DisplayWatcher*>((DisplayWatcher*)GetPropA(hWnd, "This"));
+ if(This) {
+ if(This->m_bFullScreen && (wParam == 0x1B || wParam == VK_ESCAPE)) {
+ This->SetFullscreen(FALSE);
+ }
+ }
+
+ break;
+ }
+ }
+
+ return DefWindowProc(hWnd, uMsg, wParam, lParam);
} \ No newline at end of file
diff --git a/plugins/pluginWinMF/internals/mf_display_watcher.h b/plugins/pluginWinMF/internals/mf_display_watcher.h
index d41d6a6..127fb35 100755
--- a/plugins/pluginWinMF/internals/mf_display_watcher.h
+++ b/plugins/pluginWinMF/internals/mf_display_watcher.h
@@ -1,18 +1,18 @@
/* Copyright (C) 2013 Mamadou DIOP
* Copyright (C) 2013 Doubango Telecom <http://www.doubango.org>
-*
+*
* This file is part of Open Source Doubango Framework.
*
* DOUBANGO is free software: you can redistribute it and/or modify
* it under the terms of the GNU General Public License as published by
* the Free Software Foundation, either version 3 of the License, or
* (at your option) any later version.
-*
+*
* DOUBANGO is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
* GNU General Public License for more details.
-*
+*
* You should have received a copy of the GNU General Public License
* along with DOUBANGO.
*/
@@ -31,25 +31,25 @@
class DisplayWatcher
{
public:
- DisplayWatcher(HWND hWnd, IMFMediaSink* pMediaSink, HRESULT &hr);
- virtual ~DisplayWatcher();
+ DisplayWatcher(HWND hWnd, IMFMediaSink* pMediaSink, HRESULT &hr);
+ virtual ~DisplayWatcher();
public:
- HRESULT Start();
- HRESULT SetFullscreen(BOOL bEnabled);
- HRESULT SetHwnd(HWND hWnd);
- HRESULT Stop();
+ HRESULT Start();
+ HRESULT SetFullscreen(BOOL bEnabled);
+ HRESULT SetHwnd(HWND hWnd);
+ HRESULT Stop();
private:
- void UpdatePosition();
- static LRESULT CALLBACK WndProc(HWND hWnd, UINT uMsg, WPARAM wParam, LPARAM lParam);
+ void UpdatePosition();
+ static LRESULT CALLBACK WndProc(HWND hWnd, UINT uMsg, WPARAM wParam, LPARAM lParam);
private:
- IMFVideoDisplayControl *m_pDisplayControl;
- HWND m_hWnd;
- WNDPROC m_pWndProc;
- BOOL m_bStarted;
- BOOL m_bFullScreen;
+ IMFVideoDisplayControl *m_pDisplayControl;
+ HWND m_hWnd;
+ WNDPROC m_pWndProc;
+ BOOL m_bStarted;
+ BOOL m_bFullScreen;
};
#endif /* PLUGIN_WIN_MF_DISPLAY_WATCHER_H */
diff --git a/plugins/pluginWinMF/internals/mf_sample_grabber.cxx b/plugins/pluginWinMF/internals/mf_sample_grabber.cxx
index 87aa6af..8d148a3 100755
--- a/plugins/pluginWinMF/internals/mf_sample_grabber.cxx
+++ b/plugins/pluginWinMF/internals/mf_sample_grabber.cxx
@@ -1,18 +1,18 @@
/* Copyright (C) 2013 Mamadou DIOP
* Copyright (C) 2013 Doubango Telecom <http://www.doubango.org>
-*
+*
* This file is part of Open Source Doubango Framework.
*
* DOUBANGO is free software: you can redistribute it and/or modify
* it under the terms of the GNU General Public License as published by
* the Free Software Foundation, either version 3 of the License, or
* (at your option) any later version.
-*
+*
* DOUBANGO is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
* GNU General Public License for more details.
-*
+*
* You should have received a copy of the GNU General Public License
* along with DOUBANGO.
*/
@@ -28,108 +28,105 @@
// Create a new instance of the object.
HRESULT SampleGrabberCB::CreateInstance(const struct tmedia_producer_s* pcWrappedProducer, SampleGrabberCB **ppCB)
{
- assert(pcWrappedProducer);
+ assert(pcWrappedProducer);
- *ppCB = new (std::nothrow) SampleGrabberCB(pcWrappedProducer);
+ *ppCB = new (std::nothrow) SampleGrabberCB(pcWrappedProducer);
- if (ppCB == NULL)
- {
- return E_OUTOFMEMORY;
- }
- return S_OK;
+ if (ppCB == NULL) {
+ return E_OUTOFMEMORY;
+ }
+ return S_OK;
}
STDMETHODIMP SampleGrabberCB::QueryInterface(REFIID riid, void** ppv)
{
- static const QITAB qit[] =
- {
- QITABENT(SampleGrabberCB, IMFSampleGrabberSinkCallback),
- QITABENT(SampleGrabberCB, IMFClockStateSink),
- { 0 }
- };
- return QISearch(this, qit, riid, ppv);
+ static const QITAB qit[] = {
+ QITABENT(SampleGrabberCB, IMFSampleGrabberSinkCallback),
+ QITABENT(SampleGrabberCB, IMFClockStateSink),
+ { 0 }
+ };
+ return QISearch(this, qit, riid, ppv);
}
STDMETHODIMP_(ULONG) SampleGrabberCB::AddRef()
{
- return InterlockedIncrement(&m_cRef);
+ return InterlockedIncrement(&m_cRef);
}
STDMETHODIMP_(ULONG) SampleGrabberCB::Release()
{
- ULONG cRef = InterlockedDecrement(&m_cRef);
- if (cRef == 0)
- {
- delete this;
- }
- return cRef;
+ ULONG cRef = InterlockedDecrement(&m_cRef);
+ if (cRef == 0) {
+ delete this;
+ }
+ return cRef;
}
// IMFClockStateSink methods.
-// In these example, the IMFClockStateSink methods do not perform any actions.
+// In these example, the IMFClockStateSink methods do not perform any actions.
// You can use these methods to track the state of the sample grabber sink.
STDMETHODIMP SampleGrabberCB::OnClockStart(MFTIME hnsSystemTime, LONGLONG llClockStartOffset)
{
- TSK_DEBUG_INFO("SampleGrabberCB::OnClockStart(%lld, %lld)", hnsSystemTime, llClockStartOffset);
- return S_OK;
+ TSK_DEBUG_INFO("SampleGrabberCB::OnClockStart(%lld, %lld)", hnsSystemTime, llClockStartOffset);
+ return S_OK;
}
STDMETHODIMP SampleGrabberCB::OnClockStop(MFTIME hnsSystemTime)
{
- TSK_DEBUG_INFO("SampleGrabberCB::OnClockStop(%lld)", hnsSystemTime);
- return S_OK;
+ TSK_DEBUG_INFO("SampleGrabberCB::OnClockStop(%lld)", hnsSystemTime);
+ return S_OK;
}
STDMETHODIMP SampleGrabberCB::OnClockPause(MFTIME hnsSystemTime)
{
- TSK_DEBUG_INFO("SampleGrabberCB::OnClockPause(%lld)", hnsSystemTime);
- return S_OK;
+ TSK_DEBUG_INFO("SampleGrabberCB::OnClockPause(%lld)", hnsSystemTime);
+ return S_OK;
}
STDMETHODIMP SampleGrabberCB::OnClockRestart(MFTIME hnsSystemTime)
{
- TSK_DEBUG_INFO("SampleGrabberCB::OnClockRestart(%lld)", hnsSystemTime);
- return S_OK;
+ TSK_DEBUG_INFO("SampleGrabberCB::OnClockRestart(%lld)", hnsSystemTime);
+ return S_OK;
}
STDMETHODIMP SampleGrabberCB::OnClockSetRate(MFTIME hnsSystemTime, float flRate)
{
- TSK_DEBUG_INFO("SampleGrabberCB::OnClockSetRate(%lld, %f)", hnsSystemTime, flRate);
- return S_OK;
+ TSK_DEBUG_INFO("SampleGrabberCB::OnClockSetRate(%lld, %f)", hnsSystemTime, flRate);
+ return S_OK;
}
// IMFSampleGrabberSink methods.
STDMETHODIMP SampleGrabberCB::OnSetPresentationClock(IMFPresentationClock* pClock)
{
- TSK_DEBUG_INFO("SampleGrabberCB::OnSetPresentationClock");
- return S_OK;
+ TSK_DEBUG_INFO("SampleGrabberCB::OnSetPresentationClock");
+ return S_OK;
}
STDMETHODIMP SampleGrabberCB::OnProcessSample(
- REFGUID guidMajorMediaType, DWORD dwSampleFlags,
- LONGLONG llSampleTime, LONGLONG llSampleDuration, const BYTE * pSampleBuffer,
- DWORD dwSampleSize)
+ REFGUID guidMajorMediaType, DWORD dwSampleFlags,
+ LONGLONG llSampleTime, LONGLONG llSampleDuration, const BYTE * pSampleBuffer,
+ DWORD dwSampleSize)
{
- if (m_pWrappedProducer && TMEDIA_PRODUCER(m_pWrappedProducer)->enc_cb.callback) {
+ if (m_pWrappedProducer && TMEDIA_PRODUCER(m_pWrappedProducer)->enc_cb.callback) {
#if 1
- if (m_bMuted) {
- // Send zeros. Do not skip sending data to avoid NAT issues and session deconnection.
- // Some TelePresence systems disconnect the session when the remote peer stops sending video data.
- memset((void*)pSampleBuffer, 0, dwSampleSize);
- }
+ if (m_bMuted) {
+ // Send zeros. Do not skip sending data to avoid NAT issues and session deconnection.
+ // Some TelePresence systems disconnect the session when the remote peer stops sending video data.
+ memset((void*)pSampleBuffer, 0, dwSampleSize);
+ }
#endif
- TMEDIA_PRODUCER(m_pWrappedProducer)->enc_cb.callback(TMEDIA_PRODUCER(m_pWrappedProducer)->enc_cb.callback_data, pSampleBuffer, dwSampleSize);
- }
+ TMEDIA_PRODUCER(m_pWrappedProducer)->enc_cb.callback(TMEDIA_PRODUCER(m_pWrappedProducer)->enc_cb.callback_data, pSampleBuffer, dwSampleSize);
+ }
- return S_OK;
+ return S_OK;
}
STDMETHODIMP SampleGrabberCB::OnShutdown()
{
- TSK_DEBUG_INFO("SampleGrabberCB::OnShutdown");
- return S_OK;
+ TSK_DEBUG_INFO("SampleGrabberCB::OnShutdown");
+ return S_OK;
} \ No newline at end of file
diff --git a/plugins/pluginWinMF/internals/mf_sample_grabber.h b/plugins/pluginWinMF/internals/mf_sample_grabber.h
index 858f3c1..9ea239d 100755
--- a/plugins/pluginWinMF/internals/mf_sample_grabber.h
+++ b/plugins/pluginWinMF/internals/mf_sample_grabber.h
@@ -1,18 +1,18 @@
/* Copyright (C) 2013 Mamadou DIOP
* Copyright (C) 2013 Doubango Telecom <http://www.doubango.org>
-*
+*
* This file is part of Open Source Doubango Framework.
*
* DOUBANGO is free software: you can redistribute it and/or modify
* it under the terms of the GNU General Public License as published by
* the Free Software Foundation, either version 3 of the License, or
* (at your option) any later version.
-*
+*
* DOUBANGO is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
* GNU General Public License for more details.
-*
+*
* You should have received a copy of the GNU General Public License
* along with DOUBANGO.
*/
@@ -31,9 +31,9 @@
// Sample Grabber callback [Declaration]
// http://msdn.microsoft.com/en-us/library/windows/desktop/hh184779(v=vs.85).aspx
//
-class SampleGrabberCB : public IMFSampleGrabberSinkCallback
+class SampleGrabberCB : public IMFSampleGrabberSinkCallback
{
- bool m_bMuted;
+ bool m_bMuted;
long m_cRef;
const struct tmedia_producer_s* m_pWrappedProducer;
@@ -42,7 +42,9 @@ class SampleGrabberCB : public IMFSampleGrabberSinkCallback
public:
static HRESULT CreateInstance(const struct tmedia_producer_s* pcWrappedProducer, SampleGrabberCB **ppCB);
- void SetMute(bool bMuted) { m_bMuted = bMuted; }
+ void SetMute(bool bMuted) {
+ m_bMuted = bMuted;
+ }
// IUnknown methods
STDMETHODIMP QueryInterface(REFIID iid, void** ppv);
@@ -59,8 +61,8 @@ public:
// IMFSampleGrabberSinkCallback methods
STDMETHODIMP OnSetPresentationClock(IMFPresentationClock* pClock);
STDMETHODIMP OnProcessSample(REFGUID guidMajorMediaType, DWORD dwSampleFlags,
- LONGLONG llSampleTime, LONGLONG llSampleDuration, const BYTE * pSampleBuffer,
- DWORD dwSampleSize);
+ LONGLONG llSampleTime, LONGLONG llSampleDuration, const BYTE * pSampleBuffer,
+ DWORD dwSampleSize);
STDMETHODIMP OnShutdown();
};
diff --git a/plugins/pluginWinMF/internals/mf_sample_queue.cxx b/plugins/pluginWinMF/internals/mf_sample_queue.cxx
index 05c2bc6..32630b8 100755
--- a/plugins/pluginWinMF/internals/mf_sample_queue.cxx
+++ b/plugins/pluginWinMF/internals/mf_sample_queue.cxx
@@ -1,18 +1,18 @@
/* Copyright (C) 2013 Mamadou DIOP
* Copyright (C) 2013 Doubango Telecom <http://www.doubango.org>
-*
+*
* This file is part of Open Source Doubango Framework.
*
* DOUBANGO is free software: you can redistribute it and/or modify
* it under the terms of the GNU General Public License as published by
* the Free Software Foundation, either version 3 of the License, or
* (at your option) any later version.
-*
+*
* DOUBANGO is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
* GNU General Public License for more details.
-*
+*
* You should have received a copy of the GNU General Public License
* along with DOUBANGO.
*/
@@ -21,39 +21,38 @@
#include <assert.h>
MFSampleQueue::MFSampleQueue()
-: m_nRefCount(1)
-, m_nCount(0)
+ : m_nRefCount(1)
+ , m_nCount(0)
{
- InitializeCriticalSection(&m_critSec);
+ InitializeCriticalSection(&m_critSec);
- m_anchor.next = &m_anchor;
- m_anchor.prev = &m_anchor;
+ m_anchor.next = &m_anchor;
+ m_anchor.prev = &m_anchor;
}
MFSampleQueue::~MFSampleQueue()
{
- assert(m_nRefCount == 0);
+ assert(m_nRefCount == 0);
- Clear();
+ Clear();
- DeleteCriticalSection(&m_critSec);
+ DeleteCriticalSection(&m_critSec);
}
STDMETHODIMP MFSampleQueue::QueryInterface(REFIID iid, void** ppv)
{
- return E_NOTIMPL;
+ return E_NOTIMPL;
}
STDMETHODIMP_(ULONG) MFSampleQueue::AddRef()
{
- return InterlockedIncrement(&m_nRefCount);
+ return InterlockedIncrement(&m_nRefCount);
}
STDMETHODIMP_(ULONG) MFSampleQueue::Release()
{
- ULONG uCount = InterlockedDecrement(&m_nRefCount);
- if (uCount == 0)
- {
+ ULONG uCount = InterlockedDecrement(&m_nRefCount);
+ if (uCount == 0) {
delete this;
}
// For thread safety, return a temporary variable.
@@ -62,20 +61,18 @@ STDMETHODIMP_(ULONG) MFSampleQueue::Release()
HRESULT MFSampleQueue::Queue(IMFSample* item)
{
- if (item == NULL)
- {
+ if (item == NULL) {
return E_POINTER;
}
Node *pNode = new (std::nothrow) Node(item);
- if (pNode == NULL)
- {
+ if (pNode == NULL) {
return E_OUTOFMEMORY;
}
item->AddRef();
- EnterCriticalSection(&m_critSec);
+ EnterCriticalSection(&m_critSec);
Node *pBefore = m_anchor.prev;
@@ -87,25 +84,23 @@ HRESULT MFSampleQueue::Queue(IMFSample* item)
pNode->prev = pBefore;
pNode->next = pAfter;
- m_nCount++;
+ m_nCount++;
- LeaveCriticalSection(&m_critSec);
+ LeaveCriticalSection(&m_critSec);
return S_OK;
}
-HRESULT MFSampleQueue::Dequeue(IMFSample* *ppItem)
+HRESULT MFSampleQueue::Dequeue(IMFSample**ppItem)
{
- if (ppItem == NULL)
- {
+ if (ppItem == NULL) {
return E_POINTER;
}
- EnterCriticalSection(&m_critSec);
+ EnterCriticalSection(&m_critSec);
- if (IsEmpty())
- {
- LeaveCriticalSection(&m_critSec);
+ if (IsEmpty()) {
+ LeaveCriticalSection(&m_critSec);
return E_FAIL;
}
@@ -120,24 +115,22 @@ HRESULT MFSampleQueue::Dequeue(IMFSample* *ppItem)
*ppItem = pNode->item;
delete pNode;
- m_nCount--;
+ m_nCount--;
- LeaveCriticalSection(&m_critSec);
+ LeaveCriticalSection(&m_critSec);
return S_OK;
}
HRESULT MFSampleQueue::Clear()
{
- EnterCriticalSection(&m_critSec);
+ EnterCriticalSection(&m_critSec);
- Node *n = m_anchor.next;
+ Node *n = m_anchor.next;
// Delete the nodes
- while (n != &m_anchor)
- {
- if (n->item)
- {
+ while (n != &m_anchor) {
+ if (n->item) {
n->item->Release();
}
@@ -150,9 +143,9 @@ HRESULT MFSampleQueue::Clear()
m_anchor.next = &m_anchor;
m_anchor.prev = &m_anchor;
- m_nCount = 0;
+ m_nCount = 0;
- LeaveCriticalSection(&m_critSec);
+ LeaveCriticalSection(&m_critSec);
- return S_OK;
+ return S_OK;
}
diff --git a/plugins/pluginWinMF/internals/mf_sample_queue.h b/plugins/pluginWinMF/internals/mf_sample_queue.h
index b42ecde..b110a06 100755
--- a/plugins/pluginWinMF/internals/mf_sample_queue.h
+++ b/plugins/pluginWinMF/internals/mf_sample_queue.h
@@ -1,18 +1,18 @@
/* Copyright (C) 2013 Mamadou DIOP
* Copyright (C) 2013 Doubango Telecom <http://www.doubango.org>
-*
+*
* This file is part of Open Source Doubango Framework.
*
* DOUBANGO is free software: you can redistribute it and/or modify
* it under the terms of the GNU General Public License as published by
* the Free Software Foundation, either version 3 of the License, or
* (at your option) any later version.
-*
+*
* DOUBANGO is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
* GNU General Public License for more details.
-*
+*
* You should have received a copy of the GNU General Public License
* along with DOUBANGO.
*/
@@ -32,49 +32,52 @@ class MFSampleQueue : public IUnknown
protected:
// Nodes in the linked list
- struct Node
- {
+ struct Node {
Node *prev;
Node *next;
IMFSample* item;
- Node() : prev(NULL), next(NULL)
- {
+ Node() : prev(NULL), next(NULL) {
}
- Node(IMFSample* item) : prev(NULL), next(NULL)
- {
+ Node(IMFSample* item) : prev(NULL), next(NULL) {
this->item = item;
}
- IMFSample* Item() const { return item; }
+ IMFSample* Item() const {
+ return item;
+ }
};
protected:
Node m_anchor;
- long m_nCount;
- CRITICAL_SECTION m_critSec;
+ long m_nCount;
+ CRITICAL_SECTION m_critSec;
private:
- long m_nRefCount;
+ long m_nRefCount;
public:
MFSampleQueue();
virtual ~MFSampleQueue();
- // IUnknown
+ // IUnknown
STDMETHODIMP QueryInterface(REFIID iid, void** ppv);
STDMETHODIMP_(ULONG) AddRef();
STDMETHODIMP_(ULONG) Release();
HRESULT Queue(IMFSample* item);
- HRESULT Dequeue(IMFSample* *ppItem);
+ HRESULT Dequeue(IMFSample**ppItem);
HRESULT Clear();
- inline BOOL IsEmpty() const { return m_anchor.next == &m_anchor; }
- inline long Count() { return m_nCount; }
+ inline BOOL IsEmpty() const {
+ return m_anchor.next == &m_anchor;
+ }
+ inline long Count() {
+ return m_nCount;
+ }
};
diff --git a/plugins/pluginWinMF/internals/mf_utils.cxx b/plugins/pluginWinMF/internals/mf_utils.cxx
index d1f326c..bcb63f1 100755
--- a/plugins/pluginWinMF/internals/mf_utils.cxx
+++ b/plugins/pluginWinMF/internals/mf_utils.cxx
@@ -1,18 +1,18 @@
/* Copyright (C) 2013 Mamadou DIOP
* Copyright (C) 2013 Doubango Telecom <http://www.doubango.org>
-*
+*
* This file is part of Open Source Doubango Framework.
*
* DOUBANGO is free software: you can redistribute it and/or modify
* it under the terms of the GNU General Public License as published by
* the Free Software Foundation, either version 3 of the License, or
* (at your option) any later version.
-*
+*
* DOUBANGO is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
* GNU General Public License for more details.
-*
+*
* You should have received a copy of the GNU General Public License
* along with DOUBANGO.
*/
@@ -74,35 +74,34 @@ const TOPOID MFUtils::g_ullTopoIdSource = 333;
const TOPOID MFUtils::g_ullTopoIdVideoProcessor = 444;
// Preferred VideoSubTypes
-static const VideoSubTypeGuidPair PreferredVideoSubTypeGuidPairs[] =
-{
- { tmedia_chroma_yuv420p, MFVideoFormat_I420 },
- { tmedia_chroma_nv12, MFVideoFormat_NV12 },
- { tmedia_chroma_uyvy422, MFVideoFormat_UYVY },
- { tmedia_chroma_yuyv422, MFVideoFormat_YUY2 },
- /* TODO: Add more YUV formats */
- { tmedia_chroma_rgb565le, MFVideoFormat_RGB565 },
- { tmedia_chroma_bgr24, MFVideoFormat_RGB24 },
- { tmedia_chroma_rgb32, MFVideoFormat_RGB32 },
+static const VideoSubTypeGuidPair PreferredVideoSubTypeGuidPairs[] = {
+ { tmedia_chroma_yuv420p, MFVideoFormat_I420 },
+ { tmedia_chroma_nv12, MFVideoFormat_NV12 },
+ { tmedia_chroma_uyvy422, MFVideoFormat_UYVY },
+ { tmedia_chroma_yuyv422, MFVideoFormat_YUY2 },
+ /* TODO: Add more YUV formats */
+ { tmedia_chroma_rgb565le, MFVideoFormat_RGB565 },
+ { tmedia_chroma_bgr24, MFVideoFormat_RGB24 },
+ { tmedia_chroma_rgb32, MFVideoFormat_RGB32 },
};
static const tsk_size_t PreferredVideoSubTypeGuidPairsCount = sizeof(PreferredVideoSubTypeGuidPairs)/sizeof(PreferredVideoSubTypeGuidPairs[0]);
// Video Processor
-DEFINE_GUID(CLSID_VideoProcessorMFT,
- 0x88753b26, 0x5b24, 0x49bd, 0xb2, 0xe7, 0xc, 0x44, 0x5c, 0x78, 0xc9, 0x82);
+DEFINE_GUID(CLSID_VideoProcessorMFT,
+ 0x88753b26, 0x5b24, 0x49bd, 0xb2, 0xe7, 0xc, 0x44, 0x5c, 0x78, 0xc9, 0x82);
// {4BE8D3C0-0515-4A37-AD55-E4BAE19AF471}
DEFINE_GUID(CLSID_MF_INTEL_H264EncFilter, // Intel Quick Sync Encoder
-0x4be8d3c0, 0x0515, 0x4a37, 0xad, 0x55, 0xe4, 0xba, 0xe1, 0x9a, 0xf4, 0x71);
+ 0x4be8d3c0, 0x0515, 0x4a37, 0xad, 0x55, 0xe4, 0xba, 0xe1, 0x9a, 0xf4, 0x71);
// {0855C9AC-BC6F-4371-8954-671CCD4EC16F}
DEFINE_GUID(CLSID_MF_INTEL_H264DecFilter, // Intel Quick Sync Decoder
-0x0855c9ac, 0xbc6f, 0x4371, 0x89, 0x54, 0x67, 0x1c, 0xcd, 0x4e, 0xc1, 0x6f);
+ 0x0855c9ac, 0xbc6f, 0x4371, 0x89, 0x54, 0x67, 0x1c, 0xcd, 0x4e, 0xc1, 0x6f);
#if WINVER < 0x0602/* From "sdkddkver.h" and defines the SDK version not the host */
// 6ca50344-051a-4ded-9779-a43305165e35
DEFINE_GUID(CLSID_CMSH264EncoderMFT, // MS H.264 encoder
-0x6ca50344, 0x051a, 0x4ded, 0x97, 0x79, 0xa4, 0x33, 0x05, 0x16, 0x5e, 0x35);
+ 0x6ca50344, 0x051a, 0x4ded, 0x97, 0x79, 0xa4, 0x33, 0x05, 0x16, 0x5e, 0x35);
#endif /* WINVER */
#define IsWin7_OrLater(dwMajorVersion, dwMinorVersion) ( (dwMajorVersion > 6) || ( (dwMajorVersion == 6) && (dwMinorVersion >= 1) ) )
@@ -111,249 +110,231 @@ DEFINE_GUID(CLSID_CMSH264EncoderMFT, // MS H.264 encoder
HRESULT MFUtils::Startup()
{
- if(!g_bStarted)
- {
- HRESULT hr = CoInitializeEx(NULL, COINIT_MULTITHREADED);
- if(SUCCEEDED(hr) || hr == 0x80010106) // 0x80010106 when called from managed code (e.g. Boghe) - More info: http://support.microsoft.com/kb/824480
- {
- hr = MFStartup(MF_VERSION);
- }
- g_bStarted = SUCCEEDED(hr);
-
- OSVERSIONINFO osvi;
- ZeroMemory(&osvi, sizeof(OSVERSIONINFO));
- osvi.dwOSVersionInfoSize = sizeof(OSVERSIONINFO);
- GetVersionEx(&osvi);
- g_dwMajorVersion = osvi.dwMajorVersion;
- g_dwMinorVersion = osvi.dwMinorVersion;
-
- return hr;
- }
- return S_OK;
+ if(!g_bStarted) {
+ HRESULT hr = CoInitializeEx(NULL, COINIT_MULTITHREADED);
+ if(SUCCEEDED(hr) || hr == 0x80010106) { // 0x80010106 when called from managed code (e.g. Boghe) - More info: http://support.microsoft.com/kb/824480
+ hr = MFStartup(MF_VERSION);
+ }
+ g_bStarted = SUCCEEDED(hr);
+
+ OSVERSIONINFO osvi;
+ ZeroMemory(&osvi, sizeof(OSVERSIONINFO));
+ osvi.dwOSVersionInfoSize = sizeof(OSVERSIONINFO);
+ GetVersionEx(&osvi);
+ g_dwMajorVersion = osvi.dwMajorVersion;
+ g_dwMinorVersion = osvi.dwMinorVersion;
+
+ return hr;
+ }
+ return S_OK;
}
HRESULT MFUtils::Shutdown()
{
- if(g_bStarted)
- {
- g_bStarted = false;
- return S_OK;
- }
- return S_OK;
+ if(g_bStarted) {
+ g_bStarted = false;
+ return S_OK;
+ }
+ return S_OK;
}
BOOL MFUtils::IsD3D9Supported()
{
- if (MFUtils::g_bD3D9Checked)
- {
- return MFUtils::g_bD3D9Supported;
- }
- MFUtils::g_bD3D9Checked = TRUE;
- HRESULT hr = S_OK;
- IDirect3D9* pD3D = NULL;
- D3DDISPLAYMODE mode = { 0 };
- D3DPRESENT_PARAMETERS pp = {0};
- IDirect3DDevice9* pDevice = NULL;
-
- CHECK_HR(hr = MFUtils::Startup());
-
- if (!(pD3D = Direct3DCreate9(D3D_SDK_VERSION)))
- {
+ if (MFUtils::g_bD3D9Checked) {
+ return MFUtils::g_bD3D9Supported;
+ }
+ MFUtils::g_bD3D9Checked = TRUE;
+ HRESULT hr = S_OK;
+ IDirect3D9* pD3D = NULL;
+ D3DDISPLAYMODE mode = { 0 };
+ D3DPRESENT_PARAMETERS pp = {0};
+ IDirect3DDevice9* pDevice = NULL;
+
+ CHECK_HR(hr = MFUtils::Startup());
+
+ if (!(pD3D = Direct3DCreate9(D3D_SDK_VERSION))) {
CHECK_HR(hr = E_OUTOFMEMORY);
}
hr = pD3D->GetAdapterDisplayMode(
- D3DADAPTER_DEFAULT,
- &mode
- );
- if (FAILED(hr))
- {
- goto bail;
- }
+ D3DADAPTER_DEFAULT,
+ &mode
+ );
+ if (FAILED(hr)) {
+ goto bail;
+ }
hr = pD3D->CheckDeviceType(
- D3DADAPTER_DEFAULT,
- D3DDEVTYPE_HAL,
- mode.Format,
- D3DFMT_X8R8G8B8,
- TRUE // windowed
- );
- if (FAILED(hr))
- {
- goto bail;
- }
+ D3DADAPTER_DEFAULT,
+ D3DDEVTYPE_HAL,
+ mode.Format,
+ D3DFMT_X8R8G8B8,
+ TRUE // windowed
+ );
+ if (FAILED(hr)) {
+ goto bail;
+ }
pp.BackBufferFormat = D3DFMT_X8R8G8B8;
pp.SwapEffect = D3DSWAPEFFECT_DISCARD;
pp.PresentationInterval = D3DPRESENT_INTERVAL_IMMEDIATE;
- pp.Windowed = TRUE;
- pp.hDeviceWindow = GetDesktopWindow();
+ pp.Windowed = TRUE;
+ pp.hDeviceWindow = GetDesktopWindow();
hr = pD3D->CreateDevice(
- D3DADAPTER_DEFAULT,
- D3DDEVTYPE_HAL,
- pp.hDeviceWindow,
- D3DCREATE_HARDWARE_VERTEXPROCESSING,
- &pp,
- &pDevice
- );
- if (FAILED(hr))
- {
- goto bail;
- }
-
- // Everythings is OK
- MFUtils::g_bD3D9Supported = TRUE;
- TSK_DEBUG_INFO("D3D9 supported");
+ D3DADAPTER_DEFAULT,
+ D3DDEVTYPE_HAL,
+ pp.hDeviceWindow,
+ D3DCREATE_HARDWARE_VERTEXPROCESSING,
+ &pp,
+ &pDevice
+ );
+ if (FAILED(hr)) {
+ goto bail;
+ }
+
+ // Everythings is OK
+ MFUtils::g_bD3D9Supported = TRUE;
+ TSK_DEBUG_INFO("D3D9 supported");
bail:
- if (!MFUtils::g_bD3D9Supported) {
- TSK_DEBUG_WARN("D3D9 not supported");
- }
- SafeRelease(&pDevice);
- SafeRelease(&pD3D);
- return MFUtils::g_bD3D9Supported;
+ if (!MFUtils::g_bD3D9Supported) {
+ TSK_DEBUG_WARN("D3D9 not supported");
+ }
+ SafeRelease(&pDevice);
+ SafeRelease(&pD3D);
+ return MFUtils::g_bD3D9Supported;
}
BOOL MFUtils::IsLowLatencyH264Supported()
{
- if(MFUtils::g_bLowLatencyH264Checked)
- {
- return MFUtils::g_bLowLatencyH264Supported;
- }
+ if(MFUtils::g_bLowLatencyH264Checked) {
+ return MFUtils::g_bLowLatencyH264Supported;
+ }
#if PLUGIN_MF_DISABLE_CODECS
- MFUtils::g_bLowLatencyH264Checked = TRUE;
- MFUtils::g_bLowLatencyH264Supported = FALSE;
+ MFUtils::g_bLowLatencyH264Checked = TRUE;
+ MFUtils::g_bLowLatencyH264Supported = FALSE;
#else
- Startup();
-
- HRESULT hr = S_OK;
- IMFTransform *pEncoderMFT = NULL;
- IMFTransform *pDecoderMFT = NULL;
- MFCodecVideoH264* pEncoderCodec = NULL;
- MFCodecVideoH264* pDecoderCodec = NULL;
-
- static const BOOL IsEncoderYes = TRUE;
-
- // Encoder
- hr = MFUtils::GetBestCodec(IsEncoderYes, MFMediaType_Video, MFVideoFormat_NV12, MFVideoFormat_H264, &pEncoderMFT);
- if(FAILED(hr))
- {
- TSK_DEBUG_INFO("No low latency H.264 encoder");
- goto bail;
- }
-
- // Decoder
- hr = MFUtils::GetBestCodec(!IsEncoderYes, MFMediaType_Video, MFVideoFormat_H264, MFVideoFormat_NV12, &pDecoderMFT);
- if(FAILED(hr))
- {
- TSK_DEBUG_INFO("No low latency H.264 decoder");
- goto bail;
- }
-
- // Make sure both encoder and decoder are working well. Check encoding/decoding 1080p@30 would work.
-
- TSK_DEBUG_INFO("Probing H.264 MFT encoder...");
- pEncoderCodec = MFCodecVideoH264::CreateCodecH264Main(MFCodecType_Encoder, pEncoderMFT);
- if(!pEncoderCodec)
- {
- CHECK_HR(hr = E_FAIL);
- }
- CHECK_HR(hr = pEncoderCodec->Initialize(
- 30, // FPS
- 1920, // WIDTH
- 1080, // HEIGHT
- tmedia_get_video_bandwidth_kbps_2(1920, 1080, 30) * 1024) // BITRATE
- );
- CHECK_HR(pEncoderCodec->IsSetSliceMaxSizeInBytesSupported(MFUtils::g_bLowLatencyH264SupportsMaxSliceSize));
-
- TSK_DEBUG_INFO("Probing H.264 MFT decoder...");
- pDecoderCodec = MFCodecVideoH264::CreateCodecH264Main(MFCodecType_Decoder, pDecoderMFT);
- if(!pDecoderCodec)
- {
- CHECK_HR(hr = E_FAIL);
- }
- CHECK_HR(hr = pDecoderCodec->Initialize(
- 30, // FPS
- 1920, // WIDTH
- 1080 // HEIGHT
- ));
+ Startup();
+
+ HRESULT hr = S_OK;
+ IMFTransform *pEncoderMFT = NULL;
+ IMFTransform *pDecoderMFT = NULL;
+ MFCodecVideoH264* pEncoderCodec = NULL;
+ MFCodecVideoH264* pDecoderCodec = NULL;
+
+ static const BOOL IsEncoderYes = TRUE;
+
+ // Encoder
+ hr = MFUtils::GetBestCodec(IsEncoderYes, MFMediaType_Video, MFVideoFormat_NV12, MFVideoFormat_H264, &pEncoderMFT);
+ if(FAILED(hr)) {
+ TSK_DEBUG_INFO("No low latency H.264 encoder");
+ goto bail;
+ }
+
+ // Decoder
+ hr = MFUtils::GetBestCodec(!IsEncoderYes, MFMediaType_Video, MFVideoFormat_H264, MFVideoFormat_NV12, &pDecoderMFT);
+ if(FAILED(hr)) {
+ TSK_DEBUG_INFO("No low latency H.264 decoder");
+ goto bail;
+ }
+
+ // Make sure both encoder and decoder are working well. Check encoding/decoding 1080p@30 would work.
+
+ TSK_DEBUG_INFO("Probing H.264 MFT encoder...");
+ pEncoderCodec = MFCodecVideoH264::CreateCodecH264Main(MFCodecType_Encoder, pEncoderMFT);
+ if(!pEncoderCodec) {
+ CHECK_HR(hr = E_FAIL);
+ }
+ CHECK_HR(hr = pEncoderCodec->Initialize(
+ 30, // FPS
+ 1920, // WIDTH
+ 1080, // HEIGHT
+ tmedia_get_video_bandwidth_kbps_2(1920, 1080, 30) * 1024) // BITRATE
+ );
+ CHECK_HR(pEncoderCodec->IsSetSliceMaxSizeInBytesSupported(MFUtils::g_bLowLatencyH264SupportsMaxSliceSize));
+
+ TSK_DEBUG_INFO("Probing H.264 MFT decoder...");
+ pDecoderCodec = MFCodecVideoH264::CreateCodecH264Main(MFCodecType_Decoder, pDecoderMFT);
+ if(!pDecoderCodec) {
+ CHECK_HR(hr = E_FAIL);
+ }
+ CHECK_HR(hr = pDecoderCodec->Initialize(
+ 30, // FPS
+ 1920, // WIDTH
+ 1080 // HEIGHT
+ ));
bail:
- MFUtils::g_bLowLatencyH264Checked = TRUE;
- MFUtils::g_bLowLatencyH264Supported = SUCCEEDED(hr) ? TRUE : FALSE;
- SafeRelease(&pEncoderMFT);
- SafeRelease(&pEncoderCodec);
- SafeRelease(&pDecoderMFT);
- SafeRelease(&pDecoderCodec);
+ MFUtils::g_bLowLatencyH264Checked = TRUE;
+ MFUtils::g_bLowLatencyH264Supported = SUCCEEDED(hr) ? TRUE : FALSE;
+ SafeRelease(&pEncoderMFT);
+ SafeRelease(&pEncoderCodec);
+ SafeRelease(&pDecoderMFT);
+ SafeRelease(&pDecoderCodec);
#endif /* PLUGIN_MF_DISABLE_CODECS */
- return MFUtils::g_bLowLatencyH264Supported;
+ return MFUtils::g_bLowLatencyH264Supported;
}
BOOL MFUtils::IsLowLatencyH264SupportsMaxSliceSize()
{
- return MFUtils::IsLowLatencyH264Supported() && MFUtils::g_bLowLatencyH264SupportsMaxSliceSize;
+ return MFUtils::IsLowLatencyH264Supported() && MFUtils::g_bLowLatencyH264SupportsMaxSliceSize;
}
HRESULT MFUtils::IsAsyncMFT(
- IMFTransform *pMFT, // The MFT to check
- BOOL* pbIsAsync // Whether the MFT is Async
- )
+ IMFTransform *pMFT, // The MFT to check
+ BOOL* pbIsAsync // Whether the MFT is Async
+)
{
- if(!pbIsAsync || !pMFT)
- {
- return E_POINTER;
- }
+ if(!pbIsAsync || !pMFT) {
+ return E_POINTER;
+ }
- IMFAttributes *pAttributes = NULL;
- UINT32 nIsAsync = 0;
- HRESULT hr = S_OK;
+ IMFAttributes *pAttributes = NULL;
+ UINT32 nIsAsync = 0;
+ HRESULT hr = S_OK;
hr = pMFT->GetAttributes(&pAttributes);
- if(SUCCEEDED(hr))
- {
- hr = pAttributes->GetUINT32(MF_TRANSFORM_ASYNC, &nIsAsync);
- }
-
- // Never fails: just say not Async
- CHECK_HR(hr = S_OK);
+ if(SUCCEEDED(hr)) {
+ hr = pAttributes->GetUINT32(MF_TRANSFORM_ASYNC, &nIsAsync);
+ }
+
+ // Never fails: just say not Async
+ CHECK_HR(hr = S_OK);
- *pbIsAsync = !!nIsAsync;
+ *pbIsAsync = !!nIsAsync;
bail:
- return hr;
+ return hr;
}
HRESULT MFUtils::UnlockAsyncMFT(
- IMFTransform *pMFT // The MFT to unlock
- )
+ IMFTransform *pMFT // The MFT to unlock
+)
{
- IMFAttributes *pAttributes = NULL;
- UINT32 nValue = 0;
- HRESULT hr = S_OK;
+ IMFAttributes *pAttributes = NULL;
+ UINT32 nValue = 0;
+ HRESULT hr = S_OK;
hr = pMFT->GetAttributes(&pAttributes);
- if(FAILED(hr))
- {
- hr = S_OK;
- goto bail;
- }
-
- hr = pAttributes->GetUINT32(MF_TRANSFORM_ASYNC, &nValue);
- if(FAILED(hr))
- {
- hr = S_OK;
- goto bail;
- }
-
- if(nValue == TRUE)
- {
- CHECK_HR(hr = pAttributes->SetUINT32(MF_TRANSFORM_ASYNC_UNLOCK, TRUE));
- }
-
+ if(FAILED(hr)) {
+ hr = S_OK;
+ goto bail;
+ }
+
+ hr = pAttributes->GetUINT32(MF_TRANSFORM_ASYNC, &nValue);
+ if(FAILED(hr)) {
+ hr = S_OK;
+ goto bail;
+ }
+
+ if(nValue == TRUE) {
+ CHECK_HR(hr = pAttributes->SetUINT32(MF_TRANSFORM_ASYNC_UNLOCK, TRUE));
+ }
+
bail:
- SafeRelease(&pAttributes);
- return hr;
+ SafeRelease(&pAttributes);
+ return hr;
}
//-------------------------------------------------------------------
// CreatePCMAudioType
@@ -367,7 +348,7 @@ HRESULT MFUtils::CreatePCMAudioType(
UINT32 bitsPerSample, // Bits per sample
UINT32 cChannels, // Number of channels
IMFMediaType **ppType // Receives a pointer to the media type.
- )
+)
{
HRESULT hr = S_OK;
@@ -381,48 +362,39 @@ HRESULT MFUtils::CreatePCMAudioType(
hr = MFCreateMediaType(&pType);
// Set attributes on the type.
- if (SUCCEEDED(hr))
- {
+ if (SUCCEEDED(hr)) {
hr = pType->SetGUID(MF_MT_MAJOR_TYPE, MFMediaType_Audio);
}
- if (SUCCEEDED(hr))
- {
+ if (SUCCEEDED(hr)) {
hr = pType->SetGUID(MF_MT_SUBTYPE, MFAudioFormat_PCM);
}
- if (SUCCEEDED(hr))
- {
+ if (SUCCEEDED(hr)) {
hr = pType->SetUINT32(MF_MT_AUDIO_NUM_CHANNELS, cChannels);
}
- if (SUCCEEDED(hr))
- {
+ if (SUCCEEDED(hr)) {
hr = pType->SetUINT32(MF_MT_AUDIO_SAMPLES_PER_SECOND, sampleRate);
}
- if (SUCCEEDED(hr))
- {
+ if (SUCCEEDED(hr)) {
hr = pType->SetUINT32(MF_MT_AUDIO_BLOCK_ALIGNMENT, blockAlign);
}
- if (SUCCEEDED(hr))
- {
+ if (SUCCEEDED(hr)) {
hr = pType->SetUINT32(MF_MT_AUDIO_AVG_BYTES_PER_SECOND, bytesPerSecond);
}
- if (SUCCEEDED(hr))
- {
+ if (SUCCEEDED(hr)) {
hr = pType->SetUINT32(MF_MT_AUDIO_BITS_PER_SAMPLE, bitsPerSample);
}
- if (SUCCEEDED(hr))
- {
+ if (SUCCEEDED(hr)) {
hr = pType->SetUINT32(MF_MT_ALL_SAMPLES_INDEPENDENT, TRUE);
}
- if (SUCCEEDED(hr))
- {
+ if (SUCCEEDED(hr)) {
// Return the type to the caller.
*ppType = pType;
(*ppType)->AddRef();
@@ -440,13 +412,13 @@ HRESULT MFUtils::CreatePCMAudioType(
// format.
//-------------------------------------------------------------------
HRESULT MFUtils::CreateVideoType(
- const GUID* subType, // video subType
- IMFMediaType **ppType, // Receives a pointer to the media type.
- UINT32 unWidth, // Video width (0 to ignore)
- UINT32 unHeight // Video height (0 to ignore)
- )
+ const GUID* subType, // video subType
+ IMFMediaType **ppType, // Receives a pointer to the media type.
+ UINT32 unWidth, // Video width (0 to ignore)
+ UINT32 unHeight // Video height (0 to ignore)
+)
{
- HRESULT hr = S_OK;
+ HRESULT hr = S_OK;
IMFMediaType *pType = NULL;
@@ -456,17 +428,16 @@ HRESULT MFUtils::CreateVideoType(
CHECK_HR(hr = pType->SetGUID(MF_MT_SUBTYPE, *subType));
- CHECK_HR(hr = pType->SetUINT32(MF_MT_ALL_SAMPLES_INDEPENDENT, TRUE)); // UnCompressed
+ CHECK_HR(hr = pType->SetUINT32(MF_MT_ALL_SAMPLES_INDEPENDENT, TRUE)); // UnCompressed
- CHECK_HR(hr = pType->SetUINT32(MF_MT_FIXED_SIZE_SAMPLES, TRUE)); // UnCompressed
+ CHECK_HR(hr = pType->SetUINT32(MF_MT_FIXED_SIZE_SAMPLES, TRUE)); // UnCompressed
- CHECK_HR(hr = pType->SetUINT32(MF_MT_INTERLACE_MODE, MFVideoInterlace_Progressive));
+ CHECK_HR(hr = pType->SetUINT32(MF_MT_INTERLACE_MODE, MFVideoInterlace_Progressive));
+
+ if(unWidth > 0 && unHeight > 0) {
+ CHECK_HR(hr = MFSetAttributeSize(pType, MF_MT_FRAME_SIZE, unWidth, unHeight));
+ }
- if(unWidth > 0 && unHeight > 0)
- {
- CHECK_HR(hr = MFSetAttributeSize(pType, MF_MT_FRAME_SIZE, unWidth, unHeight));
- }
-
*ppType = pType;
(*ppType)->AddRef();
@@ -481,7 +452,7 @@ bail:
//-------------------------------------------------------------------
HRESULT MFUtils::ValidateVideoFormat(IMFMediaType *pmt)
{
- GUID major_type = GUID_NULL;
+ GUID major_type = GUID_NULL;
GUID subtype = GUID_NULL;
MFVideoInterlaceMode interlace = MFVideoInterlace_Unknown;
UINT32 val = 0;
@@ -492,8 +463,7 @@ HRESULT MFUtils::ValidateVideoFormat(IMFMediaType *pmt)
// Major type must be video.
CHECK_HR(hr = pmt->GetGUID(MF_MT_MAJOR_TYPE, &major_type));
- if (major_type != MFMediaType_Video)
- {
+ if (major_type != MFMediaType_Video) {
CHECK_HR(hr = MF_E_INVALIDMEDIATYPE);
}
@@ -504,24 +474,20 @@ HRESULT MFUtils::ValidateVideoFormat(IMFMediaType *pmt)
#if 0
// Look for the subtype in our list of accepted types.
- for (DWORD i = 0; i < g_NumVideoSubtypes; i++)
- {
- if (subtype == *g_VideoSubtypes[i])
- {
+ for (DWORD i = 0; i < g_NumVideoSubtypes; i++) {
+ if (subtype == *g_VideoSubtypes[i]) {
bFoundMatchingSubtype = TRUE;
break;
}
}
- if (!bFoundMatchingSubtype)
- {
+ if (!bFoundMatchingSubtype) {
CHECK_HR(hr = MF_E_INVALIDMEDIATYPE);
}
#endif
// Video must be progressive frames.
CHECK_HR(hr = pmt->GetUINT32(MF_MT_INTERLACE_MODE, (UINT32*)&interlace));
- if (interlace != MFVideoInterlace_Progressive)
- {
+ if (interlace != MFVideoInterlace_Progressive) {
CHECK_HR(hr = MF_E_INVALIDMEDIATYPE);
}
@@ -533,9 +499,9 @@ HRESULT MFUtils::ConvertVideoTypeToUncompressedType(
IMFMediaType *pType, // Pointer to an encoded video type.
const GUID& subtype, // Uncompressed subtype (eg, RGB-32, AYUV)
IMFMediaType **ppType // Receives a matching uncompressed video type.
- )
+)
{
- IMFMediaType *pTypeUncomp = NULL;
+ IMFMediaType *pTypeUncomp = NULL;
HRESULT hr = S_OK;
GUID majortype = { 0 };
@@ -543,59 +509,51 @@ HRESULT MFUtils::ConvertVideoTypeToUncompressedType(
hr = pType->GetMajorType(&majortype);
- if (majortype != MFMediaType_Video)
- {
+ if (majortype != MFMediaType_Video) {
return MF_E_INVALIDMEDIATYPE;
}
// Create a new media type and copy over all of the items.
// This ensures that extended color information is retained.
- if (SUCCEEDED(hr))
- {
+ if (SUCCEEDED(hr)) {
hr = MFCreateMediaType(&pTypeUncomp);
}
- if (SUCCEEDED(hr))
- {
+ if (SUCCEEDED(hr)) {
hr = pType->CopyAllItems(pTypeUncomp);
}
// Set the subtype.
- if (SUCCEEDED(hr))
- {
+ if (SUCCEEDED(hr)) {
hr = pTypeUncomp->SetGUID(MF_MT_SUBTYPE, subtype);
}
// Uncompressed means all samples are independent.
- if (SUCCEEDED(hr))
- {
+ if (SUCCEEDED(hr)) {
hr = pTypeUncomp->SetUINT32(MF_MT_ALL_SAMPLES_INDEPENDENT, TRUE);
}
// Fix up PAR if not set on the original type.
- if (SUCCEEDED(hr))
- {
+ if (SUCCEEDED(hr)) {
hr = MFGetAttributeRatio(
- pTypeUncomp,
- MF_MT_PIXEL_ASPECT_RATIO,
- (UINT32*)&par.Numerator,
- (UINT32*)&par.Denominator
- );
+ pTypeUncomp,
+ MF_MT_PIXEL_ASPECT_RATIO,
+ (UINT32*)&par.Numerator,
+ (UINT32*)&par.Denominator
+ );
// Default to square pixels.
- if (FAILED(hr))
- {
+ if (FAILED(hr)) {
hr = MFSetAttributeRatio(
- pTypeUncomp,
- MF_MT_PIXEL_ASPECT_RATIO,
- 1, 1
- );
+ pTypeUncomp,
+ MF_MT_PIXEL_ASPECT_RATIO,
+ 1, 1
+ );
}
}
- if (SUCCEEDED(hr))
- {
+ if (SUCCEEDED(hr)) {
*ppType = pTypeUncomp;
(*ppType)->AddRef();
}
@@ -605,21 +563,21 @@ HRESULT MFUtils::ConvertVideoTypeToUncompressedType(
}
HRESULT MFUtils::CreateMediaSample(
- DWORD cbData, // Maximum buffer size
- IMFSample **ppSample // Receives the sample
+ DWORD cbData, // Maximum buffer size
+ IMFSample **ppSample // Receives the sample
)
{
- assert(ppSample);
+ assert(ppSample);
- HRESULT hr = S_OK;
+ HRESULT hr = S_OK;
IMFSample *pSample = NULL;
IMFMediaBuffer *pBuffer = NULL;
- CHECK_HR(hr = MFCreateSample(&pSample));
- CHECK_HR(hr = MFCreateMemoryBuffer(cbData, &pBuffer));
+ CHECK_HR(hr = MFCreateSample(&pSample));
+ CHECK_HR(hr = MFCreateMemoryBuffer(cbData, &pBuffer));
CHECK_HR(hr = pSample->AddBuffer(pBuffer));
-
+
*ppSample = pSample;
(*ppSample)->AddRef();
@@ -631,222 +589,198 @@ bail:
// Gets the best encoder and decoder. Up to the caller to release the returned pointer
HRESULT MFUtils::GetBestCodec(
- BOOL bEncoder, // Whether we request an encoder or not (TRUE=encoder, FALSE=decoder)
- const GUID& mediaType, // The MediaType
- const GUID& inputFormat, // The input MediaFormat (e.g. MFVideoFormat_NV12)
- const GUID& outputFormat, // The output MediaFormat (e.g. MFVideoFormat_H264)
- IMFTransform **ppMFT // Receives the decoder/encoder transform
- )
+ BOOL bEncoder, // Whether we request an encoder or not (TRUE=encoder, FALSE=decoder)
+ const GUID& mediaType, // The MediaType
+ const GUID& inputFormat, // The input MediaFormat (e.g. MFVideoFormat_NV12)
+ const GUID& outputFormat, // The output MediaFormat (e.g. MFVideoFormat_H264)
+ IMFTransform **ppMFT // Receives the decoder/encoder transform
+)
{
- assert(ppMFT);
- assert(mediaType == MFMediaType_Video || mediaType == MFMediaType_Audio); // only audio and video codecs are support for now
-
- *ppMFT = NULL;
-
- HRESULT hr = S_OK;
-
- if(outputFormat == MFVideoFormat_H264 || inputFormat == MFVideoFormat_H264)
- {
- if(bEncoder)
- {
- // Force using Intel Quick Sync Encoder
- hr = CoCreateInstance(CLSID_MF_INTEL_H264EncFilter, NULL,
- CLSCTX_INPROC_SERVER, IID_PPV_ARGS(ppMFT));
- if(SUCCEEDED(hr) && *ppMFT)
- {
- TSK_DEBUG_INFO("Using Intel Quick Sync encoder :)");
- return hr;
- }
- TSK_DEBUG_INFO("Not using Intel Quick Sync encoder :(");
- }
- else
- {
+ assert(ppMFT);
+ assert(mediaType == MFMediaType_Video || mediaType == MFMediaType_Audio); // only audio and video codecs are support for now
+
+ *ppMFT = NULL;
+
+ HRESULT hr = S_OK;
+
+ if(outputFormat == MFVideoFormat_H264 || inputFormat == MFVideoFormat_H264) {
+ if(bEncoder) {
+ // Force using Intel Quick Sync Encoder
+ hr = CoCreateInstance(CLSID_MF_INTEL_H264EncFilter, NULL,
+ CLSCTX_INPROC_SERVER, IID_PPV_ARGS(ppMFT));
+ if(SUCCEEDED(hr) && *ppMFT) {
+ TSK_DEBUG_INFO("Using Intel Quick Sync encoder :)");
+ return hr;
+ }
+ TSK_DEBUG_INFO("Not using Intel Quick Sync encoder :(");
+ }
+ else {
#if !PLUGIN_MF_DISABLE_ASYNC_DECODERS // Intel Quick Sync decoder is asynchronous
- // Force using Intel Quick Sync Decoder
- hr = CoCreateInstance(CLSID_MF_INTEL_H264DecFilter, NULL,
- CLSCTX_INPROC_SERVER, IID_PPV_ARGS(ppMFT));
+ // Force using Intel Quick Sync Decoder
+ hr = CoCreateInstance(CLSID_MF_INTEL_H264DecFilter, NULL,
+ CLSCTX_INPROC_SERVER, IID_PPV_ARGS(ppMFT));
#endif
- if(SUCCEEDED(hr) && *ppMFT)
- {
- TSK_DEBUG_INFO("Using Intel Quick Sync decoder :)");
- return hr;
- }
- TSK_DEBUG_INFO("Not using Intel Quick Sync decoder :(");
- }
- }
-
- UINT32 count = 0;
- BOOL bAsync = FALSE;
- GUID guidActivateCLSID = GUID_NULL;
-
- IMFActivate **ppActivate = NULL;
-
- MFT_REGISTER_TYPE_INFO infoInput = { mediaType, inputFormat };
- MFT_REGISTER_TYPE_INFO infoOutput = { mediaType, outputFormat };
-
- UINT32 unFlags = MFT_ENUM_FLAG_HARDWARE |
- MFT_ENUM_FLAG_SYNCMFT |
- MFT_ENUM_FLAG_ASYNCMFT |
- MFT_ENUM_FLAG_LOCALMFT |
- MFT_ENUM_FLAG_TRANSCODE_ONLY | // Otherwise Intel Quick Sync will not be listed
- MFT_ENUM_FLAG_SORTANDFILTER;
-
- hr = MFTEnumEx(
- (mediaType == MFMediaType_Video) ? (bEncoder ? MFT_CATEGORY_VIDEO_ENCODER : MFT_CATEGORY_VIDEO_DECODER) : (bEncoder ? MFT_CATEGORY_AUDIO_ENCODER : MFT_CATEGORY_AUDIO_DECODER),
- unFlags,
- (inputFormat == GUID_NULL) ? NULL : &infoInput, // Input type
- (outputFormat == GUID_NULL) ? NULL : &infoOutput, // Output type
- &ppActivate,
- &count
- );
-
- for(UINT32 i = 0; i < count; ++i)
- {
- SafeRelease(ppMFT);
- hr = ppActivate[i]->GetGUID(MFT_TRANSFORM_CLSID_Attribute, &guidActivateCLSID);
- if(FAILED(hr))
- {
- continue;
- }
-
- if(bEncoder)
- {
- // Encoder
- if(guidActivateCLSID == CLSID_CMSH264EncoderMFT) // MS H.264 encoder ?
- {
- if(PLUGIN_MF_DISABLE_MS_H264_ENCODER)
- {
- // Microsoft H.264 encoder is disabled
- TSK_DEBUG_INFO("MS H.264 encoder is disabled...skipping");
- continue;
- }
- if(!IsWin8_OrLater(g_dwMajorVersion, g_dwMinorVersion))
- {
- // Microsoft H.264 encoder doesn't support low latency on Win7.
- TSK_DEBUG_INFO("MS H.264 encoder doesn't support low delay on (%ld, %ld)...skipping", g_dwMajorVersion, g_dwMinorVersion);
- continue;
- }
- }
- }
- else
- {
- // Decoder
- if(guidActivateCLSID == CLSID_CMSH264DecoderMFT) // MS H.264 decoder ?
- {
- if(!IsWin8_OrLater(g_dwMajorVersion, g_dwMinorVersion))
- {
- // Microsoft H.264 decoder doesn't support low latency on Win7.
- TSK_DEBUG_INFO("MS H.264 decoder doesn't support low delay on (%ld, %ld)...skipping", g_dwMajorVersion, g_dwMinorVersion);
- continue;
- }
- }
- }
-
- hr = ppActivate[i]->ActivateObject(IID_PPV_ARGS(ppMFT));
- if(SUCCEEDED(hr) && *ppMFT) // For now we just get the first one. FIXME: Give HW encoders/decoders higher priority.
- {
- if(bEncoder)
- {
- // Encoder
-
- }
- else
- {
- // Decoder
+ if(SUCCEEDED(hr) && *ppMFT) {
+ TSK_DEBUG_INFO("Using Intel Quick Sync decoder :)");
+ return hr;
+ }
+ TSK_DEBUG_INFO("Not using Intel Quick Sync decoder :(");
+ }
+ }
+
+ UINT32 count = 0;
+ BOOL bAsync = FALSE;
+ GUID guidActivateCLSID = GUID_NULL;
+
+ IMFActivate **ppActivate = NULL;
+
+ MFT_REGISTER_TYPE_INFO infoInput = { mediaType, inputFormat };
+ MFT_REGISTER_TYPE_INFO infoOutput = { mediaType, outputFormat };
+
+ UINT32 unFlags = MFT_ENUM_FLAG_HARDWARE |
+ MFT_ENUM_FLAG_SYNCMFT |
+ MFT_ENUM_FLAG_ASYNCMFT |
+ MFT_ENUM_FLAG_LOCALMFT |
+ MFT_ENUM_FLAG_TRANSCODE_ONLY | // Otherwise Intel Quick Sync will not be listed
+ MFT_ENUM_FLAG_SORTANDFILTER;
+
+ hr = MFTEnumEx(
+ (mediaType == MFMediaType_Video) ? (bEncoder ? MFT_CATEGORY_VIDEO_ENCODER : MFT_CATEGORY_VIDEO_DECODER) : (bEncoder ? MFT_CATEGORY_AUDIO_ENCODER : MFT_CATEGORY_AUDIO_DECODER),
+ unFlags,
+ (inputFormat == GUID_NULL) ? NULL : &infoInput, // Input type
+ (outputFormat == GUID_NULL) ? NULL : &infoOutput, // Output type
+ &ppActivate,
+ &count
+ );
+
+ for(UINT32 i = 0; i < count; ++i) {
+ SafeRelease(ppMFT);
+ hr = ppActivate[i]->GetGUID(MFT_TRANSFORM_CLSID_Attribute, &guidActivateCLSID);
+ if(FAILED(hr)) {
+ continue;
+ }
+
+ if(bEncoder) {
+ // Encoder
+ if(guidActivateCLSID == CLSID_CMSH264EncoderMFT) { // MS H.264 encoder ?
+ if(PLUGIN_MF_DISABLE_MS_H264_ENCODER) {
+ // Microsoft H.264 encoder is disabled
+ TSK_DEBUG_INFO("MS H.264 encoder is disabled...skipping");
+ continue;
+ }
+ if(!IsWin8_OrLater(g_dwMajorVersion, g_dwMinorVersion)) {
+ // Microsoft H.264 encoder doesn't support low latency on Win7.
+ TSK_DEBUG_INFO("MS H.264 encoder doesn't support low delay on (%ld, %ld)...skipping", g_dwMajorVersion, g_dwMinorVersion);
+ continue;
+ }
+ }
+ }
+ else {
+ // Decoder
+ if(guidActivateCLSID == CLSID_CMSH264DecoderMFT) { // MS H.264 decoder ?
+ if(!IsWin8_OrLater(g_dwMajorVersion, g_dwMinorVersion)) {
+ // Microsoft H.264 decoder doesn't support low latency on Win7.
+ TSK_DEBUG_INFO("MS H.264 decoder doesn't support low delay on (%ld, %ld)...skipping", g_dwMajorVersion, g_dwMinorVersion);
+ continue;
+ }
+ }
+ }
+
+ hr = ppActivate[i]->ActivateObject(IID_PPV_ARGS(ppMFT));
+ if(SUCCEEDED(hr) && *ppMFT) { // For now we just get the first one. FIXME: Give HW encoders/decoders higher priority.
+ if(bEncoder) {
+ // Encoder
+
+ }
+ else {
+ // Decoder
#if PLUGIN_MF_DISABLE_ASYNC_DECODERS
- hr = IsAsyncMFT(*ppMFT, &bAsync);
- if(bAsync)
- {
- TSK_DEBUG_INFO("Skipping async decoder because not supported yet");
- continue; // Async decoders not supported yet
- }
+ hr = IsAsyncMFT(*ppMFT, &bAsync);
+ if(bAsync) {
+ TSK_DEBUG_INFO("Skipping async decoder because not supported yet");
+ continue; // Async decoders not supported yet
+ }
#endif
- }
- break;
- }
- }
-
- for (UINT32 i = 0; i < count; i++)
- {
- ppActivate[i]->Release();
- }
- CoTaskMemFree(ppActivate);
-
- return *ppMFT ? S_OK : MF_E_NOT_FOUND;
+ }
+ break;
+ }
+ }
+
+ for (UINT32 i = 0; i < count; i++) {
+ ppActivate[i]->Release();
+ }
+ CoTaskMemFree(ppActivate);
+
+ return *ppMFT ? S_OK : MF_E_NOT_FOUND;
}
HRESULT MFUtils::IsVideoProcessorSupported(BOOL *pbSupported)
{
- HRESULT hr = S_OK;
- IMFTransform *pTransform = NULL;
-
- if(!pbSupported)
- {
- CHECK_HR(hr = E_POINTER);
- }
-
- hr = CoCreateInstance(CLSID_VideoProcessorMFT, NULL,
- CLSCTX_INPROC_SERVER, IID_PPV_ARGS(&pTransform));
- *pbSupported = SUCCEEDED(hr);
- if(FAILED(hr))
- {
- hr = S_OK; // not an error
- }
+ HRESULT hr = S_OK;
+ IMFTransform *pTransform = NULL;
+
+ if(!pbSupported) {
+ CHECK_HR(hr = E_POINTER);
+ }
+
+ hr = CoCreateInstance(CLSID_VideoProcessorMFT, NULL,
+ CLSCTX_INPROC_SERVER, IID_PPV_ARGS(&pTransform));
+ *pbSupported = SUCCEEDED(hr);
+ if(FAILED(hr)) {
+ hr = S_OK; // not an error
+ }
bail:
- SafeRelease(&pTransform);
- return hr;
+ SafeRelease(&pTransform);
+ return hr;
}
HRESULT MFUtils::GetBestVideoProcessor(
- const GUID& inputFormat, // The input MediaFormat (e.g. MFVideoFormat_I420)
- const GUID& outputFormat, // The output MediaFormat (e.g. MFVideoFormat_NV12)
- IMFTransform **ppProcessor // Receives the video processor
- )
+ const GUID& inputFormat, // The input MediaFormat (e.g. MFVideoFormat_I420)
+ const GUID& outputFormat, // The output MediaFormat (e.g. MFVideoFormat_NV12)
+ IMFTransform **ppProcessor // Receives the video processor
+)
{
- assert(ppProcessor);
-
- *ppProcessor = NULL;
-
- HRESULT hr = S_OK;
- UINT32 count = 0;
-
- IMFActivate **ppActivate = NULL;
-
- MFT_REGISTER_TYPE_INFO infoInput = { MFMediaType_Video, inputFormat };
- MFT_REGISTER_TYPE_INFO infoOutput = { MFMediaType_Video, outputFormat };
-
- UINT32 unFlags = MFT_ENUM_FLAG_HARDWARE |
- MFT_ENUM_FLAG_SYNCMFT |
- MFT_ENUM_FLAG_LOCALMFT |
- MFT_ENUM_FLAG_SORTANDFILTER;
-
- hr = MFTEnumEx(
- MFT_CATEGORY_VIDEO_PROCESSOR,
- unFlags,
- &infoInput, // Input type
- &infoOutput, // Output type
- &ppActivate,
- &count
- );
-
- for(UINT32 i = 0; i < count; ++i)
- {
- hr = ppActivate[i]->ActivateObject(IID_PPV_ARGS(ppProcessor));
- if(SUCCEEDED(hr) && *ppProcessor)
- {
- break;
- }
- SafeRelease(ppProcessor);
- }
-
- for (UINT32 i = 0; i < count; i++)
- {
- ppActivate[i]->Release();
- }
- CoTaskMemFree(ppActivate);
-
- return *ppProcessor ? S_OK : MF_E_NOT_FOUND;
+ assert(ppProcessor);
+
+ *ppProcessor = NULL;
+
+ HRESULT hr = S_OK;
+ UINT32 count = 0;
+
+ IMFActivate **ppActivate = NULL;
+
+ MFT_REGISTER_TYPE_INFO infoInput = { MFMediaType_Video, inputFormat };
+ MFT_REGISTER_TYPE_INFO infoOutput = { MFMediaType_Video, outputFormat };
+
+ UINT32 unFlags = MFT_ENUM_FLAG_HARDWARE |
+ MFT_ENUM_FLAG_SYNCMFT |
+ MFT_ENUM_FLAG_LOCALMFT |
+ MFT_ENUM_FLAG_SORTANDFILTER;
+
+ hr = MFTEnumEx(
+ MFT_CATEGORY_VIDEO_PROCESSOR,
+ unFlags,
+ &infoInput, // Input type
+ &infoOutput, // Output type
+ &ppActivate,
+ &count
+ );
+
+ for(UINT32 i = 0; i < count; ++i) {
+ hr = ppActivate[i]->ActivateObject(IID_PPV_ARGS(ppProcessor));
+ if(SUCCEEDED(hr) && *ppProcessor) {
+ break;
+ }
+ SafeRelease(ppProcessor);
+ }
+
+ for (UINT32 i = 0; i < count; i++) {
+ ppActivate[i]->Release();
+ }
+ CoTaskMemFree(ppActivate);
+
+ return *ppProcessor ? S_OK : MF_E_NOT_FOUND;
}
// Add an transform node to a topology.
@@ -855,26 +789,26 @@ HRESULT MFUtils::AddTransformNode(
IMFTransform *pMFT, // MFT.
DWORD dwId, // Identifier of the stream sink.
IMFTopologyNode **ppNode // Receives the node pointer.
- )
+)
{
- *ppNode = NULL;
+ *ppNode = NULL;
IMFTopologyNode *pNode = NULL;
- HRESULT hr = S_OK;
-
+ HRESULT hr = S_OK;
+
// Create the node.
CHECK_HR(hr = MFCreateTopologyNode(MF_TOPOLOGY_TRANSFORM_NODE, &pNode));
// Set the object pointer.
CHECK_HR(hr = pNode->SetObject(pMFT));
- CHECK_HR(hr = pNode->SetUINT32(MF_TOPONODE_STREAMID, dwId));
- CHECK_HR(hr = pNode->SetUINT32(MF_TOPONODE_NOSHUTDOWN_ON_REMOVE, FALSE));
+ CHECK_HR(hr = pNode->SetUINT32(MF_TOPONODE_STREAMID, dwId));
+ CHECK_HR(hr = pNode->SetUINT32(MF_TOPONODE_NOSHUTDOWN_ON_REMOVE, FALSE));
// Add the node to the topology.
CHECK_HR(hr = pTopology->AddNode(pNode));
// Return the pointer to the caller.
*ppNode = pNode;
(*ppNode)->AddRef();
-
+
bail:
SafeRelease(&pNode);
return hr;
@@ -882,13 +816,13 @@ bail:
// Sets the IMFStreamSink pointer on an output node.
HRESULT MFUtils::BindOutputNode(
- IMFTopologyNode *pNode // The Node
- )
+ IMFTopologyNode *pNode // The Node
+)
{
- assert(pNode);
+ assert(pNode);
- HRESULT hr = S_OK;
- IUnknown *pNodeObject = NULL;
+ HRESULT hr = S_OK;
+ IUnknown *pNodeObject = NULL;
IMFActivate *pActivate = NULL;
IMFStreamSink *pStream = NULL;
IMFMediaSink *pSink = NULL;
@@ -905,44 +839,38 @@ HRESULT MFUtils::BindOutputNode(
// First, check if it's an activation object.
CHECK_HR(hr = pNodeObject->QueryInterface(IID_PPV_ARGS(&pActivate)));
- if (SUCCEEDED(hr))
- {
+ if (SUCCEEDED(hr)) {
DWORD dwStreamID = 0;
- // The object pointer is an activation object.
-
+ // The object pointer is an activation object.
+
// Try to create the media sink.
hr = pActivate->ActivateObject(IID_PPV_ARGS(&pSink));
// Look up the stream ID. (Default to zero.)
- if (SUCCEEDED(hr))
- {
- dwStreamID = MFGetAttributeUINT32(pNode, MF_TOPONODE_STREAMID, 0);
+ if (SUCCEEDED(hr)) {
+ dwStreamID = MFGetAttributeUINT32(pNode, MF_TOPONODE_STREAMID, 0);
}
// Now try to get or create the stream sink.
// Check if the media sink already has a stream sink with the requested ID.
- if (SUCCEEDED(hr))
- {
+ if (SUCCEEDED(hr)) {
hr = pSink->GetStreamSinkById(dwStreamID, &pStream);
- if (FAILED(hr))
- {
+ if (FAILED(hr)) {
// Try to add a new stream sink.
hr = pSink->AddStreamSink(dwStreamID, NULL, &pStream);
}
}
- // Replace the node's object pointer with the stream sink.
- if (SUCCEEDED(hr))
- {
+ // Replace the node's object pointer with the stream sink.
+ if (SUCCEEDED(hr)) {
hr = pNode->SetObject(pStream);
}
}
- else
- {
+ else {
// Not an activation object. Is it a stream sink?
hr = pNodeObject->QueryInterface(IID_PPV_ARGS(&pStream));
}
@@ -957,54 +885,54 @@ bail:
// Add an output node to a topology.
HRESULT MFUtils::AddOutputNode(
- IMFTopology *pTopology, // Topology.
- IMFActivate *pActivate, // Media sink activation object.
- DWORD dwId, // Identifier of the stream sink.
- IMFTopologyNode **ppNode) // Receives the node pointer
+ IMFTopology *pTopology, // Topology.
+ IMFActivate *pActivate, // Media sink activation object.
+ DWORD dwId, // Identifier of the stream sink.
+ IMFTopologyNode **ppNode) // Receives the node pointer
{
- IMFTopologyNode *pNode = NULL;
+ IMFTopologyNode *pNode = NULL;
- HRESULT hr = S_OK;
- CHECK_HR(hr = MFCreateTopologyNode(MF_TOPOLOGY_OUTPUT_NODE, &pNode));
- CHECK_HR(hr = pNode->SetObject(pActivate));
- CHECK_HR(hr = pNode->SetUINT32(MF_TOPONODE_STREAMID, dwId));
- CHECK_HR(hr = pNode->SetUINT32(MF_TOPONODE_NOSHUTDOWN_ON_REMOVE, FALSE));
- CHECK_HR(hr = pTopology->AddNode(pNode));
+ HRESULT hr = S_OK;
+ CHECK_HR(hr = MFCreateTopologyNode(MF_TOPOLOGY_OUTPUT_NODE, &pNode));
+ CHECK_HR(hr = pNode->SetObject(pActivate));
+ CHECK_HR(hr = pNode->SetUINT32(MF_TOPONODE_STREAMID, dwId));
+ CHECK_HR(hr = pNode->SetUINT32(MF_TOPONODE_NOSHUTDOWN_ON_REMOVE, FALSE));
+ CHECK_HR(hr = pTopology->AddNode(pNode));
- // Return the pointer to the caller.
- *ppNode = pNode;
- (*ppNode)->AddRef();
+ // Return the pointer to the caller.
+ *ppNode = pNode;
+ (*ppNode)->AddRef();
bail:
- SafeRelease(&pNode);
- return hr;
+ SafeRelease(&pNode);
+ return hr;
}
// Add a source node to a topology
HRESULT MFUtils::AddSourceNode(
- IMFTopology *pTopology, // Topology.
- IMFMediaSource *pSource, // Media source.
- IMFPresentationDescriptor *pPD, // Presentation descriptor.
- IMFStreamDescriptor *pSD, // Stream descriptor.
- IMFTopologyNode **ppNode // Receives the node pointer.
- )
+ IMFTopology *pTopology, // Topology.
+ IMFMediaSource *pSource, // Media source.
+ IMFPresentationDescriptor *pPD, // Presentation descriptor.
+ IMFStreamDescriptor *pSD, // Stream descriptor.
+ IMFTopologyNode **ppNode // Receives the node pointer.
+)
{
- IMFTopologyNode *pNode = NULL;
+ IMFTopologyNode *pNode = NULL;
- HRESULT hr = S_OK;
- CHECK_HR(hr = MFCreateTopologyNode(MF_TOPOLOGY_SOURCESTREAM_NODE, &pNode));
- CHECK_HR(hr = pNode->SetUnknown(MF_TOPONODE_SOURCE, pSource));
- CHECK_HR(hr = pNode->SetUnknown(MF_TOPONODE_PRESENTATION_DESCRIPTOR, pPD));
- CHECK_HR(hr = pNode->SetUnknown(MF_TOPONODE_STREAM_DESCRIPTOR, pSD));
- CHECK_HR(hr = pTopology->AddNode(pNode));
+ HRESULT hr = S_OK;
+ CHECK_HR(hr = MFCreateTopologyNode(MF_TOPOLOGY_SOURCESTREAM_NODE, &pNode));
+ CHECK_HR(hr = pNode->SetUnknown(MF_TOPONODE_SOURCE, pSource));
+ CHECK_HR(hr = pNode->SetUnknown(MF_TOPONODE_PRESENTATION_DESCRIPTOR, pPD));
+ CHECK_HR(hr = pNode->SetUnknown(MF_TOPONODE_STREAM_DESCRIPTOR, pSD));
+ CHECK_HR(hr = pTopology->AddNode(pNode));
- // Return the pointer to the caller.
- *ppNode = pNode;
- (*ppNode)->AddRef();
+ // Return the pointer to the caller.
+ *ppNode = pNode;
+ (*ppNode)->AddRef();
bail:
- SafeRelease(&pNode);
- return hr;
+ SafeRelease(&pNode);
+ return hr;
}
// Create the topology
@@ -1013,386 +941,345 @@ bail:
// \-> (SinkPreview)
//
HRESULT MFUtils::CreateTopology(
- IMFMediaSource *pSource, // Media source
- IMFTransform *pTransform, // Transform filter (e.g. encoder or decoder) to insert between the source and Sink. NULL is valid.
- IMFActivate *pSinkActivateMain, // Main sink (e.g. sample grabber or EVR).
- IMFActivate *pSinkActivatePreview, // Preview sink. Optional. Could be NULL.
- IMFMediaType *pIputTypeMain, // Main sink input MediaType
- IMFTopology **ppTopo // Receives the newly created topology
- )
+ IMFMediaSource *pSource, // Media source
+ IMFTransform *pTransform, // Transform filter (e.g. encoder or decoder) to insert between the source and Sink. NULL is valid.
+ IMFActivate *pSinkActivateMain, // Main sink (e.g. sample grabber or EVR).
+ IMFActivate *pSinkActivatePreview, // Preview sink. Optional. Could be NULL.
+ IMFMediaType *pIputTypeMain, // Main sink input MediaType
+ IMFTopology **ppTopo // Receives the newly created topology
+)
{
- IMFTopology *pTopology = NULL;
- IMFPresentationDescriptor *pPD = NULL;
- IMFStreamDescriptor *pSD = NULL;
- IMFMediaTypeHandler *pHandler = NULL;
- IMFTopologyNode *pNodeSource = NULL;
- IMFTopologyNode *pNodeSinkMain = NULL;
- IMFTopologyNode *pNodeSinkPreview = NULL;
- IMFTopologyNode *pNodeTransform = NULL;
- IMFTopologyNode *pNodeTee = NULL;
- IMFMediaType *pMediaType = NULL;
+ IMFTopology *pTopology = NULL;
+ IMFPresentationDescriptor *pPD = NULL;
+ IMFStreamDescriptor *pSD = NULL;
+ IMFMediaTypeHandler *pHandler = NULL;
+ IMFTopologyNode *pNodeSource = NULL;
+ IMFTopologyNode *pNodeSinkMain = NULL;
+ IMFTopologyNode *pNodeSinkPreview = NULL;
+ IMFTopologyNode *pNodeTransform = NULL;
+ IMFTopologyNode *pNodeTee = NULL;
+ IMFMediaType *pMediaType = NULL;
IMFTransform *pVideoProcessor = NULL;
IMFTopologyNode *pNodeVideoProcessor = NULL;
- IMFTransform *pConvFrameRate = NULL;
- IMFTransform *pConvSize = NULL;
- IMFTransform *pConvColor = NULL;
- IMFTopologyNode *pNodeConvFrameRate = NULL;
- IMFTopologyNode *pNodeConvSize = NULL;
- IMFTopologyNode *pNodeConvColor = NULL;
- IMFMediaType *pTransformInputType = NULL;
- IMFMediaType *pSinkMainInputType = NULL;
- const IMFTopologyNode *pcNodeBeforeSinkMain = NULL;
-
- HRESULT hr = S_OK;
- DWORD cStreams = 0;
- BOOL bSourceFound = FALSE;
- BOOL bSupportedSize = FALSE;
- BOOL bSupportedFps = FALSE;
- BOOL bSupportedFormat = FALSE;
- BOOL bVideoProcessorSupported = FALSE;
- GUID inputMajorType, inputSubType;
-
- CHECK_HR(hr = IsVideoProcessorSupported(&bVideoProcessorSupported));
- CHECK_HR(hr = pIputTypeMain->GetMajorType(&inputMajorType));
-
- CHECK_HR(hr = MFCreateTopology(&pTopology));
- CHECK_HR(hr = pSource->CreatePresentationDescriptor(&pPD));
- CHECK_HR(hr = pPD->GetStreamDescriptorCount(&cStreams));
-
- for (DWORD i = 0; i < cStreams; i++)
- {
- BOOL fSelected = FALSE;
- GUID majorType;
-
- CHECK_HR(hr = pPD->GetStreamDescriptorByIndex(i, &fSelected, &pSD));
- CHECK_HR(hr = pSD->GetMediaTypeHandler(&pHandler));
- CHECK_HR(hr = pHandler->GetMajorType(&majorType));
-
- if (majorType == inputMajorType && fSelected)
- {
- CHECK_HR(hr = AddSourceNode(pTopology, pSource, pPD, pSD, &pNodeSource));
- CHECK_HR(hr = pNodeSource->SetTopoNodeID(MFUtils::g_ullTopoIdSource));
- CHECK_HR(hr = AddOutputNode(pTopology, pSinkActivateMain, 0, &pNodeSinkMain));
- CHECK_HR(hr = pNodeSinkMain->SetTopoNodeID(MFUtils::g_ullTopoIdSinkMain));
- CHECK_HR(hr = MFUtils::BindOutputNode(pNodeSinkMain)); // To avoid MF_E_TOPO_SINK_ACTIVATES_UNSUPPORTED
-
- //
- // Create preview
- //
-
- if(pSinkActivatePreview)
- {
- CHECK_HR(hr = AddOutputNode(pTopology, pSinkActivatePreview, 0, &pNodeSinkPreview));
- CHECK_HR(hr = pNodeSinkPreview->SetTopoNodeID(MFUtils::g_ullTopoIdSinkPreview));
- CHECK_HR(hr = MFUtils::BindOutputNode(pNodeSinkPreview));
-
- CHECK_HR(hr = MFCreateTopologyNode(MF_TOPOLOGY_TEE_NODE, &pNodeTee));
- CHECK_HR(hr = pTopology->AddNode(pNodeTee));
- }
-
- //
- // Create converters
- //
-
- if(majorType == MFMediaType_Video)
- {
- // Even when size matches the topology could add a resizer which doesn't keep ratio when resizing while video processor does.
- if(!bVideoProcessorSupported)
- {
- hr = IsSupported(
- pPD,
- i,
- pIputTypeMain,
- &bSupportedSize,
- &bSupportedFps,
- &bSupportedFormat);
- }
-
- CHECK_HR(hr = pIputTypeMain->GetGUID(MF_MT_SUBTYPE, &inputSubType));
-
- if(!bSupportedSize || !bSupportedFps || !bSupportedFormat)
- {
- // Use video processor single MFT or 3 different MFTs
- if(!pVideoProcessor)
- {
- hr = CoCreateInstance(CLSID_VideoProcessorMFT, NULL,
- CLSCTX_INPROC_SERVER, IID_PPV_ARGS(&pVideoProcessor));
- }
- if(!pVideoProcessor)
- {
- // Video Resizer DSP(http://msdn.microsoft.com/en-us/library/windows/desktop/ff819491(v=vs.85).aspx) supports I420 only
- if(!bSupportedSize && !pConvSize && inputSubType == MFVideoFormat_I420)
- {
- hr = CoCreateInstance(CLSID_CResizerDMO, NULL,
- CLSCTX_INPROC_SERVER, IID_PPV_ARGS(&pConvSize));
- }
- // Frame Rate Converter DSP(http://msdn.microsoft.com/en-us/library/windows/desktop/ff819100(v=vs.85).aspx) supports neither NV12 nor I420
- /*if(!bSupportedFps && !pConvFrameRate)
- {
- hr = CoCreateInstance(CLSID_CFrameRateConvertDmo, NULL,
- CLSCTX_INPROC_SERVER, IID_PPV_ARGS(&pConvFrameRate));
- }*/
- // Color Converter DSP (http://msdn.microsoft.com/en-us/library/windows/desktop/ff819079(v=vs.85).aspx) supports both NV12 and I420
- if(!bSupportedFormat && !pConvColor)
- {
- hr = CoCreateInstance(CLSID_CColorConvertDMO, NULL,
- CLSCTX_INPROC_SERVER, IID_PPV_ARGS(&pConvColor));
- }
- }
- }
- else
- {
- // MediaType supported
- CHECK_HR(hr = pHandler->SetCurrentMediaType(pIputTypeMain));
- }
-
- if(pVideoProcessor && !pNodeVideoProcessor)
- {
- CHECK_HR(hr = AddTransformNode(pTopology, pVideoProcessor, 0, &pNodeVideoProcessor));
- CHECK_HR(hr = pNodeVideoProcessor->SetTopoNodeID(MFUtils::g_ullTopoIdVideoProcessor));
- }
- if(pConvColor && !pNodeConvColor)
- {
- CHECK_HR(hr = AddTransformNode(pTopology, pConvColor, 0, &pNodeConvColor));
- }
- if(pConvFrameRate && !pNodeConvFrameRate)
- {
- CHECK_HR(hr = AddTransformNode(pTopology, pConvFrameRate, 0, &pNodeConvFrameRate));
- }
- if(pConvSize && !pNodeConvSize)
- {
- CHECK_HR(hr = AddTransformNode(pTopology, pConvSize, 0, &pNodeConvSize));
- }
- } // if(majorType == MFMediaType_Video)
-
-
- //
- // Set media type
- //
-
- if(pTransform)
- {
- CHECK_HR(hr = AddTransformNode(pTopology, pTransform, 0, &pNodeTransform));
- hr = pTransform->GetInputCurrentType(0, &pTransformInputType);
- if(FAILED(hr))
- {
- pTransformInputType = pIputTypeMain;
- pTransformInputType->AddRef();
- hr = S_OK;
- }
- if(pVideoProcessor)
- {
- CHECK_HR(hr = pVideoProcessor->SetOutputType(0, pTransformInputType, 0));
- }
- else
- {
- if(pConvColor)
- {
- /*CHECK_HR*/(hr = pConvColor->SetOutputType(0, pTransformInputType, 0));
- }
- if(pConvFrameRate)
- {
- /*CHECK_HR*/(hr = pConvFrameRate->SetOutputType(0, pTransformInputType, 0));
- }
- if(pConvSize)
- {
- // Transform requires NV12
- //Video Resizer DSP(http://msdn.microsoft.com/en-us/library/windows/desktop/ff819491(v=vs.85).aspx) doesn't support NV12
- //*CHECK_HR*/(hr = pConvSize->SetOutputType(0, pTransformInputType, 0));
- }
- }
- }
- else
- {
- hr = pNodeSinkMain->GetInputPrefType(0, &pSinkMainInputType);
- if(FAILED(hr))
- {
- pSinkMainInputType = pIputTypeMain;
- pSinkMainInputType->AddRef();
- hr = S_OK;
- }
- if(SUCCEEDED(hr))
- {
- if(pVideoProcessor)
- {
- CHECK_HR(hr = pVideoProcessor->SetOutputType(0, pSinkMainInputType, 0));
- }
- else
- {
- //!\ MUST NOT SET OUTPUT TYPE
- if(pConvColor)
- {
- //*CHECK_HR*/(hr = pConvColor->SetOutputType(0, pSinkMainInputType, 0));
- }
- if(pConvFrameRate)
- {
- //*CHECK_HR*/(hr = pConvFrameRate->SetOutputType(0, pSinkMainInputType, 0));
- }
- if(pConvSize)
- {
- //*CHECK_HR*/(hr = pConvSize->SetOutputType(0, pSinkMainInputType, 0));
- }
- }
- }
- }
-
- //
- // Connect
- //
-
- if(pNodeTee)
- {
- // Connect(Source -> Tee)
- CHECK_HR(hr = pNodeSource->ConnectOutput(0, pNodeTee, 0));
-
- // Connect(Tee -> SinkPreview)
- CHECK_HR(hr = pNodeTee->ConnectOutput(1, pNodeSinkPreview, 0));
-
- // Connect(Tee ->(Processors)
- if(pVideoProcessor)
- {
- CHECK_HR(hr = pNodeTee->ConnectOutput(0, pNodeVideoProcessor, 0));
- pcNodeBeforeSinkMain = pNodeVideoProcessor;
- }
- else if(pNodeConvFrameRate || pNodeConvSize || pNodeConvColor)
- {
- CHECK_HR(hr = ConnectConverters(
- pNodeTee,
- 0,
- pNodeConvFrameRate,
- pNodeConvColor,
- pNodeConvSize
- ));
- pcNodeBeforeSinkMain = pNodeConvSize ? pNodeConvSize : (pNodeConvColor ? pNodeConvColor : pNodeConvFrameRate);
- }
- else
- {
- pcNodeBeforeSinkMain = pNodeTee;
- }
- }
- else
- {
- // Connect(Source -> (Processors))
- if(pVideoProcessor)
- {
- CHECK_HR(hr = pNodeSource->ConnectOutput(0, pNodeVideoProcessor, 0));
- pcNodeBeforeSinkMain = pNodeVideoProcessor;
- }
- else if(pNodeConvFrameRate || pNodeConvFrameRate || pNodeConvColor)
- {
- CHECK_HR(hr = ConnectConverters(
- pNodeSource,
- 0,
- pNodeConvFrameRate,
- pNodeConvSize,
- pNodeConvColor
- ));
- pcNodeBeforeSinkMain = pNodeConvSize ? pNodeConvSize : (pNodeConvColor ? pNodeConvColor : pNodeConvFrameRate);
- }
- else
- {
- pcNodeBeforeSinkMain = pNodeSource;
- }
- }
-
-
- if(pNodeTransform)
- {
- // Connect(X->Transform)
- CHECK_HR(hr = ((IMFTopologyNode *)pcNodeBeforeSinkMain)->ConnectOutput(0, pNodeTransform, 0));
- pcNodeBeforeSinkMain = pNodeTransform;
- }
-
- // Connect(X -> SinkMain)
- CHECK_HR(hr = ((IMFTopologyNode *)pcNodeBeforeSinkMain)->ConnectOutput(0, pNodeSinkMain, 0));
-
- bSourceFound = TRUE;
- break;
- }
- else
- {
- CHECK_HR(hr = pPD->DeselectStream(i));
- }
- SafeRelease(&pSD);
- SafeRelease(&pHandler);
- }
-
- *ppTopo = pTopology;
- (*ppTopo)->AddRef();
+ IMFTransform *pConvFrameRate = NULL;
+ IMFTransform *pConvSize = NULL;
+ IMFTransform *pConvColor = NULL;
+ IMFTopologyNode *pNodeConvFrameRate = NULL;
+ IMFTopologyNode *pNodeConvSize = NULL;
+ IMFTopologyNode *pNodeConvColor = NULL;
+ IMFMediaType *pTransformInputType = NULL;
+ IMFMediaType *pSinkMainInputType = NULL;
+ const IMFTopologyNode *pcNodeBeforeSinkMain = NULL;
+
+ HRESULT hr = S_OK;
+ DWORD cStreams = 0;
+ BOOL bSourceFound = FALSE;
+ BOOL bSupportedSize = FALSE;
+ BOOL bSupportedFps = FALSE;
+ BOOL bSupportedFormat = FALSE;
+ BOOL bVideoProcessorSupported = FALSE;
+ GUID inputMajorType, inputSubType;
+
+ CHECK_HR(hr = IsVideoProcessorSupported(&bVideoProcessorSupported));
+ CHECK_HR(hr = pIputTypeMain->GetMajorType(&inputMajorType));
+
+ CHECK_HR(hr = MFCreateTopology(&pTopology));
+ CHECK_HR(hr = pSource->CreatePresentationDescriptor(&pPD));
+ CHECK_HR(hr = pPD->GetStreamDescriptorCount(&cStreams));
+
+ for (DWORD i = 0; i < cStreams; i++) {
+ BOOL fSelected = FALSE;
+ GUID majorType;
+
+ CHECK_HR(hr = pPD->GetStreamDescriptorByIndex(i, &fSelected, &pSD));
+ CHECK_HR(hr = pSD->GetMediaTypeHandler(&pHandler));
+ CHECK_HR(hr = pHandler->GetMajorType(&majorType));
+
+ if (majorType == inputMajorType && fSelected) {
+ CHECK_HR(hr = AddSourceNode(pTopology, pSource, pPD, pSD, &pNodeSource));
+ CHECK_HR(hr = pNodeSource->SetTopoNodeID(MFUtils::g_ullTopoIdSource));
+ CHECK_HR(hr = AddOutputNode(pTopology, pSinkActivateMain, 0, &pNodeSinkMain));
+ CHECK_HR(hr = pNodeSinkMain->SetTopoNodeID(MFUtils::g_ullTopoIdSinkMain));
+ CHECK_HR(hr = MFUtils::BindOutputNode(pNodeSinkMain)); // To avoid MF_E_TOPO_SINK_ACTIVATES_UNSUPPORTED
+
+ //
+ // Create preview
+ //
+
+ if(pSinkActivatePreview) {
+ CHECK_HR(hr = AddOutputNode(pTopology, pSinkActivatePreview, 0, &pNodeSinkPreview));
+ CHECK_HR(hr = pNodeSinkPreview->SetTopoNodeID(MFUtils::g_ullTopoIdSinkPreview));
+ CHECK_HR(hr = MFUtils::BindOutputNode(pNodeSinkPreview));
+
+ CHECK_HR(hr = MFCreateTopologyNode(MF_TOPOLOGY_TEE_NODE, &pNodeTee));
+ CHECK_HR(hr = pTopology->AddNode(pNodeTee));
+ }
+
+ //
+ // Create converters
+ //
+
+ if(majorType == MFMediaType_Video) {
+ // Even when size matches the topology could add a resizer which doesn't keep ratio when resizing while video processor does.
+ if(!bVideoProcessorSupported) {
+ hr = IsSupported(
+ pPD,
+ i,
+ pIputTypeMain,
+ &bSupportedSize,
+ &bSupportedFps,
+ &bSupportedFormat);
+ }
+
+ CHECK_HR(hr = pIputTypeMain->GetGUID(MF_MT_SUBTYPE, &inputSubType));
+
+ if(!bSupportedSize || !bSupportedFps || !bSupportedFormat) {
+ // Use video processor single MFT or 3 different MFTs
+ if(!pVideoProcessor) {
+ hr = CoCreateInstance(CLSID_VideoProcessorMFT, NULL,
+ CLSCTX_INPROC_SERVER, IID_PPV_ARGS(&pVideoProcessor));
+ }
+ if(!pVideoProcessor) {
+ // Video Resizer DSP(http://msdn.microsoft.com/en-us/library/windows/desktop/ff819491(v=vs.85).aspx) supports I420 only
+ if(!bSupportedSize && !pConvSize && inputSubType == MFVideoFormat_I420) {
+ hr = CoCreateInstance(CLSID_CResizerDMO, NULL,
+ CLSCTX_INPROC_SERVER, IID_PPV_ARGS(&pConvSize));
+ }
+ // Frame Rate Converter DSP(http://msdn.microsoft.com/en-us/library/windows/desktop/ff819100(v=vs.85).aspx) supports neither NV12 nor I420
+ /*if(!bSupportedFps && !pConvFrameRate)
+ {
+ hr = CoCreateInstance(CLSID_CFrameRateConvertDmo, NULL,
+ CLSCTX_INPROC_SERVER, IID_PPV_ARGS(&pConvFrameRate));
+ }*/
+ // Color Converter DSP (http://msdn.microsoft.com/en-us/library/windows/desktop/ff819079(v=vs.85).aspx) supports both NV12 and I420
+ if(!bSupportedFormat && !pConvColor) {
+ hr = CoCreateInstance(CLSID_CColorConvertDMO, NULL,
+ CLSCTX_INPROC_SERVER, IID_PPV_ARGS(&pConvColor));
+ }
+ }
+ }
+ else {
+ // MediaType supported
+ CHECK_HR(hr = pHandler->SetCurrentMediaType(pIputTypeMain));
+ }
+
+ if(pVideoProcessor && !pNodeVideoProcessor) {
+ CHECK_HR(hr = AddTransformNode(pTopology, pVideoProcessor, 0, &pNodeVideoProcessor));
+ CHECK_HR(hr = pNodeVideoProcessor->SetTopoNodeID(MFUtils::g_ullTopoIdVideoProcessor));
+ }
+ if(pConvColor && !pNodeConvColor) {
+ CHECK_HR(hr = AddTransformNode(pTopology, pConvColor, 0, &pNodeConvColor));
+ }
+ if(pConvFrameRate && !pNodeConvFrameRate) {
+ CHECK_HR(hr = AddTransformNode(pTopology, pConvFrameRate, 0, &pNodeConvFrameRate));
+ }
+ if(pConvSize && !pNodeConvSize) {
+ CHECK_HR(hr = AddTransformNode(pTopology, pConvSize, 0, &pNodeConvSize));
+ }
+ } // if(majorType == MFMediaType_Video)
+
+
+ //
+ // Set media type
+ //
+
+ if(pTransform) {
+ CHECK_HR(hr = AddTransformNode(pTopology, pTransform, 0, &pNodeTransform));
+ hr = pTransform->GetInputCurrentType(0, &pTransformInputType);
+ if(FAILED(hr)) {
+ pTransformInputType = pIputTypeMain;
+ pTransformInputType->AddRef();
+ hr = S_OK;
+ }
+ if(pVideoProcessor) {
+ CHECK_HR(hr = pVideoProcessor->SetOutputType(0, pTransformInputType, 0));
+ }
+ else {
+ if(pConvColor) {
+ /*CHECK_HR*/(hr = pConvColor->SetOutputType(0, pTransformInputType, 0));
+ }
+ if(pConvFrameRate) {
+ /*CHECK_HR*/(hr = pConvFrameRate->SetOutputType(0, pTransformInputType, 0));
+ }
+ if(pConvSize) {
+ // Transform requires NV12
+ //Video Resizer DSP(http://msdn.microsoft.com/en-us/library/windows/desktop/ff819491(v=vs.85).aspx) doesn't support NV12
+ //*CHECK_HR*/(hr = pConvSize->SetOutputType(0, pTransformInputType, 0));
+ }
+ }
+ }
+ else {
+ hr = pNodeSinkMain->GetInputPrefType(0, &pSinkMainInputType);
+ if(FAILED(hr)) {
+ pSinkMainInputType = pIputTypeMain;
+ pSinkMainInputType->AddRef();
+ hr = S_OK;
+ }
+ if(SUCCEEDED(hr)) {
+ if(pVideoProcessor) {
+ CHECK_HR(hr = pVideoProcessor->SetOutputType(0, pSinkMainInputType, 0));
+ }
+ else {
+ //!\ MUST NOT SET OUTPUT TYPE
+ if(pConvColor) {
+ //*CHECK_HR*/(hr = pConvColor->SetOutputType(0, pSinkMainInputType, 0));
+ }
+ if(pConvFrameRate) {
+ //*CHECK_HR*/(hr = pConvFrameRate->SetOutputType(0, pSinkMainInputType, 0));
+ }
+ if(pConvSize) {
+ //*CHECK_HR*/(hr = pConvSize->SetOutputType(0, pSinkMainInputType, 0));
+ }
+ }
+ }
+ }
+
+ //
+ // Connect
+ //
+
+ if(pNodeTee) {
+ // Connect(Source -> Tee)
+ CHECK_HR(hr = pNodeSource->ConnectOutput(0, pNodeTee, 0));
+
+ // Connect(Tee -> SinkPreview)
+ CHECK_HR(hr = pNodeTee->ConnectOutput(1, pNodeSinkPreview, 0));
+
+ // Connect(Tee ->(Processors)
+ if(pVideoProcessor) {
+ CHECK_HR(hr = pNodeTee->ConnectOutput(0, pNodeVideoProcessor, 0));
+ pcNodeBeforeSinkMain = pNodeVideoProcessor;
+ }
+ else if(pNodeConvFrameRate || pNodeConvSize || pNodeConvColor) {
+ CHECK_HR(hr = ConnectConverters(
+ pNodeTee,
+ 0,
+ pNodeConvFrameRate,
+ pNodeConvColor,
+ pNodeConvSize
+ ));
+ pcNodeBeforeSinkMain = pNodeConvSize ? pNodeConvSize : (pNodeConvColor ? pNodeConvColor : pNodeConvFrameRate);
+ }
+ else {
+ pcNodeBeforeSinkMain = pNodeTee;
+ }
+ }
+ else {
+ // Connect(Source -> (Processors))
+ if(pVideoProcessor) {
+ CHECK_HR(hr = pNodeSource->ConnectOutput(0, pNodeVideoProcessor, 0));
+ pcNodeBeforeSinkMain = pNodeVideoProcessor;
+ }
+ else if(pNodeConvFrameRate || pNodeConvFrameRate || pNodeConvColor) {
+ CHECK_HR(hr = ConnectConverters(
+ pNodeSource,
+ 0,
+ pNodeConvFrameRate,
+ pNodeConvSize,
+ pNodeConvColor
+ ));
+ pcNodeBeforeSinkMain = pNodeConvSize ? pNodeConvSize : (pNodeConvColor ? pNodeConvColor : pNodeConvFrameRate);
+ }
+ else {
+ pcNodeBeforeSinkMain = pNodeSource;
+ }
+ }
+
+
+ if(pNodeTransform) {
+ // Connect(X->Transform)
+ CHECK_HR(hr = ((IMFTopologyNode *)pcNodeBeforeSinkMain)->ConnectOutput(0, pNodeTransform, 0));
+ pcNodeBeforeSinkMain = pNodeTransform;
+ }
+
+ // Connect(X -> SinkMain)
+ CHECK_HR(hr = ((IMFTopologyNode *)pcNodeBeforeSinkMain)->ConnectOutput(0, pNodeSinkMain, 0));
+
+ bSourceFound = TRUE;
+ break;
+ }
+ else {
+ CHECK_HR(hr = pPD->DeselectStream(i));
+ }
+ SafeRelease(&pSD);
+ SafeRelease(&pHandler);
+ }
+
+ *ppTopo = pTopology;
+ (*ppTopo)->AddRef();
bail:
- SafeRelease(&pTopology);
- SafeRelease(&pNodeSource);
- SafeRelease(&pNodeSinkMain);
- SafeRelease(&pNodeSinkPreview);
- SafeRelease(&pNodeTransform);
- SafeRelease(&pNodeTee);
- SafeRelease(&pPD);
- SafeRelease(&pSD);
- SafeRelease(&pHandler);
- SafeRelease(&pMediaType);
- SafeRelease(&pTransformInputType);
- SafeRelease(&pSinkMainInputType);
-
- SafeRelease(&pVideoProcessor);
+ SafeRelease(&pTopology);
+ SafeRelease(&pNodeSource);
+ SafeRelease(&pNodeSinkMain);
+ SafeRelease(&pNodeSinkPreview);
+ SafeRelease(&pNodeTransform);
+ SafeRelease(&pNodeTee);
+ SafeRelease(&pPD);
+ SafeRelease(&pSD);
+ SafeRelease(&pHandler);
+ SafeRelease(&pMediaType);
+ SafeRelease(&pTransformInputType);
+ SafeRelease(&pSinkMainInputType);
+
+ SafeRelease(&pVideoProcessor);
SafeRelease(&pNodeVideoProcessor);
- SafeRelease(&pConvFrameRate);
- SafeRelease(&pConvSize);
- SafeRelease(&pConvColor);
- SafeRelease(&pNodeConvFrameRate);
- SafeRelease(&pNodeConvSize);
- SafeRelease(&pNodeConvColor);
-
- if(!bSourceFound)
- {
- TSK_DEBUG_ERROR("No source node found");
- return E_NOT_SET;
- }
-
- return hr;
+ SafeRelease(&pConvFrameRate);
+ SafeRelease(&pConvSize);
+ SafeRelease(&pConvColor);
+ SafeRelease(&pNodeConvFrameRate);
+ SafeRelease(&pNodeConvSize);
+ SafeRelease(&pNodeConvColor);
+
+ if(!bSourceFound) {
+ TSK_DEBUG_ERROR("No source node found");
+ return E_NOT_SET;
+ }
+
+ return hr;
}
// Creates a fully loaded topology from the input partial topology.
HRESULT MFUtils::ResolveTopology(
- IMFTopology *pInputTopo, // A pointer to the IMFTopology interface of the partial topology to be resolved.
- IMFTopology **ppOutputTopo, // Receives a pointer to the IMFTopology interface of the completed topology. The caller must release the interface.
- IMFTopology *pCurrentTopo /*= NULL*/ // A pointer to the IMFTopology interface of the previous full topology. The topology loader can re-use objects from this topology in the new topology. This parameter can be NULL.
- )
+ IMFTopology *pInputTopo, // A pointer to the IMFTopology interface of the partial topology to be resolved.
+ IMFTopology **ppOutputTopo, // Receives a pointer to the IMFTopology interface of the completed topology. The caller must release the interface.
+ IMFTopology *pCurrentTopo /*= NULL*/ // A pointer to the IMFTopology interface of the previous full topology. The topology loader can re-use objects from this topology in the new topology. This parameter can be NULL.
+)
{
- assert(ppOutputTopo && pInputTopo);
-
- HRESULT hr = S_OK;
- IMFTopoLoader* pTopoLoader = NULL;
-
- *ppOutputTopo = NULL;
-
- CHECK_HR(hr = MFCreateTopoLoader(&pTopoLoader));
- CHECK_HR(hr = pTopoLoader->Load(pInputTopo, ppOutputTopo, pCurrentTopo));
-
+ assert(ppOutputTopo && pInputTopo);
+
+ HRESULT hr = S_OK;
+ IMFTopoLoader* pTopoLoader = NULL;
+
+ *ppOutputTopo = NULL;
+
+ CHECK_HR(hr = MFCreateTopoLoader(&pTopoLoader));
+ CHECK_HR(hr = pTopoLoader->Load(pInputTopo, ppOutputTopo, pCurrentTopo));
+
bail:
- SafeRelease(&pTopoLoader);
- return hr;
+ SafeRelease(&pTopoLoader);
+ return hr;
}
HRESULT MFUtils::FindNodeObject(
- IMFTopology *pInputTopo, // The Topology containing the node to find
- TOPOID qwTopoNodeID, //The identifier for the node
- void** ppObject // Receives the Object
- )
+ IMFTopology *pInputTopo, // The Topology containing the node to find
+ TOPOID qwTopoNodeID, //The identifier for the node
+ void** ppObject // Receives the Object
+)
{
- assert(pInputTopo && ppObject);
+ assert(pInputTopo && ppObject);
- *ppObject = NULL;
+ *ppObject = NULL;
- IMFTopologyNode *pNode = NULL;
- HRESULT hr = S_OK;
+ IMFTopologyNode *pNode = NULL;
+ HRESULT hr = S_OK;
- CHECK_HR(hr = pInputTopo->GetNodeByID(qwTopoNodeID, &pNode));
- CHECK_HR(hr = pNode->GetObject((IUnknown**)ppObject));
+ CHECK_HR(hr = pInputTopo->GetNodeByID(qwTopoNodeID, &pNode));
+ CHECK_HR(hr = pNode->GetObject((IUnknown**)ppObject));
bail:
- SafeRelease(&pNode);
- return hr;
+ SafeRelease(&pNode);
+ return hr;
}
// Create an activation object for a renderer, based on the stream media type.
@@ -1402,7 +1289,7 @@ HRESULT MFUtils::CreateMediaSinkActivate(
IMFActivate **ppActivate
)
{
- HRESULT hr = S_OK;
+ HRESULT hr = S_OK;
IMFMediaTypeHandler *pHandler = NULL;
IMFActivate *pActivate = NULL;
@@ -1411,29 +1298,25 @@ HRESULT MFUtils::CreateMediaSinkActivate(
// Get the major media type.
GUID guidMajorType;
CHECK_HR(hr = pHandler->GetMajorType(&guidMajorType));
-
+
// Create an IMFActivate object for the renderer, based on the media type.
- if (MFMediaType_Audio == guidMajorType)
- {
+ if (MFMediaType_Audio == guidMajorType) {
// Create the audio renderer.
CHECK_HR(hr = MFCreateAudioRendererActivate(&pActivate));
}
- else if (MFMediaType_Video == guidMajorType)
- {
+ else if (MFMediaType_Video == guidMajorType) {
// Create the video renderer.
CHECK_HR(hr = MFCreateVideoRendererActivate(hVideoWindow, &pActivate));
}
- else
- {
- // Unknown stream type.
+ else {
+ // Unknown stream type.
hr = E_FAIL;
// Optionally, you could deselect this stream instead of failing.
}
- if (FAILED(hr))
- {
+ if (FAILED(hr)) {
goto bail;
}
-
+
// Return IMFActivate pointer to caller.
*ppActivate = pActivate;
(*ppActivate)->AddRef();
@@ -1447,92 +1330,87 @@ bail:
// Set source output media type
HRESULT MFUtils::SetMediaType(
IMFMediaSource *pSource, // Media source.
- IMFMediaType* pMediaType // Media Type.
- )
+ IMFMediaType* pMediaType // Media Type.
+)
{
- assert(pSource && pMediaType);
-
- IMFPresentationDescriptor *pPD = NULL;
- IMFStreamDescriptor *pSD = NULL;
- IMFMediaTypeHandler *pHandler = NULL;
-
- HRESULT hr = S_OK;
- DWORD cStreams = 0;
- GUID inputMajorType;
-
- CHECK_HR(hr = pSource->CreatePresentationDescriptor(&pPD));
- CHECK_HR(hr = pPD->GetStreamDescriptorCount(&cStreams));
- CHECK_HR(hr = pMediaType->GetMajorType(&inputMajorType));
-
- for (DWORD i = 0; i < cStreams; i++)
- {
- BOOL fSelected = FALSE;
- GUID majorType;
-
- CHECK_HR(hr = pPD->GetStreamDescriptorByIndex(i, &fSelected, &pSD));
- CHECK_HR(hr = pSD->GetMediaTypeHandler(&pHandler));
- CHECK_HR(hr = pHandler->GetMajorType(&majorType));
-
- if (majorType == inputMajorType && fSelected)
- {
- CHECK_HR(hr = pHandler->SetCurrentMediaType(pMediaType));
- }
- else
- {
- CHECK_HR(hr = pPD->DeselectStream(i));
- }
- SafeRelease(&pSD);
- SafeRelease(&pHandler);
- }
+ assert(pSource && pMediaType);
+
+ IMFPresentationDescriptor *pPD = NULL;
+ IMFStreamDescriptor *pSD = NULL;
+ IMFMediaTypeHandler *pHandler = NULL;
+
+ HRESULT hr = S_OK;
+ DWORD cStreams = 0;
+ GUID inputMajorType;
+
+ CHECK_HR(hr = pSource->CreatePresentationDescriptor(&pPD));
+ CHECK_HR(hr = pPD->GetStreamDescriptorCount(&cStreams));
+ CHECK_HR(hr = pMediaType->GetMajorType(&inputMajorType));
+
+ for (DWORD i = 0; i < cStreams; i++) {
+ BOOL fSelected = FALSE;
+ GUID majorType;
+
+ CHECK_HR(hr = pPD->GetStreamDescriptorByIndex(i, &fSelected, &pSD));
+ CHECK_HR(hr = pSD->GetMediaTypeHandler(&pHandler));
+ CHECK_HR(hr = pHandler->GetMajorType(&majorType));
+
+ if (majorType == inputMajorType && fSelected) {
+ CHECK_HR(hr = pHandler->SetCurrentMediaType(pMediaType));
+ }
+ else {
+ CHECK_HR(hr = pPD->DeselectStream(i));
+ }
+ SafeRelease(&pSD);
+ SafeRelease(&pHandler);
+ }
bail:
- SafeRelease(&pPD);
- SafeRelease(&pSD);
- SafeRelease(&pHandler);
+ SafeRelease(&pPD);
+ SafeRelease(&pSD);
+ SafeRelease(&pHandler);
- return hr;
+ return hr;
}
HRESULT MFUtils::SetVideoWindow(
IMFTopology *pTopology, // Topology.
IMFMediaSource *pSource, // Media source.
HWND hVideoWnd // Window for video playback.
- )
+)
{
- HRESULT hr = S_OK;
- IMFStreamDescriptor *pSD = NULL;
- IMFPresentationDescriptor *pPD = NULL;
+ HRESULT hr = S_OK;
+ IMFStreamDescriptor *pSD = NULL;
+ IMFPresentationDescriptor *pPD = NULL;
IMFActivate *pSinkActivate = NULL;
IMFTopologyNode *pSourceNode = NULL;
IMFTopologyNode *pOutputNode = NULL;
- DWORD cStreams = 0, iStream;
-
- CHECK_HR(hr = pSource->CreatePresentationDescriptor(&pPD));
- CHECK_HR(hr = pPD->GetStreamDescriptorCount(&cStreams));
-
- for(iStream = 0; iStream < cStreams; ++iStream)
- {
- BOOL fSelected = FALSE;
-
- CHECK_HR(hr = pPD->GetStreamDescriptorByIndex(iStream, &fSelected, &pSD));
-
- if (fSelected)
- {
- // Create the media sink activation object.
- CHECK_HR(hr = CreateMediaSinkActivate(pSD, hVideoWnd, &pSinkActivate));
- // Add a source node for this stream.
- CHECK_HR(hr = AddSourceNode(pTopology, pSource, pPD, pSD, &pSourceNode));
- // Create the output node for the renderer.
- CHECK_HR(hr = AddOutputNode(pTopology, pSinkActivate, 0, &pOutputNode));
- // Connect the source node to the output node.
- CHECK_HR(hr = pSourceNode->ConnectOutput(0, pOutputNode, 0));
- }
- // else: If not selected, don't add the branch.
- }
+ DWORD cStreams = 0, iStream;
+
+ CHECK_HR(hr = pSource->CreatePresentationDescriptor(&pPD));
+ CHECK_HR(hr = pPD->GetStreamDescriptorCount(&cStreams));
+
+ for(iStream = 0; iStream < cStreams; ++iStream) {
+ BOOL fSelected = FALSE;
+
+ CHECK_HR(hr = pPD->GetStreamDescriptorByIndex(iStream, &fSelected, &pSD));
+
+ if (fSelected) {
+ // Create the media sink activation object.
+ CHECK_HR(hr = CreateMediaSinkActivate(pSD, hVideoWnd, &pSinkActivate));
+ // Add a source node for this stream.
+ CHECK_HR(hr = AddSourceNode(pTopology, pSource, pPD, pSD, &pSourceNode));
+ // Create the output node for the renderer.
+ CHECK_HR(hr = AddOutputNode(pTopology, pSinkActivate, 0, &pOutputNode));
+ // Connect the source node to the output node.
+ CHECK_HR(hr = pSourceNode->ConnectOutput(0, pOutputNode, 0));
+ }
+ // else: If not selected, don't add the branch.
+ }
bail:
- SafeRelease(&pPD);
+ SafeRelease(&pPD);
SafeRelease(&pSD);
SafeRelease(&pSinkActivate);
SafeRelease(&pSourceNode);
@@ -1542,407 +1420,374 @@ bail:
// Run the session
HRESULT MFUtils::RunSession(
- IMFMediaSession *pSession, // Session to run
- IMFTopology *pTopology // The toppology
- )
+ IMFMediaSession *pSession, // Session to run
+ IMFTopology *pTopology // The toppology
+)
{
- assert(pSession && pTopology);
-
- IMFMediaEvent *pEvent = NULL;
-
- PROPVARIANT var;
- PropVariantInit(&var);
-
- MediaEventType met;
- HRESULT hrStatus = S_OK;
- HRESULT hr = S_OK;
- CHECK_HR(hr = pSession->SetTopology(MFSESSION_SETTOPOLOGY_IMMEDIATE, pTopology)); // MFSESSION_SETTOPOLOGY_IMMEDIATE required to update (reload) topology when media type change
- CHECK_HR(hr = pSession->Start(&GUID_NULL, &var));
-
- // Check first event
- hr = pSession->GetEvent(MF_EVENT_FLAG_NO_WAIT, &pEvent);
- if(hr == MF_E_NO_EVENTS_AVAILABLE || hr == MF_E_MULTIPLE_SUBSCRIBERS){ // MF_E_MULTIPLE_SUBSCRIBERS means already listening
- hr = S_OK;
- goto bail;
- }
- if(pEvent) {
- CHECK_HR(hr = pEvent->GetStatus(&hrStatus));
- }
- else {
- hrStatus = hr;
- }
- if (FAILED(hrStatus))
- {
- CHECK_HR(hr = pEvent->GetType(&met));
- TSK_DEBUG_ERROR("Session error: 0x%x (event id: %d)\n", hrStatus, met);
- hr = hrStatus;
- goto bail;
- }
+ assert(pSession && pTopology);
+
+ IMFMediaEvent *pEvent = NULL;
+
+ PROPVARIANT var;
+ PropVariantInit(&var);
+
+ MediaEventType met;
+ HRESULT hrStatus = S_OK;
+ HRESULT hr = S_OK;
+ CHECK_HR(hr = pSession->SetTopology(MFSESSION_SETTOPOLOGY_IMMEDIATE, pTopology)); // MFSESSION_SETTOPOLOGY_IMMEDIATE required to update (reload) topology when media type change
+ CHECK_HR(hr = pSession->Start(&GUID_NULL, &var));
+
+ // Check first event
+ hr = pSession->GetEvent(MF_EVENT_FLAG_NO_WAIT, &pEvent);
+ if(hr == MF_E_NO_EVENTS_AVAILABLE || hr == MF_E_MULTIPLE_SUBSCRIBERS) { // MF_E_MULTIPLE_SUBSCRIBERS means already listening
+ hr = S_OK;
+ goto bail;
+ }
+ if(pEvent) {
+ CHECK_HR(hr = pEvent->GetStatus(&hrStatus));
+ }
+ else {
+ hrStatus = hr;
+ }
+ if (FAILED(hrStatus)) {
+ CHECK_HR(hr = pEvent->GetType(&met));
+ TSK_DEBUG_ERROR("Session error: 0x%x (event id: %d)\n", hrStatus, met);
+ hr = hrStatus;
+ goto bail;
+ }
bail:
- SafeRelease(&pEvent);
- return hr;
+ SafeRelease(&pEvent);
+ return hr;
}
// Stop session
HRESULT MFUtils::ShutdownSession(
- IMFMediaSession *pSession, // The Session
- IMFMediaSource *pSource // Source to shutdown (optional)
- )
+ IMFMediaSession *pSession, // The Session
+ IMFMediaSource *pSource // Source to shutdown (optional)
+)
{
- // MUST be source then session
- if(pSource){
- pSource->Stop();
- pSource->Shutdown();
- }
- if(pSession){
- pSession->Shutdown();
- }
- return S_OK;
+ // MUST be source then session
+ if(pSource) {
+ pSource->Stop();
+ pSource->Shutdown();
+ }
+ if(pSession) {
+ pSession->Shutdown();
+ }
+ return S_OK;
}
// Pause session
HRESULT MFUtils::PauseSession(
- IMFMediaSession *pSession, // The session
- IMFMediaSource *pSource // Source to pause (optional)
- )
+ IMFMediaSession *pSession, // The session
+ IMFMediaSource *pSource // Source to pause (optional)
+)
{
- if(!pSession){
- return E_INVALIDARG;
- }
- if(pSource){
- pSource->Pause();
- }
- return pSession->Pause();
+ if(!pSession) {
+ return E_INVALIDARG;
+ }
+ if(pSource) {
+ pSource->Pause();
+ }
+ return pSession->Pause();
}
// Returns -1 if none is supported
INT MFUtils::GetSupportedSubTypeIndex(
- IMFMediaSource *pSource, // The source
- const GUID& mediaType, // The MediaType
- const VideoSubTypeGuidPair* subTypes, UINT subTypesCount // List of preferred subtypes (in ascending order)
- )
+ IMFMediaSource *pSource, // The source
+ const GUID& mediaType, // The MediaType
+ const VideoSubTypeGuidPair* subTypes, UINT subTypesCount // List of preferred subtypes (in ascending order)
+)
{
- assert(pSource);
-
- IMFPresentationDescriptor *pPD = NULL;
- IMFStreamDescriptor *pSD = NULL;
- IMFMediaTypeHandler *pHandler = NULL;
- IMFMediaType *pMediaType = NULL;
-
- INT nIndex = -1;
- HRESULT hr = S_OK;
- DWORD cStreams = 0, cMediaTypesCount;
- GUID majorType, subType;
- BOOL fSelected;
-
- CHECK_HR(hr = pSource->CreatePresentationDescriptor(&pPD));
- CHECK_HR(hr = pPD->GetStreamDescriptorCount(&cStreams));
-
- for (UINT subTypesIndex = 0; subTypesIndex < subTypesCount && nIndex == -1; ++subTypesIndex)
- {
- for (DWORD cStreamIndex = 0; cStreamIndex < cStreams && nIndex == -1; ++cStreamIndex)
- {
- fSelected = FALSE;
- CHECK_HR(hr = pPD->GetStreamDescriptorByIndex(cStreamIndex, &fSelected, &pSD));
- if(fSelected)
- {
- CHECK_HR(hr = pSD->GetMediaTypeHandler(&pHandler));
- CHECK_HR(hr = pHandler->GetMajorType(&majorType));
- if(majorType == mediaType)
- {
- CHECK_HR(hr = pHandler->GetMediaTypeCount(&cMediaTypesCount));
- for(DWORD cMediaTypesIndex = 0; cMediaTypesIndex < cMediaTypesCount && nIndex == -1; ++cMediaTypesIndex)
- {
- CHECK_HR(hr = pHandler->GetMediaTypeByIndex(cMediaTypesIndex, &pMediaType));
- CHECK_HR(hr = pMediaType->GetGUID(MF_MT_SUBTYPE, &subType));
- if (subTypes[subTypesIndex].fourcc == subType)
- {
- nIndex = subTypesIndex;
- break;
- }
- SafeRelease(&pMediaType);
- }
- }
- }
-
- SafeRelease(&pSD);
- SafeRelease(&pHandler);
- }
- }
+ assert(pSource);
+
+ IMFPresentationDescriptor *pPD = NULL;
+ IMFStreamDescriptor *pSD = NULL;
+ IMFMediaTypeHandler *pHandler = NULL;
+ IMFMediaType *pMediaType = NULL;
+
+ INT nIndex = -1;
+ HRESULT hr = S_OK;
+ DWORD cStreams = 0, cMediaTypesCount;
+ GUID majorType, subType;
+ BOOL fSelected;
+
+ CHECK_HR(hr = pSource->CreatePresentationDescriptor(&pPD));
+ CHECK_HR(hr = pPD->GetStreamDescriptorCount(&cStreams));
+
+ for (UINT subTypesIndex = 0; subTypesIndex < subTypesCount && nIndex == -1; ++subTypesIndex) {
+ for (DWORD cStreamIndex = 0; cStreamIndex < cStreams && nIndex == -1; ++cStreamIndex) {
+ fSelected = FALSE;
+ CHECK_HR(hr = pPD->GetStreamDescriptorByIndex(cStreamIndex, &fSelected, &pSD));
+ if(fSelected) {
+ CHECK_HR(hr = pSD->GetMediaTypeHandler(&pHandler));
+ CHECK_HR(hr = pHandler->GetMajorType(&majorType));
+ if(majorType == mediaType) {
+ CHECK_HR(hr = pHandler->GetMediaTypeCount(&cMediaTypesCount));
+ for(DWORD cMediaTypesIndex = 0; cMediaTypesIndex < cMediaTypesCount && nIndex == -1; ++cMediaTypesIndex) {
+ CHECK_HR(hr = pHandler->GetMediaTypeByIndex(cMediaTypesIndex, &pMediaType));
+ CHECK_HR(hr = pMediaType->GetGUID(MF_MT_SUBTYPE, &subType));
+ if (subTypes[subTypesIndex].fourcc == subType) {
+ nIndex = subTypesIndex;
+ break;
+ }
+ SafeRelease(&pMediaType);
+ }
+ }
+ }
+
+ SafeRelease(&pSD);
+ SafeRelease(&pHandler);
+ }
+ }
bail:
- SafeRelease(&pMediaType);
- SafeRelease(&pPD);
- SafeRelease(&pSD);
- SafeRelease(&pHandler);
+ SafeRelease(&pMediaType);
+ SafeRelease(&pPD);
+ SafeRelease(&pSD);
+ SafeRelease(&pHandler);
- return nIndex;
+ return nIndex;
}
HRESULT MFUtils::IsSupported(
- IMFPresentationDescriptor *pPD,
- DWORD cStreamIndex,
- UINT32 nWidth,
- UINT32 nHeight,
- UINT32 nFps,
- const GUID& guidFormat,
- BOOL* pbSupportedSize,
- BOOL* pbSupportedFps,
- BOOL* pbSupportedFormat
- )
+ IMFPresentationDescriptor *pPD,
+ DWORD cStreamIndex,
+ UINT32 nWidth,
+ UINT32 nHeight,
+ UINT32 nFps,
+ const GUID& guidFormat,
+ BOOL* pbSupportedSize,
+ BOOL* pbSupportedFps,
+ BOOL* pbSupportedFormat
+)
{
- HRESULT hr = S_OK;
-
- BOOL fSelected = FALSE;
- IMFStreamDescriptor *pSD = NULL;
- IMFMediaTypeHandler *pHandler = NULL;
- IMFMediaType *pMediaType = NULL;
- UINT32 _nWidth = 0, _nHeight = 0, numeratorFps = 0, denominatorFps = 0;
- GUID subType;
- DWORD cMediaTypesCount;
-
- if(!pPD || !pbSupportedSize || !pbSupportedFps || !pbSupportedFormat)
- {
- CHECK_HR(hr = E_POINTER);
- }
-
- *pbSupportedSize = FALSE;
- *pbSupportedFps = FALSE;
- *pbSupportedFormat = FALSE;
-
- CHECK_HR(hr = pPD->GetStreamDescriptorByIndex(cStreamIndex, &fSelected, &pSD));
- if(fSelected)
- {
- CHECK_HR(hr = pSD->GetMediaTypeHandler(&pHandler));
- CHECK_HR(hr = pHandler->GetMediaTypeCount(&cMediaTypesCount));
- for(DWORD cMediaTypesIndex = 0; cMediaTypesIndex < cMediaTypesCount; ++cMediaTypesIndex)
- {
- CHECK_HR(hr = pHandler->GetMediaTypeByIndex(cMediaTypesIndex, &pMediaType));
- CHECK_HR(hr = MFGetAttributeSize(pMediaType, MF_MT_FRAME_SIZE, &_nWidth, &_nHeight));
- CHECK_HR(hr = pMediaType->GetGUID(MF_MT_SUBTYPE, &subType));
- if(FAILED(hr = MFGetAttributeRatio(pMediaType, MF_MT_FRAME_RATE, &numeratorFps, &denominatorFps)))
- {
- numeratorFps = 30;
- denominatorFps = 1;
- }
-
- // all must match for the same stream
- if(_nWidth == nWidth && _nHeight == nHeight && subType == guidFormat && (numeratorFps/denominatorFps) == nFps)
- {
- *pbSupportedSize = TRUE;
- *pbSupportedFormat = TRUE;
- *pbSupportedFps = TRUE;
- break;
- }
-
- SafeRelease(&pMediaType);
- }
- SafeRelease(&pHandler);
- }
-
+ HRESULT hr = S_OK;
+
+ BOOL fSelected = FALSE;
+ IMFStreamDescriptor *pSD = NULL;
+ IMFMediaTypeHandler *pHandler = NULL;
+ IMFMediaType *pMediaType = NULL;
+ UINT32 _nWidth = 0, _nHeight = 0, numeratorFps = 0, denominatorFps = 0;
+ GUID subType;
+ DWORD cMediaTypesCount;
+
+ if(!pPD || !pbSupportedSize || !pbSupportedFps || !pbSupportedFormat) {
+ CHECK_HR(hr = E_POINTER);
+ }
+
+ *pbSupportedSize = FALSE;
+ *pbSupportedFps = FALSE;
+ *pbSupportedFormat = FALSE;
+
+ CHECK_HR(hr = pPD->GetStreamDescriptorByIndex(cStreamIndex, &fSelected, &pSD));
+ if(fSelected) {
+ CHECK_HR(hr = pSD->GetMediaTypeHandler(&pHandler));
+ CHECK_HR(hr = pHandler->GetMediaTypeCount(&cMediaTypesCount));
+ for(DWORD cMediaTypesIndex = 0; cMediaTypesIndex < cMediaTypesCount; ++cMediaTypesIndex) {
+ CHECK_HR(hr = pHandler->GetMediaTypeByIndex(cMediaTypesIndex, &pMediaType));
+ CHECK_HR(hr = MFGetAttributeSize(pMediaType, MF_MT_FRAME_SIZE, &_nWidth, &_nHeight));
+ CHECK_HR(hr = pMediaType->GetGUID(MF_MT_SUBTYPE, &subType));
+ if(FAILED(hr = MFGetAttributeRatio(pMediaType, MF_MT_FRAME_RATE, &numeratorFps, &denominatorFps))) {
+ numeratorFps = 30;
+ denominatorFps = 1;
+ }
+
+ // all must match for the same stream
+ if(_nWidth == nWidth && _nHeight == nHeight && subType == guidFormat && (numeratorFps/denominatorFps) == nFps) {
+ *pbSupportedSize = TRUE;
+ *pbSupportedFormat = TRUE;
+ *pbSupportedFps = TRUE;
+ break;
+ }
+
+ SafeRelease(&pMediaType);
+ }
+ SafeRelease(&pHandler);
+ }
+
bail:
- SafeRelease(&pSD);
- SafeRelease(&pHandler);
- SafeRelease(&pMediaType);
-
- return hr;
+ SafeRelease(&pSD);
+ SafeRelease(&pHandler);
+ SafeRelease(&pMediaType);
+
+ return hr;
}
HRESULT MFUtils::IsSupported(
- IMFPresentationDescriptor *pPD,
- DWORD cStreamIndex,
- IMFMediaType* pMediaType,
- BOOL* pbSupportedSize,
- BOOL* pbSupportedFps,
- BOOL* pbSupportedFormat
- )
+ IMFPresentationDescriptor *pPD,
+ DWORD cStreamIndex,
+ IMFMediaType* pMediaType,
+ BOOL* pbSupportedSize,
+ BOOL* pbSupportedFps,
+ BOOL* pbSupportedFormat
+)
{
- HRESULT hr = S_OK;
-
- UINT32 nWidth = 0, nHeight = 0, nFps = 0, numeratorFps = 30, denominatorFps = 1;
- GUID subType;
-
- if(!pPD || !pMediaType || !pbSupportedSize || !pbSupportedFps || !pbSupportedFormat)
- {
- CHECK_HR(hr = E_POINTER);
- }
-
- CHECK_HR(hr = MFGetAttributeSize(pMediaType, MF_MT_FRAME_SIZE, &nWidth, &nHeight));
- CHECK_HR(hr = pMediaType->GetGUID(MF_MT_SUBTYPE, &subType));
- if(FAILED(hr = MFGetAttributeRatio(pMediaType, MF_MT_FRAME_RATE, &numeratorFps, &denominatorFps)))
- {
- numeratorFps = 30;
- denominatorFps = 1;
- }
-
- CHECK_HR(hr = IsSupported(
- pPD,
- cStreamIndex,
- nWidth,
- nHeight,
- (numeratorFps / denominatorFps),
- subType,
- pbSupportedSize,
- pbSupportedFps,
- pbSupportedFormat
- ));
+ HRESULT hr = S_OK;
+
+ UINT32 nWidth = 0, nHeight = 0, nFps = 0, numeratorFps = 30, denominatorFps = 1;
+ GUID subType;
+
+ if(!pPD || !pMediaType || !pbSupportedSize || !pbSupportedFps || !pbSupportedFormat) {
+ CHECK_HR(hr = E_POINTER);
+ }
+
+ CHECK_HR(hr = MFGetAttributeSize(pMediaType, MF_MT_FRAME_SIZE, &nWidth, &nHeight));
+ CHECK_HR(hr = pMediaType->GetGUID(MF_MT_SUBTYPE, &subType));
+ if(FAILED(hr = MFGetAttributeRatio(pMediaType, MF_MT_FRAME_RATE, &numeratorFps, &denominatorFps))) {
+ numeratorFps = 30;
+ denominatorFps = 1;
+ }
+
+ CHECK_HR(hr = IsSupported(
+ pPD,
+ cStreamIndex,
+ nWidth,
+ nHeight,
+ (numeratorFps / denominatorFps),
+ subType,
+ pbSupportedSize,
+ pbSupportedFps,
+ pbSupportedFormat
+ ));
bail:
- return hr;
+ return hr;
}
HRESULT MFUtils::IsSupportedByInput(
- IMFPresentationDescriptor *pPD,
- DWORD cStreamIndex,
- IMFTopologyNode *pNode,
- BOOL* pbSupportedSize,
- BOOL* pbSupportedFps,
- BOOL* pbSupportedFormat
- )
+ IMFPresentationDescriptor *pPD,
+ DWORD cStreamIndex,
+ IMFTopologyNode *pNode,
+ BOOL* pbSupportedSize,
+ BOOL* pbSupportedFps,
+ BOOL* pbSupportedFormat
+)
{
- HRESULT hr = S_OK;
-
- IMFMediaType *pMediaType = NULL;
- IUnknown* pObject = NULL;
- IMFActivate *pActivate = NULL;
- IMFMediaSink *pMediaSink = NULL;
- IMFTransform *pTransform = NULL;
- IMFStreamSink *pStreamSink = NULL;
- IMFMediaTypeHandler *pHandler = NULL;
-
- if(!pPD || !pNode || !pbSupportedSize || !pbSupportedFps || !pbSupportedFormat)
- {
- CHECK_HR(hr = E_POINTER);
- }
-
- CHECK_HR(hr = pNode->GetObject(&pObject));
- hr = pObject->QueryInterface(IID_PPV_ARGS(&pActivate));
- if(SUCCEEDED(hr))
- {
- SafeRelease(&pObject);
- hr = pActivate->ActivateObject(IID_IMFMediaSink, (void**)&pObject);
- if(FAILED(hr))
- {
- hr = pActivate->ActivateObject(IID_IMFTransform, (void**)&pObject);
- }
- }
-
- if(!pObject)
- {
- CHECK_HR(hr = E_NOINTERFACE);
- }
-
- hr = pObject->QueryInterface(IID_PPV_ARGS(&pMediaSink));
- if(FAILED(hr))
- {
- hr = pObject->QueryInterface(IID_PPV_ARGS(&pTransform));
- }
-
-
-
- if(pMediaSink)
- {
- CHECK_HR(hr = pMediaSink->GetStreamSinkByIndex(0, &pStreamSink));
- CHECK_HR(hr = pStreamSink->GetMediaTypeHandler(&pHandler));
- CHECK_HR(hr = pHandler->GetCurrentMediaType(&pMediaType));
-
- }
- else if(pTransform)
- {
- CHECK_HR(hr = pTransform->GetInputCurrentType(0, &pMediaType));
- }
- else
- {
- CHECK_HR(hr = pNode->GetInputPrefType(0, &pMediaType));
- }
-
- CHECK_HR(hr = IsSupported(
- pPD,
- cStreamIndex,
- pMediaType,
- pbSupportedSize,
- pbSupportedFps,
- pbSupportedFormat
- ));
+ HRESULT hr = S_OK;
+
+ IMFMediaType *pMediaType = NULL;
+ IUnknown* pObject = NULL;
+ IMFActivate *pActivate = NULL;
+ IMFMediaSink *pMediaSink = NULL;
+ IMFTransform *pTransform = NULL;
+ IMFStreamSink *pStreamSink = NULL;
+ IMFMediaTypeHandler *pHandler = NULL;
+
+ if(!pPD || !pNode || !pbSupportedSize || !pbSupportedFps || !pbSupportedFormat) {
+ CHECK_HR(hr = E_POINTER);
+ }
+
+ CHECK_HR(hr = pNode->GetObject(&pObject));
+ hr = pObject->QueryInterface(IID_PPV_ARGS(&pActivate));
+ if(SUCCEEDED(hr)) {
+ SafeRelease(&pObject);
+ hr = pActivate->ActivateObject(IID_IMFMediaSink, (void**)&pObject);
+ if(FAILED(hr)) {
+ hr = pActivate->ActivateObject(IID_IMFTransform, (void**)&pObject);
+ }
+ }
+
+ if(!pObject) {
+ CHECK_HR(hr = E_NOINTERFACE);
+ }
+
+ hr = pObject->QueryInterface(IID_PPV_ARGS(&pMediaSink));
+ if(FAILED(hr)) {
+ hr = pObject->QueryInterface(IID_PPV_ARGS(&pTransform));
+ }
+
+
+
+ if(pMediaSink) {
+ CHECK_HR(hr = pMediaSink->GetStreamSinkByIndex(0, &pStreamSink));
+ CHECK_HR(hr = pStreamSink->GetMediaTypeHandler(&pHandler));
+ CHECK_HR(hr = pHandler->GetCurrentMediaType(&pMediaType));
+
+ }
+ else if(pTransform) {
+ CHECK_HR(hr = pTransform->GetInputCurrentType(0, &pMediaType));
+ }
+ else {
+ CHECK_HR(hr = pNode->GetInputPrefType(0, &pMediaType));
+ }
+
+ CHECK_HR(hr = IsSupported(
+ pPD,
+ cStreamIndex,
+ pMediaType,
+ pbSupportedSize,
+ pbSupportedFps,
+ pbSupportedFormat
+ ));
bail:
- SafeRelease(&pObject);
- SafeRelease(&pActivate);
- SafeRelease(&pMediaType);
- SafeRelease(&pStreamSink);
- SafeRelease(&pHandler);
- return hr;
+ SafeRelease(&pObject);
+ SafeRelease(&pActivate);
+ SafeRelease(&pMediaType);
+ SafeRelease(&pStreamSink);
+ SafeRelease(&pHandler);
+ return hr;
}
HRESULT MFUtils::ConnectConverters(
- IMFTopologyNode *pNode,
- DWORD dwOutputIndex,
- IMFTopologyNode *pNodeConvFrameRate,
- IMFTopologyNode *pNodeConvColor,
- IMFTopologyNode *pNodeConvSize
- )
+ IMFTopologyNode *pNode,
+ DWORD dwOutputIndex,
+ IMFTopologyNode *pNodeConvFrameRate,
+ IMFTopologyNode *pNodeConvColor,
+ IMFTopologyNode *pNodeConvSize
+)
{
- HRESULT hr = S_OK;
-
- if(!pNode)
- {
- CHECK_HR(hr = E_POINTER);
- }
-
- if(pNodeConvFrameRate)
- {
- CHECK_HR(hr = pNode->ConnectOutput(dwOutputIndex, pNodeConvFrameRate, 0));
- if(pNodeConvSize)
- {
- CHECK_HR(hr = pNodeConvFrameRate->ConnectOutput(0, pNodeConvSize, 0));
- if(pNodeConvColor)
- {
- CHECK_HR(hr = pNodeConvSize->ConnectOutput(0, pNodeConvColor, 0));
- }
- }
- else
- {
- if(pNodeConvColor)
- {
- CHECK_HR(hr = pNodeConvFrameRate->ConnectOutput(0, pNodeConvColor, 0));
- }
- }
- }
- else
- {
- if(pNodeConvSize)
- {
- CHECK_HR(hr = pNode->ConnectOutput(dwOutputIndex, pNodeConvSize, 0));
- if(pNodeConvColor)
- {
- CHECK_HR(hr = pNodeConvSize->ConnectOutput(0, pNodeConvColor, 0));
- }
- }
- else
- {
- if(pNodeConvColor)
- {
- CHECK_HR(hr = pNode->ConnectOutput(dwOutputIndex, pNodeConvColor, 0));
- }
- }
- }
+ HRESULT hr = S_OK;
+
+ if(!pNode) {
+ CHECK_HR(hr = E_POINTER);
+ }
+
+ if(pNodeConvFrameRate) {
+ CHECK_HR(hr = pNode->ConnectOutput(dwOutputIndex, pNodeConvFrameRate, 0));
+ if(pNodeConvSize) {
+ CHECK_HR(hr = pNodeConvFrameRate->ConnectOutput(0, pNodeConvSize, 0));
+ if(pNodeConvColor) {
+ CHECK_HR(hr = pNodeConvSize->ConnectOutput(0, pNodeConvColor, 0));
+ }
+ }
+ else {
+ if(pNodeConvColor) {
+ CHECK_HR(hr = pNodeConvFrameRate->ConnectOutput(0, pNodeConvColor, 0));
+ }
+ }
+ }
+ else {
+ if(pNodeConvSize) {
+ CHECK_HR(hr = pNode->ConnectOutput(dwOutputIndex, pNodeConvSize, 0));
+ if(pNodeConvColor) {
+ CHECK_HR(hr = pNodeConvSize->ConnectOutput(0, pNodeConvColor, 0));
+ }
+ }
+ else {
+ if(pNodeConvColor) {
+ CHECK_HR(hr = pNode->ConnectOutput(dwOutputIndex, pNodeConvColor, 0));
+ }
+ }
+ }
bail:
- return hr;
+ return hr;
}
// This function should be called only if VideoProcessor is not supported
HRESULT MFUtils::GetBestFormat(
- IMFMediaSource *pSource,
- const GUID *pSubType,
- UINT32 nWidth,
- UINT32 nHeight,
- UINT32 nFps,
- UINT32 *pnWidth,
- UINT32 *pnHeight,
- UINT32 *pnFps,
- const VideoSubTypeGuidPair **ppSubTypeGuidPair
- )
+ IMFMediaSource *pSource,
+ const GUID *pSubType,
+ UINT32 nWidth,
+ UINT32 nHeight,
+ UINT32 nFps,
+ UINT32 *pnWidth,
+ UINT32 *pnHeight,
+ UINT32 *pnFps,
+ const VideoSubTypeGuidPair **ppSubTypeGuidPair
+)
{
#define _FindPairByGuid(_guid, _index) { \
@@ -1954,151 +1799,146 @@ HRESULT MFUtils::GetBestFormat(
} \
}
#if 0
- *pnWidth = 640;
- *pnHeight = 480;
- *pnFps = 30;
- return S_OK;
-#else
- HRESULT hr = S_OK;
- IMFPresentationDescriptor *pPD = NULL;
- IMFStreamDescriptor *pSD = NULL;
- IMFMediaTypeHandler *pHandler = NULL;
- IMFMediaType *pMediaType = NULL;
- DWORD cStreams = 0, cMediaTypesCount;
- GUID majorType, subType, _BestSubType;
- BOOL bFound = FALSE, fSelected;
- UINT32 _nWidth, _nHeight, numeratorFps, denominatorFps, _nFps, _nScore, _nBestScore;
- int PreferredVideoSubTypeGuidPairIndex;
- static const UINT32 kSubTypeMismatchPad = _UI32_MAX >> 4;
- static const UINT32 kFpsMismatchPad = _UI32_MAX >> 2;
-
- if (!ppSubTypeGuidPair || !pSubType) {
- CHECK_HR(hr = E_INVALIDARG);
- }
- _FindPairByGuid(*pSubType, PreferredVideoSubTypeGuidPairIndex);
- if (PreferredVideoSubTypeGuidPairIndex == -1) {
- CHECK_HR(hr = E_INVALIDARG);
- }
- *ppSubTypeGuidPair = &PreferredVideoSubTypeGuidPairs[PreferredVideoSubTypeGuidPairIndex];
-
- _nBestScore = _UI32_MAX;
- CHECK_HR(hr = pSource->CreatePresentationDescriptor(&pPD));
- CHECK_HR(hr = pPD->GetStreamDescriptorCount(&cStreams));
-
-
- for (DWORD i = 0; i < cStreams; i++)
- {
- fSelected = FALSE;
-
- CHECK_HR(hr = pPD->GetStreamDescriptorByIndex(i, &fSelected, &pSD));
-
- if (fSelected)
- {
- CHECK_HR(hr = pSD->GetMediaTypeHandler(&pHandler));
-
- CHECK_HR(hr = pHandler->GetMajorType(&majorType));
-
- if(majorType == MFMediaType_Video)
- {
- CHECK_HR(hr = pHandler->GetMediaTypeCount(&cMediaTypesCount));
-
- for(DWORD cMediaTypesIndex = 0; cMediaTypesIndex < cMediaTypesCount; ++cMediaTypesIndex)
- {
- CHECK_HR(hr = pHandler->GetMediaTypeByIndex(cMediaTypesIndex, &pMediaType));
-
- CHECK_HR(hr = pMediaType->GetGUID(MF_MT_SUBTYPE, &subType));
- // if(subType == *pSubType)
- {
- CHECK_HR(hr = MFGetAttributeSize(pMediaType, MF_MT_FRAME_SIZE, &_nWidth, &_nHeight));
- CHECK_HR(hr = MFGetAttributeRatio(pMediaType, MF_MT_FRAME_RATE, &numeratorFps, &denominatorFps));
- _nFps = (numeratorFps / denominatorFps);
-
- if (subType == *pSubType) {
- _nScore = 0;
- }
- else {
- _FindPairByGuid(subType, PreferredVideoSubTypeGuidPairIndex);
- if (PreferredVideoSubTypeGuidPairIndex == -1) {
- _nScore = kSubTypeMismatchPad; // Not a must but important: If(!VideoProcess) then CLSID_CColorConvertDMO
- }
- else {
- _nScore = kSubTypeMismatchPad >> (PreferredVideoSubTypeGuidPairsCount - PreferredVideoSubTypeGuidPairIndex);
- }
- }
- _nScore += abs((int)(_nWidth - nWidth)); // Not a must: If(!VideoProcess) then CLSID_CResizerDMO
- _nScore += abs((int)(_nHeight - nHeight)); // Not a must: If(!VideoProcess) then CLSID_CResizerDMO
- _nScore += (_nFps == nFps) ? 0 : kFpsMismatchPad; // Fps is a must because without video processor no alternative exist (CLSID_CFrameRateConvertDmo doesn't support I420)
-
- if (_nScore <= _nBestScore || !bFound)
- {
- *pnWidth = _nWidth;
- *pnHeight = _nHeight;
- *pnFps = _nFps;
- bFound = TRUE;
- _BestSubType = subType;
- _nBestScore = _nScore;
- }
- }
-
- SafeRelease(&pMediaType);
- }
- }
- }
-
- SafeRelease(&pHandler);
- SafeRelease(&pSD);
- }
+ *pnWidth = 640;
+ *pnHeight = 480;
+ *pnFps = 30;
+ return S_OK;
+#else
+ HRESULT hr = S_OK;
+ IMFPresentationDescriptor *pPD = NULL;
+ IMFStreamDescriptor *pSD = NULL;
+ IMFMediaTypeHandler *pHandler = NULL;
+ IMFMediaType *pMediaType = NULL;
+ DWORD cStreams = 0, cMediaTypesCount;
+ GUID majorType, subType, _BestSubType;
+ BOOL bFound = FALSE, fSelected;
+ UINT32 _nWidth, _nHeight, numeratorFps, denominatorFps, _nFps, _nScore, _nBestScore;
+ int PreferredVideoSubTypeGuidPairIndex;
+ static const UINT32 kSubTypeMismatchPad = _UI32_MAX >> 4;
+ static const UINT32 kFpsMismatchPad = _UI32_MAX >> 2;
+
+ if (!ppSubTypeGuidPair || !pSubType) {
+ CHECK_HR(hr = E_INVALIDARG);
+ }
+ _FindPairByGuid(*pSubType, PreferredVideoSubTypeGuidPairIndex);
+ if (PreferredVideoSubTypeGuidPairIndex == -1) {
+ CHECK_HR(hr = E_INVALIDARG);
+ }
+ *ppSubTypeGuidPair = &PreferredVideoSubTypeGuidPairs[PreferredVideoSubTypeGuidPairIndex];
+
+ _nBestScore = _UI32_MAX;
+ CHECK_HR(hr = pSource->CreatePresentationDescriptor(&pPD));
+ CHECK_HR(hr = pPD->GetStreamDescriptorCount(&cStreams));
+
+
+ for (DWORD i = 0; i < cStreams; i++) {
+ fSelected = FALSE;
+
+ CHECK_HR(hr = pPD->GetStreamDescriptorByIndex(i, &fSelected, &pSD));
+
+ if (fSelected) {
+ CHECK_HR(hr = pSD->GetMediaTypeHandler(&pHandler));
+
+ CHECK_HR(hr = pHandler->GetMajorType(&majorType));
+
+ if(majorType == MFMediaType_Video) {
+ CHECK_HR(hr = pHandler->GetMediaTypeCount(&cMediaTypesCount));
+
+ for(DWORD cMediaTypesIndex = 0; cMediaTypesIndex < cMediaTypesCount; ++cMediaTypesIndex) {
+ CHECK_HR(hr = pHandler->GetMediaTypeByIndex(cMediaTypesIndex, &pMediaType));
+
+ CHECK_HR(hr = pMediaType->GetGUID(MF_MT_SUBTYPE, &subType));
+ // if(subType == *pSubType)
+ {
+ CHECK_HR(hr = MFGetAttributeSize(pMediaType, MF_MT_FRAME_SIZE, &_nWidth, &_nHeight));
+ CHECK_HR(hr = MFGetAttributeRatio(pMediaType, MF_MT_FRAME_RATE, &numeratorFps, &denominatorFps));
+ _nFps = (numeratorFps / denominatorFps);
+
+ if (subType == *pSubType) {
+ _nScore = 0;
+ }
+ else {
+ _FindPairByGuid(subType, PreferredVideoSubTypeGuidPairIndex);
+ if (PreferredVideoSubTypeGuidPairIndex == -1) {
+ _nScore = kSubTypeMismatchPad; // Not a must but important: If(!VideoProcess) then CLSID_CColorConvertDMO
+ }
+ else {
+ _nScore = kSubTypeMismatchPad >> (PreferredVideoSubTypeGuidPairsCount - PreferredVideoSubTypeGuidPairIndex);
+ }
+ }
+ _nScore += abs((int)(_nWidth - nWidth)); // Not a must: If(!VideoProcess) then CLSID_CResizerDMO
+ _nScore += abs((int)(_nHeight - nHeight)); // Not a must: If(!VideoProcess) then CLSID_CResizerDMO
+ _nScore += (_nFps == nFps) ? 0 : kFpsMismatchPad; // Fps is a must because without video processor no alternative exist (CLSID_CFrameRateConvertDmo doesn't support I420)
+
+ if (_nScore <= _nBestScore || !bFound) {
+ *pnWidth = _nWidth;
+ *pnHeight = _nHeight;
+ *pnFps = _nFps;
+ bFound = TRUE;
+ _BestSubType = subType;
+ _nBestScore = _nScore;
+ }
+ }
+
+ SafeRelease(&pMediaType);
+ }
+ }
+ }
+
+ SafeRelease(&pHandler);
+ SafeRelease(&pSD);
+ }
bail:
- SafeRelease(&pPD);
- SafeRelease(&pSD);
- SafeRelease(&pHandler);
- SafeRelease(&pMediaType);
-
- _FindPairByGuid(_BestSubType, PreferredVideoSubTypeGuidPairIndex);
- if (PreferredVideoSubTypeGuidPairIndex != -1) {
- *ppSubTypeGuidPair = &PreferredVideoSubTypeGuidPairs[PreferredVideoSubTypeGuidPairIndex];
- }
- else /*if (_nBestScore > kSubTypeMismatchPad)*/ {
- *pnWidth = 640;
- *pnHeight = 480;
- *pnFps = 30;
- TSK_DEBUG_WARN("Failed to math subtype...using VGA@30fps");
- }
-
- return SUCCEEDED(hr) ? (bFound ? S_OK : E_NOT_SET): hr;
+ SafeRelease(&pPD);
+ SafeRelease(&pSD);
+ SafeRelease(&pHandler);
+ SafeRelease(&pMediaType);
+
+ _FindPairByGuid(_BestSubType, PreferredVideoSubTypeGuidPairIndex);
+ if (PreferredVideoSubTypeGuidPairIndex != -1) {
+ *ppSubTypeGuidPair = &PreferredVideoSubTypeGuidPairs[PreferredVideoSubTypeGuidPairIndex];
+ }
+ else { /*if (_nBestScore > kSubTypeMismatchPad)*/
+ *pnWidth = 640;
+ *pnHeight = 480;
+ *pnFps = 30;
+ TSK_DEBUG_WARN("Failed to math subtype...using VGA@30fps");
+ }
+
+ return SUCCEEDED(hr) ? (bFound ? S_OK : E_NOT_SET): hr;
#endif
}
HWND MFUtils::GetConsoleHwnd(void)
{
- #define MY_BUFSIZE 1024 // Buffer size for console window titles.
- HWND hwndFound; // This is what is returned to the caller.
- TCHAR pszNewWindowTitle[MY_BUFSIZE]; // Contains fabricated
- // WindowTitle.
- TCHAR pszOldWindowTitle[MY_BUFSIZE]; // Contains original
- // WindowTitle.
+#define MY_BUFSIZE 1024 // Buffer size for console window titles.
+ HWND hwndFound; // This is what is returned to the caller.
+ TCHAR pszNewWindowTitle[MY_BUFSIZE]; // Contains fabricated
+ // WindowTitle.
+ TCHAR pszOldWindowTitle[MY_BUFSIZE]; // Contains original
+ // WindowTitle.
- // Fetch current window title.
- GetConsoleTitle(pszOldWindowTitle, MY_BUFSIZE);
+ // Fetch current window title.
+ GetConsoleTitle(pszOldWindowTitle, MY_BUFSIZE);
- // Format a "unique" NewWindowTitle.
- wsprintf(pszNewWindowTitle,TEXT("%d/%d"),
- GetTickCount(),
- GetCurrentProcessId());
+ // Format a "unique" NewWindowTitle.
+ wsprintf(pszNewWindowTitle,TEXT("%d/%d"),
+ GetTickCount(),
+ GetCurrentProcessId());
- // Change current window title.
- SetConsoleTitle(pszNewWindowTitle);
+ // Change current window title.
+ SetConsoleTitle(pszNewWindowTitle);
- // Ensure window title has been updated.
- Sleep(40);
+ // Ensure window title has been updated.
+ Sleep(40);
- // Look for NewWindowTitle.
- hwndFound=FindWindow(NULL, pszNewWindowTitle);
+ // Look for NewWindowTitle.
+ hwndFound=FindWindow(NULL, pszNewWindowTitle);
- // Restore original window title.
- SetConsoleTitle(pszOldWindowTitle);
+ // Restore original window title.
+ SetConsoleTitle(pszOldWindowTitle);
- return(hwndFound);
+ return(hwndFound);
}
diff --git a/plugins/pluginWinMF/internals/mf_utils.h b/plugins/pluginWinMF/internals/mf_utils.h
index 0819597..1225b3b 100755
--- a/plugins/pluginWinMF/internals/mf_utils.h
+++ b/plugins/pluginWinMF/internals/mf_utils.h
@@ -1,18 +1,18 @@
/* Copyright (C) 2013 Mamadou DIOP
* Copyright (C) 2013 Doubango Telecom <http://www.doubango.org>
-*
+*
* This file is part of Open Source Doubango Framework.
*
* DOUBANGO is free software: you can redistribute it and/or modify
* it under the terms of the GNU General Public License as published by
* the Free Software Foundation, either version 3 of the License, or
* (at your option) any later version.
-*
+*
* DOUBANGO is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
* GNU General Public License for more details.
-*
+*
* You should have received a copy of the GNU General Public License
* along with DOUBANGO.
*/
@@ -35,14 +35,13 @@
(*ppT)->Release(); \
*ppT = NULL; \
} \
-}
+}
#undef CHECK_HR
// In CHECK_HR(x) When (x) is a function it will be executed twice when used in "TSK_DEBUG_ERROR(x)" and "If(x)"
#define CHECK_HR(x) { HRESULT __hr__ = (x); if (FAILED(__hr__)) { TSK_DEBUG_ERROR("Operation Failed (%08x)", __hr__); goto bail; } }
-typedef struct VideoSubTypeGuidPair
-{
+typedef struct VideoSubTypeGuidPair {
enum tmedia_chroma_e chroma;
const GUID& fourcc;
}
@@ -52,209 +51,207 @@ class MFUtils
{
public:
-static HRESULT Startup();
-static HRESULT Shutdown();
+ static HRESULT Startup();
+ static HRESULT Shutdown();
-static BOOL IsD3D9Supported();
-static BOOL IsLowLatencyH264Supported();
-static BOOL IsLowLatencyH264SupportsMaxSliceSize();
+ static BOOL IsD3D9Supported();
+ static BOOL IsLowLatencyH264Supported();
+ static BOOL IsLowLatencyH264SupportsMaxSliceSize();
-static HRESULT IsAsyncMFT(
- IMFTransform *pMFT, // The MFT to check
- BOOL* pbIsAsync // Whether the MFT is Async
- );
-static HRESULT UnlockAsyncMFT(
- IMFTransform *pMFT // The MFT to unlock
- );
+ static HRESULT IsAsyncMFT(
+ IMFTransform *pMFT, // The MFT to check
+ BOOL* pbIsAsync // Whether the MFT is Async
+ );
+ static HRESULT UnlockAsyncMFT(
+ IMFTransform *pMFT // The MFT to unlock
+ );
-static HRESULT CreatePCMAudioType(
- UINT32 sampleRate, // Samples per second
- UINT32 bitsPerSample, // Bits per sample
- UINT32 cChannels, // Number of channels
- IMFMediaType **ppType // Receives a pointer to the media type.
+ static HRESULT CreatePCMAudioType(
+ UINT32 sampleRate, // Samples per second
+ UINT32 bitsPerSample, // Bits per sample
+ UINT32 cChannels, // Number of channels
+ IMFMediaType **ppType // Receives a pointer to the media type.
+ );
+ static HRESULT CreateVideoType(
+ const GUID* subType, // video subType
+ IMFMediaType **ppType, // Receives a pointer to the media type.
+ UINT32 unWidth = 0, // Video width (0 to ignore)
+ UINT32 unHeight = 0 // Video height (0 to ignore)
+ );
+ static HRESULT ConvertVideoTypeToUncompressedType(
+ IMFMediaType *pType, // Pointer to an encoded video type.
+ const GUID& subtype, // Uncompressed subtype (eg, RGB-32, AYUV)
+ IMFMediaType **ppType // Receives a matching uncompressed video type.
+ );
+ static HRESULT CreateMediaSample(
+ DWORD cbData, // Maximum buffer size
+ IMFSample **ppSample // Receives the sample
+ );
+ static HRESULT ValidateVideoFormat(
+ IMFMediaType *pmt
+ );
+ static HRESULT IsVideoProcessorSupported(BOOL *pbSupported);
+ static HRESULT GetBestVideoProcessor(
+ const GUID& inputFormat, // The input MediaFormat (e.g. MFVideoFormat_I420)
+ const GUID& outputFormat, // The output MediaFormat (e.g. MFVideoFormat_NV12)
+ IMFTransform **ppProcessor // Receives the video processor
+ );
+ static HRESULT GetBestCodec(
+ BOOL bEncoder, // Whether we request an encoder or not (TRUE=encoder, FALSE=decoder)
+ const GUID& mediaType, // The MediaType
+ const GUID& inputFormat, // The input MediaFormat (e.g. MFVideoFormat_NV12)
+ const GUID& outputFormat, // The output MediaFormat (e.g. MFVideoFormat_H264)
+ IMFTransform **ppMFT // Receives the decoder/encoder transform
+ );
+ static HRESULT BindOutputNode(
+ IMFTopologyNode *pNode // The Node
+ );
+ static HRESULT AddOutputNode(
+ IMFTopology *pTopology, // Topology.
+ IMFActivate *pActivate, // Media sink activation object.
+ DWORD dwId, // Identifier of the stream sink.
+ IMFTopologyNode **ppNode // Receives the node pointer.
+ );
+ static HRESULT AddTransformNode(
+ IMFTopology *pTopology, // Topology.
+ IMFTransform *pMFT, // MFT.
+ DWORD dwId, // Identifier of the stream sink.
+ IMFTopologyNode **ppNode // Receives the node pointer.
+ );
+ static HRESULT AddSourceNode(
+ IMFTopology *pTopology, // Topology.
+ IMFMediaSource *pSource, // Media source.
+ IMFPresentationDescriptor *pPD, // Presentation descriptor.
+ IMFStreamDescriptor *pSD, // Stream descriptor.
+ IMFTopologyNode **ppNode // Receives the node pointer.
);
-static HRESULT CreateVideoType(
- const GUID* subType, // video subType
- IMFMediaType **ppType, // Receives a pointer to the media type.
- UINT32 unWidth = 0, // Video width (0 to ignore)
- UINT32 unHeight = 0 // Video height (0 to ignore)
- );
-static HRESULT ConvertVideoTypeToUncompressedType(
- IMFMediaType *pType, // Pointer to an encoded video type.
- const GUID& subtype, // Uncompressed subtype (eg, RGB-32, AYUV)
- IMFMediaType **ppType // Receives a matching uncompressed video type.
+ static HRESULT CreateTopology(
+ IMFMediaSource *pSource, // Media source
+ IMFTransform *pTransform, // Transform filter (e.g. encoder or decoder) to insert between the source and Sink. NULL is valid.
+ IMFActivate *pSinkActivateMain, // Main sink (e.g. sample grabber or EVR).
+ IMFActivate *pSinkActivatePreview, // Preview sink. Optional. Could be NULL.
+ IMFMediaType *pIputTypeMain, // Main sink input MediaType
+ IMFTopology **ppTopo // Receives the newly created topology
+ );
+ static HRESULT ResolveTopology(
+ IMFTopology *pInputTopo, // A pointer to the IMFTopology interface of the partial topology to be resolved.
+ IMFTopology **ppOutputTopo, // Receives a pointer to the IMFTopology interface of the completed topology. The caller must release the interface.
+ IMFTopology *pCurrentTopo = NULL // A pointer to the IMFTopology interface of the previous full topology. The topology loader can re-use objects from this topology in the new topology. This parameter can be NULL.
+ );
+ static HRESULT FindNodeObject(
+ IMFTopology *pInputTopo, // The Topology containing the node to find
+ TOPOID qwTopoNodeID, //The identifier for the node
+ void** ppObject // Receives the Object
+ );
+ static HRESULT CreateMediaSinkActivate(
+ IMFStreamDescriptor *pSourceSD, // Pointer to the stream descriptor.
+ HWND hVideoWindow, // Handle to the video clipping window.
+ IMFActivate **ppActivate
+ );
+ static HRESULT SetMediaType(
+ IMFMediaSource *pSource, // Media source.
+ IMFMediaType* pMediaType // Media Type.
+ );
+ static HRESULT SetVideoWindow(
+ IMFTopology *pTopology, // Topology.
+ IMFMediaSource *pSource, // Media source.
+ HWND hVideoWnd // Window for video playback.
+ );
+ static HRESULT RunSession(
+ IMFMediaSession *pSession, // Session to run
+ IMFTopology *pTopology // The toppology
+ );
+ static HRESULT ShutdownSession(
+ IMFMediaSession *pSession, // The Session
+ IMFMediaSource *pSource = NULL // Source to shutdown (optional)
+ );
+ static HRESULT PauseSession(
+ IMFMediaSession *pSession, // The session
+ IMFMediaSource *pSource = NULL// Source to pause (optional)
+ );
+ static INT GetSupportedSubTypeIndex(
+ IMFMediaSource *pSource, // The source
+ const GUID& mediaType, // The MediaType
+ const VideoSubTypeGuidPair* subTypes, UINT subTypesCount // List of preferred subtypes (in ascending order)
+ );
+ static HRESULT IsSupported(
+ IMFPresentationDescriptor *pPD,
+ DWORD cStreamIndex,
+ UINT32 nWidth,
+ UINT32 nHeight,
+ UINT32 nFps,
+ const GUID& guidFormat,
+ BOOL* pbSupportedSize,
+ BOOL* pbSupportedFps,
+ BOOL* pbSupportedFormat
+ );
+ static HRESULT IsSupported(
+ IMFPresentationDescriptor *pPD,
+ DWORD cStreamIndex,
+ IMFMediaType* pMediaType,
+ BOOL* pbSupportedSize,
+ BOOL* pbSupportedFps,
+ BOOL* pbSupportedFormat
+ );
+ static HRESULT IsSupportedByInput(
+ IMFPresentationDescriptor *pPD,
+ DWORD cStreamIndex,
+ IMFTopologyNode *pNode,
+ BOOL* pbSupportedSize,
+ BOOL* pbSupportedFps,
+ BOOL* pbSupportedFormat
+ );
+ static HRESULT ConnectConverters(
+ IMFTopologyNode *pNode,
+ DWORD dwOutputIndex,
+ IMFTopologyNode *pNodeConvFrameRate,
+ IMFTopologyNode *pNodeConvColor,
+ IMFTopologyNode *pNodeConvSize
+ );
+ static HRESULT GetBestFormat(
+ IMFMediaSource *pSource,
+ const GUID *pSubType,
+ UINT32 nWidth,
+ UINT32 nHeight,
+ UINT32 nFps,
+ UINT32 *pnWidth,
+ UINT32 *pnHeight,
+ UINT32 *pnFps,
+ const VideoSubTypeGuidPair **pSubTypeGuidPair
);
-static HRESULT CreateMediaSample(
- DWORD cbData, // Maximum buffer size
- IMFSample **ppSample // Receives the sample
- );
-static HRESULT ValidateVideoFormat(
- IMFMediaType *pmt
- );
-static HRESULT IsVideoProcessorSupported(BOOL *pbSupported);
-static HRESULT GetBestVideoProcessor(
- const GUID& inputFormat, // The input MediaFormat (e.g. MFVideoFormat_I420)
- const GUID& outputFormat, // The output MediaFormat (e.g. MFVideoFormat_NV12)
- IMFTransform **ppProcessor // Receives the video processor
- );
-static HRESULT GetBestCodec(
- BOOL bEncoder, // Whether we request an encoder or not (TRUE=encoder, FALSE=decoder)
- const GUID& mediaType, // The MediaType
- const GUID& inputFormat, // The input MediaFormat (e.g. MFVideoFormat_NV12)
- const GUID& outputFormat, // The output MediaFormat (e.g. MFVideoFormat_H264)
- IMFTransform **ppMFT // Receives the decoder/encoder transform
- );
-static HRESULT BindOutputNode(
- IMFTopologyNode *pNode // The Node
- );
-static HRESULT AddOutputNode(
- IMFTopology *pTopology, // Topology.
- IMFActivate *pActivate, // Media sink activation object.
- DWORD dwId, // Identifier of the stream sink.
- IMFTopologyNode **ppNode // Receives the node pointer.
- );
-static HRESULT AddTransformNode(
- IMFTopology *pTopology, // Topology.
- IMFTransform *pMFT, // MFT.
- DWORD dwId, // Identifier of the stream sink.
- IMFTopologyNode **ppNode // Receives the node pointer.
- );
-static HRESULT AddSourceNode(
- IMFTopology *pTopology, // Topology.
- IMFMediaSource *pSource, // Media source.
- IMFPresentationDescriptor *pPD, // Presentation descriptor.
- IMFStreamDescriptor *pSD, // Stream descriptor.
- IMFTopologyNode **ppNode // Receives the node pointer.
- );
-static HRESULT CreateTopology(
- IMFMediaSource *pSource, // Media source
- IMFTransform *pTransform, // Transform filter (e.g. encoder or decoder) to insert between the source and Sink. NULL is valid.
- IMFActivate *pSinkActivateMain, // Main sink (e.g. sample grabber or EVR).
- IMFActivate *pSinkActivatePreview, // Preview sink. Optional. Could be NULL.
- IMFMediaType *pIputTypeMain, // Main sink input MediaType
- IMFTopology **ppTopo // Receives the newly created topology
- );
-static HRESULT ResolveTopology(
- IMFTopology *pInputTopo, // A pointer to the IMFTopology interface of the partial topology to be resolved.
- IMFTopology **ppOutputTopo, // Receives a pointer to the IMFTopology interface of the completed topology. The caller must release the interface.
- IMFTopology *pCurrentTopo = NULL // A pointer to the IMFTopology interface of the previous full topology. The topology loader can re-use objects from this topology in the new topology. This parameter can be NULL.
- );
-static HRESULT FindNodeObject(
- IMFTopology *pInputTopo, // The Topology containing the node to find
- TOPOID qwTopoNodeID, //The identifier for the node
- void** ppObject // Receives the Object
- );
-static HRESULT CreateMediaSinkActivate(
- IMFStreamDescriptor *pSourceSD, // Pointer to the stream descriptor.
- HWND hVideoWindow, // Handle to the video clipping window.
- IMFActivate **ppActivate
-);
-static HRESULT SetMediaType(
- IMFMediaSource *pSource, // Media source.
- IMFMediaType* pMediaType // Media Type.
- );
-static HRESULT SetVideoWindow(
- IMFTopology *pTopology, // Topology.
- IMFMediaSource *pSource, // Media source.
- HWND hVideoWnd // Window for video playback.
- );
-static HRESULT RunSession(
- IMFMediaSession *pSession, // Session to run
- IMFTopology *pTopology // The toppology
- );
-static HRESULT ShutdownSession(
- IMFMediaSession *pSession, // The Session
- IMFMediaSource *pSource = NULL // Source to shutdown (optional)
- );
-static HRESULT PauseSession(
- IMFMediaSession *pSession, // The session
- IMFMediaSource *pSource = NULL// Source to pause (optional)
- );
-static INT GetSupportedSubTypeIndex(
- IMFMediaSource *pSource, // The source
- const GUID& mediaType, // The MediaType
- const VideoSubTypeGuidPair* subTypes, UINT subTypesCount // List of preferred subtypes (in ascending order)
- );
-static HRESULT IsSupported(
- IMFPresentationDescriptor *pPD,
- DWORD cStreamIndex,
- UINT32 nWidth,
- UINT32 nHeight,
- UINT32 nFps,
- const GUID& guidFormat,
- BOOL* pbSupportedSize,
- BOOL* pbSupportedFps,
- BOOL* pbSupportedFormat
- );
-static HRESULT IsSupported(
- IMFPresentationDescriptor *pPD,
- DWORD cStreamIndex,
- IMFMediaType* pMediaType,
- BOOL* pbSupportedSize,
- BOOL* pbSupportedFps,
- BOOL* pbSupportedFormat
- );
-static HRESULT IsSupportedByInput(
- IMFPresentationDescriptor *pPD,
- DWORD cStreamIndex,
- IMFTopologyNode *pNode,
- BOOL* pbSupportedSize,
- BOOL* pbSupportedFps,
- BOOL* pbSupportedFormat
- );
-static HRESULT ConnectConverters(
- IMFTopologyNode *pNode,
- DWORD dwOutputIndex,
- IMFTopologyNode *pNodeConvFrameRate,
- IMFTopologyNode *pNodeConvColor,
- IMFTopologyNode *pNodeConvSize
- );
-static HRESULT GetBestFormat(
- IMFMediaSource *pSource,
- const GUID *pSubType,
- UINT32 nWidth,
- UINT32 nHeight,
- UINT32 nFps,
- UINT32 *pnWidth,
- UINT32 *pnHeight,
- UINT32 *pnFps,
- const VideoSubTypeGuidPair **pSubTypeGuidPair
- );
-static HWND GetConsoleHwnd(void);
+ static HWND GetConsoleHwnd(void);
-template <class Q>
-static HRESULT GetTopoNodeObject(IMFTopologyNode *pNode, Q **ppObject)
-{
- IUnknown *pUnk = NULL; // zero output
+ template <class Q>
+ static HRESULT GetTopoNodeObject(IMFTopologyNode *pNode, Q **ppObject) {
+ IUnknown *pUnk = NULL; // zero output
- HRESULT hr = pNode->GetObject(&pUnk);
- if (SUCCEEDED(hr))
- {
- pUnk->QueryInterface(IID_PPV_ARGS(ppObject));
- pUnk->Release();
+ HRESULT hr = pNode->GetObject(&pUnk);
+ if (SUCCEEDED(hr)) {
+ pUnk->QueryInterface(IID_PPV_ARGS(ppObject));
+ pUnk->Release();
+ }
+ return hr;
}
- return hr;
-}
private:
- static BOOL g_bStarted;
+ static BOOL g_bStarted;
- static DWORD g_dwMajorVersion;
- static DWORD g_dwMinorVersion;
+ static DWORD g_dwMajorVersion;
+ static DWORD g_dwMinorVersion;
- static BOOL g_bLowLatencyH264Checked;
- static BOOL g_bLowLatencyH264Supported;
- static BOOL g_bLowLatencyH264SupportsMaxSliceSize;
+ static BOOL g_bLowLatencyH264Checked;
+ static BOOL g_bLowLatencyH264Supported;
+ static BOOL g_bLowLatencyH264SupportsMaxSliceSize;
- static BOOL g_bD3D9Checked;
- static BOOL g_bD3D9Supported;
+ static BOOL g_bD3D9Checked;
+ static BOOL g_bD3D9Supported;
public:
- static const TOPOID g_ullTopoIdSinkMain;
- static const TOPOID g_ullTopoIdSinkPreview;
- static const TOPOID g_ullTopoIdSource;
- static const TOPOID g_ullTopoIdVideoProcessor;
+ static const TOPOID g_ullTopoIdSinkMain;
+ static const TOPOID g_ullTopoIdSinkPreview;
+ static const TOPOID g_ullTopoIdSource;
+ static const TOPOID g_ullTopoIdVideoProcessor;
};
#endif /* PLUGIN_WIN_MF_UTILS_H */
diff --git a/plugins/pluginWinMF/plugin_win_mf_codec_h264.cxx b/plugins/pluginWinMF/plugin_win_mf_codec_h264.cxx
index bee00f0..c815deb 100755
--- a/plugins/pluginWinMF/plugin_win_mf_codec_h264.cxx
+++ b/plugins/pluginWinMF/plugin_win_mf_codec_h264.cxx
@@ -1,18 +1,18 @@
/* Copyright (C) 2013 Mamadou DIOP
* Copyright (C) 2013 Doubango Telecom <http://www.doubango.org>
-*
+*
* This file is part of Open Source Doubango Framework.
*
* DOUBANGO is free software: you can redistribute it and/or modify
* it under the terms of the GNU General Public License as published by
* the Free Software Foundation, either version 3 of the License, or
* (at your option) any later version.
-*
+*
* DOUBANGO is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
* GNU General Public License for more details.
-*
+*
* You should have received a copy of the GNU General Public License
* along with DOUBANGO.
*/
@@ -31,35 +31,34 @@
#include "tsk_memory.h"
#include "tsk_debug.h"
-typedef struct mf_codec_h264_s
-{
- TDAV_DECLARE_CODEC_H264_COMMON;
-
- // Encoder
- struct{
- MFCodecVideoH264* pInst;
- void* buffer;
- int64_t frame_count;
- tsk_bool_t force_idr;
- int32_t quality; // [1-31]
- int rotation;
- int neg_width;
- int neg_height;
- int neg_fps;
- int max_bitrate_bps;
- int32_t max_bw_kpbs;
- tsk_bool_t passthrough; // whether to bypass encoding
- } encoder;
-
- // decoder
- struct{
- MFCodecVideoH264* pInst;
- void* accumulator;
- tsk_size_t accumulator_pos;
- tsk_size_t accumulator_size;
- uint16_t last_seq;
- tsk_bool_t passthrough; // whether to bypass decoding
- } decoder;
+typedef struct mf_codec_h264_s {
+ TDAV_DECLARE_CODEC_H264_COMMON;
+
+ // Encoder
+ struct {
+ MFCodecVideoH264* pInst;
+ void* buffer;
+ int64_t frame_count;
+ tsk_bool_t force_idr;
+ int32_t quality; // [1-31]
+ int rotation;
+ int neg_width;
+ int neg_height;
+ int neg_fps;
+ int max_bitrate_bps;
+ int32_t max_bw_kpbs;
+ tsk_bool_t passthrough; // whether to bypass encoding
+ } encoder;
+
+ // decoder
+ struct {
+ MFCodecVideoH264* pInst;
+ void* accumulator;
+ tsk_size_t accumulator_pos;
+ tsk_size_t accumulator_size;
+ uint16_t last_seq;
+ tsk_bool_t passthrough; // whether to bypass decoding
+ } decoder;
}
mf_codec_h264_t;
@@ -78,372 +77,368 @@ static int mf_codec_h264_close_decoder(mf_codec_h264_t* self);
static int mf_codec_h264_set(tmedia_codec_t* self, const tmedia_param_t* param)
{
- mf_codec_h264_t* h264 = (mf_codec_h264_t*)self;
- if(!self->opened){
- TSK_DEBUG_ERROR("Codec not opened");
- return -1;
- }
- if(param->value_type == tmedia_pvt_int32){
- if(tsk_striequals(param->key, "action")){
- tmedia_codec_action_t action = (tmedia_codec_action_t)TSK_TO_INT32((uint8_t*)param->value);
- switch(action){
- case tmedia_codec_action_encode_idr:
- {
- h264->encoder.force_idr = tsk_true;
- break;
- }
- case tmedia_codec_action_bw_down:
- {
- h264->encoder.quality = TSK_CLAMP(1, (h264->encoder.quality + 1), 31);
- break;
- }
- case tmedia_codec_action_bw_up:
- {
- h264->encoder.quality = TSK_CLAMP(1, (h264->encoder.quality - 1), 31);
- break;
- }
- }
- return 0;
- }
- else if(tsk_striequals(param->key, "bypass-encoding")){
- h264->encoder.passthrough = *((int32_t*)param->value) ? tsk_true : tsk_false;
- h264->encoder.pInst->setBundled(h264->encoder.passthrough);
- TSK_DEBUG_INFO("[H.264] bypass-encoding = %d", h264->encoder.passthrough);
- return 0;
- }
- else if(tsk_striequals(param->key, "bypass-decoding")){
- h264->decoder.passthrough = *((int32_t*)param->value) ? tsk_true : tsk_false;
- h264->decoder.pInst->setBundled(h264->decoder.passthrough);
- TSK_DEBUG_INFO("[H.264] bypass-decoding = %d", h264->decoder.passthrough);
- return 0;
- }
- else if(tsk_striequals(param->key, "rotation")){
- int rotation = *((int32_t*)param->value);
- if(h264->encoder.rotation != rotation){
- if(self->opened){
- int ret;
- h264->encoder.rotation = rotation;
- if((ret = mf_codec_h264_close_encoder(h264))){
- return ret;
- }
- if((ret = mf_codec_h264_open_encoder(h264))){
- return ret;
- }
- }
- }
- return 0;
- }
- }
- return -1;
+ mf_codec_h264_t* h264 = (mf_codec_h264_t*)self;
+ if(!self->opened) {
+ TSK_DEBUG_ERROR("Codec not opened");
+ return -1;
+ }
+ if(param->value_type == tmedia_pvt_int32) {
+ if(tsk_striequals(param->key, "action")) {
+ tmedia_codec_action_t action = (tmedia_codec_action_t)TSK_TO_INT32((uint8_t*)param->value);
+ switch(action) {
+ case tmedia_codec_action_encode_idr: {
+ h264->encoder.force_idr = tsk_true;
+ break;
+ }
+ case tmedia_codec_action_bw_down: {
+ h264->encoder.quality = TSK_CLAMP(1, (h264->encoder.quality + 1), 31);
+ break;
+ }
+ case tmedia_codec_action_bw_up: {
+ h264->encoder.quality = TSK_CLAMP(1, (h264->encoder.quality - 1), 31);
+ break;
+ }
+ }
+ return 0;
+ }
+ else if(tsk_striequals(param->key, "bypass-encoding")) {
+ h264->encoder.passthrough = *((int32_t*)param->value) ? tsk_true : tsk_false;
+ h264->encoder.pInst->setBundled(h264->encoder.passthrough);
+ TSK_DEBUG_INFO("[H.264] bypass-encoding = %d", h264->encoder.passthrough);
+ return 0;
+ }
+ else if(tsk_striequals(param->key, "bypass-decoding")) {
+ h264->decoder.passthrough = *((int32_t*)param->value) ? tsk_true : tsk_false;
+ h264->decoder.pInst->setBundled(h264->decoder.passthrough);
+ TSK_DEBUG_INFO("[H.264] bypass-decoding = %d", h264->decoder.passthrough);
+ return 0;
+ }
+ else if(tsk_striequals(param->key, "rotation")) {
+ int rotation = *((int32_t*)param->value);
+ if(h264->encoder.rotation != rotation) {
+ if(self->opened) {
+ int ret;
+ h264->encoder.rotation = rotation;
+ if((ret = mf_codec_h264_close_encoder(h264))) {
+ return ret;
+ }
+ if((ret = mf_codec_h264_open_encoder(h264))) {
+ return ret;
+ }
+ }
+ }
+ return 0;
+ }
+ }
+ return -1;
}
static int mf_codec_h264_open(tmedia_codec_t* self)
{
- int ret;
- mf_codec_h264_t* h264 = (mf_codec_h264_t*)self;
-
- if(!h264){
- TSK_DEBUG_ERROR("Invalid parameter");
- return -1;
- }
-
- /* the caller (base class) already checked that the codec is not opened */
-
- // Encoder
- if((ret = mf_codec_h264_open_encoder(h264))){
- return ret;
- }
-
- // Decoder
- if((ret = mf_codec_h264_open_decoder(h264))){
- return ret;
- }
-
- return 0;
+ int ret;
+ mf_codec_h264_t* h264 = (mf_codec_h264_t*)self;
+
+ if(!h264) {
+ TSK_DEBUG_ERROR("Invalid parameter");
+ return -1;
+ }
+
+ /* the caller (base class) already checked that the codec is not opened */
+
+ // Encoder
+ if((ret = mf_codec_h264_open_encoder(h264))) {
+ return ret;
+ }
+
+ // Decoder
+ if((ret = mf_codec_h264_open_decoder(h264))) {
+ return ret;
+ }
+
+ return 0;
}
static int mf_codec_h264_close(tmedia_codec_t* self)
{
- mf_codec_h264_t* h264 = (mf_codec_h264_t*)self;
+ mf_codec_h264_t* h264 = (mf_codec_h264_t*)self;
- if(!h264){
- TSK_DEBUG_ERROR("Invalid parameter");
- return -1;
- }
+ if(!h264) {
+ TSK_DEBUG_ERROR("Invalid parameter");
+ return -1;
+ }
- /* the caller (base class) alreasy checked that the codec is opened */
+ /* the caller (base class) alreasy checked that the codec is opened */
- // Encoder
- mf_codec_h264_close_encoder(h264);
+ // Encoder
+ mf_codec_h264_close_encoder(h264);
- // Decoder
- mf_codec_h264_close_decoder(h264);
+ // Decoder
+ mf_codec_h264_close_decoder(h264);
- return 0;
+ return 0;
}
static tsk_size_t mf_codec_h264_encode(tmedia_codec_t* self, const void* in_data, tsk_size_t in_size, void** out_data, tsk_size_t* out_max_size)
{
- int ret = 0;
- tsk_bool_t send_idr, send_hdr;
-
- mf_codec_h264_t* h264 = (mf_codec_h264_t*)self;
- tdav_codec_h264_common_t* common = (tdav_codec_h264_common_t*)self;
-
- if(!self || !in_data || !in_size){
- TSK_DEBUG_ERROR("Invalid parameter");
- return 0;
- }
-
- if(!self->opened || !h264->encoder.pInst || !h264->encoder.pInst->IsReady()){
- TSK_DEBUG_ERROR("Encoder not opened or not ready");
- return 0;
- }
-
-
- HRESULT hr = S_OK;
- IMFSample *pSampleOut = NULL;
- IMFMediaBuffer* pBufferOut = NULL;
-
- // send IDR for:
- // - the first frame
- // - remote peer requested an IDR
- // - every second within the first 4seconds
- send_idr = (
- h264->encoder.frame_count++ == 0
- || h264 ->encoder.force_idr
- || ( (h264->encoder.frame_count < h264->encoder.neg_fps * 4) && ((h264->encoder.frame_count % h264->encoder.neg_fps)==0) )
- );
-
- if(send_idr) {
- CHECK_HR(hr = h264->encoder.pInst->RequestKeyFrame());
- }
-
- // send SPS and PPS headers for:
- // - IDR frames (not required but it's the easiest way to deal with pkt loss)
- // - every 5 seconds after the first 4seconds
- send_hdr = (
- send_idr
- || ( (h264->encoder.frame_count % (h264->encoder.neg_fps * 5))==0 )
- );
- if(send_hdr){
- //FIXME: MF_MT_MPEG_SEQUENCE_HEADER
- // tdav_codec_h264_rtp_encap(TDAV_CODEC_H264_COMMON(h264), h264->encoder.context->extradata, (tsk_size_t)h264->encoder.context->extradata_size);
- }
-
- if (h264->encoder.passthrough) {
- tdav_codec_h264_rtp_encap(common, (const uint8_t*)in_data, in_size);
- return 0;
- }
-
- // Encode data
- CHECK_HR(hr = h264->encoder.pInst->Process(in_data, (UINT32)in_size, &pSampleOut));
- if(pSampleOut) {
- CHECK_HR(hr = pSampleOut->GetBufferByIndex(0, &pBufferOut));
-
- BYTE* pBufferPtr = NULL;
- DWORD dwDataLength = 0;
- CHECK_HR(hr = pBufferOut->GetCurrentLength(&dwDataLength));
- if(dwDataLength > 0) {
- CHECK_HR(hr = pBufferOut->Lock(&pBufferPtr, NULL, NULL));
- tdav_codec_h264_rtp_encap(common, (const uint8_t*)pBufferPtr, (tsk_size_t)dwDataLength);
- CHECK_HR(hr = pBufferOut->Unlock());
- }
- }
-
- // reset
- h264->encoder.force_idr = tsk_false;
+ int ret = 0;
+ tsk_bool_t send_idr, send_hdr;
+
+ mf_codec_h264_t* h264 = (mf_codec_h264_t*)self;
+ tdav_codec_h264_common_t* common = (tdav_codec_h264_common_t*)self;
+
+ if(!self || !in_data || !in_size) {
+ TSK_DEBUG_ERROR("Invalid parameter");
+ return 0;
+ }
+
+ if(!self->opened || !h264->encoder.pInst || !h264->encoder.pInst->IsReady()) {
+ TSK_DEBUG_ERROR("Encoder not opened or not ready");
+ return 0;
+ }
+
+
+ HRESULT hr = S_OK;
+ IMFSample *pSampleOut = NULL;
+ IMFMediaBuffer* pBufferOut = NULL;
+
+ // send IDR for:
+ // - the first frame
+ // - remote peer requested an IDR
+ // - every second within the first 4seconds
+ send_idr = (
+ h264->encoder.frame_count++ == 0
+ || h264 ->encoder.force_idr
+ || ( (h264->encoder.frame_count < h264->encoder.neg_fps * 4) && ((h264->encoder.frame_count % h264->encoder.neg_fps)==0) )
+ );
+
+ if(send_idr) {
+ CHECK_HR(hr = h264->encoder.pInst->RequestKeyFrame());
+ }
+
+ // send SPS and PPS headers for:
+ // - IDR frames (not required but it's the easiest way to deal with pkt loss)
+ // - every 5 seconds after the first 4seconds
+ send_hdr = (
+ send_idr
+ || ( (h264->encoder.frame_count % (h264->encoder.neg_fps * 5))==0 )
+ );
+ if(send_hdr) {
+ //FIXME: MF_MT_MPEG_SEQUENCE_HEADER
+ // tdav_codec_h264_rtp_encap(TDAV_CODEC_H264_COMMON(h264), h264->encoder.context->extradata, (tsk_size_t)h264->encoder.context->extradata_size);
+ }
+
+ if (h264->encoder.passthrough) {
+ tdav_codec_h264_rtp_encap(common, (const uint8_t*)in_data, in_size);
+ return 0;
+ }
+
+ // Encode data
+ CHECK_HR(hr = h264->encoder.pInst->Process(in_data, (UINT32)in_size, &pSampleOut));
+ if(pSampleOut) {
+ CHECK_HR(hr = pSampleOut->GetBufferByIndex(0, &pBufferOut));
+
+ BYTE* pBufferPtr = NULL;
+ DWORD dwDataLength = 0;
+ CHECK_HR(hr = pBufferOut->GetCurrentLength(&dwDataLength));
+ if(dwDataLength > 0) {
+ CHECK_HR(hr = pBufferOut->Lock(&pBufferPtr, NULL, NULL));
+ tdav_codec_h264_rtp_encap(common, (const uint8_t*)pBufferPtr, (tsk_size_t)dwDataLength);
+ CHECK_HR(hr = pBufferOut->Unlock());
+ }
+ }
+
+ // reset
+ h264->encoder.force_idr = tsk_false;
bail:
- SafeRelease(&pSampleOut);
- SafeRelease(&pBufferOut);
- return 0;
+ SafeRelease(&pSampleOut);
+ SafeRelease(&pBufferOut);
+ return 0;
}
static tsk_size_t mf_codec_h264_decode(tmedia_codec_t* self, const void* in_data, tsk_size_t in_size, void** out_data, tsk_size_t* out_max_size, const tsk_object_t* proto_hdr)
{
- mf_codec_h264_t* h264 = (mf_codec_h264_t*)self;
- const trtp_rtp_header_t* rtp_hdr = (const trtp_rtp_header_t*)proto_hdr;
-
- const uint8_t* pay_ptr = tsk_null;
- tsk_size_t pay_size = 0;
- int ret;
- tsk_bool_t append_scp, end_of_unit;
- tsk_bool_t sps_or_pps;
- tsk_size_t retsize = 0, size_to_copy = 0;
- static const tsk_size_t xmax_size = (3840 * 2160 * 3) >> 3; // >>3 instead of >>1 (not an error)
- static tsk_size_t start_code_prefix_size = sizeof(H264_START_CODE_PREFIX);
-
- if(!h264 || !in_data || !in_size || !out_data)
- {
- TSK_DEBUG_ERROR("Invalid parameter");
- return 0;
- }
-
- if(!self->opened || !h264->encoder.pInst || !h264->decoder.pInst->IsReady()){
- TSK_DEBUG_ERROR("Decoder not opened or not ready");
- return 0;
- }
-
- HRESULT hr = S_OK;
- IMFSample *pSampleOut = NULL;
- IMFMediaBuffer* pBufferOut = NULL;
-
- /* Packet lost? */
- if((h264->decoder.last_seq + 1) != rtp_hdr->seq_num && h264->decoder.last_seq){
- TSK_DEBUG_INFO("[H.264] Packet loss, seq_num=%d", (h264->decoder.last_seq + 1));
- }
- h264->decoder.last_seq = rtp_hdr->seq_num;
-
-
- /* 5.3. NAL Unit Octet Usage
- +---------------+
+ mf_codec_h264_t* h264 = (mf_codec_h264_t*)self;
+ const trtp_rtp_header_t* rtp_hdr = (const trtp_rtp_header_t*)proto_hdr;
+
+ const uint8_t* pay_ptr = tsk_null;
+ tsk_size_t pay_size = 0;
+ int ret;
+ tsk_bool_t append_scp, end_of_unit;
+ tsk_bool_t sps_or_pps;
+ tsk_size_t retsize = 0, size_to_copy = 0;
+ static const tsk_size_t xmax_size = (3840 * 2160 * 3) >> 3; // >>3 instead of >>1 (not an error)
+ static tsk_size_t start_code_prefix_size = sizeof(H264_START_CODE_PREFIX);
+
+ if(!h264 || !in_data || !in_size || !out_data) {
+ TSK_DEBUG_ERROR("Invalid parameter");
+ return 0;
+ }
+
+ if(!self->opened || !h264->encoder.pInst || !h264->decoder.pInst->IsReady()) {
+ TSK_DEBUG_ERROR("Decoder not opened or not ready");
+ return 0;
+ }
+
+ HRESULT hr = S_OK;
+ IMFSample *pSampleOut = NULL;
+ IMFMediaBuffer* pBufferOut = NULL;
+
+ /* Packet lost? */
+ if((h264->decoder.last_seq + 1) != rtp_hdr->seq_num && h264->decoder.last_seq) {
+ TSK_DEBUG_INFO("[H.264] Packet loss, seq_num=%d", (h264->decoder.last_seq + 1));
+ }
+ h264->decoder.last_seq = rtp_hdr->seq_num;
+
+
+ /* 5.3. NAL Unit Octet Usage
+ +---------------+
|0|1|2|3|4|5|6|7|
+-+-+-+-+-+-+-+-+
|F|NRI| Type |
+---------------+
- */
- if (*((uint8_t*)in_data) & 0x80) {
- TSK_DEBUG_WARN("F=1");
- /* reset accumulator */
- h264->decoder.accumulator_pos = 0;
- return 0;
- }
-
- /* get payload */
- if ((ret = tdav_codec_h264_get_pay(in_data, in_size, (const void**)&pay_ptr, &pay_size, &append_scp, &end_of_unit)) || !pay_ptr || !pay_size){
- TSK_DEBUG_ERROR("Depayloader failed to get H.264 content");
- return 0;
- }
- //append_scp = tsk_true;
- size_to_copy = pay_size + (append_scp ? start_code_prefix_size : 0);
- // whether it's SPS or PPS (append_scp is false for subsequent FUA chuncks)
- sps_or_pps = append_scp && pay_ptr && ((pay_ptr[0] & 0x1F) == 7 || (pay_ptr[0] & 0x1F) == 8);
-
- // start-accumulator
- if (!h264->decoder.accumulator) {
- if (size_to_copy > xmax_size) {
- TSK_DEBUG_ERROR("%u too big to contain valid encoded data. xmax_size=%u", size_to_copy, xmax_size);
- return 0;
- }
- if (!(h264->decoder.accumulator = tsk_calloc(size_to_copy, sizeof(uint8_t)))) {
- TSK_DEBUG_ERROR("Failed to allocated new buffer");
- return 0;
- }
- h264->decoder.accumulator_size = size_to_copy;
- }
- if ((h264->decoder.accumulator_pos + size_to_copy) >= xmax_size) {
- TSK_DEBUG_ERROR("BufferOverflow");
- h264->decoder.accumulator_pos = 0;
- return 0;
- }
- if ((h264->decoder.accumulator_pos + size_to_copy) > h264->decoder.accumulator_size) {
- if(!(h264->decoder.accumulator = tsk_realloc(h264->decoder.accumulator, (h264->decoder.accumulator_pos + size_to_copy)))){
- TSK_DEBUG_ERROR("Failed to reallocated new buffer");
- h264->decoder.accumulator_pos = 0;
- h264->decoder.accumulator_size = 0;
- return 0;
- }
- h264->decoder.accumulator_size = (h264->decoder.accumulator_pos + size_to_copy);
- }
-
- if (append_scp) {
- memcpy(&((uint8_t*)h264->decoder.accumulator)[h264->decoder.accumulator_pos], H264_START_CODE_PREFIX, start_code_prefix_size);
- h264->decoder.accumulator_pos += start_code_prefix_size;
- }
- memcpy(&((uint8_t*)h264->decoder.accumulator)[h264->decoder.accumulator_pos], pay_ptr, pay_size);
- h264->decoder.accumulator_pos += pay_size;
- // end-accumulator
-
- /*if(sps_or_pps){
- // http://libav-users.943685.n4.nabble.com/Decode-H264-streams-how-to-fill-AVCodecContext-from-SPS-PPS-td2484472.html
- // SPS and PPS should be bundled with IDR
- TSK_DEBUG_INFO("Receiving SPS or PPS ...to be tied to an IDR");
- }
- else */if (rtp_hdr->marker) {
- if (h264->decoder.passthrough) {
- if (*out_max_size < h264->decoder.accumulator_pos) {
- if ((*out_data = tsk_realloc(*out_data, h264->decoder.accumulator_pos))) {
- *out_max_size = h264->decoder.accumulator_pos;
- }
- else {
- *out_max_size = 0;
- return 0;
- }
- }
- memcpy(*out_data, h264->decoder.accumulator, h264->decoder.accumulator_pos);
- retsize = h264->decoder.accumulator_pos;
- }
- else { // !h264->decoder.passthrough
- /* decode the picture */
- CHECK_HR(hr = h264->decoder.pInst->Process(h264->decoder.accumulator, (UINT32)h264->decoder.accumulator_pos, &pSampleOut));
- if (pSampleOut) {
- CHECK_HR(hr = pSampleOut->GetBufferByIndex(0, &pBufferOut));
-
- BYTE* pBufferPtr = NULL;
- DWORD dwDataLength = 0;
- CHECK_HR(hr = pBufferOut->GetCurrentLength(&dwDataLength));
- if (dwDataLength > 0) {
- CHECK_HR(hr = pBufferOut->Lock(&pBufferPtr, NULL, NULL));
- {
- /* IDR ? */
- if(((pay_ptr[0] & 0x1F) == 0x05) && TMEDIA_CODEC_VIDEO(self)->in.callback){
- TSK_DEBUG_INFO("Decoded H.264 IDR");
- TMEDIA_CODEC_VIDEO(self)->in.result.type = tmedia_video_decode_result_type_idr;
- TMEDIA_CODEC_VIDEO(self)->in.result.proto_hdr = proto_hdr;
- TMEDIA_CODEC_VIDEO(self)->in.callback(&TMEDIA_CODEC_VIDEO(self)->in.result);
- }
- /* fill out */
- if(*out_max_size < dwDataLength){
- if((*out_data = tsk_realloc(*out_data, dwDataLength))){
- *out_max_size = dwDataLength;
- }
- else{
- *out_max_size = 0;
- return 0;
- }
- }
- retsize = (tsk_size_t)dwDataLength;
- TMEDIA_CODEC_VIDEO(h264)->in.width = h264->decoder.pInst->GetWidth();
- TMEDIA_CODEC_VIDEO(h264)->in.height = h264->decoder.pInst->GetHeight();
- memcpy(*out_data, pBufferPtr, retsize);
- }
- CHECK_HR(hr = pBufferOut->Unlock());
- }
- }
- }// else(!h264->decoder.passthrough)
- } // else if(rtp_hdr->marker)
+ */
+ if (*((uint8_t*)in_data) & 0x80) {
+ TSK_DEBUG_WARN("F=1");
+ /* reset accumulator */
+ h264->decoder.accumulator_pos = 0;
+ return 0;
+ }
+
+ /* get payload */
+ if ((ret = tdav_codec_h264_get_pay(in_data, in_size, (const void**)&pay_ptr, &pay_size, &append_scp, &end_of_unit)) || !pay_ptr || !pay_size) {
+ TSK_DEBUG_ERROR("Depayloader failed to get H.264 content");
+ return 0;
+ }
+ //append_scp = tsk_true;
+ size_to_copy = pay_size + (append_scp ? start_code_prefix_size : 0);
+ // whether it's SPS or PPS (append_scp is false for subsequent FUA chuncks)
+ sps_or_pps = append_scp && pay_ptr && ((pay_ptr[0] & 0x1F) == 7 || (pay_ptr[0] & 0x1F) == 8);
+
+ // start-accumulator
+ if (!h264->decoder.accumulator) {
+ if (size_to_copy > xmax_size) {
+ TSK_DEBUG_ERROR("%u too big to contain valid encoded data. xmax_size=%u", size_to_copy, xmax_size);
+ return 0;
+ }
+ if (!(h264->decoder.accumulator = tsk_calloc(size_to_copy, sizeof(uint8_t)))) {
+ TSK_DEBUG_ERROR("Failed to allocated new buffer");
+ return 0;
+ }
+ h264->decoder.accumulator_size = size_to_copy;
+ }
+ if ((h264->decoder.accumulator_pos + size_to_copy) >= xmax_size) {
+ TSK_DEBUG_ERROR("BufferOverflow");
+ h264->decoder.accumulator_pos = 0;
+ return 0;
+ }
+ if ((h264->decoder.accumulator_pos + size_to_copy) > h264->decoder.accumulator_size) {
+ if(!(h264->decoder.accumulator = tsk_realloc(h264->decoder.accumulator, (h264->decoder.accumulator_pos + size_to_copy)))) {
+ TSK_DEBUG_ERROR("Failed to reallocated new buffer");
+ h264->decoder.accumulator_pos = 0;
+ h264->decoder.accumulator_size = 0;
+ return 0;
+ }
+ h264->decoder.accumulator_size = (h264->decoder.accumulator_pos + size_to_copy);
+ }
+
+ if (append_scp) {
+ memcpy(&((uint8_t*)h264->decoder.accumulator)[h264->decoder.accumulator_pos], H264_START_CODE_PREFIX, start_code_prefix_size);
+ h264->decoder.accumulator_pos += start_code_prefix_size;
+ }
+ memcpy(&((uint8_t*)h264->decoder.accumulator)[h264->decoder.accumulator_pos], pay_ptr, pay_size);
+ h264->decoder.accumulator_pos += pay_size;
+ // end-accumulator
+
+ /*if(sps_or_pps){
+ // http://libav-users.943685.n4.nabble.com/Decode-H264-streams-how-to-fill-AVCodecContext-from-SPS-PPS-td2484472.html
+ // SPS and PPS should be bundled with IDR
+ TSK_DEBUG_INFO("Receiving SPS or PPS ...to be tied to an IDR");
+ }
+ else */if (rtp_hdr->marker) {
+ if (h264->decoder.passthrough) {
+ if (*out_max_size < h264->decoder.accumulator_pos) {
+ if ((*out_data = tsk_realloc(*out_data, h264->decoder.accumulator_pos))) {
+ *out_max_size = h264->decoder.accumulator_pos;
+ }
+ else {
+ *out_max_size = 0;
+ return 0;
+ }
+ }
+ memcpy(*out_data, h264->decoder.accumulator, h264->decoder.accumulator_pos);
+ retsize = h264->decoder.accumulator_pos;
+ }
+ else { // !h264->decoder.passthrough
+ /* decode the picture */
+ CHECK_HR(hr = h264->decoder.pInst->Process(h264->decoder.accumulator, (UINT32)h264->decoder.accumulator_pos, &pSampleOut));
+ if (pSampleOut) {
+ CHECK_HR(hr = pSampleOut->GetBufferByIndex(0, &pBufferOut));
+
+ BYTE* pBufferPtr = NULL;
+ DWORD dwDataLength = 0;
+ CHECK_HR(hr = pBufferOut->GetCurrentLength(&dwDataLength));
+ if (dwDataLength > 0) {
+ CHECK_HR(hr = pBufferOut->Lock(&pBufferPtr, NULL, NULL));
+ {
+ /* IDR ? */
+ if(((pay_ptr[0] & 0x1F) == 0x05) && TMEDIA_CODEC_VIDEO(self)->in.callback) {
+ TSK_DEBUG_INFO("Decoded H.264 IDR");
+ TMEDIA_CODEC_VIDEO(self)->in.result.type = tmedia_video_decode_result_type_idr;
+ TMEDIA_CODEC_VIDEO(self)->in.result.proto_hdr = proto_hdr;
+ TMEDIA_CODEC_VIDEO(self)->in.callback(&TMEDIA_CODEC_VIDEO(self)->in.result);
+ }
+ /* fill out */
+ if(*out_max_size < dwDataLength) {
+ if((*out_data = tsk_realloc(*out_data, dwDataLength))) {
+ *out_max_size = dwDataLength;
+ }
+ else {
+ *out_max_size = 0;
+ return 0;
+ }
+ }
+ retsize = (tsk_size_t)dwDataLength;
+ TMEDIA_CODEC_VIDEO(h264)->in.width = h264->decoder.pInst->GetWidth();
+ TMEDIA_CODEC_VIDEO(h264)->in.height = h264->decoder.pInst->GetHeight();
+ memcpy(*out_data, pBufferPtr, retsize);
+ }
+ CHECK_HR(hr = pBufferOut->Unlock());
+ }
+ }
+ }// else(!h264->decoder.passthrough)
+ } // else if(rtp_hdr->marker)
bail:
- if (rtp_hdr->marker) {
- h264->decoder.accumulator_pos = 0;
- }
- if (FAILED(hr) /*|| (!pSampleOut && rtp_hdr->marker)*/){
- TSK_DEBUG_INFO("Failed to decode the buffer with error code =%d, size=%u, append=%s", ret, h264->decoder.accumulator_pos, append_scp ? "yes" : "no");
- if(TMEDIA_CODEC_VIDEO(self)->in.callback){
- TMEDIA_CODEC_VIDEO(self)->in.result.type = tmedia_video_decode_result_type_error;
- TMEDIA_CODEC_VIDEO(self)->in.result.proto_hdr = proto_hdr;
- TMEDIA_CODEC_VIDEO(self)->in.callback(&TMEDIA_CODEC_VIDEO(self)->in.result);
- }
- }
- SafeRelease(&pSampleOut);
- SafeRelease(&pBufferOut);
- return retsize;
+ if (rtp_hdr->marker) {
+ h264->decoder.accumulator_pos = 0;
+ }
+ if (FAILED(hr) /*|| (!pSampleOut && rtp_hdr->marker)*/) {
+ TSK_DEBUG_INFO("Failed to decode the buffer with error code =%d, size=%u, append=%s", ret, h264->decoder.accumulator_pos, append_scp ? "yes" : "no");
+ if(TMEDIA_CODEC_VIDEO(self)->in.callback) {
+ TMEDIA_CODEC_VIDEO(self)->in.result.type = tmedia_video_decode_result_type_error;
+ TMEDIA_CODEC_VIDEO(self)->in.result.proto_hdr = proto_hdr;
+ TMEDIA_CODEC_VIDEO(self)->in.callback(&TMEDIA_CODEC_VIDEO(self)->in.result);
+ }
+ }
+ SafeRelease(&pSampleOut);
+ SafeRelease(&pBufferOut);
+ return retsize;
}
static tsk_bool_t mf_codec_h264_sdp_att_match(const tmedia_codec_t* self, const char* att_name, const char* att_value)
{
- return tdav_codec_h264_common_sdp_att_match((tdav_codec_h264_common_t*)self, att_name, att_value);
+ return tdav_codec_h264_common_sdp_att_match((tdav_codec_h264_common_t*)self, att_name, att_value);
}
static char* mf_codec_h264_sdp_att_get(const tmedia_codec_t* self, const char* att_name)
{
- char* att = tdav_codec_h264_common_sdp_att_get((const tdav_codec_h264_common_t*)self, att_name);
- if(att && tsk_striequals(att_name, "fmtp")) {
- tsk_strcat(&att, "; impl=MF");
- }
- return att;
+ char* att = tdav_codec_h264_common_sdp_att_get((const tdav_codec_h264_common_t*)self, att_name);
+ if(att && tsk_striequals(att_name, "fmtp")) {
+ tsk_strcat(&att, "; impl=MF");
+ }
+ return att;
}
@@ -454,63 +449,61 @@ static char* mf_codec_h264_sdp_att_get(const tmedia_codec_t* self, const char* a
/* constructor */
static tsk_object_t* mf_codec_h264_base_ctor(tsk_object_t * self, va_list * app)
{
- mf_codec_h264_t *h264 = (mf_codec_h264_t*)self;
- if(h264){
- /* init base: called by tmedia_codec_create() */
- /* init self */
- if(mf_codec_h264_init(h264, profile_idc_baseline) != 0){
- return tsk_null;
- }
- }
- return self;
+ mf_codec_h264_t *h264 = (mf_codec_h264_t*)self;
+ if(h264) {
+ /* init base: called by tmedia_codec_create() */
+ /* init self */
+ if(mf_codec_h264_init(h264, profile_idc_baseline) != 0) {
+ return tsk_null;
+ }
+ }
+ return self;
}
/* destructor */
static tsk_object_t* mf_codec_h264_base_dtor(tsk_object_t * self)
-{
- mf_codec_h264_t *h264 = (mf_codec_h264_t*)self;
- if(h264){
- /* deinit base */
- tdav_codec_h264_common_deinit(TDAV_CODEC_H264_COMMON(self));
- /* deinit self */
- mf_codec_h264_deinit(h264);
- }
-
- return self;
+{
+ mf_codec_h264_t *h264 = (mf_codec_h264_t*)self;
+ if(h264) {
+ /* deinit base */
+ tdav_codec_h264_common_deinit(TDAV_CODEC_H264_COMMON(self));
+ /* deinit self */
+ mf_codec_h264_deinit(h264);
+ }
+
+ return self;
}
/* object definition */
-static const tsk_object_def_t mf_codec_h264_base_def_s =
-{
- sizeof(mf_codec_h264_t),
- mf_codec_h264_base_ctor,
- mf_codec_h264_base_dtor,
- tmedia_codec_cmp,
+static const tsk_object_def_t mf_codec_h264_base_def_s = {
+ sizeof(mf_codec_h264_t),
+ mf_codec_h264_base_ctor,
+ mf_codec_h264_base_dtor,
+ tmedia_codec_cmp,
};
/* plugin definition*/
-static const tmedia_codec_plugin_def_t mf_codec_h264_base_plugin_def_s =
-{
- &mf_codec_h264_base_def_s,
-
- tmedia_video,
- tmedia_codec_id_h264_bp,
- "H264",
- "H264 Base Profile (Media Foundation)",
- TMEDIA_CODEC_FORMAT_H264_BP,
- tsk_true,
- 90000, // rate
-
- /* audio */
- { 0 },
-
- /* video (width, height, fps) */
- {176, 144, 0}, // fps is @deprecated
-
- mf_codec_h264_set,
- mf_codec_h264_open,
- mf_codec_h264_close,
- mf_codec_h264_encode,
- mf_codec_h264_decode,
- mf_codec_h264_sdp_att_match,
- mf_codec_h264_sdp_att_get
+static const tmedia_codec_plugin_def_t mf_codec_h264_base_plugin_def_s = {
+ &mf_codec_h264_base_def_s,
+
+ tmedia_video,
+ tmedia_codec_id_h264_bp,
+ "H264",
+ "H264 Base Profile (Media Foundation)",
+ TMEDIA_CODEC_FORMAT_H264_BP,
+ tsk_true,
+ 90000, // rate
+
+ /* audio */
+ { 0 },
+
+ /* video (width, height, fps) */
+ {176, 144, 0}, // fps is @deprecated
+
+ mf_codec_h264_set,
+ mf_codec_h264_open,
+ mf_codec_h264_close,
+ mf_codec_h264_encode,
+ mf_codec_h264_decode,
+ mf_codec_h264_sdp_att_match,
+ mf_codec_h264_sdp_att_get
};
const tmedia_codec_plugin_def_t *mf_codec_h264_base_plugin_def_t = &mf_codec_h264_base_plugin_def_s;
@@ -519,64 +512,62 @@ const tmedia_codec_plugin_def_t *mf_codec_h264_base_plugin_def_t = &mf_codec_h26
/* constructor */
static tsk_object_t* mf_codec_h264_main_ctor(tsk_object_t * self, va_list * app)
{
- mf_codec_h264_t *h264 = (mf_codec_h264_t*)self;
- if(h264){
- /* init base: called by tmedia_codec_create() */
- /* init self */
- if(mf_codec_h264_init(h264, profile_idc_main) != 0){
- return tsk_null;
- }
- }
- return self;
+ mf_codec_h264_t *h264 = (mf_codec_h264_t*)self;
+ if(h264) {
+ /* init base: called by tmedia_codec_create() */
+ /* init self */
+ if(mf_codec_h264_init(h264, profile_idc_main) != 0) {
+ return tsk_null;
+ }
+ }
+ return self;
}
/* destructor */
static tsk_object_t* mf_codec_h264_main_dtor(tsk_object_t * self)
-{
- mf_codec_h264_t *h264 = (mf_codec_h264_t*)self;
- if(h264){
- /* deinit base */
- tdav_codec_h264_common_deinit(TDAV_CODEC_H264_COMMON(self));
- /* deinit self */
- mf_codec_h264_deinit(h264);
-
- }
-
- return self;
+{
+ mf_codec_h264_t *h264 = (mf_codec_h264_t*)self;
+ if(h264) {
+ /* deinit base */
+ tdav_codec_h264_common_deinit(TDAV_CODEC_H264_COMMON(self));
+ /* deinit self */
+ mf_codec_h264_deinit(h264);
+
+ }
+
+ return self;
}
/* object definition */
-static const tsk_object_def_t mf_codec_h264_main_def_s =
-{
- sizeof(mf_codec_h264_t),
- mf_codec_h264_main_ctor,
- mf_codec_h264_main_dtor,
- tmedia_codec_cmp,
+static const tsk_object_def_t mf_codec_h264_main_def_s = {
+ sizeof(mf_codec_h264_t),
+ mf_codec_h264_main_ctor,
+ mf_codec_h264_main_dtor,
+ tmedia_codec_cmp,
};
/* plugin definition*/
-static const tmedia_codec_plugin_def_t mf_codec_h264_main_plugin_def_s =
-{
- &mf_codec_h264_main_def_s,
-
- tmedia_video,
- tmedia_codec_id_h264_mp,
- "H264",
- "H264 Main Profile (Media Foundation)",
- TMEDIA_CODEC_FORMAT_H264_MP,
- tsk_true,
- 90000, // rate
-
- /* audio */
- { 0 },
-
- /* video (width, height, fps)*/
- {176, 144, 0},// fps is @deprecated
-
- mf_codec_h264_set,
- mf_codec_h264_open,
- mf_codec_h264_close,
- mf_codec_h264_encode,
- mf_codec_h264_decode,
- mf_codec_h264_sdp_att_match,
- mf_codec_h264_sdp_att_get
+static const tmedia_codec_plugin_def_t mf_codec_h264_main_plugin_def_s = {
+ &mf_codec_h264_main_def_s,
+
+ tmedia_video,
+ tmedia_codec_id_h264_mp,
+ "H264",
+ "H264 Main Profile (Media Foundation)",
+ TMEDIA_CODEC_FORMAT_H264_MP,
+ tsk_true,
+ 90000, // rate
+
+ /* audio */
+ { 0 },
+
+ /* video (width, height, fps)*/
+ {176, 144, 0},// fps is @deprecated
+
+ mf_codec_h264_set,
+ mf_codec_h264_open,
+ mf_codec_h264_close,
+ mf_codec_h264_encode,
+ mf_codec_h264_decode,
+ mf_codec_h264_sdp_att_match,
+ mf_codec_h264_sdp_att_get
};
const tmedia_codec_plugin_def_t *mf_codec_h264_main_plugin_def_t = &mf_codec_h264_main_plugin_def_s;
@@ -586,165 +577,165 @@ const tmedia_codec_plugin_def_t *mf_codec_h264_main_plugin_def_t = &mf_codec_h26
int mf_codec_h264_open_encoder(mf_codec_h264_t* self)
{
- HRESULT hr = S_OK;
- int32_t max_bw_kpbs;
- tdav_codec_h264_common_t* common = (tdav_codec_h264_common_t*)self;
+ HRESULT hr = S_OK;
+ int32_t max_bw_kpbs;
+ tdav_codec_h264_common_t* common = (tdav_codec_h264_common_t*)self;
- if(self->encoder.pInst) {
- TSK_DEBUG_ERROR("Encoder already initialized");
+ if(self->encoder.pInst) {
+ TSK_DEBUG_ERROR("Encoder already initialized");
#if defined(E_ILLEGAL_METHOD_CALL)
- CHECK_HR(hr = E_ILLEGAL_METHOD_CALL);
+ CHECK_HR(hr = E_ILLEGAL_METHOD_CALL);
#else
- CHECK_HR(hr = 0x8000000EL);
-#endif
- }
-
- // create encoder
- if(!(self->encoder.pInst = (common->profile == profile_idc_baseline) ? MFCodecVideoH264::CreateCodecH264Base(MFCodecType_Encoder) : MFCodecVideoH264::CreateCodecH264Main(MFCodecType_Encoder))){
- TSK_DEBUG_ERROR("Failed to find H.264 encoder");
- CHECK_HR(hr = E_OUTOFMEMORY);
- }
-
- //self->encoder.context->pix_fmt = PIX_FMT_YUV420P;
- //self->encoder.context->time_base.num = 1;
- //self->encoder.context->time_base.den = TMEDIA_CODEC_VIDEO(self)->out.fps;
- self->encoder.neg_width = (self->encoder.rotation == 90 || self->encoder.rotation == 270) ? TMEDIA_CODEC_VIDEO(self)->out.height : TMEDIA_CODEC_VIDEO(self)->out.width;
- self->encoder.neg_height = (self->encoder.rotation == 90 || self->encoder.rotation == 270) ? TMEDIA_CODEC_VIDEO(self)->out.width : TMEDIA_CODEC_VIDEO(self)->out.height;
- self->encoder.neg_fps = TMEDIA_CODEC_VIDEO(self)->out.fps;
- max_bw_kpbs = TSK_CLAMP(
- 0,
- tmedia_get_video_bandwidth_kbps_2(self->encoder.neg_width, self->encoder.neg_height, self->encoder.neg_fps),
- self->encoder.max_bw_kpbs
- );
- self->encoder.max_bitrate_bps = (max_bw_kpbs * 1024);
-
- TSK_DEBUG_INFO("[H.264 MF Encoder] neg_width=%d, neg_height=%d, neg_fps=%d, max_bitrate_bps=%d",
- self->encoder.neg_width,
- self->encoder.neg_height,
- self->encoder.neg_fps,
- self->encoder.max_bitrate_bps
- );
-
- CHECK_HR(hr = self->encoder.pInst->Initialize(
- self->encoder.neg_fps,
- self->encoder.neg_width,
- self->encoder.neg_height,
- self->encoder.max_bitrate_bps));
-
- CHECK_HR(hr = self->encoder.pInst->SetGOPSize(self->encoder.neg_fps * PLUGIN_MF_H264_GOP_SIZE_IN_SECONDS));
- CHECK_HR(hr = self->encoder.pInst->SetSliceMaxSizeInBytes((H264_RTP_PAYLOAD_SIZE - 100)));
+ CHECK_HR(hr = 0x8000000EL);
+#endif
+ }
+
+ // create encoder
+ if(!(self->encoder.pInst = (common->profile == profile_idc_baseline) ? MFCodecVideoH264::CreateCodecH264Base(MFCodecType_Encoder) : MFCodecVideoH264::CreateCodecH264Main(MFCodecType_Encoder))) {
+ TSK_DEBUG_ERROR("Failed to find H.264 encoder");
+ CHECK_HR(hr = E_OUTOFMEMORY);
+ }
+
+ //self->encoder.context->pix_fmt = PIX_FMT_YUV420P;
+ //self->encoder.context->time_base.num = 1;
+ //self->encoder.context->time_base.den = TMEDIA_CODEC_VIDEO(self)->out.fps;
+ self->encoder.neg_width = (self->encoder.rotation == 90 || self->encoder.rotation == 270) ? TMEDIA_CODEC_VIDEO(self)->out.height : TMEDIA_CODEC_VIDEO(self)->out.width;
+ self->encoder.neg_height = (self->encoder.rotation == 90 || self->encoder.rotation == 270) ? TMEDIA_CODEC_VIDEO(self)->out.width : TMEDIA_CODEC_VIDEO(self)->out.height;
+ self->encoder.neg_fps = TMEDIA_CODEC_VIDEO(self)->out.fps;
+ max_bw_kpbs = TSK_CLAMP(
+ 0,
+ tmedia_get_video_bandwidth_kbps_2(self->encoder.neg_width, self->encoder.neg_height, self->encoder.neg_fps),
+ self->encoder.max_bw_kpbs
+ );
+ self->encoder.max_bitrate_bps = (max_bw_kpbs * 1024);
+
+ TSK_DEBUG_INFO("[H.264 MF Encoder] neg_width=%d, neg_height=%d, neg_fps=%d, max_bitrate_bps=%d",
+ self->encoder.neg_width,
+ self->encoder.neg_height,
+ self->encoder.neg_fps,
+ self->encoder.max_bitrate_bps
+ );
+
+ CHECK_HR(hr = self->encoder.pInst->Initialize(
+ self->encoder.neg_fps,
+ self->encoder.neg_width,
+ self->encoder.neg_height,
+ self->encoder.max_bitrate_bps));
+
+ CHECK_HR(hr = self->encoder.pInst->SetGOPSize(self->encoder.neg_fps * PLUGIN_MF_H264_GOP_SIZE_IN_SECONDS));
+ CHECK_HR(hr = self->encoder.pInst->SetSliceMaxSizeInBytes((H264_RTP_PAYLOAD_SIZE - 100)));
bail:
- return SUCCEEDED(hr) ? 0 : -1;
+ return SUCCEEDED(hr) ? 0 : -1;
}
int mf_codec_h264_close_encoder(mf_codec_h264_t* self)
{
- if(self){
- SafeRelease(&self->encoder.pInst);
- if(self->encoder.buffer){
- TSK_FREE(self->encoder.buffer);
- }
- self->encoder.frame_count = 0;
- }
-
- return 0;
+ if(self) {
+ SafeRelease(&self->encoder.pInst);
+ if(self->encoder.buffer) {
+ TSK_FREE(self->encoder.buffer);
+ }
+ self->encoder.frame_count = 0;
+ }
+
+ return 0;
}
int mf_codec_h264_open_decoder(mf_codec_h264_t* self)
{
- HRESULT hr = S_OK;
- tdav_codec_h264_common_t* common = (tdav_codec_h264_common_t*)self;
+ HRESULT hr = S_OK;
+ tdav_codec_h264_common_t* common = (tdav_codec_h264_common_t*)self;
- if(self->decoder.pInst) {
- TSK_DEBUG_ERROR("Decoder already initialized");
+ if(self->decoder.pInst) {
+ TSK_DEBUG_ERROR("Decoder already initialized");
#if defined(E_ILLEGAL_METHOD_CALL)
- CHECK_HR(hr = E_ILLEGAL_METHOD_CALL);
+ CHECK_HR(hr = E_ILLEGAL_METHOD_CALL);
#else
- CHECK_HR(hr = 0x8000000EL);
+ CHECK_HR(hr = 0x8000000EL);
#endif
- }
+ }
- // create decoder
- if(!(self->decoder.pInst = (common->profile == profile_idc_baseline) ? MFCodecVideoH264::CreateCodecH264Base(MFCodecType_Decoder) : MFCodecVideoH264::CreateCodecH264Main(MFCodecType_Decoder))){
- TSK_DEBUG_ERROR("Failed to find H.264 encoder");
- CHECK_HR(hr = E_OUTOFMEMORY);
- }
+ // create decoder
+ if(!(self->decoder.pInst = (common->profile == profile_idc_baseline) ? MFCodecVideoH264::CreateCodecH264Base(MFCodecType_Decoder) : MFCodecVideoH264::CreateCodecH264Main(MFCodecType_Decoder))) {
+ TSK_DEBUG_ERROR("Failed to find H.264 encoder");
+ CHECK_HR(hr = E_OUTOFMEMORY);
+ }
- TSK_DEBUG_INFO("[H.264 MF Decoder] neg_width=%d, neg_height=%d, neg_fps=%d",
- TMEDIA_CODEC_VIDEO(self)->in.width,
- TMEDIA_CODEC_VIDEO(self)->in.height,
- TMEDIA_CODEC_VIDEO(self)->in.fps
- );
+ TSK_DEBUG_INFO("[H.264 MF Decoder] neg_width=%d, neg_height=%d, neg_fps=%d",
+ TMEDIA_CODEC_VIDEO(self)->in.width,
+ TMEDIA_CODEC_VIDEO(self)->in.height,
+ TMEDIA_CODEC_VIDEO(self)->in.fps
+ );
- CHECK_HR(hr = self->decoder.pInst->Initialize(
- TMEDIA_CODEC_VIDEO(self)->in.fps,
- TMEDIA_CODEC_VIDEO(self)->in.width,
- TMEDIA_CODEC_VIDEO(self)->in.height));
+ CHECK_HR(hr = self->decoder.pInst->Initialize(
+ TMEDIA_CODEC_VIDEO(self)->in.fps,
+ TMEDIA_CODEC_VIDEO(self)->in.width,
+ TMEDIA_CODEC_VIDEO(self)->in.height));
bail:
- return SUCCEEDED(hr) ? 0 : -1;
+ return SUCCEEDED(hr) ? 0 : -1;
}
int mf_codec_h264_close_decoder(mf_codec_h264_t* self)
{
- if(self){
- SafeRelease(&self->decoder.pInst);
- TSK_FREE(self->decoder.accumulator);
- self->decoder.accumulator_pos = 0;
- }
+ if(self) {
+ SafeRelease(&self->decoder.pInst);
+ TSK_FREE(self->decoder.accumulator);
+ self->decoder.accumulator_pos = 0;
+ }
- return 0;
+ return 0;
}
int mf_codec_h264_init(mf_codec_h264_t* self, profile_idc_t profile)
{
- int ret = 0;
- level_idc_t level;
- tdav_codec_h264_common_t* common = (tdav_codec_h264_common_t*)self;
-
- if(!self){
- TSK_DEBUG_ERROR("Invalid parameter");
- return -1;
- }
-
- if((ret = tdav_codec_h264_common_init(common))){
- TSK_DEBUG_ERROR("mf_codec_h264_common_init() faile with error code=%d", ret);
- return ret;
- }
-
- if((ret = tdav_codec_h264_common_level_from_size(TMEDIA_CODEC_VIDEO(self)->out.width, TMEDIA_CODEC_VIDEO(self)->out.height, &level))){
- TSK_DEBUG_ERROR("Failed to find level for size=[%u, %u]", TMEDIA_CODEC_VIDEO(self)->out.width, TMEDIA_CODEC_VIDEO(self)->out.height);
- return ret;
- }
-
- (self)->encoder.max_bw_kpbs = tmedia_defaults_get_bandwidth_video_upload_max();
- if (MFUtils::IsLowLatencyH264SupportsMaxSliceSize()) {
- common->pack_mode_local = H264_PACKETIZATION_MODE;
- }
- else {
- common->pack_mode_local = Non_Interleaved_Mode;
- }
- common->profile = profile;
- common->level = level;
- TMEDIA_CODEC_VIDEO(self)->in.max_mbps = TMEDIA_CODEC_VIDEO(self)->out.max_mbps = H264_MAX_MBPS*1000;
- TMEDIA_CODEC_VIDEO(self)->in.max_br = TMEDIA_CODEC_VIDEO(self)->out.max_br = H264_MAX_BR*1000;
-
- TMEDIA_CODEC_VIDEO(self)->in.chroma = tmedia_chroma_nv12;
- TMEDIA_CODEC_VIDEO(self)->out.chroma = tmedia_chroma_nv12;
-
- self->encoder.quality = 1;
-
- return ret;
+ int ret = 0;
+ level_idc_t level;
+ tdav_codec_h264_common_t* common = (tdav_codec_h264_common_t*)self;
+
+ if(!self) {
+ TSK_DEBUG_ERROR("Invalid parameter");
+ return -1;
+ }
+
+ if((ret = tdav_codec_h264_common_init(common))) {
+ TSK_DEBUG_ERROR("mf_codec_h264_common_init() faile with error code=%d", ret);
+ return ret;
+ }
+
+ if((ret = tdav_codec_h264_common_level_from_size(TMEDIA_CODEC_VIDEO(self)->out.width, TMEDIA_CODEC_VIDEO(self)->out.height, &level))) {
+ TSK_DEBUG_ERROR("Failed to find level for size=[%u, %u]", TMEDIA_CODEC_VIDEO(self)->out.width, TMEDIA_CODEC_VIDEO(self)->out.height);
+ return ret;
+ }
+
+ (self)->encoder.max_bw_kpbs = tmedia_defaults_get_bandwidth_video_upload_max();
+ if (MFUtils::IsLowLatencyH264SupportsMaxSliceSize()) {
+ common->pack_mode_local = H264_PACKETIZATION_MODE;
+ }
+ else {
+ common->pack_mode_local = Non_Interleaved_Mode;
+ }
+ common->profile = profile;
+ common->level = level;
+ TMEDIA_CODEC_VIDEO(self)->in.max_mbps = TMEDIA_CODEC_VIDEO(self)->out.max_mbps = H264_MAX_MBPS*1000;
+ TMEDIA_CODEC_VIDEO(self)->in.max_br = TMEDIA_CODEC_VIDEO(self)->out.max_br = H264_MAX_BR*1000;
+
+ TMEDIA_CODEC_VIDEO(self)->in.chroma = tmedia_chroma_nv12;
+ TMEDIA_CODEC_VIDEO(self)->out.chroma = tmedia_chroma_nv12;
+
+ self->encoder.quality = 1;
+
+ return ret;
}
int mf_codec_h264_deinit(mf_codec_h264_t* self)
{
- if(!self){
- TSK_DEBUG_ERROR("Invalid parameter");
- return -1;
- }
+ if(!self) {
+ TSK_DEBUG_ERROR("Invalid parameter");
+ return -1;
+ }
- mf_codec_h264_close((tmedia_codec_t*)self);
+ mf_codec_h264_close((tmedia_codec_t*)self);
- return 0;
+ return 0;
} \ No newline at end of file
diff --git a/plugins/pluginWinMF/plugin_win_mf_config.h b/plugins/pluginWinMF/plugin_win_mf_config.h
index f4f692a..e518e37 100755
--- a/plugins/pluginWinMF/plugin_win_mf_config.h
+++ b/plugins/pluginWinMF/plugin_win_mf_config.h
@@ -1,18 +1,18 @@
/* Copyright (C) 2013 Mamadou DIOP
* Copyright (C) 2013 Doubango Telecom <http://www.doubango.org>
-*
+*
* This file is part of Open Source Doubango Framework.
*
* DOUBANGO is free software: you can redistribute it and/or modify
* it under the terms of the GNU General Public License as published by
* the Free Software Foundation, either version 3 of the License, or
* (at your option) any later version.
-*
+*
* DOUBANGO is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
* GNU General Public License for more details.
-*
+*
* You should have received a copy of the GNU General Public License
* along with DOUBANGO.
*/
@@ -48,12 +48,12 @@
# define PLUGIN_WIN_MF_UNDER_X86 1
#endif
-// Guards against C++ name mangling
+// Guards against C++ name mangling
#ifdef __cplusplus
# define PLUGIN_WIN_MF_BEGIN_DECLS extern "C" {
# define PLUGIN_WIN_MF_END_DECLS }
#else
-# define PLUGIN_WIN_MF_BEGIN_DECLS
+# define PLUGIN_WIN_MF_BEGIN_DECLS
# define PLUGIN_WIN_MF_END_DECLS
#endif
@@ -69,7 +69,7 @@
#endif
#if HAVE_CONFIG_H
- #include <config.h>
+#include <config.h>
#endif
#endif // PLUGIN_WIN_MF_CONFIG_H
diff --git a/plugins/pluginWinMF/plugin_win_mf_consumer_audio.cxx b/plugins/pluginWinMF/plugin_win_mf_consumer_audio.cxx
index 026f510..f68f428 100755
--- a/plugins/pluginWinMF/plugin_win_mf_consumer_audio.cxx
+++ b/plugins/pluginWinMF/plugin_win_mf_consumer_audio.cxx
@@ -1,18 +1,18 @@
/* Copyright (C) 2013 Mamadou DIOP
* Copyright (C) 2013 Doubango Telecom <http://www.doubango.org>
-*
+*
* This file is part of Open Source Doubango Framework.
*
* DOUBANGO is free software: you can redistribute it and/or modify
* it under the terms of the GNU General Public License as published by
* the Free Software Foundation, either version 3 of the License, or
* (at your option) any later version.
-*
+*
* DOUBANGO is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
* GNU General Public License for more details.
-*
+*
* You should have received a copy of the GNU General Public License
* along with DOUBANGO.
*/
@@ -23,11 +23,10 @@
#include "tsk_debug.h"
-typedef struct plugin_win_mf_consumer_audio_s
-{
- TDAV_DECLARE_CONSUMER_AUDIO;
+typedef struct plugin_win_mf_consumer_audio_s {
+ TDAV_DECLARE_CONSUMER_AUDIO;
- bool bStarted;
+ bool bStarted;
}
plugin_win_mf_consumer_audio_t;
@@ -35,70 +34,70 @@ plugin_win_mf_consumer_audio_t;
/* ============ Consumer Interface ================= */
static int plugin_win_mf_consumer_audio_set(tmedia_consumer_t* self, const tmedia_param_t* param)
{
- plugin_win_mf_consumer_audio_t* pSelf = (plugin_win_mf_consumer_audio_t*)self;
- int ret = tdav_consumer_audio_set(TDAV_CONSUMER_AUDIO(self), param);
+ plugin_win_mf_consumer_audio_t* pSelf = (plugin_win_mf_consumer_audio_t*)self;
+ int ret = tdav_consumer_audio_set(TDAV_CONSUMER_AUDIO(self), param);
+
+ if(ret == 0) {
- if(ret == 0){
-
- }
+ }
- return ret;
+ return ret;
}
static int plugin_win_mf_consumer_audio_prepare(tmedia_consumer_t* self, const tmedia_codec_t* codec)
{
- plugin_win_mf_consumer_audio_t* pSelf = (plugin_win_mf_consumer_audio_t*)self;
+ plugin_win_mf_consumer_audio_t* pSelf = (plugin_win_mf_consumer_audio_t*)self;
- if(!pSelf){
- TSK_DEBUG_ERROR("Invalid parameter");
- return -1;
- }
+ if(!pSelf) {
+ TSK_DEBUG_ERROR("Invalid parameter");
+ return -1;
+ }
- return 0;
+ return 0;
}
static int plugin_win_mf_consumer_audio_start(tmedia_consumer_t* self)
{
- plugin_win_mf_consumer_audio_t* pSelf = (plugin_win_mf_consumer_audio_t*)self;
+ plugin_win_mf_consumer_audio_t* pSelf = (plugin_win_mf_consumer_audio_t*)self;
- pSelf->bStarted = true;
+ pSelf->bStarted = true;
- return 0;
+ return 0;
}
static int plugin_win_mf_consumer_audio_consume(tmedia_consumer_t* self, const void* buffer, tsk_size_t size, const tsk_object_t* proto_hdr)
{
- if(!self || !buffer || !size){
- TSK_DEBUG_ERROR("Invalid parameter");
- return -1;
- }
- /* buffer is already decoded */
- return tdav_consumer_audio_put(TDAV_CONSUMER_AUDIO(self), buffer, size, proto_hdr);
+ if(!self || !buffer || !size) {
+ TSK_DEBUG_ERROR("Invalid parameter");
+ return -1;
+ }
+ /* buffer is already decoded */
+ return tdav_consumer_audio_put(TDAV_CONSUMER_AUDIO(self), buffer, size, proto_hdr);
}
static int plugin_win_mf_consumer_audio_pause(tmedia_consumer_t* self)
{
- return 0;
+ return 0;
}
static int plugin_win_mf_consumer_audio_stop(tmedia_consumer_t* self)
{
- plugin_win_mf_consumer_audio_t* pSelf = (plugin_win_mf_consumer_audio_t*)self;
-
- if(!pSelf){
- TSK_DEBUG_ERROR("Invalid parameter");
- return -1;
- }
-
- if(!pSelf->bStarted){
- TSK_DEBUG_INFO("WinMF audio consumer not started");
- return 0;
- }
-
- /* should be done here */
- pSelf->bStarted = false;
-
- return 0;
+ plugin_win_mf_consumer_audio_t* pSelf = (plugin_win_mf_consumer_audio_t*)self;
+
+ if(!pSelf) {
+ TSK_DEBUG_ERROR("Invalid parameter");
+ return -1;
+ }
+
+ if(!pSelf->bStarted) {
+ TSK_DEBUG_INFO("WinMF audio consumer not started");
+ return 0;
+ }
+
+ /* should be done here */
+ pSelf->bStarted = false;
+
+ return 0;
}
@@ -108,56 +107,54 @@ static int plugin_win_mf_consumer_audio_stop(tmedia_consumer_t* self)
/* constructor */
static tsk_object_t* plugin_win_mf_consumer_audio_ctor(tsk_object_t * self, va_list * app)
{
- MFUtils::Startup();
-
- plugin_win_mf_consumer_audio_t *pSelf = (plugin_win_mf_consumer_audio_t *)self;
- if(pSelf){
- /* init base */
- tdav_consumer_audio_init(TDAV_CONSUMER_AUDIO(pSelf));
- /* init self */
-
- }
- return self;
+ MFUtils::Startup();
+
+ plugin_win_mf_consumer_audio_t *pSelf = (plugin_win_mf_consumer_audio_t *)self;
+ if(pSelf) {
+ /* init base */
+ tdav_consumer_audio_init(TDAV_CONSUMER_AUDIO(pSelf));
+ /* init self */
+
+ }
+ return self;
}
/* destructor */
static tsk_object_t* plugin_win_mf_consumer_audio_dtor(tsk_object_t * self)
-{
- plugin_win_mf_consumer_audio_t *pSelf = (plugin_win_mf_consumer_audio_t *)self;
- if(pSelf){
- /* stop */
- if(pSelf->bStarted){
- plugin_win_mf_consumer_audio_stop(TMEDIA_CONSUMER(pSelf));
- }
-
- /* deinit base */
- tdav_consumer_audio_deinit(TDAV_CONSUMER_AUDIO(pSelf));
- /* deinit self */
-
- }
-
- return self;
+{
+ plugin_win_mf_consumer_audio_t *pSelf = (plugin_win_mf_consumer_audio_t *)self;
+ if(pSelf) {
+ /* stop */
+ if(pSelf->bStarted) {
+ plugin_win_mf_consumer_audio_stop(TMEDIA_CONSUMER(pSelf));
+ }
+
+ /* deinit base */
+ tdav_consumer_audio_deinit(TDAV_CONSUMER_AUDIO(pSelf));
+ /* deinit self */
+
+ }
+
+ return self;
}
/* object definition */
-static const tsk_object_def_t plugin_win_mf_consumer_audio_def_s =
-{
- sizeof(plugin_win_mf_consumer_audio_t),
- plugin_win_mf_consumer_audio_ctor,
- plugin_win_mf_consumer_audio_dtor,
- tdav_consumer_audio_cmp,
+static const tsk_object_def_t plugin_win_mf_consumer_audio_def_s = {
+ sizeof(plugin_win_mf_consumer_audio_t),
+ plugin_win_mf_consumer_audio_ctor,
+ plugin_win_mf_consumer_audio_dtor,
+ tdav_consumer_audio_cmp,
};
/* plugin definition*/
-static const tmedia_consumer_plugin_def_t plugin_win_mf_consumer_audio_plugin_def_s =
-{
- &plugin_win_mf_consumer_audio_def_s,
-
- tmedia_audio,
- "Windows Media Foundation audio consumer",
-
- plugin_win_mf_consumer_audio_set,
- plugin_win_mf_consumer_audio_prepare,
- plugin_win_mf_consumer_audio_start,
- plugin_win_mf_consumer_audio_consume,
- plugin_win_mf_consumer_audio_pause,
- plugin_win_mf_consumer_audio_stop
+static const tmedia_consumer_plugin_def_t plugin_win_mf_consumer_audio_plugin_def_s = {
+ &plugin_win_mf_consumer_audio_def_s,
+
+ tmedia_audio,
+ "Windows Media Foundation audio consumer",
+
+ plugin_win_mf_consumer_audio_set,
+ plugin_win_mf_consumer_audio_prepare,
+ plugin_win_mf_consumer_audio_start,
+ plugin_win_mf_consumer_audio_consume,
+ plugin_win_mf_consumer_audio_pause,
+ plugin_win_mf_consumer_audio_stop
};
const tmedia_consumer_plugin_def_t *plugin_win_mf_consumer_audio_plugin_def_t = &plugin_win_mf_consumer_audio_plugin_def_s;
diff --git a/plugins/pluginWinMF/plugin_win_mf_consumer_video.cxx b/plugins/pluginWinMF/plugin_win_mf_consumer_video.cxx
index f6bef59..ee6eaaa 100755
--- a/plugins/pluginWinMF/plugin_win_mf_consumer_video.cxx
+++ b/plugins/pluginWinMF/plugin_win_mf_consumer_video.cxx
@@ -1,18 +1,18 @@
/* Copyright (C) 2013 Mamadou DIOP
* Copyright (C) 2013 Doubango Telecom <http://www.doubango.org>
-*
+*
* This file is part of Open Source Doubango Framework.
*
* DOUBANGO is free software: you can redistribute it and/or modify
* it under the terms of the GNU General Public License as published by
* the Free Software Foundation, either version 3 of the License, or
* (at your option) any later version.
-*
+*
* DOUBANGO is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
* GNU General Public License for more details.
-*
+*
* You should have received a copy of the GNU General Public License
* along with DOUBANGO.
*/
@@ -50,20 +50,20 @@
const DWORD NUM_BACK_BUFFERS = 2;
static HRESULT CreateDeviceD3D9(
- HWND hWnd,
- IDirect3DDevice9** ppDevice,
- IDirect3D9 **ppD3D,
- D3DPRESENT_PARAMETERS &d3dpp
- );
+ HWND hWnd,
+ IDirect3DDevice9** ppDevice,
+ IDirect3D9 **ppD3D,
+ D3DPRESENT_PARAMETERS &d3dpp
+);
static HRESULT TestCooperativeLevel(
- struct plugin_win_mf_consumer_video_s *pSelf
- );
+ struct plugin_win_mf_consumer_video_s *pSelf
+);
static HRESULT CreateSwapChain(
- HWND hWnd,
- UINT32 nFrameWidth,
- UINT32 nFrameHeight,
- IDirect3DDevice9* pDevice,
- IDirect3DSwapChain9 **ppSwapChain);
+ HWND hWnd,
+ UINT32 nFrameWidth,
+ UINT32 nFrameHeight,
+ IDirect3DDevice9* pDevice,
+ IDirect3DSwapChain9 **ppSwapChain);
static LRESULT CALLBACK WndProc(HWND hWnd, UINT uMsg, WPARAM wParam, LPARAM lParam);
@@ -81,30 +81,29 @@ static HRESULT UnhookWindow(struct plugin_win_mf_consumer_video_s *pSelf);
-typedef struct plugin_win_mf_consumer_video_s
-{
- TMEDIA_DECLARE_CONSUMER;
-
- BOOL bStarted, bPrepared, bPaused, bFullScreen, bWindowHooked;
- BOOL bPluginFireFox, bPluginWebRTC4All;
- HWND hWindow;
- WNDPROC wndProc;
- HWND hWindowFullScreen;
- RECT rcWindow;
- RECT rcDest;
- MFRatio pixelAR;
-
- UINT32 nNegWidth;
- UINT32 nNegHeight;
- UINT32 nNegFps;
-
- D3DLOCKED_RECT rcLock;
- IDirect3DDevice9* pDevice;
- IDirect3D9 *pD3D;
- IDirect3DSwapChain9 *pSwapChain;
- D3DPRESENT_PARAMETERS d3dpp;
-
- TSK_DECLARE_SAFEOBJ;
+typedef struct plugin_win_mf_consumer_video_s {
+ TMEDIA_DECLARE_CONSUMER;
+
+ BOOL bStarted, bPrepared, bPaused, bFullScreen, bWindowHooked;
+ BOOL bPluginFireFox, bPluginWebRTC4All;
+ HWND hWindow;
+ WNDPROC wndProc;
+ HWND hWindowFullScreen;
+ RECT rcWindow;
+ RECT rcDest;
+ MFRatio pixelAR;
+
+ UINT32 nNegWidth;
+ UINT32 nNegHeight;
+ UINT32 nNegFps;
+
+ D3DLOCKED_RECT rcLock;
+ IDirect3DDevice9* pDevice;
+ IDirect3D9 *pD3D;
+ IDirect3DSwapChain9 *pSwapChain;
+ D3DPRESENT_PARAMETERS d3dpp;
+
+ TSK_DECLARE_SAFEOBJ;
}
plugin_win_mf_consumer_video_t;
@@ -113,360 +112,331 @@ static int _plugin_win_mf_consumer_video_unprepare(plugin_win_mf_consumer_video_
/* ============ Media Consumer Interface ================= */
static int plugin_win_mf_consumer_video_set(tmedia_consumer_t *self, const tmedia_param_t* param)
{
- int ret = 0;
- HRESULT hr = S_OK;
- plugin_win_mf_consumer_video_t* pSelf = (plugin_win_mf_consumer_video_t*)self;
-
- if(!self || !param)
- {
- TSK_DEBUG_ERROR("Invalid parameter");
- CHECK_HR(hr = E_POINTER);
- }
-
- if(param->value_type == tmedia_pvt_int64)
- {
- if(tsk_striequals(param->key, "remote-hwnd"))
- {
- HWND hWnd = reinterpret_cast<HWND>((INT64)*((int64_t*)param->value));
- if(hWnd != pSelf->hWindow)
- {
- tsk_safeobj_lock(pSelf); // block consumer thread
- pSelf->hWindow = hWnd;
- if(pSelf->bPrepared)
- {
- hr = ResetDevice(pSelf);
- }
- tsk_safeobj_unlock(pSelf); // unblock consumer thread
- }
- }
- }
- else if(param->value_type == tmedia_pvt_int32)
- {
- if(tsk_striequals(param->key, "fullscreen"))
- {
- BOOL bFullScreen = !!*((int32_t*)param->value);
- TSK_DEBUG_INFO("[MF video consumer] Full Screen = %d", bFullScreen);
- CHECK_HR(hr = SetFullscreen(pSelf, bFullScreen));
- }
- else if(tsk_striequals(param->key, "create-on-current-thead"))
- {
- // DSCONSUMER(self)->create_on_ui_thread = *((int32_t*)param->value) ? tsk_false : tsk_true;
- }
- else if(tsk_striequals(param->key, "plugin-firefox"))
- {
- pSelf->bPluginFireFox = (*((int32_t*)param->value) != 0);
- }
- else if(tsk_striequals(param->key, "plugin-webrtc4all"))
- {
- pSelf->bPluginWebRTC4All = (*((int32_t*)param->value) != 0);
- }
- }
-
- CHECK_HR(hr);
+ int ret = 0;
+ HRESULT hr = S_OK;
+ plugin_win_mf_consumer_video_t* pSelf = (plugin_win_mf_consumer_video_t*)self;
+
+ if(!self || !param) {
+ TSK_DEBUG_ERROR("Invalid parameter");
+ CHECK_HR(hr = E_POINTER);
+ }
+
+ if(param->value_type == tmedia_pvt_int64) {
+ if(tsk_striequals(param->key, "remote-hwnd")) {
+ HWND hWnd = reinterpret_cast<HWND>((INT64)*((int64_t*)param->value));
+ if(hWnd != pSelf->hWindow) {
+ tsk_safeobj_lock(pSelf); // block consumer thread
+ pSelf->hWindow = hWnd;
+ if(pSelf->bPrepared) {
+ hr = ResetDevice(pSelf);
+ }
+ tsk_safeobj_unlock(pSelf); // unblock consumer thread
+ }
+ }
+ }
+ else if(param->value_type == tmedia_pvt_int32) {
+ if(tsk_striequals(param->key, "fullscreen")) {
+ BOOL bFullScreen = !!*((int32_t*)param->value);
+ TSK_DEBUG_INFO("[MF video consumer] Full Screen = %d", bFullScreen);
+ CHECK_HR(hr = SetFullscreen(pSelf, bFullScreen));
+ }
+ else if(tsk_striequals(param->key, "create-on-current-thead")) {
+ // DSCONSUMER(self)->create_on_ui_thread = *((int32_t*)param->value) ? tsk_false : tsk_true;
+ }
+ else if(tsk_striequals(param->key, "plugin-firefox")) {
+ pSelf->bPluginFireFox = (*((int32_t*)param->value) != 0);
+ }
+ else if(tsk_striequals(param->key, "plugin-webrtc4all")) {
+ pSelf->bPluginWebRTC4All = (*((int32_t*)param->value) != 0);
+ }
+ }
+
+ CHECK_HR(hr);
bail:
- return SUCCEEDED(hr) ? 0 : -1;
+ return SUCCEEDED(hr) ? 0 : -1;
}
static int plugin_win_mf_consumer_video_prepare(tmedia_consumer_t* self, const tmedia_codec_t* codec)
{
- plugin_win_mf_consumer_video_t* pSelf = (plugin_win_mf_consumer_video_t*)self;
-
- if(!pSelf || !codec && codec->plugin){
- TSK_DEBUG_ERROR("Invalid parameter");
- return -1;
- }
- if(pSelf->bPrepared){
- TSK_DEBUG_WARN("D3D9 video consumer already prepared");
- return -1;
- }
-
- // FIXME: DirectShow requires flipping but not D3D9
- // The Core library always tries to flip when OSType==Win32. Must be changed
- TMEDIA_CODEC_VIDEO(codec)->in.flip = tsk_false;
-
- HRESULT hr = S_OK;
- HWND hWnd = Window(pSelf);
-
- TMEDIA_CONSUMER(pSelf)->video.fps = TMEDIA_CODEC_VIDEO(codec)->in.fps;
- TMEDIA_CONSUMER(pSelf)->video.in.width = TMEDIA_CODEC_VIDEO(codec)->in.width;
- TMEDIA_CONSUMER(pSelf)->video.in.height = TMEDIA_CODEC_VIDEO(codec)->in.height;
-
- if(!TMEDIA_CONSUMER(pSelf)->video.display.width){
- TMEDIA_CONSUMER(pSelf)->video.display.width = TMEDIA_CONSUMER(pSelf)->video.in.width;
- }
- if(!TMEDIA_CONSUMER(pSelf)->video.display.height){
- TMEDIA_CONSUMER(pSelf)->video.display.height = TMEDIA_CONSUMER(pSelf)->video.in.height;
- }
-
- pSelf->nNegFps = TMEDIA_CONSUMER(pSelf)->video.fps;
- pSelf->nNegWidth = (UINT32)TMEDIA_CONSUMER(pSelf)->video.display.width;
- pSelf->nNegHeight = (UINT32)TMEDIA_CONSUMER(pSelf)->video.display.height;
-
- TSK_DEBUG_INFO("D3D9 video consumer: fps=%d, width=%d, height=%d",
- pSelf->nNegFps,
- pSelf->nNegWidth,
- pSelf->nNegHeight);
-
- TMEDIA_CONSUMER(pSelf)->video.display.chroma = tmedia_chroma_rgb32;
- TMEDIA_CONSUMER(pSelf)->decoder.codec_id = tmedia_codec_id_none; // means accept RAW fames
-
- // The window handle is not created until the call is connect (incoming only) - At least on Internet Explorer 10
- if(hWnd && !pSelf->bPluginWebRTC4All)
- {
- CHECK_HR(hr = CreateDeviceD3D9(hWnd, &pSelf->pDevice, &pSelf->pD3D, pSelf->d3dpp));
- CHECK_HR(hr = CreateSwapChain(hWnd, pSelf->nNegWidth, pSelf->nNegHeight, pSelf->pDevice, &pSelf->pSwapChain));
- }
- else
- {
- if(hWnd && pSelf->bPluginWebRTC4All)
- {
- TSK_DEBUG_INFO("[MF consumer] HWND is defined but we detected webrtc4all...delaying D3D9 device creating until session get connected");
- }
- else
- {
- TSK_DEBUG_WARN("Delaying D3D9 device creation because HWND is not defined yet");
- }
- }
-
+ plugin_win_mf_consumer_video_t* pSelf = (plugin_win_mf_consumer_video_t*)self;
+
+ if(!pSelf || !codec && codec->plugin) {
+ TSK_DEBUG_ERROR("Invalid parameter");
+ return -1;
+ }
+ if(pSelf->bPrepared) {
+ TSK_DEBUG_WARN("D3D9 video consumer already prepared");
+ return -1;
+ }
+
+ // FIXME: DirectShow requires flipping but not D3D9
+ // The Core library always tries to flip when OSType==Win32. Must be changed
+ TMEDIA_CODEC_VIDEO(codec)->in.flip = tsk_false;
+
+ HRESULT hr = S_OK;
+ HWND hWnd = Window(pSelf);
+
+ TMEDIA_CONSUMER(pSelf)->video.fps = TMEDIA_CODEC_VIDEO(codec)->in.fps;
+ TMEDIA_CONSUMER(pSelf)->video.in.width = TMEDIA_CODEC_VIDEO(codec)->in.width;
+ TMEDIA_CONSUMER(pSelf)->video.in.height = TMEDIA_CODEC_VIDEO(codec)->in.height;
+
+ if(!TMEDIA_CONSUMER(pSelf)->video.display.width) {
+ TMEDIA_CONSUMER(pSelf)->video.display.width = TMEDIA_CONSUMER(pSelf)->video.in.width;
+ }
+ if(!TMEDIA_CONSUMER(pSelf)->video.display.height) {
+ TMEDIA_CONSUMER(pSelf)->video.display.height = TMEDIA_CONSUMER(pSelf)->video.in.height;
+ }
+
+ pSelf->nNegFps = TMEDIA_CONSUMER(pSelf)->video.fps;
+ pSelf->nNegWidth = (UINT32)TMEDIA_CONSUMER(pSelf)->video.display.width;
+ pSelf->nNegHeight = (UINT32)TMEDIA_CONSUMER(pSelf)->video.display.height;
+
+ TSK_DEBUG_INFO("D3D9 video consumer: fps=%d, width=%d, height=%d",
+ pSelf->nNegFps,
+ pSelf->nNegWidth,
+ pSelf->nNegHeight);
+
+ TMEDIA_CONSUMER(pSelf)->video.display.chroma = tmedia_chroma_rgb32;
+ TMEDIA_CONSUMER(pSelf)->decoder.codec_id = tmedia_codec_id_none; // means accept RAW fames
+
+ // The window handle is not created until the call is connect (incoming only) - At least on Internet Explorer 10
+ if(hWnd && !pSelf->bPluginWebRTC4All) {
+ CHECK_HR(hr = CreateDeviceD3D9(hWnd, &pSelf->pDevice, &pSelf->pD3D, pSelf->d3dpp));
+ CHECK_HR(hr = CreateSwapChain(hWnd, pSelf->nNegWidth, pSelf->nNegHeight, pSelf->pDevice, &pSelf->pSwapChain));
+ }
+ else {
+ if(hWnd && pSelf->bPluginWebRTC4All) {
+ TSK_DEBUG_INFO("[MF consumer] HWND is defined but we detected webrtc4all...delaying D3D9 device creating until session get connected");
+ }
+ else {
+ TSK_DEBUG_WARN("Delaying D3D9 device creation because HWND is not defined yet");
+ }
+ }
+
bail:
- pSelf->bPrepared = SUCCEEDED(hr);
- return pSelf->bPrepared ? 0 : -1;
+ pSelf->bPrepared = SUCCEEDED(hr);
+ return pSelf->bPrepared ? 0 : -1;
}
static int plugin_win_mf_consumer_video_start(tmedia_consumer_t* self)
{
- plugin_win_mf_consumer_video_t* pSelf = (plugin_win_mf_consumer_video_t*)self;
-
- if(!pSelf){
- TSK_DEBUG_ERROR("Invalid parameter");
- return -1;
- }
-
- if(pSelf->bStarted){
- TSK_DEBUG_INFO("D3D9 video consumer already started");
- return 0;
- }
- if(!pSelf->bPrepared){
- TSK_DEBUG_ERROR("D3D9 video consumer not prepared");
- return -1;
- }
-
- HRESULT hr = S_OK;
-
- pSelf->bPaused = false;
- pSelf->bStarted = true;
-
- return SUCCEEDED(hr) ? 0 : -1;
+ plugin_win_mf_consumer_video_t* pSelf = (plugin_win_mf_consumer_video_t*)self;
+
+ if(!pSelf) {
+ TSK_DEBUG_ERROR("Invalid parameter");
+ return -1;
+ }
+
+ if(pSelf->bStarted) {
+ TSK_DEBUG_INFO("D3D9 video consumer already started");
+ return 0;
+ }
+ if(!pSelf->bPrepared) {
+ TSK_DEBUG_ERROR("D3D9 video consumer not prepared");
+ return -1;
+ }
+
+ HRESULT hr = S_OK;
+
+ pSelf->bPaused = false;
+ pSelf->bStarted = true;
+
+ return SUCCEEDED(hr) ? 0 : -1;
}
static int plugin_win_mf_consumer_video_consume(tmedia_consumer_t* self, const void* buffer, tsk_size_t size, const tsk_object_t* proto_hdr)
{
- plugin_win_mf_consumer_video_t* pSelf = (plugin_win_mf_consumer_video_t*)self;
+ plugin_win_mf_consumer_video_t* pSelf = (plugin_win_mf_consumer_video_t*)self;
- HRESULT hr = S_OK;
- HWND hWnd = Window(pSelf);
+ HRESULT hr = S_OK;
+ HWND hWnd = Window(pSelf);
- IDirect3DSurface9 *pSurf = NULL;
+ IDirect3DSurface9 *pSurf = NULL;
IDirect3DSurface9 *pBB = NULL;
- if(!pSelf)
- {
- TSK_DEBUG_ERROR("Invalid parameter");
- return -1; // because of the mutex lock do it here
- }
-
- tsk_safeobj_lock(pSelf);
-
- if(!buffer || !size)
- {
- TSK_DEBUG_ERROR("Invalid parameter");
- CHECK_HR(hr = E_INVALIDARG);
- }
-
- if(!pSelf->bStarted)
- {
- TSK_DEBUG_INFO("D3D9 video consumer not started");
- CHECK_HR(hr = E_FAIL);
- }
-
- if(!hWnd)
- {
- TSK_DEBUG_INFO("Do not draw frame because HWND not set");
- goto bail; // not an error as the application can decide to set the HWND at any time
- }
-
- if (!pSelf->bWindowHooked)
- {
- // Do not hook "hWnd" as it could be the fullscreen handle which is always hooked.
- CHECK_HR(hr = HookWindow(pSelf, pSelf->hWindow));
- }
-
- if(!pSelf->pDevice || !pSelf->pD3D || !pSelf->pSwapChain)
- {
- if(pSelf->pDevice || pSelf->pD3D || pSelf->pSwapChain)
- {
- CHECK_HR(hr = E_POINTER); // They must be "all null" or "all valid"
- }
-
- if(hWnd)
- {
- // means HWND was not set but defined now
- pSelf->nNegWidth = (UINT32)TMEDIA_CONSUMER(pSelf)->video.in.width;
- pSelf->nNegHeight = (UINT32)TMEDIA_CONSUMER(pSelf)->video.in.height;
-
- CHECK_HR(hr = CreateDeviceD3D9(hWnd, &pSelf->pDevice, &pSelf->pD3D, pSelf->d3dpp));
- CHECK_HR(hr = CreateSwapChain(hWnd, pSelf->nNegWidth, pSelf->nNegHeight, pSelf->pDevice, &pSelf->pSwapChain));
- }
- }
-
- if(pSelf->nNegWidth != TMEDIA_CONSUMER(pSelf)->video.in.width || pSelf->nNegHeight != TMEDIA_CONSUMER(pSelf)->video.in.height){
- TSK_DEBUG_INFO("Negotiated and input video sizes are different:%d#%d or %d#%d",
- pSelf->nNegWidth, TMEDIA_CONSUMER(pSelf)->video.in.width,
- pSelf->nNegHeight, TMEDIA_CONSUMER(pSelf)->video.in.height);
- // Update media type
-
- SafeRelease(&pSelf->pSwapChain);
- CHECK_HR(hr = CreateSwapChain(hWnd, (UINT32)TMEDIA_CONSUMER(pSelf)->video.in.width, (UINT32)TMEDIA_CONSUMER(pSelf)->video.in.height, pSelf->pDevice, &pSelf->pSwapChain));
-
- pSelf->nNegWidth = (UINT32)TMEDIA_CONSUMER(pSelf)->video.in.width;
- pSelf->nNegHeight = (UINT32)TMEDIA_CONSUMER(pSelf)->video.in.height;
-
- // Update Destination will do noting if the window size haven't changed.
- // Force updating the destination rect if negotiated size change
- CHECK_HR(hr = UpdateDestinationRect(pSelf, TRUE/* Force */));
- }
-
- CHECK_HR(hr = TestCooperativeLevel(pSelf));
-
- CHECK_HR(hr = UpdateDestinationRect(pSelf));
-
- CHECK_HR(hr = pSelf->pSwapChain->GetBackBuffer(0, D3DBACKBUFFER_TYPE_MONO, &pSurf));
- CHECK_HR(hr = pSurf->LockRect(&pSelf->rcLock, NULL, D3DLOCK_NOSYSLOCK ));
-
- // Fast copy() using MMX, SSE, or SSE2
- hr = MFCopyImage(
- (BYTE*)pSelf->rcLock.pBits,
- pSelf->rcLock.Pitch,
- (BYTE*)buffer,
- (pSelf->nNegWidth << 2),
- (pSelf->nNegWidth << 2),
- pSelf->nNegHeight
- );
- if(FAILED(hr))
- {
- // unlock() before leaving
- pSurf->UnlockRect();
- CHECK_HR(hr);
- }
-
- CHECK_HR(hr = pSurf->UnlockRect());
-
- // Color fill the back buffer
- CHECK_HR(hr = pSelf->pDevice->GetBackBuffer(0, 0, D3DBACKBUFFER_TYPE_MONO, &pBB));
+ if(!pSelf) {
+ TSK_DEBUG_ERROR("Invalid parameter");
+ return -1; // because of the mutex lock do it here
+ }
+
+ tsk_safeobj_lock(pSelf);
+
+ if(!buffer || !size) {
+ TSK_DEBUG_ERROR("Invalid parameter");
+ CHECK_HR(hr = E_INVALIDARG);
+ }
+
+ if(!pSelf->bStarted) {
+ TSK_DEBUG_INFO("D3D9 video consumer not started");
+ CHECK_HR(hr = E_FAIL);
+ }
+
+ if(!hWnd) {
+ TSK_DEBUG_INFO("Do not draw frame because HWND not set");
+ goto bail; // not an error as the application can decide to set the HWND at any time
+ }
+
+ if (!pSelf->bWindowHooked) {
+ // Do not hook "hWnd" as it could be the fullscreen handle which is always hooked.
+ CHECK_HR(hr = HookWindow(pSelf, pSelf->hWindow));
+ }
+
+ if(!pSelf->pDevice || !pSelf->pD3D || !pSelf->pSwapChain) {
+ if(pSelf->pDevice || pSelf->pD3D || pSelf->pSwapChain) {
+ CHECK_HR(hr = E_POINTER); // They must be "all null" or "all valid"
+ }
+
+ if(hWnd) {
+ // means HWND was not set but defined now
+ pSelf->nNegWidth = (UINT32)TMEDIA_CONSUMER(pSelf)->video.in.width;
+ pSelf->nNegHeight = (UINT32)TMEDIA_CONSUMER(pSelf)->video.in.height;
+
+ CHECK_HR(hr = CreateDeviceD3D9(hWnd, &pSelf->pDevice, &pSelf->pD3D, pSelf->d3dpp));
+ CHECK_HR(hr = CreateSwapChain(hWnd, pSelf->nNegWidth, pSelf->nNegHeight, pSelf->pDevice, &pSelf->pSwapChain));
+ }
+ }
+
+ if(pSelf->nNegWidth != TMEDIA_CONSUMER(pSelf)->video.in.width || pSelf->nNegHeight != TMEDIA_CONSUMER(pSelf)->video.in.height) {
+ TSK_DEBUG_INFO("Negotiated and input video sizes are different:%d#%d or %d#%d",
+ pSelf->nNegWidth, TMEDIA_CONSUMER(pSelf)->video.in.width,
+ pSelf->nNegHeight, TMEDIA_CONSUMER(pSelf)->video.in.height);
+ // Update media type
+
+ SafeRelease(&pSelf->pSwapChain);
+ CHECK_HR(hr = CreateSwapChain(hWnd, (UINT32)TMEDIA_CONSUMER(pSelf)->video.in.width, (UINT32)TMEDIA_CONSUMER(pSelf)->video.in.height, pSelf->pDevice, &pSelf->pSwapChain));
+
+ pSelf->nNegWidth = (UINT32)TMEDIA_CONSUMER(pSelf)->video.in.width;
+ pSelf->nNegHeight = (UINT32)TMEDIA_CONSUMER(pSelf)->video.in.height;
+
+ // Update Destination will do noting if the window size haven't changed.
+ // Force updating the destination rect if negotiated size change
+ CHECK_HR(hr = UpdateDestinationRect(pSelf, TRUE/* Force */));
+ }
+
+ CHECK_HR(hr = TestCooperativeLevel(pSelf));
+
+ CHECK_HR(hr = UpdateDestinationRect(pSelf));
+
+ CHECK_HR(hr = pSelf->pSwapChain->GetBackBuffer(0, D3DBACKBUFFER_TYPE_MONO, &pSurf));
+ CHECK_HR(hr = pSurf->LockRect(&pSelf->rcLock, NULL, D3DLOCK_NOSYSLOCK ));
+
+ // Fast copy() using MMX, SSE, or SSE2
+ hr = MFCopyImage(
+ (BYTE*)pSelf->rcLock.pBits,
+ pSelf->rcLock.Pitch,
+ (BYTE*)buffer,
+ (pSelf->nNegWidth << 2),
+ (pSelf->nNegWidth << 2),
+ pSelf->nNegHeight
+ );
+ if(FAILED(hr)) {
+ // unlock() before leaving
+ pSurf->UnlockRect();
+ CHECK_HR(hr);
+ }
+
+ CHECK_HR(hr = pSurf->UnlockRect());
+
+ // Color fill the back buffer
+ CHECK_HR(hr = pSelf->pDevice->GetBackBuffer(0, 0, D3DBACKBUFFER_TYPE_MONO, &pBB));
#if METROPOLIS
- CHECK_HR(hr = pSelf->pDevice->ColorFill(pBB, NULL, D3DCOLOR_XRGB(0x00, 0x00, 0x00)));
+ CHECK_HR(hr = pSelf->pDevice->ColorFill(pBB, NULL, D3DCOLOR_XRGB(0x00, 0x00, 0x00)));
#else
- CHECK_HR(hr = pSelf->pDevice->ColorFill(pBB, NULL, D3DCOLOR_XRGB(0xFF, 0xFF, 0xFF)));
+ CHECK_HR(hr = pSelf->pDevice->ColorFill(pBB, NULL, D3DCOLOR_XRGB(0xFF, 0xFF, 0xFF)));
#endif
-
- // Resize keeping aspect ratio and Blit the frame (required)
- hr = pSelf->pDevice->StretchRect(
- pSurf,
- NULL,
- pBB,
- &pSelf->rcDest/*NULL*/,
- D3DTEXF_LINEAR
- ); // could fail when display is being resized
- if(SUCCEEDED(hr))
- {
- // Present the frame
- CHECK_HR(hr = pSelf->pDevice->Present(NULL, NULL, NULL, NULL));
- }
- else
- {
- TSK_DEBUG_INFO("StretchRect returned ...%x", hr);
- }
+
+ // Resize keeping aspect ratio and Blit the frame (required)
+ hr = pSelf->pDevice->StretchRect(
+ pSurf,
+ NULL,
+ pBB,
+ &pSelf->rcDest/*NULL*/,
+ D3DTEXF_LINEAR
+ ); // could fail when display is being resized
+ if(SUCCEEDED(hr)) {
+ // Present the frame
+ CHECK_HR(hr = pSelf->pDevice->Present(NULL, NULL, NULL, NULL));
+ }
+ else {
+ TSK_DEBUG_INFO("StretchRect returned ...%x", hr);
+ }
bail:
- SafeRelease(&pSurf);
- SafeRelease(&pBB);
+ SafeRelease(&pSurf);
+ SafeRelease(&pBB);
- tsk_safeobj_unlock(pSelf);
+ tsk_safeobj_unlock(pSelf);
- return SUCCEEDED(hr) ? 0 : -1;
+ return SUCCEEDED(hr) ? 0 : -1;
}
static int plugin_win_mf_consumer_video_pause(tmedia_consumer_t* self)
{
- plugin_win_mf_consumer_video_t* pSelf = (plugin_win_mf_consumer_video_t*)self;
+ plugin_win_mf_consumer_video_t* pSelf = (plugin_win_mf_consumer_video_t*)self;
- if(!pSelf){
- TSK_DEBUG_ERROR("Invalid parameter");
- return -1;
- }
- if(!pSelf->bStarted)
- {
- TSK_DEBUG_INFO("MF video producer not started");
- return 0;
- }
+ if(!pSelf) {
+ TSK_DEBUG_ERROR("Invalid parameter");
+ return -1;
+ }
+ if(!pSelf->bStarted) {
+ TSK_DEBUG_INFO("MF video producer not started");
+ return 0;
+ }
- HRESULT hr = S_OK;
+ HRESULT hr = S_OK;
- pSelf->bPaused = true;
+ pSelf->bPaused = true;
- return SUCCEEDED(hr) ? 0 : -1;
+ return SUCCEEDED(hr) ? 0 : -1;
}
static int plugin_win_mf_consumer_video_stop(tmedia_consumer_t* self)
{
- plugin_win_mf_consumer_video_t* pSelf = (plugin_win_mf_consumer_video_t*)self;
+ plugin_win_mf_consumer_video_t* pSelf = (plugin_win_mf_consumer_video_t*)self;
- if(!pSelf){
+ if(!pSelf) {
TSK_DEBUG_ERROR("Invalid parameter");
return -1;
}
- HRESULT hr = S_OK;
-
+ HRESULT hr = S_OK;
+
pSelf->bStarted = false;
- pSelf->bPaused = false;
+ pSelf->bPaused = false;
- if(pSelf->hWindowFullScreen)
- {
- ::InvalidateRect(pSelf->hWindowFullScreen, NULL, FALSE);
- ::ShowWindow(pSelf->hWindowFullScreen, SW_HIDE);
- }
+ if(pSelf->hWindowFullScreen) {
+ ::InvalidateRect(pSelf->hWindowFullScreen, NULL, FALSE);
+ ::ShowWindow(pSelf->hWindowFullScreen, SW_HIDE);
+ }
- // next start() will be called after prepare()
- return _plugin_win_mf_consumer_video_unprepare(pSelf);
+ // next start() will be called after prepare()
+ return _plugin_win_mf_consumer_video_unprepare(pSelf);
}
static int _plugin_win_mf_consumer_video_unprepare(plugin_win_mf_consumer_video_t* pSelf)
{
- if(!pSelf)
- {
- TSK_DEBUG_ERROR("Invalid parameter");
- return -1;
- }
+ if(!pSelf) {
+ TSK_DEBUG_ERROR("Invalid parameter");
+ return -1;
+ }
- UnhookWindow(pSelf);
+ UnhookWindow(pSelf);
- if(pSelf->bStarted)
- {
- // plugin_win_mf_producer_video_stop(TMEDIA_PRODUCER(pSelf));
- TSK_DEBUG_ERROR("Consumer must be stopped before calling unprepare");
- return -1;
- }
+ if(pSelf->bStarted) {
+ // plugin_win_mf_producer_video_stop(TMEDIA_PRODUCER(pSelf));
+ TSK_DEBUG_ERROR("Consumer must be stopped before calling unprepare");
+ return -1;
+ }
- SafeRelease(&pSelf->pDevice);
- SafeRelease(&pSelf->pD3D);
- SafeRelease(&pSelf->pSwapChain);
+ SafeRelease(&pSelf->pDevice);
+ SafeRelease(&pSelf->pD3D);
+ SafeRelease(&pSelf->pSwapChain);
- pSelf->bPrepared = false;
+ pSelf->bPrepared = false;
- return 0;
+ return 0;
}
@@ -476,192 +446,179 @@ static int _plugin_win_mf_consumer_video_unprepare(plugin_win_mf_consumer_video_
/* constructor */
static tsk_object_t* plugin_win_mf_consumer_video_ctor(tsk_object_t * self, va_list * app)
{
- MFUtils::Startup();
-
- plugin_win_mf_consumer_video_t *pSelf = (plugin_win_mf_consumer_video_t *)self;
- if(pSelf){
- /* init base */
- tmedia_consumer_init(TMEDIA_CONSUMER(pSelf));
- TMEDIA_CONSUMER(pSelf)->video.display.chroma = tmedia_chroma_rgb32;
- TMEDIA_CONSUMER(pSelf)->decoder.codec_id = tmedia_codec_id_none; // means accept RAW fames
-
- /* init self */
- tsk_safeobj_init(pSelf);
- TMEDIA_CONSUMER(pSelf)->video.fps = 15;
- TMEDIA_CONSUMER(pSelf)->video.display.width = 0; // use codec value
- TMEDIA_CONSUMER(pSelf)->video.display.height = 0; // use codec value
- TMEDIA_CONSUMER(pSelf)->video.display.auto_resize = tsk_true;
-
- pSelf->pixelAR.Denominator = pSelf->pixelAR.Numerator = 1;
- }
- return self;
+ MFUtils::Startup();
+
+ plugin_win_mf_consumer_video_t *pSelf = (plugin_win_mf_consumer_video_t *)self;
+ if(pSelf) {
+ /* init base */
+ tmedia_consumer_init(TMEDIA_CONSUMER(pSelf));
+ TMEDIA_CONSUMER(pSelf)->video.display.chroma = tmedia_chroma_rgb32;
+ TMEDIA_CONSUMER(pSelf)->decoder.codec_id = tmedia_codec_id_none; // means accept RAW fames
+
+ /* init self */
+ tsk_safeobj_init(pSelf);
+ TMEDIA_CONSUMER(pSelf)->video.fps = 15;
+ TMEDIA_CONSUMER(pSelf)->video.display.width = 0; // use codec value
+ TMEDIA_CONSUMER(pSelf)->video.display.height = 0; // use codec value
+ TMEDIA_CONSUMER(pSelf)->video.display.auto_resize = tsk_true;
+
+ pSelf->pixelAR.Denominator = pSelf->pixelAR.Numerator = 1;
+ }
+ return self;
}
/* destructor */
static tsk_object_t* plugin_win_mf_consumer_video_dtor(tsk_object_t * self)
-{
- plugin_win_mf_consumer_video_t *pSelf = (plugin_win_mf_consumer_video_t *)self;
- if(pSelf){
- /* stop */
- if(pSelf->bStarted)
- {
- plugin_win_mf_consumer_video_stop(TMEDIA_CONSUMER(pSelf));
- }
-
- /* deinit base */
- tmedia_consumer_deinit(TMEDIA_CONSUMER(pSelf));
- /* deinit self */
- _plugin_win_mf_consumer_video_unprepare(pSelf);
- tsk_safeobj_deinit(pSelf);
- }
-
- return self;
+{
+ plugin_win_mf_consumer_video_t *pSelf = (plugin_win_mf_consumer_video_t *)self;
+ if(pSelf) {
+ /* stop */
+ if(pSelf->bStarted) {
+ plugin_win_mf_consumer_video_stop(TMEDIA_CONSUMER(pSelf));
+ }
+
+ /* deinit base */
+ tmedia_consumer_deinit(TMEDIA_CONSUMER(pSelf));
+ /* deinit self */
+ _plugin_win_mf_consumer_video_unprepare(pSelf);
+ tsk_safeobj_deinit(pSelf);
+ }
+
+ return self;
}
/* object definition */
-static const tsk_object_def_t plugin_win_mf_consumer_video_def_s =
-{
- sizeof(plugin_win_mf_consumer_video_t),
- plugin_win_mf_consumer_video_ctor,
- plugin_win_mf_consumer_video_dtor,
- tsk_null,
+static const tsk_object_def_t plugin_win_mf_consumer_video_def_s = {
+ sizeof(plugin_win_mf_consumer_video_t),
+ plugin_win_mf_consumer_video_ctor,
+ plugin_win_mf_consumer_video_dtor,
+ tsk_null,
};
/* plugin definition*/
-static const tmedia_consumer_plugin_def_t plugin_win_mf_consumer_video_plugin_def_s =
-{
- &plugin_win_mf_consumer_video_def_s,
-
- tmedia_video,
- "D3D9 video consumer",
-
- plugin_win_mf_consumer_video_set,
- plugin_win_mf_consumer_video_prepare,
- plugin_win_mf_consumer_video_start,
- plugin_win_mf_consumer_video_consume,
- plugin_win_mf_consumer_video_pause,
- plugin_win_mf_consumer_video_stop
+static const tmedia_consumer_plugin_def_t plugin_win_mf_consumer_video_plugin_def_s = {
+ &plugin_win_mf_consumer_video_def_s,
+
+ tmedia_video,
+ "D3D9 video consumer",
+
+ plugin_win_mf_consumer_video_set,
+ plugin_win_mf_consumer_video_prepare,
+ plugin_win_mf_consumer_video_start,
+ plugin_win_mf_consumer_video_consume,
+ plugin_win_mf_consumer_video_pause,
+ plugin_win_mf_consumer_video_stop
};
const tmedia_consumer_plugin_def_t *plugin_win_mf_consumer_video_plugin_def_t = &plugin_win_mf_consumer_video_plugin_def_s;
// Helper functions
static HRESULT CreateDeviceD3D9(
- HWND hWnd,
- IDirect3DDevice9** ppDevice,
- IDirect3D9 **ppD3D,
- D3DPRESENT_PARAMETERS &d3dpp
- )
+ HWND hWnd,
+ IDirect3DDevice9** ppDevice,
+ IDirect3D9 **ppD3D,
+ D3DPRESENT_PARAMETERS &d3dpp
+)
{
- HRESULT hr = S_OK;
+ HRESULT hr = S_OK;
D3DDISPLAYMODE mode = { 0 };
- D3DPRESENT_PARAMETERS pp = {0};
-
- if(!ppDevice || *ppDevice || !ppD3D || *ppD3D)
- {
- CHECK_HR(hr = E_POINTER);
- }
-
- if(!(*ppD3D = Direct3DCreate9(D3D_SDK_VERSION)))
- {
+ D3DPRESENT_PARAMETERS pp = {0};
+
+ if(!ppDevice || *ppDevice || !ppD3D || *ppD3D) {
+ CHECK_HR(hr = E_POINTER);
+ }
+
+ if(!(*ppD3D = Direct3DCreate9(D3D_SDK_VERSION))) {
CHECK_HR(hr = E_OUTOFMEMORY);
}
CHECK_HR(hr = (*ppD3D)->GetAdapterDisplayMode(
- D3DADAPTER_DEFAULT,
- &mode
- ));
+ D3DADAPTER_DEFAULT,
+ &mode
+ ));
CHECK_HR(hr = (*ppD3D)->CheckDeviceType(
- D3DADAPTER_DEFAULT,
- D3DDEVTYPE_HAL,
- mode.Format,
- D3DFMT_X8R8G8B8,
- TRUE // windowed
- ));
+ D3DADAPTER_DEFAULT,
+ D3DDEVTYPE_HAL,
+ mode.Format,
+ D3DFMT_X8R8G8B8,
+ TRUE // windowed
+ ));
pp.BackBufferFormat = D3DFMT_X8R8G8B8;
pp.SwapEffect = D3DSWAPEFFECT_DISCARD;
pp.PresentationInterval = D3DPRESENT_INTERVAL_IMMEDIATE;
- pp.Windowed = TRUE;
+ pp.Windowed = TRUE;
pp.hDeviceWindow = hWnd;
CHECK_HR(hr = (*ppD3D)->CreateDevice(
- D3DADAPTER_DEFAULT,
- D3DDEVTYPE_HAL,
- hWnd,
- D3DCREATE_HARDWARE_VERTEXPROCESSING,
- &pp,
- ppDevice
- ));
+ D3DADAPTER_DEFAULT,
+ D3DDEVTYPE_HAL,
+ hWnd,
+ D3DCREATE_HARDWARE_VERTEXPROCESSING,
+ &pp,
+ ppDevice
+ ));
- d3dpp = pp;
+ d3dpp = pp;
bail:
- if(FAILED(hr))
- {
- SafeRelease(ppD3D);
- SafeRelease(ppDevice);
- }
+ if(FAILED(hr)) {
+ SafeRelease(ppD3D);
+ SafeRelease(ppDevice);
+ }
return hr;
}
static HRESULT TestCooperativeLevel(
- struct plugin_win_mf_consumer_video_s *pSelf
- )
+ struct plugin_win_mf_consumer_video_s *pSelf
+)
{
- HRESULT hr = S_OK;
-
- if (!pSelf || !pSelf->pDevice)
- {
- CHECK_HR(hr = E_POINTER);
- }
-
- switch((hr = pSelf->pDevice->TestCooperativeLevel()))
- {
- case D3D_OK:
- {
- break;
- }
-
- case D3DERR_DEVICELOST:
- {
- hr = S_OK;
- break;
- }
-
- case D3DERR_DEVICENOTRESET:
- {
- hr = ResetDevice(pSelf, TRUE);
- break;
- }
-
- default:
- {
- break;
- }
- }
-
- CHECK_HR(hr);
+ HRESULT hr = S_OK;
+
+ if (!pSelf || !pSelf->pDevice) {
+ CHECK_HR(hr = E_POINTER);
+ }
+
+ switch((hr = pSelf->pDevice->TestCooperativeLevel())) {
+ case D3D_OK: {
+ break;
+ }
+
+ case D3DERR_DEVICELOST: {
+ hr = S_OK;
+ break;
+ }
+
+ case D3DERR_DEVICENOTRESET: {
+ hr = ResetDevice(pSelf, TRUE);
+ break;
+ }
+
+ default: {
+ break;
+ }
+ }
+
+ CHECK_HR(hr);
bail:
return hr;
}
static HRESULT CreateSwapChain(
- HWND hWnd,
- UINT32 nFrameWidth,
- UINT32 nFrameHeight,
- IDirect3DDevice9* pDevice,
- IDirect3DSwapChain9 **ppSwapChain
- )
+ HWND hWnd,
+ UINT32 nFrameWidth,
+ UINT32 nFrameHeight,
+ IDirect3DDevice9* pDevice,
+ IDirect3DSwapChain9 **ppSwapChain
+)
{
HRESULT hr = S_OK;
D3DPRESENT_PARAMETERS pp = { 0 };
- if(!pDevice || !ppSwapChain || *ppSwapChain)
- {
- CHECK_HR(hr = E_POINTER);
- }
-
- pp.BackBufferWidth = nFrameWidth;
+ if(!pDevice || !ppSwapChain || *ppSwapChain) {
+ CHECK_HR(hr = E_POINTER);
+ }
+
+ pp.BackBufferWidth = nFrameWidth;
pp.BackBufferHeight = nFrameHeight;
pp.Windowed = TRUE;
pp.SwapEffect = D3DSWAPEFFECT_FLIP;
@@ -673,7 +630,7 @@ static HRESULT CreateSwapChain(
pp.PresentationInterval = D3DPRESENT_INTERVAL_IMMEDIATE;
pp.BackBufferCount = NUM_BACK_BUFFERS;
- CHECK_HR(hr = pDevice->CreateAdditionalSwapChain(&pp, ppSwapChain));
+ CHECK_HR(hr = pDevice->CreateAdditionalSwapChain(&pp, ppSwapChain));
bail:
return hr;
@@ -681,7 +638,7 @@ bail:
static inline HWND Window(struct plugin_win_mf_consumer_video_s *pSelf)
{
- return pSelf ? (pSelf->bFullScreen ? pSelf->hWindowFullScreen : pSelf->hWindow) : NULL;
+ return pSelf ? (pSelf->bFullScreen ? pSelf->hWindowFullScreen : pSelf->hWindow) : NULL;
}
static inline LONG Width(const RECT& r)
@@ -710,17 +667,14 @@ static inline RECT CorrectAspectRatio(const RECT& src, const MFRatio& srcPAR)
// Start with a rectangle the same size as src, but offset to the origin (0,0).
RECT rc = {0, 0, src.right - src.left, src.bottom - src.top};
- if ((srcPAR.Numerator != 1) || (srcPAR.Denominator != 1))
- {
+ if ((srcPAR.Numerator != 1) || (srcPAR.Denominator != 1)) {
// Correct for the source's PAR.
- if (srcPAR.Numerator > srcPAR.Denominator)
- {
+ if (srcPAR.Numerator > srcPAR.Denominator) {
// The source has "wide" pixels, so stretch the width.
rc.right = MulDiv(rc.right, srcPAR.Numerator, srcPAR.Denominator);
}
- else if (srcPAR.Numerator < srcPAR.Denominator)
- {
+ else if (srcPAR.Numerator < srcPAR.Denominator) {
// The source has "tall" pixels, so stretch the height.
rc.bottom = MulDiv(rc.bottom, srcPAR.Denominator, srcPAR.Numerator);
}
@@ -783,236 +737,214 @@ static inline RECT LetterBoxRect(const RECT& rcSrc, const RECT& rcDst)
static inline HRESULT UpdateDestinationRect(plugin_win_mf_consumer_video_t *pSelf, BOOL bForce /*= FALSE*/)
{
- HRESULT hr = S_OK;
- HWND hwnd = Window(pSelf);
-
- if(!pSelf)
- {
- CHECK_HR(hr = E_POINTER);
- }
-
- if(!hwnd)
- {
- CHECK_HR(hr = E_HANDLE);
- }
+ HRESULT hr = S_OK;
+ HWND hwnd = Window(pSelf);
+
+ if(!pSelf) {
+ CHECK_HR(hr = E_POINTER);
+ }
+
+ if(!hwnd) {
+ CHECK_HR(hr = E_HANDLE);
+ }
RECT rcClient;
- GetClientRect(hwnd, &rcClient);
+ GetClientRect(hwnd, &rcClient);
- // only update destination if window size changed
- if(bForce || (rcClient.bottom != pSelf->rcWindow.bottom || rcClient.left != pSelf->rcWindow.left || rcClient.right != pSelf->rcWindow.right || rcClient.top != pSelf->rcWindow.top))
- {
- CHECK_HR(hr = ResetDevice(pSelf));
+ // only update destination if window size changed
+ if(bForce || (rcClient.bottom != pSelf->rcWindow.bottom || rcClient.left != pSelf->rcWindow.left || rcClient.right != pSelf->rcWindow.right || rcClient.top != pSelf->rcWindow.top)) {
+ CHECK_HR(hr = ResetDevice(pSelf));
- pSelf->rcWindow = rcClient;
+ pSelf->rcWindow = rcClient;
#if 1
- RECT rcSrc = { 0, 0, pSelf->nNegWidth, pSelf->nNegHeight };
- rcSrc = CorrectAspectRatio(rcSrc, pSelf->pixelAR);
- pSelf->rcDest = LetterBoxRect(rcSrc, rcClient);
+ RECT rcSrc = { 0, 0, pSelf->nNegWidth, pSelf->nNegHeight };
+ rcSrc = CorrectAspectRatio(rcSrc, pSelf->pixelAR);
+ pSelf->rcDest = LetterBoxRect(rcSrc, rcClient);
#else
- long w = rcClient.right - rcClient.left;
- long h = rcClient.bottom - rcClient.top;
- float ratio = ((float)pSelf->nNegWidth/(float)pSelf->nNegHeight);
- // (w/h)=ratio =>
- // 1) h=w/ratio
- // and
- // 2) w=h*ratio
- pSelf->rcDest.right = (int)(w/ratio) > h ? (int)(h * ratio) : w;
- pSelf->rcDest.bottom = (int)(pSelf->rcDest.right/ratio) > h ? h : (int)(pSelf->rcDest.right/ratio);
- pSelf->rcDest.left = ((w - pSelf->rcDest.right) >> 1);
- pSelf->rcDest.top = ((h - pSelf->rcDest.bottom) >> 1);
+ long w = rcClient.right - rcClient.left;
+ long h = rcClient.bottom - rcClient.top;
+ float ratio = ((float)pSelf->nNegWidth/(float)pSelf->nNegHeight);
+ // (w/h)=ratio =>
+ // 1) h=w/ratio
+ // and
+ // 2) w=h*ratio
+ pSelf->rcDest.right = (int)(w/ratio) > h ? (int)(h * ratio) : w;
+ pSelf->rcDest.bottom = (int)(pSelf->rcDest.right/ratio) > h ? h : (int)(pSelf->rcDest.right/ratio);
+ pSelf->rcDest.left = ((w - pSelf->rcDest.right) >> 1);
+ pSelf->rcDest.top = ((h - pSelf->rcDest.bottom) >> 1);
#endif
- //::InvalidateRect(hwnd, NULL, FALSE);
- }
+ //::InvalidateRect(hwnd, NULL, FALSE);
+ }
bail:
- return hr;
+ return hr;
}
static HRESULT ResetDevice(plugin_win_mf_consumer_video_t *pSelf, BOOL bUpdateDestinationRect /*= FALSE*/)
{
HRESULT hr = S_OK;
- tsk_safeobj_lock(pSelf);
+ tsk_safeobj_lock(pSelf);
- HWND hWnd = Window(pSelf);
+ HWND hWnd = Window(pSelf);
- if (pSelf->pDevice)
- {
+ if (pSelf->pDevice) {
D3DPRESENT_PARAMETERS d3dpp = pSelf->d3dpp;
hr = pSelf->pDevice->Reset(&d3dpp);
- if (FAILED(hr))
- {
+ if (FAILED(hr)) {
SafeRelease(&pSelf->pDevice);
- SafeRelease(&pSelf->pD3D);
- SafeRelease(&pSelf->pSwapChain);
+ SafeRelease(&pSelf->pD3D);
+ SafeRelease(&pSelf->pSwapChain);
}
}
- if (pSelf->pDevice == NULL && hWnd)
- {
+ if (pSelf->pDevice == NULL && hWnd) {
CHECK_HR(hr = CreateDeviceD3D9(hWnd, &pSelf->pDevice, &pSelf->pD3D, pSelf->d3dpp));
- CHECK_HR(hr = CreateSwapChain(hWnd, pSelf->nNegWidth, pSelf->nNegHeight, pSelf->pDevice, &pSelf->pSwapChain));
- }
+ CHECK_HR(hr = CreateSwapChain(hWnd, pSelf->nNegWidth, pSelf->nNegHeight, pSelf->pDevice, &pSelf->pSwapChain));
+ }
- if(bUpdateDestinationRect) // endless loop guard
- {
- CHECK_HR(hr = UpdateDestinationRect(pSelf));
- }
+ if(bUpdateDestinationRect) { // endless loop guard
+ CHECK_HR(hr = UpdateDestinationRect(pSelf));
+ }
bail:
- tsk_safeobj_unlock(pSelf);
+ tsk_safeobj_unlock(pSelf);
- return hr;
+ return hr;
}
static HRESULT SetFullscreen(struct plugin_win_mf_consumer_video_s *pSelf, BOOL bFullScreen)
{
- HRESULT hr = S_OK;
- if(!pSelf)
- {
- CHECK_HR(hr = E_POINTER);
- }
-
- if(pSelf->bFullScreen != bFullScreen)
- {
- tsk_safeobj_lock(pSelf);
- if(bFullScreen)
- {
- HWND hWnd = CreateFullScreenWindow(pSelf);
- if(hWnd)
- {
- ::ShowWindow(hWnd, SW_SHOWDEFAULT);
- ::UpdateWindow(hWnd);
- }
- }
- else if(pSelf->hWindowFullScreen)
- {
- ::ShowWindow(pSelf->hWindowFullScreen, SW_HIDE);
- }
- pSelf->bFullScreen = bFullScreen;
- if(pSelf->bPrepared)
- {
- hr = ResetDevice(pSelf);
- }
- tsk_safeobj_unlock(pSelf);
-
- CHECK_HR(hr);
- }
+ HRESULT hr = S_OK;
+ if(!pSelf) {
+ CHECK_HR(hr = E_POINTER);
+ }
+
+ if(pSelf->bFullScreen != bFullScreen) {
+ tsk_safeobj_lock(pSelf);
+ if(bFullScreen) {
+ HWND hWnd = CreateFullScreenWindow(pSelf);
+ if(hWnd) {
+ ::ShowWindow(hWnd, SW_SHOWDEFAULT);
+ ::UpdateWindow(hWnd);
+ }
+ }
+ else if(pSelf->hWindowFullScreen) {
+ ::ShowWindow(pSelf->hWindowFullScreen, SW_HIDE);
+ }
+ pSelf->bFullScreen = bFullScreen;
+ if(pSelf->bPrepared) {
+ hr = ResetDevice(pSelf);
+ }
+ tsk_safeobj_unlock(pSelf);
+
+ CHECK_HR(hr);
+ }
bail:
- return hr;
+ return hr;
}
static LRESULT CALLBACK WndProc(HWND hWnd, UINT uMsg, WPARAM wParam, LPARAM lParam)
{
- switch(uMsg)
- {
- case WM_CREATE:
- case WM_SIZE:
- case WM_MOVE:
- {
- struct plugin_win_mf_consumer_video_s* pSelf = dynamic_cast<struct plugin_win_mf_consumer_video_s*>((struct plugin_win_mf_consumer_video_s*)GetPropA(hWnd, "Self"));
- if (pSelf)
- {
-
- }
- break;
- }
-
- case WM_ERASEBKGND:
- {
- return TRUE; // avoid background erasing.
- }
-
- case WM_CHAR:
- case WM_KEYUP:
- {
- struct plugin_win_mf_consumer_video_s* pSelf = dynamic_cast<struct plugin_win_mf_consumer_video_s*>((struct plugin_win_mf_consumer_video_s*)GetPropA(hWnd, "Self"));
- if (pSelf)
- {
- SetFullscreen(pSelf, FALSE);
- }
-
- break;
- }
- }
-
- return DefWindowProc(hWnd, uMsg, wParam, lParam);
+ switch(uMsg) {
+ case WM_CREATE:
+ case WM_SIZE:
+ case WM_MOVE: {
+ struct plugin_win_mf_consumer_video_s* pSelf = dynamic_cast<struct plugin_win_mf_consumer_video_s*>((struct plugin_win_mf_consumer_video_s*)GetPropA(hWnd, "Self"));
+ if (pSelf) {
+
+ }
+ break;
+ }
+
+ case WM_ERASEBKGND: {
+ return TRUE; // avoid background erasing.
+ }
+
+ case WM_CHAR:
+ case WM_KEYUP: {
+ struct plugin_win_mf_consumer_video_s* pSelf = dynamic_cast<struct plugin_win_mf_consumer_video_s*>((struct plugin_win_mf_consumer_video_s*)GetPropA(hWnd, "Self"));
+ if (pSelf) {
+ SetFullscreen(pSelf, FALSE);
+ }
+
+ break;
+ }
+ }
+
+ return DefWindowProc(hWnd, uMsg, wParam, lParam);
}
static HWND CreateFullScreenWindow(struct plugin_win_mf_consumer_video_s *pSelf)
{
- HRESULT hr = S_OK;
-
- if(!pSelf)
- {
- return NULL;
- }
-
- if(!pSelf->hWindowFullScreen)
- {
- WNDCLASS wc = {0};
-
- wc.lpfnWndProc = WndProc;
- wc.hInstance = GetModuleHandle(NULL);
- wc.hCursor = LoadCursor(NULL, IDC_ARROW);
- wc.lpszClassName = L"WindowClass";
- RegisterClass(&wc);
- pSelf->hWindowFullScreen = ::CreateWindowEx(
- NULL,
- wc.lpszClassName,
- L"Doubango's Video Consumer Fullscreen",
- WS_EX_TOPMOST | WS_POPUP,
- 0, 0,
- GetSystemMetrics(SM_CXSCREEN), GetSystemMetrics(SM_CYSCREEN),
- NULL,
- NULL,
- GetModuleHandle(NULL),
- NULL);
-
- SetPropA(pSelf->hWindowFullScreen, "Self", pSelf);
- }
- return pSelf->hWindowFullScreen;
+ HRESULT hr = S_OK;
+
+ if(!pSelf) {
+ return NULL;
+ }
+
+ if(!pSelf->hWindowFullScreen) {
+ WNDCLASS wc = {0};
+
+ wc.lpfnWndProc = WndProc;
+ wc.hInstance = GetModuleHandle(NULL);
+ wc.hCursor = LoadCursor(NULL, IDC_ARROW);
+ wc.lpszClassName = L"WindowClass";
+ RegisterClass(&wc);
+ pSelf->hWindowFullScreen = ::CreateWindowEx(
+ NULL,
+ wc.lpszClassName,
+ L"Doubango's Video Consumer Fullscreen",
+ WS_EX_TOPMOST | WS_POPUP,
+ 0, 0,
+ GetSystemMetrics(SM_CXSCREEN), GetSystemMetrics(SM_CYSCREEN),
+ NULL,
+ NULL,
+ GetModuleHandle(NULL),
+ NULL);
+
+ SetPropA(pSelf->hWindowFullScreen, "Self", pSelf);
+ }
+ return pSelf->hWindowFullScreen;
}
static HRESULT HookWindow(plugin_win_mf_consumer_video_s *pSelf, HWND hWnd)
{
- HRESULT hr = S_OK;
+ HRESULT hr = S_OK;
- tsk_safeobj_lock(pSelf);
+ tsk_safeobj_lock(pSelf);
- CHECK_HR(hr = UnhookWindow(pSelf));
+ CHECK_HR(hr = UnhookWindow(pSelf));
- if ((pSelf->hWindow = hWnd)) {
- pSelf->wndProc = (WNDPROC)SetWindowLongPtr(pSelf->hWindow, GWLP_WNDPROC, (LONG_PTR)WndProc);
- if (!pSelf->wndProc) {
- TSK_DEBUG_ERROR("HookWindowLongPtr() failed with errcode=%d", GetLastError());
- CHECK_HR(hr = E_FAIL);
- }
- pSelf->bWindowHooked = TRUE;
- }
+ if ((pSelf->hWindow = hWnd)) {
+ pSelf->wndProc = (WNDPROC)SetWindowLongPtr(pSelf->hWindow, GWLP_WNDPROC, (LONG_PTR)WndProc);
+ if (!pSelf->wndProc) {
+ TSK_DEBUG_ERROR("HookWindowLongPtr() failed with errcode=%d", GetLastError());
+ CHECK_HR(hr = E_FAIL);
+ }
+ pSelf->bWindowHooked = TRUE;
+ }
bail:
- tsk_safeobj_unlock(pSelf);
- return S_OK;
+ tsk_safeobj_unlock(pSelf);
+ return S_OK;
}
static HRESULT UnhookWindow(struct plugin_win_mf_consumer_video_s *pSelf)
{
- tsk_safeobj_lock(pSelf);
- if (pSelf->hWindow && pSelf->wndProc) {
- SetWindowLongPtr(pSelf->hWindow, GWLP_WNDPROC, (LONG_PTR)pSelf->wndProc);
- pSelf->wndProc = NULL;
- }
- if(pSelf->hWindow)
- {
- ::InvalidateRect(pSelf->hWindow, NULL, FALSE);
- }
- pSelf->bWindowHooked = FALSE;
- tsk_safeobj_unlock(pSelf);
- return S_OK;
+ tsk_safeobj_lock(pSelf);
+ if (pSelf->hWindow && pSelf->wndProc) {
+ SetWindowLongPtr(pSelf->hWindow, GWLP_WNDPROC, (LONG_PTR)pSelf->wndProc);
+ pSelf->wndProc = NULL;
+ }
+ if(pSelf->hWindow) {
+ ::InvalidateRect(pSelf->hWindow, NULL, FALSE);
+ }
+ pSelf->bWindowHooked = FALSE;
+ tsk_safeobj_unlock(pSelf);
+ return S_OK;
}
@@ -1038,39 +970,38 @@ static HRESULT UnhookWindow(struct plugin_win_mf_consumer_video_s *pSelf)
// To avoid chroma conversion (performance issues) we use NV12 when the codec is bundled as MediaFoundation codecs most likely only support this format.
// NV12 is the native format for media foundation codecs (e.g. Intel Quick Sync) and the GPU.
// I420 is the native format for FFmpeg, libvpx and libtheora.
-const GUID kDefaultUncompressedType
+const GUID kDefaultUncompressedType
#if PLUGIN_MF_CV_BUNDLE_CODEC
-= MFVideoFormat_NV12;
+ = MFVideoFormat_NV12;
#else
-= MFVideoFormat_I420;
+ = MFVideoFormat_I420;
#endif
DEFINE_GUID(PLUGIN_MF_LOW_LATENCY,
-0x9c27891a, 0xed7a, 0x40e1, 0x88, 0xe8, 0xb2, 0x27, 0x27, 0xa0, 0x24, 0xee);
+ 0x9c27891a, 0xed7a, 0x40e1, 0x88, 0xe8, 0xb2, 0x27, 0x27, 0xa0, 0x24, 0xee);
static void* TSK_STDCALL RunSessionThread(void *pArg);
static int _plugin_win_mf_consumer_video_unprepare(struct plugin_win_mf_consumer_video_s* pSelf);
-typedef struct plugin_win_mf_consumer_video_s
-{
- TMEDIA_DECLARE_CONSUMER;
-
- bool bStarted, bPrepared;
- HWND hWindow;
- tsk_thread_handle_t* ppTread[1];
+typedef struct plugin_win_mf_consumer_video_s {
+ TMEDIA_DECLARE_CONSUMER;
+
+ bool bStarted, bPrepared;
+ HWND hWindow;
+ tsk_thread_handle_t* ppTread[1];
- UINT32 nNegWidth;
- UINT32 nNegHeight;
- UINT32 nNegFps;
+ UINT32 nNegWidth;
+ UINT32 nNegHeight;
+ UINT32 nNegFps;
- MFCodecVideo *pDecoder;
+ MFCodecVideo *pDecoder;
IMFMediaSession *pSession;
CMFSource *pSource;
IMFActivate *pSinkActivate;
- DisplayWatcher* pDisplayWatcher;
+ DisplayWatcher* pDisplayWatcher;
IMFTopology *pTopologyFull;
- IMFTopology *pTopologyPartial;
- IMFMediaType *pOutType;
+ IMFTopology *pTopologyPartial;
+ IMFMediaType *pOutType;
}
plugin_win_mf_consumer_video_t;
@@ -1079,437 +1010,437 @@ plugin_win_mf_consumer_video_t;
/* ============ Media Consumer Interface ================= */
static int plugin_win_mf_consumer_video_set(tmedia_consumer_t *self, const tmedia_param_t* param)
{
- int ret = 0;
- HRESULT hr = S_OK;
- plugin_win_mf_consumer_video_t* pSelf = (plugin_win_mf_consumer_video_t*)self;
-
- if(!self || !param){
- TSK_DEBUG_ERROR("Invalid parameter");
- return -1;
- }
-
- if(param->value_type == tmedia_pvt_int64){
- if(tsk_striequals(param->key, "remote-hwnd")){
- HWND hWnd = reinterpret_cast<HWND>((INT64)*((int64_t*)param->value));
- if(hWnd != pSelf->hWindow)
- {
- pSelf->hWindow = hWnd;
- if(pSelf->pDisplayWatcher)
- {
- CHECK_HR(hr = pSelf->pDisplayWatcher->SetHwnd(hWnd));
- }
- }
- }
- }
- else if(param->value_type == tmedia_pvt_int32){
- if(tsk_striequals(param->key, "fullscreen")){
- if(pSelf->pDisplayWatcher)
- {
- CHECK_HR(hr = pSelf->pDisplayWatcher->SetFullscreen(!!*((int32_t*)param->value)));
- }
- }
- else if(tsk_striequals(param->key, "create-on-current-thead")){
- // DSCONSUMER(self)->create_on_ui_thread = *((int32_t*)param->value) ? tsk_false : tsk_true;
- }
- else if(tsk_striequals(param->key, "plugin-firefox")){
- /*DSCONSUMER(self)->plugin_firefox = (*((int32_t*)param->value) != 0);
- if(DSCONSUMER(self)->display){
- DSCONSUMER(self)->display->setPluginFirefox((DSCONSUMER(self)->plugin_firefox == tsk_true));
- }*/
- }
- }
+ int ret = 0;
+ HRESULT hr = S_OK;
+ plugin_win_mf_consumer_video_t* pSelf = (plugin_win_mf_consumer_video_t*)self;
+
+ if(!self || !param) {
+ TSK_DEBUG_ERROR("Invalid parameter");
+ return -1;
+ }
+
+ if(param->value_type == tmedia_pvt_int64) {
+ if(tsk_striequals(param->key, "remote-hwnd")) {
+ HWND hWnd = reinterpret_cast<HWND>((INT64)*((int64_t*)param->value));
+ if(hWnd != pSelf->hWindow) {
+ pSelf->hWindow = hWnd;
+ if(pSelf->pDisplayWatcher) {
+ CHECK_HR(hr = pSelf->pDisplayWatcher->SetHwnd(hWnd));
+ }
+ }
+ }
+ }
+ else if(param->value_type == tmedia_pvt_int32) {
+ if(tsk_striequals(param->key, "fullscreen")) {
+ if(pSelf->pDisplayWatcher) {
+ CHECK_HR(hr = pSelf->pDisplayWatcher->SetFullscreen(!!*((int32_t*)param->value)));
+ }
+ }
+ else if(tsk_striequals(param->key, "create-on-current-thead")) {
+ // DSCONSUMER(self)->create_on_ui_thread = *((int32_t*)param->value) ? tsk_false : tsk_true;
+ }
+ else if(tsk_striequals(param->key, "plugin-firefox")) {
+ /*DSCONSUMER(self)->plugin_firefox = (*((int32_t*)param->value) != 0);
+ if(DSCONSUMER(self)->display){
+ DSCONSUMER(self)->display->setPluginFirefox((DSCONSUMER(self)->plugin_firefox == tsk_true));
+ }*/
+ }
+ }
bail:
- return SUCCEEDED(hr) ? 0 : -1;
+ return SUCCEEDED(hr) ? 0 : -1;
}
static int plugin_win_mf_consumer_video_prepare(tmedia_consumer_t* self, const tmedia_codec_t* codec)
{
- plugin_win_mf_consumer_video_t* pSelf = (plugin_win_mf_consumer_video_t*)self;
-
- if(!pSelf || !codec && codec->plugin){
- TSK_DEBUG_ERROR("Invalid parameter");
- return -1;
- }
- if(pSelf->bPrepared){
- TSK_DEBUG_WARN("MF video consumer already prepared");
- return -1;
- }
-
- // FIXME: DirectShow requires flipping but not MF
- // The Core library always tries to flip when OSType==Win32. Must be changed
- TMEDIA_CODEC_VIDEO(codec)->in.flip = tsk_false;
-
- HRESULT hr = S_OK;
-
- TMEDIA_CONSUMER(pSelf)->video.fps = TMEDIA_CODEC_VIDEO(codec)->in.fps;
- TMEDIA_CONSUMER(pSelf)->video.in.width = TMEDIA_CODEC_VIDEO(codec)->in.width;
- TMEDIA_CONSUMER(pSelf)->video.in.height = TMEDIA_CODEC_VIDEO(codec)->in.height;
-
- if(!TMEDIA_CONSUMER(pSelf)->video.display.width){
- TMEDIA_CONSUMER(pSelf)->video.display.width = TMEDIA_CONSUMER(pSelf)->video.in.width;
- }
- if(!TMEDIA_CONSUMER(pSelf)->video.display.height){
- TMEDIA_CONSUMER(pSelf)->video.display.height = TMEDIA_CONSUMER(pSelf)->video.in.height;
- }
-
- pSelf->nNegFps = TMEDIA_CONSUMER(pSelf)->video.fps;
- pSelf->nNegWidth = TMEDIA_CONSUMER(pSelf)->video.display.width;
- pSelf->nNegHeight = TMEDIA_CONSUMER(pSelf)->video.display.height;
-
- TSK_DEBUG_INFO("MF video consumer: fps=%d, width=%d, height=%d",
- pSelf->nNegFps,
- pSelf->nNegWidth,
- pSelf->nNegHeight);
-
- if(kDefaultUncompressedType == MFVideoFormat_NV12) {
- TMEDIA_CONSUMER(pSelf)->video.display.chroma = tmedia_chroma_nv12;
- }
- else if(kDefaultUncompressedType == MFVideoFormat_I420) {
- TMEDIA_CONSUMER(pSelf)->video.display.chroma = tmedia_chroma_yuv420p;
- }
- else if(kDefaultUncompressedType == MFVideoFormat_RGB32) {
- TMEDIA_CONSUMER(pSelf)->video.display.chroma = tmedia_chroma_rgb32;
- }
- else if(kDefaultUncompressedType == MFVideoFormat_RGB24) {
- TMEDIA_CONSUMER(pSelf)->video.display.chroma = tmedia_chroma_rgb24;
- }
- else {
- CHECK_HR(hr = E_NOTIMPL);
- }
- TMEDIA_CONSUMER(pSelf)->decoder.codec_id = tmedia_codec_id_none; // means accept RAW fames
-
- IMFMediaSink* pMediaSink = NULL;
- IMFAttributes* pSessionAttributes = NULL;
-
- // Set session attributes
- CHECK_HR(hr = MFCreateAttributes(&pSessionAttributes, 1));
- CHECK_HR(hr = pSessionAttributes->SetUINT32(PLUGIN_MF_LOW_LATENCY, 1));
-
- CHECK_HR(hr = MFCreateMediaType(&pSelf->pOutType));
- CHECK_HR(hr = pSelf->pOutType->SetGUID(MF_MT_MAJOR_TYPE, MFMediaType_Video));
+ plugin_win_mf_consumer_video_t* pSelf = (plugin_win_mf_consumer_video_t*)self;
+
+ if(!pSelf || !codec && codec->plugin) {
+ TSK_DEBUG_ERROR("Invalid parameter");
+ return -1;
+ }
+ if(pSelf->bPrepared) {
+ TSK_DEBUG_WARN("MF video consumer already prepared");
+ return -1;
+ }
+
+ // FIXME: DirectShow requires flipping but not MF
+ // The Core library always tries to flip when OSType==Win32. Must be changed
+ TMEDIA_CODEC_VIDEO(codec)->in.flip = tsk_false;
+
+ HRESULT hr = S_OK;
+
+ TMEDIA_CONSUMER(pSelf)->video.fps = TMEDIA_CODEC_VIDEO(codec)->in.fps;
+ TMEDIA_CONSUMER(pSelf)->video.in.width = TMEDIA_CODEC_VIDEO(codec)->in.width;
+ TMEDIA_CONSUMER(pSelf)->video.in.height = TMEDIA_CODEC_VIDEO(codec)->in.height;
+
+ if(!TMEDIA_CONSUMER(pSelf)->video.display.width) {
+ TMEDIA_CONSUMER(pSelf)->video.display.width = TMEDIA_CONSUMER(pSelf)->video.in.width;
+ }
+ if(!TMEDIA_CONSUMER(pSelf)->video.display.height) {
+ TMEDIA_CONSUMER(pSelf)->video.display.height = TMEDIA_CONSUMER(pSelf)->video.in.height;
+ }
+
+ pSelf->nNegFps = TMEDIA_CONSUMER(pSelf)->video.fps;
+ pSelf->nNegWidth = TMEDIA_CONSUMER(pSelf)->video.display.width;
+ pSelf->nNegHeight = TMEDIA_CONSUMER(pSelf)->video.display.height;
+
+ TSK_DEBUG_INFO("MF video consumer: fps=%d, width=%d, height=%d",
+ pSelf->nNegFps,
+ pSelf->nNegWidth,
+ pSelf->nNegHeight);
+
+ if(kDefaultUncompressedType == MFVideoFormat_NV12) {
+ TMEDIA_CONSUMER(pSelf)->video.display.chroma = tmedia_chroma_nv12;
+ }
+ else if(kDefaultUncompressedType == MFVideoFormat_I420) {
+ TMEDIA_CONSUMER(pSelf)->video.display.chroma = tmedia_chroma_yuv420p;
+ }
+ else if(kDefaultUncompressedType == MFVideoFormat_RGB32) {
+ TMEDIA_CONSUMER(pSelf)->video.display.chroma = tmedia_chroma_rgb32;
+ }
+ else if(kDefaultUncompressedType == MFVideoFormat_RGB24) {
+ TMEDIA_CONSUMER(pSelf)->video.display.chroma = tmedia_chroma_rgb24;
+ }
+ else {
+ CHECK_HR(hr = E_NOTIMPL);
+ }
+ TMEDIA_CONSUMER(pSelf)->decoder.codec_id = tmedia_codec_id_none; // means accept RAW fames
+
+ IMFMediaSink* pMediaSink = NULL;
+ IMFAttributes* pSessionAttributes = NULL;
+
+ // Set session attributes
+ CHECK_HR(hr = MFCreateAttributes(&pSessionAttributes, 1));
+ CHECK_HR(hr = pSessionAttributes->SetUINT32(PLUGIN_MF_LOW_LATENCY, 1));
+
+ CHECK_HR(hr = MFCreateMediaType(&pSelf->pOutType));
+ CHECK_HR(hr = pSelf->pOutType->SetGUID(MF_MT_MAJOR_TYPE, MFMediaType_Video));
#if PLUGIN_MF_CV_BUNDLE_CODEC
- if((codec->id == tmedia_codec_id_h264_bp || codec->id == tmedia_codec_id_h264_mp) && MFUtils::IsLowLatencyH264Supported()) {
- // both Microsoft and Intel encoders support NV12 only as input
- // static const BOOL kIsEncoder = FALSE;
- // hr = MFUtils::GetBestCodec(kIsEncoder, MFMediaType_Video, MFVideoFormat_H264, MFVideoFormat_NV12, &pSelf->pDecoder);
- pSelf->pDecoder = (codec->id == tmedia_codec_id_h264_bp) ? MFCodecVideoH264::CreateCodecH264Base(MFCodecType_Decoder) : MFCodecVideoH264::CreateCodecH264Main(MFCodecType_Decoder);
- if(pSelf->pDecoder)
- {
- hr = pSelf->pDecoder->Initialize(
- pSelf->nNegFps,
- pSelf->nNegWidth,
- pSelf->nNegHeight);
-
- if(FAILED(hr))
- {
- SafeRelease(&pSelf->pDecoder);
- hr = S_OK;
- }
- }
- if(SUCCEEDED(hr) && pSelf->pDecoder) {
- TMEDIA_CONSUMER(pSelf)->decoder.codec_id = codec->id; // means accept ENCODED fames
- CHECK_HR(hr = pSelf->pOutType->SetGUID(MF_MT_SUBTYPE, MFVideoFormat_H264));
- }
- else {
- SafeRelease(&pSelf->pDecoder);
- TSK_DEBUG_WARN("Failed to find H.264 HW encoder...fallback to SW implementation");
- }
- }
+ if((codec->id == tmedia_codec_id_h264_bp || codec->id == tmedia_codec_id_h264_mp) && MFUtils::IsLowLatencyH264Supported()) {
+ // both Microsoft and Intel encoders support NV12 only as input
+ // static const BOOL kIsEncoder = FALSE;
+ // hr = MFUtils::GetBestCodec(kIsEncoder, MFMediaType_Video, MFVideoFormat_H264, MFVideoFormat_NV12, &pSelf->pDecoder);
+ pSelf->pDecoder = (codec->id == tmedia_codec_id_h264_bp) ? MFCodecVideoH264::CreateCodecH264Base(MFCodecType_Decoder) : MFCodecVideoH264::CreateCodecH264Main(MFCodecType_Decoder);
+ if(pSelf->pDecoder) {
+ hr = pSelf->pDecoder->Initialize(
+ pSelf->nNegFps,
+ pSelf->nNegWidth,
+ pSelf->nNegHeight);
+
+ if(FAILED(hr)) {
+ SafeRelease(&pSelf->pDecoder);
+ hr = S_OK;
+ }
+ }
+ if(SUCCEEDED(hr) && pSelf->pDecoder) {
+ TMEDIA_CONSUMER(pSelf)->decoder.codec_id = codec->id; // means accept ENCODED fames
+ CHECK_HR(hr = pSelf->pOutType->SetGUID(MF_MT_SUBTYPE, MFVideoFormat_H264));
+ }
+ else {
+ SafeRelease(&pSelf->pDecoder);
+ TSK_DEBUG_WARN("Failed to find H.264 HW encoder...fallback to SW implementation");
+ }
+ }
#endif
- if(!pSelf->pDecoder){
- CHECK_HR(hr = pSelf->pOutType->SetGUID(MF_MT_SUBTYPE, kDefaultUncompressedType));
- TMEDIA_CONSUMER(pSelf)->video.display.chroma = kDefaultUncompressedType == MFVideoFormat_NV12 ? tmedia_chroma_nv12 : tmedia_chroma_yuv420p;
- }
+ if(!pSelf->pDecoder) {
+ CHECK_HR(hr = pSelf->pOutType->SetGUID(MF_MT_SUBTYPE, kDefaultUncompressedType));
+ TMEDIA_CONSUMER(pSelf)->video.display.chroma = kDefaultUncompressedType == MFVideoFormat_NV12 ? tmedia_chroma_nv12 : tmedia_chroma_yuv420p;
+ }
CHECK_HR(hr = pSelf->pOutType->SetUINT32(MF_MT_INTERLACE_MODE, MFVideoInterlace_Progressive));
- CHECK_HR(hr = pSelf->pOutType->SetUINT32(MF_MT_ALL_SAMPLES_INDEPENDENT, TRUE));
+ CHECK_HR(hr = pSelf->pOutType->SetUINT32(MF_MT_ALL_SAMPLES_INDEPENDENT, TRUE));
CHECK_HR(hr = MFSetAttributeSize(pSelf->pOutType, MF_MT_FRAME_SIZE, pSelf->nNegWidth, pSelf->nNegHeight));
- CHECK_HR(hr = MFSetAttributeRatio(pSelf->pOutType, MF_MT_FRAME_RATE, pSelf->nNegFps, 1));
+ CHECK_HR(hr = MFSetAttributeRatio(pSelf->pOutType, MF_MT_FRAME_RATE, pSelf->nNegFps, 1));
CHECK_HR(hr = MFSetAttributeRatio(pSelf->pOutType, MF_MT_PIXEL_ASPECT_RATIO, 1, 1));
- CHECK_HR(hr = CMFSource::CreateInstanceEx(IID_IMFMediaSource, (void**)&pSelf->pSource, pSelf->pOutType));
+ CHECK_HR(hr = CMFSource::CreateInstanceEx(IID_IMFMediaSource, (void**)&pSelf->pSource, pSelf->pOutType));
- // Apply Encoder output type (must be called before SetInputType)
- //if(pSelf->pDecoder) {
- // CHECK_HR(hr = pSelf->pDecoder->SetOutputType(0, pSelf->pOutType, 0/*MFT_SET_TYPE_TEST_ONLY*/));
- //}
+ // Apply Encoder output type (must be called before SetInputType)
+ //if(pSelf->pDecoder) {
+ // CHECK_HR(hr = pSelf->pDecoder->SetOutputType(0, pSelf->pOutType, 0/*MFT_SET_TYPE_TEST_ONLY*/));
+ //}
- // Create the Media Session.
- CHECK_HR(hr = MFCreateMediaSession(pSessionAttributes, &pSelf->pSession));
+ // Create the Media Session.
+ CHECK_HR(hr = MFCreateMediaSession(pSessionAttributes, &pSelf->pSession));
- // Create the EVR activation object.
- CHECK_HR(hr = MFCreateVideoRendererActivate(pSelf->hWindow, &pSelf->pSinkActivate));
+ // Create the EVR activation object.
+ CHECK_HR(hr = MFCreateVideoRendererActivate(pSelf->hWindow, &pSelf->pSinkActivate));
- // Create the topology.
- CHECK_HR(hr = MFUtils::CreateTopology(
- pSelf->pSource,
- pSelf->pDecoder ? pSelf->pDecoder->GetMFT() : NULL,
- pSelf->pSinkActivate,
- NULL/*Preview*/,
- pSelf->pOutType,
- &pSelf->pTopologyPartial));
- // Resolve topology (adds video processors if needed).
- CHECK_HR(hr = MFUtils::ResolveTopology(pSelf->pTopologyPartial, &pSelf->pTopologyFull));
+ // Create the topology.
+ CHECK_HR(hr = MFUtils::CreateTopology(
+ pSelf->pSource,
+ pSelf->pDecoder ? pSelf->pDecoder->GetMFT() : NULL,
+ pSelf->pSinkActivate,
+ NULL/*Preview*/,
+ pSelf->pOutType,
+ &pSelf->pTopologyPartial));
+ // Resolve topology (adds video processors if needed).
+ CHECK_HR(hr = MFUtils::ResolveTopology(pSelf->pTopologyPartial, &pSelf->pTopologyFull));
- // Find EVR
- CHECK_HR(hr = MFUtils::FindNodeObject(pSelf->pTopologyFull, MFUtils::g_ullTopoIdSinkMain, (void**)&pMediaSink));
+ // Find EVR
+ CHECK_HR(hr = MFUtils::FindNodeObject(pSelf->pTopologyFull, MFUtils::g_ullTopoIdSinkMain, (void**)&pMediaSink));
- // Create EVR watcher
- pSelf->pDisplayWatcher = new DisplayWatcher(pSelf->hWindow, pMediaSink, hr);
- CHECK_HR(hr);
+ // Create EVR watcher
+ pSelf->pDisplayWatcher = new DisplayWatcher(pSelf->hWindow, pMediaSink, hr);
+ CHECK_HR(hr);
bail:
- SafeRelease(&pMediaSink);
- SafeRelease(&pSessionAttributes);
-
- pSelf->bPrepared = SUCCEEDED(hr);
- return pSelf->bPrepared ? 0 : -1;
+ SafeRelease(&pMediaSink);
+ SafeRelease(&pSessionAttributes);
+
+ pSelf->bPrepared = SUCCEEDED(hr);
+ return pSelf->bPrepared ? 0 : -1;
}
static int plugin_win_mf_consumer_video_start(tmedia_consumer_t* self)
{
- plugin_win_mf_consumer_video_t* pSelf = (plugin_win_mf_consumer_video_t*)self;
-
- if(!pSelf){
- TSK_DEBUG_ERROR("Invalid parameter");
- return -1;
- }
-
- if(pSelf->bStarted){
- TSK_DEBUG_INFO("MF video consumer already started");
- return 0;
- }
- if(!pSelf->bPrepared){
- TSK_DEBUG_ERROR("MF video consumer not prepared");
- return -1;
- }
-
- HRESULT hr = S_OK;
-
- // Run EVR watcher
- if(pSelf->pDisplayWatcher) {
- CHECK_HR(hr = pSelf->pDisplayWatcher->Start());
- }
-
- // Run the media session.
- CHECK_HR(hr = MFUtils::RunSession(pSelf->pSession, pSelf->pTopologyFull));
-
- // Start asynchronous watcher thread
- pSelf->bStarted = true;
- int ret = tsk_thread_create(&pSelf->ppTread[0], RunSessionThread, pSelf);
- if(ret != 0) {
- TSK_DEBUG_ERROR("Failed to create thread");
- hr = E_FAIL;
- pSelf->bStarted = false;
- if(pSelf->ppTread[0]){
- tsk_thread_join(&pSelf->ppTread[0]);
- }
- MFUtils::ShutdownSession(pSelf->pSession, pSelf->pSource);
- CHECK_HR(hr = E_FAIL);
- }
+ plugin_win_mf_consumer_video_t* pSelf = (plugin_win_mf_consumer_video_t*)self;
+
+ if(!pSelf) {
+ TSK_DEBUG_ERROR("Invalid parameter");
+ return -1;
+ }
+
+ if(pSelf->bStarted) {
+ TSK_DEBUG_INFO("MF video consumer already started");
+ return 0;
+ }
+ if(!pSelf->bPrepared) {
+ TSK_DEBUG_ERROR("MF video consumer not prepared");
+ return -1;
+ }
+
+ HRESULT hr = S_OK;
+
+ // Run EVR watcher
+ if(pSelf->pDisplayWatcher) {
+ CHECK_HR(hr = pSelf->pDisplayWatcher->Start());
+ }
+
+ // Run the media session.
+ CHECK_HR(hr = MFUtils::RunSession(pSelf->pSession, pSelf->pTopologyFull));
+
+ // Start asynchronous watcher thread
+ pSelf->bStarted = true;
+ int ret = tsk_thread_create(&pSelf->ppTread[0], RunSessionThread, pSelf);
+ if(ret != 0) {
+ TSK_DEBUG_ERROR("Failed to create thread");
+ hr = E_FAIL;
+ pSelf->bStarted = false;
+ if(pSelf->ppTread[0]) {
+ tsk_thread_join(&pSelf->ppTread[0]);
+ }
+ MFUtils::ShutdownSession(pSelf->pSession, pSelf->pSource);
+ CHECK_HR(hr = E_FAIL);
+ }
bail:
- return SUCCEEDED(hr) ? 0 : -1;
+ return SUCCEEDED(hr) ? 0 : -1;
}
static int plugin_win_mf_consumer_video_consume(tmedia_consumer_t* self, const void* buffer, tsk_size_t size, const tsk_object_t* proto_hdr)
{
- plugin_win_mf_consumer_video_t* pSelf = (plugin_win_mf_consumer_video_t*)self;
-
- HRESULT hr = S_OK;
-
- if(!pSelf || !buffer || !size) {
- TSK_DEBUG_ERROR("Invalid parameter");
- CHECK_HR(hr = E_INVALIDARG);
- }
-
- if(!pSelf->bStarted) {
- TSK_DEBUG_INFO("MF video consumer not started");
- CHECK_HR(hr = E_FAIL);
- }
- if(!pSelf->pSource) {
- TSK_DEBUG_ERROR("No video custom source");
- CHECK_HR(hr = E_FAIL);
- }
-
- if(pSelf->nNegWidth != TMEDIA_CONSUMER(pSelf)->video.in.width || pSelf->nNegHeight != TMEDIA_CONSUMER(pSelf)->video.in.height){
- TSK_DEBUG_INFO("Negotiated and input video sizes are different:%d#%d or %d#%d",
- pSelf->nNegWidth, TMEDIA_CONSUMER(pSelf)->video.in.width,
- pSelf->nNegHeight, TMEDIA_CONSUMER(pSelf)->video.in.height);
- // Update media type
- CHECK_HR(hr = MFSetAttributeSize(pSelf->pOutType, MF_MT_FRAME_SIZE, TMEDIA_CONSUMER(pSelf)->video.in.width, TMEDIA_CONSUMER(pSelf)->video.in.height));
- CHECK_HR(hr = MFSetAttributeRatio(pSelf->pOutType, MF_MT_FRAME_RATE, TMEDIA_CONSUMER(pSelf)->video.fps, 1));
-
- CHECK_HR(hr = pSelf->pSession->ClearTopologies());
-
- //
- // FIXME: Using same EVR when the size is just swapped (e.g. [640, 480] -> [480, 640]) doesn't work while other changes does (e.g. [352, 288] -> [640, 480])
- // /!\This look like a bug in Media Foundation
- //
- if(pSelf->nNegWidth == TMEDIA_CONSUMER(pSelf)->video.in.height && pSelf->nNegHeight == TMEDIA_CONSUMER(pSelf)->video.in.width) // swapped?
- {
- TSK_DEBUG_INFO("/!\\ Size swapped");
-
- IMFActivate* pSinkActivate = NULL;
- IMFTopology* pTopologyPartial = NULL;
- hr = MFCreateVideoRendererActivate(pSelf->hWindow, &pSinkActivate);
- if(FAILED(hr)) goto end_of_swapping;
- hr = MFUtils::CreateTopology(
- pSelf->pSource,
- pSelf->pDecoder ? pSelf->pDecoder->GetMFT() : NULL,
- pSinkActivate,
- NULL/*Preview*/,
- pSelf->pOutType,
- &pTopologyPartial);
- if(FAILED(hr)) goto end_of_swapping;
-
- if(SUCCEEDED(hr)) {
- SafeRelease(&pSelf->pSinkActivate);
- SafeRelease(&pSelf->pTopologyPartial);
- pSelf->pSinkActivate = pSinkActivate; pSinkActivate = NULL;
- pSelf->pTopologyPartial = pTopologyPartial; pTopologyPartial = NULL;
-
- }
-
+ plugin_win_mf_consumer_video_t* pSelf = (plugin_win_mf_consumer_video_t*)self;
+
+ HRESULT hr = S_OK;
+
+ if(!pSelf || !buffer || !size) {
+ TSK_DEBUG_ERROR("Invalid parameter");
+ CHECK_HR(hr = E_INVALIDARG);
+ }
+
+ if(!pSelf->bStarted) {
+ TSK_DEBUG_INFO("MF video consumer not started");
+ CHECK_HR(hr = E_FAIL);
+ }
+ if(!pSelf->pSource) {
+ TSK_DEBUG_ERROR("No video custom source");
+ CHECK_HR(hr = E_FAIL);
+ }
+
+ if(pSelf->nNegWidth != TMEDIA_CONSUMER(pSelf)->video.in.width || pSelf->nNegHeight != TMEDIA_CONSUMER(pSelf)->video.in.height) {
+ TSK_DEBUG_INFO("Negotiated and input video sizes are different:%d#%d or %d#%d",
+ pSelf->nNegWidth, TMEDIA_CONSUMER(pSelf)->video.in.width,
+ pSelf->nNegHeight, TMEDIA_CONSUMER(pSelf)->video.in.height);
+ // Update media type
+ CHECK_HR(hr = MFSetAttributeSize(pSelf->pOutType, MF_MT_FRAME_SIZE, TMEDIA_CONSUMER(pSelf)->video.in.width, TMEDIA_CONSUMER(pSelf)->video.in.height));
+ CHECK_HR(hr = MFSetAttributeRatio(pSelf->pOutType, MF_MT_FRAME_RATE, TMEDIA_CONSUMER(pSelf)->video.fps, 1));
+
+ CHECK_HR(hr = pSelf->pSession->ClearTopologies());
+
+ //
+ // FIXME: Using same EVR when the size is just swapped (e.g. [640, 480] -> [480, 640]) doesn't work while other changes does (e.g. [352, 288] -> [640, 480])
+ // /!\This look like a bug in Media Foundation
+ //
+ if(pSelf->nNegWidth == TMEDIA_CONSUMER(pSelf)->video.in.height && pSelf->nNegHeight == TMEDIA_CONSUMER(pSelf)->video.in.width) { // swapped?
+ TSK_DEBUG_INFO("/!\\ Size swapped");
+
+ IMFActivate* pSinkActivate = NULL;
+ IMFTopology* pTopologyPartial = NULL;
+ hr = MFCreateVideoRendererActivate(pSelf->hWindow, &pSinkActivate);
+ if(FAILED(hr)) {
+ goto end_of_swapping;
+ }
+ hr = MFUtils::CreateTopology(
+ pSelf->pSource,
+ pSelf->pDecoder ? pSelf->pDecoder->GetMFT() : NULL,
+ pSinkActivate,
+ NULL/*Preview*/,
+ pSelf->pOutType,
+ &pTopologyPartial);
+ if(FAILED(hr)) {
+ goto end_of_swapping;
+ }
+
+ if(SUCCEEDED(hr)) {
+ SafeRelease(&pSelf->pSinkActivate);
+ SafeRelease(&pSelf->pTopologyPartial);
+ pSelf->pSinkActivate = pSinkActivate;
+ pSinkActivate = NULL;
+ pSelf->pTopologyPartial = pTopologyPartial;
+ pTopologyPartial = NULL;
+
+ }
+
end_of_swapping:
- SafeRelease(&pSinkActivate);
- SafeRelease(&pTopologyPartial);
- CHECK_HR(hr);
- }
-
- // Set media type again (not required but who know)
- CHECK_HR(hr = MFUtils::SetMediaType(pSelf->pSource, pSelf->pOutType));
-
- // Rebuild topology using the partial one
- IMFTopology* pTopologyFull = NULL;
- hr = MFUtils::ResolveTopology(pSelf->pTopologyPartial, &pTopologyFull);
- if(SUCCEEDED(hr)){
- SafeRelease(&pSelf->pTopologyFull);
- pSelf->pTopologyFull = pTopologyFull; pTopologyFull = NULL;
- }
- SafeRelease(&pTopologyFull);
- CHECK_HR(hr);
-
- // Find Main Sink
- IMFMediaSink* pMediaSink = NULL;
- hr = MFUtils::FindNodeObject(pSelf->pTopologyFull, MFUtils::g_ullTopoIdSinkMain, (void**)&pMediaSink);
- if(SUCCEEDED(hr)) {
- if(pSelf->pDisplayWatcher){
- delete pSelf->pDisplayWatcher, pSelf->pDisplayWatcher = NULL;
- }
- pSelf->pDisplayWatcher = new DisplayWatcher(pSelf->hWindow, pMediaSink, hr);
- if(SUCCEEDED(hr) && pSelf->bStarted) {
- hr = pSelf->pDisplayWatcher->Start();
- }
- }
- SafeRelease(&pMediaSink);
- CHECK_HR(hr);
-
- // Update the topology associated to the media session
- CHECK_HR(hr = pSelf->pSession->SetTopology(MFSESSION_SETTOPOLOGY_IMMEDIATE, pSelf->pTopologyFull));
-
- // Update negotiated width and height
- pSelf->nNegWidth = TMEDIA_CONSUMER(pSelf)->video.in.width;
- pSelf->nNegHeight = TMEDIA_CONSUMER(pSelf)->video.in.height;
- }
-
- // Deliver buffer
- CHECK_HR(hr = pSelf->pSource->CopyVideoBuffer(pSelf->nNegWidth, pSelf->nNegHeight, buffer, size));
+ SafeRelease(&pSinkActivate);
+ SafeRelease(&pTopologyPartial);
+ CHECK_HR(hr);
+ }
+
+ // Set media type again (not required but who know)
+ CHECK_HR(hr = MFUtils::SetMediaType(pSelf->pSource, pSelf->pOutType));
+
+ // Rebuild topology using the partial one
+ IMFTopology* pTopologyFull = NULL;
+ hr = MFUtils::ResolveTopology(pSelf->pTopologyPartial, &pTopologyFull);
+ if(SUCCEEDED(hr)) {
+ SafeRelease(&pSelf->pTopologyFull);
+ pSelf->pTopologyFull = pTopologyFull;
+ pTopologyFull = NULL;
+ }
+ SafeRelease(&pTopologyFull);
+ CHECK_HR(hr);
+
+ // Find Main Sink
+ IMFMediaSink* pMediaSink = NULL;
+ hr = MFUtils::FindNodeObject(pSelf->pTopologyFull, MFUtils::g_ullTopoIdSinkMain, (void**)&pMediaSink);
+ if(SUCCEEDED(hr)) {
+ if(pSelf->pDisplayWatcher) {
+ delete pSelf->pDisplayWatcher, pSelf->pDisplayWatcher = NULL;
+ }
+ pSelf->pDisplayWatcher = new DisplayWatcher(pSelf->hWindow, pMediaSink, hr);
+ if(SUCCEEDED(hr) && pSelf->bStarted) {
+ hr = pSelf->pDisplayWatcher->Start();
+ }
+ }
+ SafeRelease(&pMediaSink);
+ CHECK_HR(hr);
+
+ // Update the topology associated to the media session
+ CHECK_HR(hr = pSelf->pSession->SetTopology(MFSESSION_SETTOPOLOGY_IMMEDIATE, pSelf->pTopologyFull));
+
+ // Update negotiated width and height
+ pSelf->nNegWidth = TMEDIA_CONSUMER(pSelf)->video.in.width;
+ pSelf->nNegHeight = TMEDIA_CONSUMER(pSelf)->video.in.height;
+ }
+
+ // Deliver buffer
+ CHECK_HR(hr = pSelf->pSource->CopyVideoBuffer(pSelf->nNegWidth, pSelf->nNegHeight, buffer, size));
bail:
- return SUCCEEDED(hr) ? 0 : -1;
+ return SUCCEEDED(hr) ? 0 : -1;
}
static int plugin_win_mf_consumer_video_pause(tmedia_consumer_t* self)
{
- plugin_win_mf_consumer_video_t* pSelf = (plugin_win_mf_consumer_video_t*)self;
+ plugin_win_mf_consumer_video_t* pSelf = (plugin_win_mf_consumer_video_t*)self;
- if(!pSelf){
- TSK_DEBUG_ERROR("Invalid parameter");
- return -1;
- }
- if(!pSelf->bStarted)
- {
- TSK_DEBUG_INFO("MF video producer not started");
- return 0;
- }
+ if(!pSelf) {
+ TSK_DEBUG_ERROR("Invalid parameter");
+ return -1;
+ }
+ if(!pSelf->bStarted) {
+ TSK_DEBUG_INFO("MF video producer not started");
+ return 0;
+ }
- HRESULT hr = MFUtils::PauseSession(pSelf->pSession);
+ HRESULT hr = MFUtils::PauseSession(pSelf->pSession);
- return SUCCEEDED(hr) ? 0 : -1;
+ return SUCCEEDED(hr) ? 0 : -1;
}
static int plugin_win_mf_consumer_video_stop(tmedia_consumer_t* self)
{
- plugin_win_mf_consumer_video_t* pSelf = (plugin_win_mf_consumer_video_t*)self;
+ plugin_win_mf_consumer_video_t* pSelf = (plugin_win_mf_consumer_video_t*)self;
- if(!pSelf){
+ if(!pSelf) {
TSK_DEBUG_ERROR("Invalid parameter");
return -1;
}
HRESULT hr = S_OK;
- // stop EVR watcher
- if(pSelf->pDisplayWatcher) {
- hr = pSelf->pDisplayWatcher->Stop();
- }
+ // stop EVR watcher
+ if(pSelf->pDisplayWatcher) {
+ hr = pSelf->pDisplayWatcher->Stop();
+ }
// for the thread
pSelf->bStarted = false;
hr = MFUtils::ShutdownSession(pSelf->pSession, NULL); // stop session to wakeup the asynchronous thread
- if(pSelf->ppTread[0]){
+ if(pSelf->ppTread[0]) {
tsk_thread_join(&pSelf->ppTread[0]);
}
hr = MFUtils::ShutdownSession(NULL, pSelf->pSource); // stop source to release the camera
- // next start() will be called after prepare()
- return _plugin_win_mf_consumer_video_unprepare(pSelf);
+ // next start() will be called after prepare()
+ return _plugin_win_mf_consumer_video_unprepare(pSelf);
}
static int _plugin_win_mf_consumer_video_unprepare(plugin_win_mf_consumer_video_t* pSelf)
{
- if(!pSelf){
- TSK_DEBUG_ERROR("Invalid parameter");
- return -1;
- }
-
- if(pSelf->bStarted) {
- // plugin_win_mf_producer_video_stop(TMEDIA_PRODUCER(pSelf));
- TSK_DEBUG_ERROR("Consumer must be stopped before calling unprepare");
- }
-
- if(pSelf->pDisplayWatcher) {
- pSelf->pDisplayWatcher->Stop();
- }
- if(pSelf->pSource){
- pSelf->pSource->Shutdown();
- pSelf->pSource = NULL;
- }
- if(pSelf->pSession){
+ if(!pSelf) {
+ TSK_DEBUG_ERROR("Invalid parameter");
+ return -1;
+ }
+
+ if(pSelf->bStarted) {
+ // plugin_win_mf_producer_video_stop(TMEDIA_PRODUCER(pSelf));
+ TSK_DEBUG_ERROR("Consumer must be stopped before calling unprepare");
+ }
+
+ if(pSelf->pDisplayWatcher) {
+ pSelf->pDisplayWatcher->Stop();
+ }
+ if(pSelf->pSource) {
+ pSelf->pSource->Shutdown();
+ pSelf->pSource = NULL;
+ }
+ if(pSelf->pSession) {
pSelf->pSession->Shutdown();
- pSelf->pSession = NULL;
+ pSelf->pSession = NULL;
}
- SafeRelease(&pSelf->pDecoder);
+ SafeRelease(&pSelf->pDecoder);
SafeRelease(&pSelf->pSession);
SafeRelease(&pSelf->pSource);
SafeRelease(&pSelf->pSinkActivate);
SafeRelease(&pSelf->pTopologyFull);
- SafeRelease(&pSelf->pTopologyPartial);
- SafeRelease(&pSelf->pOutType);
+ SafeRelease(&pSelf->pTopologyPartial);
+ SafeRelease(&pSelf->pOutType);
- if(pSelf->pDisplayWatcher) {
- delete pSelf->pDisplayWatcher;
- pSelf->pDisplayWatcher = NULL;
- }
+ if(pSelf->pDisplayWatcher) {
+ delete pSelf->pDisplayWatcher;
+ pSelf->pDisplayWatcher = NULL;
+ }
- pSelf->bPrepared = false;
+ pSelf->bPrepared = false;
- return 0;
+ return 0;
}
@@ -1519,102 +1450,98 @@ static int _plugin_win_mf_consumer_video_unprepare(plugin_win_mf_consumer_video_
/* constructor */
static tsk_object_t* plugin_win_mf_consumer_video_ctor(tsk_object_t * self, va_list * app)
{
- MFUtils::Startup();
-
- plugin_win_mf_consumer_video_t *pSelf = (plugin_win_mf_consumer_video_t *)self;
- if(pSelf){
- /* init base */
- tmedia_consumer_init(TMEDIA_CONSUMER(pSelf));
- TMEDIA_CONSUMER(pSelf)->video.display.chroma = tmedia_chroma_yuv420p;
- TMEDIA_CONSUMER(pSelf)->decoder.codec_id = tmedia_codec_id_none; // means accept RAW fames
-
- /* init self */
- // consumer->create_on_ui_thread = tsk_true;
- TMEDIA_CONSUMER(pSelf)->video.fps = 15;
- TMEDIA_CONSUMER(pSelf)->video.display.width = 0; // use codec value
- TMEDIA_CONSUMER(pSelf)->video.display.height = 0; // use codec value
- TMEDIA_CONSUMER(pSelf)->video.display.auto_resize = tsk_true;
-
- TSK_DEBUG_INFO("Create WinMF video consumer");
- }
- return self;
+ MFUtils::Startup();
+
+ plugin_win_mf_consumer_video_t *pSelf = (plugin_win_mf_consumer_video_t *)self;
+ if(pSelf) {
+ /* init base */
+ tmedia_consumer_init(TMEDIA_CONSUMER(pSelf));
+ TMEDIA_CONSUMER(pSelf)->video.display.chroma = tmedia_chroma_yuv420p;
+ TMEDIA_CONSUMER(pSelf)->decoder.codec_id = tmedia_codec_id_none; // means accept RAW fames
+
+ /* init self */
+ // consumer->create_on_ui_thread = tsk_true;
+ TMEDIA_CONSUMER(pSelf)->video.fps = 15;
+ TMEDIA_CONSUMER(pSelf)->video.display.width = 0; // use codec value
+ TMEDIA_CONSUMER(pSelf)->video.display.height = 0; // use codec value
+ TMEDIA_CONSUMER(pSelf)->video.display.auto_resize = tsk_true;
+
+ TSK_DEBUG_INFO("Create WinMF video consumer");
+ }
+ return self;
}
/* destructor */
static tsk_object_t* plugin_win_mf_consumer_video_dtor(tsk_object_t * self)
-{
- plugin_win_mf_consumer_video_t *pSelf = (plugin_win_mf_consumer_video_t *)self;
- if(pSelf){
- /* stop */
- if(pSelf->bStarted){
- plugin_win_mf_consumer_video_stop(TMEDIA_CONSUMER(pSelf));
- }
-
- /* deinit base */
- tmedia_consumer_deinit(TMEDIA_CONSUMER(pSelf));
- /* deinit self */
- _plugin_win_mf_consumer_video_unprepare(pSelf);
- }
-
- return self;
+{
+ plugin_win_mf_consumer_video_t *pSelf = (plugin_win_mf_consumer_video_t *)self;
+ if(pSelf) {
+ /* stop */
+ if(pSelf->bStarted) {
+ plugin_win_mf_consumer_video_stop(TMEDIA_CONSUMER(pSelf));
+ }
+
+ /* deinit base */
+ tmedia_consumer_deinit(TMEDIA_CONSUMER(pSelf));
+ /* deinit self */
+ _plugin_win_mf_consumer_video_unprepare(pSelf);
+ }
+
+ return self;
}
/* object definition */
-static const tsk_object_def_t plugin_win_mf_consumer_video_def_s =
-{
- sizeof(plugin_win_mf_consumer_video_t),
- plugin_win_mf_consumer_video_ctor,
- plugin_win_mf_consumer_video_dtor,
- tsk_null,
+static const tsk_object_def_t plugin_win_mf_consumer_video_def_s = {
+ sizeof(plugin_win_mf_consumer_video_t),
+ plugin_win_mf_consumer_video_ctor,
+ plugin_win_mf_consumer_video_dtor,
+ tsk_null,
};
/* plugin definition*/
-static const tmedia_consumer_plugin_def_t plugin_win_mf_consumer_video_plugin_def_s =
-{
- &plugin_win_mf_consumer_video_def_s,
-
- tmedia_video,
- "Media Foundation video consumer",
-
- plugin_win_mf_consumer_video_set,
- plugin_win_mf_consumer_video_prepare,
- plugin_win_mf_consumer_video_start,
- plugin_win_mf_consumer_video_consume,
- plugin_win_mf_consumer_video_pause,
- plugin_win_mf_consumer_video_stop
+static const tmedia_consumer_plugin_def_t plugin_win_mf_consumer_video_plugin_def_s = {
+ &plugin_win_mf_consumer_video_def_s,
+
+ tmedia_video,
+ "Media Foundation video consumer",
+
+ plugin_win_mf_consumer_video_set,
+ plugin_win_mf_consumer_video_prepare,
+ plugin_win_mf_consumer_video_start,
+ plugin_win_mf_consumer_video_consume,
+ plugin_win_mf_consumer_video_pause,
+ plugin_win_mf_consumer_video_stop
};
const tmedia_consumer_plugin_def_t *plugin_win_mf_consumer_video_plugin_def_t = &plugin_win_mf_consumer_video_plugin_def_s;
// Run session async thread
static void* TSK_STDCALL RunSessionThread(void *pArg)
{
- plugin_win_mf_consumer_video_t *pSelf = (plugin_win_mf_consumer_video_t *)pArg;
- HRESULT hrStatus = S_OK;
- HRESULT hr = S_OK;
- IMFMediaEvent *pEvent = NULL;
- MediaEventType met;
-
- TSK_DEBUG_INFO("RunSessionThread (MF video consumer) - ENTER");
-
- while(pSelf->bStarted){
- CHECK_HR(hr = pSelf->pSession->GetEvent(0, &pEvent));
- CHECK_HR(hr = pEvent->GetStatus(&hrStatus));
- CHECK_HR(hr = pEvent->GetType(&met));
-
- if (FAILED(hrStatus) /*&& hrStatus != MF_E_NO_SAMPLE_TIMESTAMP*/)
- {
- TSK_DEBUG_ERROR("Session error: 0x%x (event id: %d)\n", hrStatus, met);
- hr = hrStatus;
- goto bail;
- }
- if (met == MESessionEnded)
- {
- break;
- }
- SafeRelease(&pEvent);
- }
+ plugin_win_mf_consumer_video_t *pSelf = (plugin_win_mf_consumer_video_t *)pArg;
+ HRESULT hrStatus = S_OK;
+ HRESULT hr = S_OK;
+ IMFMediaEvent *pEvent = NULL;
+ MediaEventType met;
+
+ TSK_DEBUG_INFO("RunSessionThread (MF video consumer) - ENTER");
+
+ while(pSelf->bStarted) {
+ CHECK_HR(hr = pSelf->pSession->GetEvent(0, &pEvent));
+ CHECK_HR(hr = pEvent->GetStatus(&hrStatus));
+ CHECK_HR(hr = pEvent->GetType(&met));
+
+ if (FAILED(hrStatus) /*&& hrStatus != MF_E_NO_SAMPLE_TIMESTAMP*/) {
+ TSK_DEBUG_ERROR("Session error: 0x%x (event id: %d)\n", hrStatus, met);
+ hr = hrStatus;
+ goto bail;
+ }
+ if (met == MESessionEnded) {
+ break;
+ }
+ SafeRelease(&pEvent);
+ }
bail:
- TSK_DEBUG_INFO("RunSessionThread (MF video consumer) - EXIT");
+ TSK_DEBUG_INFO("RunSessionThread (MF video consumer) - EXIT");
- return NULL;
+ return NULL;
}
#endif /* PLUGIN_MF_CV_USE_D3D9 */ \ No newline at end of file
diff --git a/plugins/pluginWinMF/plugin_win_mf_converter_video.cxx b/plugins/pluginWinMF/plugin_win_mf_converter_video.cxx
index 0e6abcb..70a1b4c 100755
--- a/plugins/pluginWinMF/plugin_win_mf_converter_video.cxx
+++ b/plugins/pluginWinMF/plugin_win_mf_converter_video.cxx
@@ -1,18 +1,18 @@
/* Copyright (C) 2013-2015 Mamadou DIOP
* Copyright (C) 2013-2015 Doubango Telecom <http://www.doubango.org>
-*
+*
* This file is part of Open Source Doubango Framework.
*
* DOUBANGO is free software: you can redistribute it and/or modify
* it under the terms of the GNU General Public License as published by
* the Free Software Foundation, either version 3 of the License, or
* (at your option) any later version.
-*
+*
* DOUBANGO is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
* GNU General Public License for more details.
-*
+*
* You should have received a copy of the GNU General Public License
* along with DOUBANGO.
*/
@@ -53,9 +53,9 @@ EXTERN_C const GUID CLSID_VideoProcessorMFT; // defined in mf_utils.cxx
#define _WIN32_WINNT_WIN8 0x0602
#endif /* _WIN32_WINNT_WIN8 */
-#if (WINVER < _WIN32_WINNT_WIN8)
+#if (WINVER < _WIN32_WINNT_WIN8)
DEFINE_GUID(MF_SA_D3D11_AWARE,
- 0x206b4fc8, 0xfcf9, 0x4c51, 0xaf, 0xe3, 0x97, 0x64, 0x36, 0x9e, 0x33, 0xa0);
+ 0x206b4fc8, 0xfcf9, 0x4c51, 0xaf, 0xe3, 0x97, 0x64, 0x36, 0x9e, 0x33, 0xa0);
#endif /* MF_SA_D3D11_AWARE */
#if !defined(HAVE_IMFVideoProcessorControl)
@@ -72,34 +72,33 @@ DEFINE_GUID(MF_SA_D3D11_AWARE,
#define PLUGIN_MF_VC_FPS 120 // Samples requires timestamp
#endif /* PLUGIN_MF_VC_FPS */
-typedef struct plugin_win_mf_converter_video_ms_s
-{
- TMEDIA_DECLARE_CONVERTER_VIDEO;
-
- GUID fmtSrc;
- tsk_size_t widthSrc;
- tsk_size_t heightSrc;
-
- GUID fmtDst;
- tsk_size_t widthDst;
- tsk_size_t heightDst;
-
- UINT32 rotation;
- UINT32 xOutputSize;
- UINT32 xInputSize;
- BOOL flip;
-
- IMFSample* pSampleOut;
- IMFSample* pSampleIn;
-
- LONGLONG rtStart;
+typedef struct plugin_win_mf_converter_video_ms_s {
+ TMEDIA_DECLARE_CONVERTER_VIDEO;
+
+ GUID fmtSrc;
+ tsk_size_t widthSrc;
+ tsk_size_t heightSrc;
+
+ GUID fmtDst;
+ tsk_size_t widthDst;
+ tsk_size_t heightDst;
+
+ UINT32 rotation;
+ UINT32 xOutputSize;
+ UINT32 xInputSize;
+ BOOL flip;
+
+ IMFSample* pSampleOut;
+ IMFSample* pSampleIn;
+
+ LONGLONG rtStart;
UINT64 rtDuration;
- IMFTransform* pMFT; // "CLSID_VideoProcessorMFT" or "CLSID_CColorConvertDMO"
+ IMFTransform* pMFT; // "CLSID_VideoProcessorMFT" or "CLSID_CColorConvertDMO"
#if HAVE_IMFVideoProcessorControl
- IMFVideoProcessorControl* pVPC;
+ IMFVideoProcessorControl* pVPC;
#endif
- BOOL isVideoProcessor;
+ BOOL isVideoProcessor;
}
plugin_win_mf_converter_video_ms_t;
@@ -110,324 +109,299 @@ static inline HRESULT _plugin_win_mf_converter_video_ms_copy_rgb32_down_top(
const BYTE* pSrc,
INT dwWidthInPixels,
INT dwHeightInPixels
- );
+);
static HRESULT _plugin_win_mf_converter_video_ms_process_input(plugin_win_mf_converter_video_ms_t* pSelf, IMFSample* pSample);
static HRESULT _plugin_win_mf_converter_video_ms_process_output(plugin_win_mf_converter_video_ms_t* pSelf, IMFSample **ppSample);
static HRESULT _plugin_win_mf_converter_video_ms_process(plugin_win_mf_converter_video_ms_t* pSelf, const void* pcInputPtr, UINT32 nInputSize, IMFSample **ppSampleOut);
static int plugin_win_mf_converter_video_ms_init(tmedia_converter_video_t* self, tsk_size_t srcWidth, tsk_size_t srcHeight, tmedia_chroma_t srcChroma, tsk_size_t dstWidth, tsk_size_t dstHeight, tmedia_chroma_t dstChroma)
{
- plugin_win_mf_converter_video_ms_t* pSelf = (plugin_win_mf_converter_video_ms_t*)self;
- TSK_DEBUG_INFO("Initializing new MF Video Converter src=(%dx%d@%d) dst=(%dx%d@%d)", srcWidth, srcHeight, srcChroma, dstWidth, dstHeight, dstChroma);
-
- if((pSelf->fmtSrc = _plugin_win_mf_converter_video_ms_get_pixfmt(srcChroma)) == GUID_NULL)
- {
- TSK_DEBUG_ERROR("Invalid source chroma");
- return -2;
- }
- if((pSelf->fmtDst = _plugin_win_mf_converter_video_ms_get_pixfmt(dstChroma)) == GUID_NULL)
- {
- TSK_DEBUG_ERROR("Invalid destination chroma");
- return -3;
- }
-
- pSelf->rtStart = 0;
-
- pSelf->widthSrc = srcWidth;
- pSelf->heightSrc = srcHeight;
- pSelf->widthDst = dstWidth;
- pSelf->heightDst = dstHeight;
- pSelf->rotation = 0;
- pSelf->xOutputSize = (UINT32)_plugin_win_mf_converter_video_ms_get_size(dstChroma, dstWidth, dstHeight);
- pSelf->xInputSize = (UINT32)_plugin_win_mf_converter_video_ms_get_size(srcChroma, srcWidth, srcHeight);
-
- SafeRelease(&pSelf->pSampleOut);
- SafeRelease(&pSelf->pSampleIn);
- SafeRelease(&pSelf->pMFT);
+ plugin_win_mf_converter_video_ms_t* pSelf = (plugin_win_mf_converter_video_ms_t*)self;
+ TSK_DEBUG_INFO("Initializing new MF Video Converter src=(%dx%d@%d) dst=(%dx%d@%d)", srcWidth, srcHeight, srcChroma, dstWidth, dstHeight, dstChroma);
+
+ if((pSelf->fmtSrc = _plugin_win_mf_converter_video_ms_get_pixfmt(srcChroma)) == GUID_NULL) {
+ TSK_DEBUG_ERROR("Invalid source chroma");
+ return -2;
+ }
+ if((pSelf->fmtDst = _plugin_win_mf_converter_video_ms_get_pixfmt(dstChroma)) == GUID_NULL) {
+ TSK_DEBUG_ERROR("Invalid destination chroma");
+ return -3;
+ }
+
+ pSelf->rtStart = 0;
+
+ pSelf->widthSrc = srcWidth;
+ pSelf->heightSrc = srcHeight;
+ pSelf->widthDst = dstWidth;
+ pSelf->heightDst = dstHeight;
+ pSelf->rotation = 0;
+ pSelf->xOutputSize = (UINT32)_plugin_win_mf_converter_video_ms_get_size(dstChroma, dstWidth, dstHeight);
+ pSelf->xInputSize = (UINT32)_plugin_win_mf_converter_video_ms_get_size(srcChroma, srcWidth, srcHeight);
+
+ SafeRelease(&pSelf->pSampleOut);
+ SafeRelease(&pSelf->pSampleIn);
+ SafeRelease(&pSelf->pMFT);
#if HAVE_IMFVideoProcessorControl
- SafeRelease(&pSelf->pVPC);
+ SafeRelease(&pSelf->pVPC);
#endif
- HRESULT hr = S_OK;
-
- IMFMediaType* pTypeSrc = NULL;
- IMFMediaType* pTypeDst = NULL;
-
- // Get video processor or Color convertor
- hr = CoCreateInstance(CLSID_VideoProcessorMFT, NULL,
- CLSCTX_INPROC_SERVER, IID_PPV_ARGS(&pSelf->pMFT));
- pSelf->isVideoProcessor = SUCCEEDED(hr);
- if(FAILED(hr))
- {
- TSK_DEBUG_INFO("CoCreateInstance(CLSID_VideoProcessorMFT) failed");
- if(pSelf->widthSrc == pSelf->widthDst && pSelf->heightSrc == pSelf->heightDst)
- {
- TSK_DEBUG_INFO("No video scaling is required...perform CoCreateInstance(CLSID_CColorConvertDMO)");
- CHECK_HR(hr = CoCreateInstance(CLSID_CColorConvertDMO, NULL,
- CLSCTX_INPROC_SERVER, IID_PPV_ARGS(&pSelf->pMFT)));
- }
- else
- {
- CHECK_HR(hr);
- }
- }
-
-
-
- if(pSelf->isVideoProcessor)
- {
- IMFAttributes* pAttributes = NULL;
- UINT32 GPU = 0;
- hr = pSelf->pMFT->GetAttributes(&pAttributes);
- if (SUCCEEDED(hr)) {
- hr = pAttributes->GetUINT32(MF_SA_D3D11_AWARE, &GPU);
- }
- SafeRelease(&pAttributes);
- TSK_DEBUG_INFO("MF_SA_D3D11_AWARE = %d", GPU);
+ HRESULT hr = S_OK;
+
+ IMFMediaType* pTypeSrc = NULL;
+ IMFMediaType* pTypeDst = NULL;
+
+ // Get video processor or Color convertor
+ hr = CoCreateInstance(CLSID_VideoProcessorMFT, NULL,
+ CLSCTX_INPROC_SERVER, IID_PPV_ARGS(&pSelf->pMFT));
+ pSelf->isVideoProcessor = SUCCEEDED(hr);
+ if(FAILED(hr)) {
+ TSK_DEBUG_INFO("CoCreateInstance(CLSID_VideoProcessorMFT) failed");
+ if(pSelf->widthSrc == pSelf->widthDst && pSelf->heightSrc == pSelf->heightDst) {
+ TSK_DEBUG_INFO("No video scaling is required...perform CoCreateInstance(CLSID_CColorConvertDMO)");
+ CHECK_HR(hr = CoCreateInstance(CLSID_CColorConvertDMO, NULL,
+ CLSCTX_INPROC_SERVER, IID_PPV_ARGS(&pSelf->pMFT)));
+ }
+ else {
+ CHECK_HR(hr);
+ }
+ }
+
+
+
+ if(pSelf->isVideoProcessor) {
+ IMFAttributes* pAttributes = NULL;
+ UINT32 GPU = 0;
+ hr = pSelf->pMFT->GetAttributes(&pAttributes);
+ if (SUCCEEDED(hr)) {
+ hr = pAttributes->GetUINT32(MF_SA_D3D11_AWARE, &GPU);
+ }
+ SafeRelease(&pAttributes);
+ TSK_DEBUG_INFO("MF_SA_D3D11_AWARE = %d", GPU);
#if HAVE_IMFVideoProcessorControl
- CHECK_HR(hr = pSelf->pMFT->QueryInterface(IID_PPV_ARGS(&pSelf->pVPC)));
+ CHECK_HR(hr = pSelf->pMFT->QueryInterface(IID_PPV_ARGS(&pSelf->pVPC)));
#endif
- }
+ }
- CHECK_HR(hr = MFUtils::CreateVideoType(&pSelf->fmtSrc, &pTypeSrc, (UINT32)pSelf->widthSrc, (UINT32)pSelf->heightSrc));
- CHECK_HR(hr = MFUtils::CreateVideoType(&pSelf->fmtDst, &pTypeDst, (UINT32)pSelf->widthDst, (UINT32)pSelf->heightDst));
+ CHECK_HR(hr = MFUtils::CreateVideoType(&pSelf->fmtSrc, &pTypeSrc, (UINT32)pSelf->widthSrc, (UINT32)pSelf->heightSrc));
+ CHECK_HR(hr = MFUtils::CreateVideoType(&pSelf->fmtDst, &pTypeDst, (UINT32)pSelf->widthDst, (UINT32)pSelf->heightDst));
- CHECK_HR(hr = pSelf->pMFT->SetInputType(0, pTypeSrc, 0));
- CHECK_HR(hr = pSelf->pMFT->SetOutputType(0, pTypeDst, 0));
+ CHECK_HR(hr = pSelf->pMFT->SetInputType(0, pTypeSrc, 0));
+ CHECK_HR(hr = pSelf->pMFT->SetOutputType(0, pTypeDst, 0));
bail:
- SafeRelease(&pTypeSrc);
- SafeRelease(&pTypeDst);
+ SafeRelease(&pTypeSrc);
+ SafeRelease(&pTypeDst);
- if(FAILED(hr))
- {
- SafeRelease(&pSelf->pMFT);
+ if(FAILED(hr)) {
+ SafeRelease(&pSelf->pMFT);
#if HAVE_IMFVideoProcessorControl
- SafeRelease(&pSelf->pVPC);
+ SafeRelease(&pSelf->pVPC);
#endif
- return -4;
- }
+ return -4;
+ }
- return 0;
+ return 0;
}
static tsk_size_t plugin_win_mf_converter_video_ms_process(tmedia_converter_video_t* _self, const void* buffer, tsk_size_t buffer_size, void** output, tsk_size_t* output_max_size)
{
- plugin_win_mf_converter_video_ms_t* pSelf = (plugin_win_mf_converter_video_ms_t*)_self;
+ plugin_win_mf_converter_video_ms_t* pSelf = (plugin_win_mf_converter_video_ms_t*)_self;
- HRESULT hr = S_OK;
+ HRESULT hr = S_OK;
- IMFSample *pSampleOut = NULL;
- IMFMediaBuffer* pBufferOut = NULL;
+ IMFSample *pSampleOut = NULL;
+ IMFMediaBuffer* pBufferOut = NULL;
- if(!pSelf || !buffer || !output || !output_max_size)
- {
- CHECK_HR(hr = E_POINTER);
- }
+ if(!pSelf || !buffer || !output || !output_max_size) {
+ CHECK_HR(hr = E_POINTER);
+ }
- if(!pSelf->pMFT)
- {
- TSK_DEBUG_ERROR("Not initialized");
- CHECK_HR(hr = E_FAIL);
- }
+ if(!pSelf->pMFT) {
+ TSK_DEBUG_ERROR("Not initialized");
+ CHECK_HR(hr = E_FAIL);
+ }
#if HAVE_IMFVideoProcessorControl
- if(!pSelf->pVPC && pSelf->isVideoProcessor)
- {
- TSK_DEBUG_ERROR("Not initialized");
- CHECK_HR(hr = E_FAIL);
- }
+ if(!pSelf->pVPC && pSelf->isVideoProcessor) {
+ TSK_DEBUG_ERROR("Not initialized");
+ CHECK_HR(hr = E_FAIL);
+ }
#endif
- if(*output_max_size < pSelf->xOutputSize)
- {
- if(!(*output = tsk_realloc(*output, pSelf->xOutputSize)))
- {
- *output_max_size = 0;
- TSK_DEBUG_ERROR("Failed to allocate buffer with size = %u", pSelf->xOutputSize);
- CHECK_HR(hr = E_OUTOFMEMORY);
- }
- *output_max_size = pSelf->xOutputSize;
- }
+ if(*output_max_size < pSelf->xOutputSize) {
+ if(!(*output = tsk_realloc(*output, pSelf->xOutputSize))) {
+ *output_max_size = 0;
+ TSK_DEBUG_ERROR("Failed to allocate buffer with size = %u", pSelf->xOutputSize);
+ CHECK_HR(hr = E_OUTOFMEMORY);
+ }
+ *output_max_size = pSelf->xOutputSize;
+ }
#if HAVE_IMFVideoProcessorControl
- if(pSelf->pVPC && !!_self->flip != !!pSelf->flip)
- {
- pSelf->flip = !!_self->flip;
- CHECK_HR(hr = pSelf->pVPC->SetMirror(pSelf->flip ? MIRROR_NONE : MIRROR_VERTICAL));
- }
- if(pSelf->pVPC && _self->rotation != pSelf->rotation)
- {
- _self->rotation = pSelf->rotation;
- CHECK_HR(hr = pSelf->pVPC->SetRotation(pSelf->rotation == 0 ? ROTATION_NONE : ROTATION_NORMAL));
-
- }
+ if(pSelf->pVPC && !!_self->flip != !!pSelf->flip) {
+ pSelf->flip = !!_self->flip;
+ CHECK_HR(hr = pSelf->pVPC->SetMirror(pSelf->flip ? MIRROR_NONE : MIRROR_VERTICAL));
+ }
+ if(pSelf->pVPC && _self->rotation != pSelf->rotation) {
+ _self->rotation = pSelf->rotation;
+ CHECK_HR(hr = pSelf->pVPC->SetRotation(pSelf->rotation == 0 ? ROTATION_NONE : ROTATION_NORMAL));
+
+ }
#endif
- CHECK_HR(hr = _plugin_win_mf_converter_video_ms_process(
- pSelf, buffer, pSelf->xInputSize, &pSampleOut));
-
- if(pSampleOut)
- {
- CHECK_HR(hr = pSampleOut->GetBufferByIndex(0, &pBufferOut));
-
- BYTE* pBufferPtr = NULL;
- DWORD dwDataLength = 0;
- CHECK_HR(hr = pBufferOut->GetCurrentLength(&dwDataLength));
- if(dwDataLength > 0)
- {
- if(dwDataLength != pSelf->xOutputSize)
- {
- TSK_DEBUG_ERROR("Output size mismatch");
- CHECK_HR(hr = E_BOUNDS);
- }
- CHECK_HR(hr = pBufferOut->Lock(&pBufferPtr, NULL, NULL));
-
- // MFCopyImage() is optimized: MMX, SSE, or SSE2
- switch(_self->dstChroma)
- {
- // Don't waste your time guessing which parameter to use: The consumer will always request RGB32. If not used for consumer then, just memcpy()
- case tmedia_chroma_rgb32:
- {
- if(pSelf->isVideoProcessor)
- {
- hr = _plugin_win_mf_converter_video_ms_copy_rgb32_down_top(
- (BYTE*)*output,
- (const BYTE*)pBufferPtr,
- (INT)pSelf->widthDst,
- (INT)pSelf->heightDst
- );
- }
- else
- {
- hr = MFCopyImage(
- (BYTE*)*output,
- (LONG)(pSelf->widthDst << 2),
- (BYTE*)pBufferPtr,
- (LONG)(pSelf->widthDst << 2),
- (DWORD)(pSelf->widthDst << 2),
- (DWORD)pSelf->heightDst
- );
- }
-
-
- if(FAILED(hr))
- {
- // unlock() before leaving
- pBufferOut->Unlock();
- CHECK_HR(hr);
- }
- break;
- }
- default:
- {
- memcpy(*output, pBufferPtr, dwDataLength);
- }
- }
- CHECK_HR(hr = pBufferOut->Unlock());
- }
- }
-
- pSelf->rtStart += pSelf->rtDuration;
+ CHECK_HR(hr = _plugin_win_mf_converter_video_ms_process(
+ pSelf, buffer, pSelf->xInputSize, &pSampleOut));
+
+ if(pSampleOut) {
+ CHECK_HR(hr = pSampleOut->GetBufferByIndex(0, &pBufferOut));
+
+ BYTE* pBufferPtr = NULL;
+ DWORD dwDataLength = 0;
+ CHECK_HR(hr = pBufferOut->GetCurrentLength(&dwDataLength));
+ if(dwDataLength > 0) {
+ if(dwDataLength != pSelf->xOutputSize) {
+ TSK_DEBUG_ERROR("Output size mismatch");
+ CHECK_HR(hr = E_BOUNDS);
+ }
+ CHECK_HR(hr = pBufferOut->Lock(&pBufferPtr, NULL, NULL));
+
+ // MFCopyImage() is optimized: MMX, SSE, or SSE2
+ switch(_self->dstChroma) {
+ // Don't waste your time guessing which parameter to use: The consumer will always request RGB32. If not used for consumer then, just memcpy()
+ case tmedia_chroma_rgb32: {
+ if(pSelf->isVideoProcessor) {
+ hr = _plugin_win_mf_converter_video_ms_copy_rgb32_down_top(
+ (BYTE*)*output,
+ (const BYTE*)pBufferPtr,
+ (INT)pSelf->widthDst,
+ (INT)pSelf->heightDst
+ );
+ }
+ else {
+ hr = MFCopyImage(
+ (BYTE*)*output,
+ (LONG)(pSelf->widthDst << 2),
+ (BYTE*)pBufferPtr,
+ (LONG)(pSelf->widthDst << 2),
+ (DWORD)(pSelf->widthDst << 2),
+ (DWORD)pSelf->heightDst
+ );
+ }
+
+
+ if(FAILED(hr)) {
+ // unlock() before leaving
+ pBufferOut->Unlock();
+ CHECK_HR(hr);
+ }
+ break;
+ }
+ default: {
+ memcpy(*output, pBufferPtr, dwDataLength);
+ }
+ }
+ CHECK_HR(hr = pBufferOut->Unlock());
+ }
+ }
+
+ pSelf->rtStart += pSelf->rtDuration;
bail:
- SafeRelease(&pSampleOut);
- SafeRelease(&pBufferOut);
+ SafeRelease(&pSampleOut);
+ SafeRelease(&pBufferOut);
- return SUCCEEDED(hr) ? pSelf->xOutputSize : 0;
+ return SUCCEEDED(hr) ? pSelf->xOutputSize : 0;
}
static tsk_object_t* plugin_win_mf_converter_video_ms_ctor(tsk_object_t * self, va_list * app)
{
- plugin_win_mf_converter_video_ms_t *pSelf = (plugin_win_mf_converter_video_ms_t *)self;
- if(pSelf){
- HRESULT hr = MFFrameRateToAverageTimePerFrame(PLUGIN_MF_VC_FPS, 1, &pSelf->rtDuration);
- if(FAILED(hr)){
- pSelf->rtDuration = 83333; // 120 FPS
- }
- }
- return self;
+ plugin_win_mf_converter_video_ms_t *pSelf = (plugin_win_mf_converter_video_ms_t *)self;
+ if(pSelf) {
+ HRESULT hr = MFFrameRateToAverageTimePerFrame(PLUGIN_MF_VC_FPS, 1, &pSelf->rtDuration);
+ if(FAILED(hr)) {
+ pSelf->rtDuration = 83333; // 120 FPS
+ }
+ }
+ return self;
}
static tsk_object_t* plugin_win_mf_converter_video_ms_dtor(tsk_object_t * self)
-{
- plugin_win_mf_converter_video_ms_t *pSelf = (plugin_win_mf_converter_video_ms_t *)self;
- if(pSelf){
- SafeRelease(&pSelf->pSampleOut);
- SafeRelease(&pSelf->pSampleIn);
- SafeRelease(&pSelf->pMFT);
+{
+ plugin_win_mf_converter_video_ms_t *pSelf = (plugin_win_mf_converter_video_ms_t *)self;
+ if(pSelf) {
+ SafeRelease(&pSelf->pSampleOut);
+ SafeRelease(&pSelf->pSampleIn);
+ SafeRelease(&pSelf->pMFT);
#if HAVE_IMFVideoProcessorControl
- SafeRelease(&pSelf->pVPC);
+ SafeRelease(&pSelf->pVPC);
#endif
- }
+ }
- return self;
+ return self;
}
-static const tsk_object_def_t plugin_win_mf_converter_video_ms_def_s =
-{
- sizeof(plugin_win_mf_converter_video_ms_t),
- plugin_win_mf_converter_video_ms_ctor,
- plugin_win_mf_converter_video_ms_dtor,
- tsk_null,
+static const tsk_object_def_t plugin_win_mf_converter_video_ms_def_s = {
+ sizeof(plugin_win_mf_converter_video_ms_t),
+ plugin_win_mf_converter_video_ms_ctor,
+ plugin_win_mf_converter_video_ms_dtor,
+ tsk_null,
};
const tsk_object_def_t *plugin_win_mf_converter_video_ms_def_t = &plugin_win_mf_converter_video_ms_def_s;
-static const tmedia_converter_video_plugin_def_t plugin_win_mf_converter_video_ms_plugin_def_s =
-{
- &plugin_win_mf_converter_video_ms_def_s,
-
- plugin_win_mf_converter_video_ms_init,
- plugin_win_mf_converter_video_ms_process
+static const tmedia_converter_video_plugin_def_t plugin_win_mf_converter_video_ms_plugin_def_s = {
+ &plugin_win_mf_converter_video_ms_def_s,
+
+ plugin_win_mf_converter_video_ms_init,
+ plugin_win_mf_converter_video_ms_process
};
const tmedia_converter_video_plugin_def_t *plugin_win_mf_converter_video_ms_plugin_def_t = &plugin_win_mf_converter_video_ms_plugin_def_s;
static inline tsk_size_t _plugin_win_mf_converter_video_ms_get_size(tmedia_chroma_t chroma, tsk_size_t w, tsk_size_t h)
{
- switch(chroma){
- case tmedia_chroma_rgb24:
- case tmedia_chroma_bgr24:
- return (w * h * 3);
- case tmedia_chroma_rgb565le:
- return ((w * h) << 1);
- case tmedia_chroma_rgb32:
- return ((w * h) << 2);
- case tmedia_chroma_nv21:
- return ((w * h * 3) >> 1);
- case tmedia_chroma_nv12:
- return ((w * h * 3) >> 1);
- case tmedia_chroma_yuv422p:
- return ((w * h) << 1);
- case tmedia_chroma_uyvy422:
- case tmedia_chroma_yuyv422:
- return ((w * h) << 1);
- case tmedia_chroma_yuv420p:
- return ((w * h * 3) >> 1);
- default:
- TSK_DEBUG_ERROR("Invalid chroma %d", (int)chroma);
- return 0;
- }
+ switch(chroma) {
+ case tmedia_chroma_rgb24:
+ case tmedia_chroma_bgr24:
+ return (w * h * 3);
+ case tmedia_chroma_rgb565le:
+ return ((w * h) << 1);
+ case tmedia_chroma_rgb32:
+ return ((w * h) << 2);
+ case tmedia_chroma_nv21:
+ return ((w * h * 3) >> 1);
+ case tmedia_chroma_nv12:
+ return ((w * h * 3) >> 1);
+ case tmedia_chroma_yuv422p:
+ return ((w * h) << 1);
+ case tmedia_chroma_uyvy422:
+ case tmedia_chroma_yuyv422:
+ return ((w * h) << 1);
+ case tmedia_chroma_yuv420p:
+ return ((w * h * 3) >> 1);
+ default:
+ TSK_DEBUG_ERROR("Invalid chroma %d", (int)chroma);
+ return 0;
+ }
}
static inline const GUID& _plugin_win_mf_converter_video_ms_get_pixfmt(tmedia_chroma_t chroma)
{
- switch(chroma){
- case tmedia_chroma_rgb24:
- case tmedia_chroma_bgr24:
- return MFVideoFormat_RGB24;
- case tmedia_chroma_rgb565le:
- return MFVideoFormat_RGB565;
- case tmedia_chroma_rgb32:
- return MFVideoFormat_RGB32;
- case tmedia_chroma_nv12:
- return MFVideoFormat_NV12;
- case tmedia_chroma_yuv420p:
- return MFVideoFormat_I420;
- case tmedia_chroma_yuyv422:
- return MFVideoFormat_YUY2;
- case tmedia_chroma_uyvy422:
- return MFVideoFormat_UYVY;
- default:
- TSK_DEBUG_ERROR("Invalid chroma %d", (int)chroma);
- return GUID_NULL;
- }
+ switch(chroma) {
+ case tmedia_chroma_rgb24:
+ case tmedia_chroma_bgr24:
+ return MFVideoFormat_RGB24;
+ case tmedia_chroma_rgb565le:
+ return MFVideoFormat_RGB565;
+ case tmedia_chroma_rgb32:
+ return MFVideoFormat_RGB32;
+ case tmedia_chroma_nv12:
+ return MFVideoFormat_NV12;
+ case tmedia_chroma_yuv420p:
+ return MFVideoFormat_I420;
+ case tmedia_chroma_yuyv422:
+ return MFVideoFormat_YUY2;
+ case tmedia_chroma_uyvy422:
+ return MFVideoFormat_UYVY;
+ default:
+ TSK_DEBUG_ERROR("Invalid chroma %d", (int)chroma);
+ return GUID_NULL;
+ }
}
// For RGB32:
@@ -438,72 +412,66 @@ static inline HRESULT _plugin_win_mf_converter_video_ms_copy_rgb32_down_top(
const BYTE* pSrc,
INT dwWidthInPixels,
INT dwHeightInPixels
- )
+)
{
- RGBQUAD *pSrcPixel = &((RGBQUAD*)pSrc)[(dwWidthInPixels * dwHeightInPixels) - dwWidthInPixels];
+ RGBQUAD *pSrcPixel = &((RGBQUAD*)pSrc)[(dwWidthInPixels * dwHeightInPixels) - dwWidthInPixels];
RGBQUAD *pDestPixel = &((RGBQUAD*)pDst)[0];
- register INT x;
- register INT y;
+ register INT x;
+ register INT y;
- for (y = dwHeightInPixels; y > 0 ; --y)
- {
- for (x = 0; x < dwWidthInPixels; ++x)
- {
+ for (y = dwHeightInPixels; y > 0 ; --y) {
+ for (x = 0; x < dwWidthInPixels; ++x) {
pDestPixel[x] = pSrcPixel[x];
}
pDestPixel += dwWidthInPixels;
pSrcPixel -= dwWidthInPixels;
}
- return S_OK;
+ return S_OK;
}
static HRESULT _plugin_win_mf_converter_video_ms_process_input(plugin_win_mf_converter_video_ms_t* pSelf, IMFSample* pSample)
{
- return pSelf->pMFT->ProcessInput(0, pSample, 0);
+ return pSelf->pMFT->ProcessInput(0, pSample, 0);
}
static HRESULT _plugin_win_mf_converter_video_ms_process_output(plugin_win_mf_converter_video_ms_t* pSelf, IMFSample **ppSample)
{
- *ppSample = NULL;
+ *ppSample = NULL;
IMFMediaBuffer* pBufferOut = NULL;
DWORD dwStatus;
- HRESULT hr = S_OK;
-
+ HRESULT hr = S_OK;
+
MFT_OUTPUT_STREAM_INFO mftStreamInfo = { 0 };
MFT_OUTPUT_DATA_BUFFER mftOutputData = { 0 };
- CHECK_HR(hr = pSelf->pMFT->GetOutputStreamInfo(0, &mftStreamInfo));
-
- if(!pSelf->pSampleOut)
- {
- CHECK_HR(hr = MFUtils::CreateMediaSample(mftStreamInfo.cbSize, &pSelf->pSampleOut));
- hr = pSelf->pSampleOut->GetBufferByIndex(0, &pBufferOut);
- if(FAILED(hr))
- {
- SafeRelease(&pSelf->pSampleOut);
- CHECK_HR(hr);
- }
- }
- else
- {
- DWORD dwMaxLength = 0;
- CHECK_HR(hr = pSelf->pSampleOut->GetBufferByIndex(0, &pBufferOut));
- CHECK_HR(hr = pBufferOut->GetMaxLength(&dwMaxLength));
- if(dwMaxLength < mftStreamInfo.cbSize)
- {
- CHECK_HR(hr = pSelf->pSampleOut->RemoveAllBuffers());
- SafeRelease(&pBufferOut);
- CHECK_HR(hr = MFCreateMemoryBuffer(mftStreamInfo.cbSize, &pBufferOut));
- CHECK_HR(hr = pSelf->pSampleOut->AddBuffer(pBufferOut));
- }
- }
-
- CHECK_HR(hr = pBufferOut->SetCurrentLength(0));
-
+ CHECK_HR(hr = pSelf->pMFT->GetOutputStreamInfo(0, &mftStreamInfo));
+
+ if(!pSelf->pSampleOut) {
+ CHECK_HR(hr = MFUtils::CreateMediaSample(mftStreamInfo.cbSize, &pSelf->pSampleOut));
+ hr = pSelf->pSampleOut->GetBufferByIndex(0, &pBufferOut);
+ if(FAILED(hr)) {
+ SafeRelease(&pSelf->pSampleOut);
+ CHECK_HR(hr);
+ }
+ }
+ else {
+ DWORD dwMaxLength = 0;
+ CHECK_HR(hr = pSelf->pSampleOut->GetBufferByIndex(0, &pBufferOut));
+ CHECK_HR(hr = pBufferOut->GetMaxLength(&dwMaxLength));
+ if(dwMaxLength < mftStreamInfo.cbSize) {
+ CHECK_HR(hr = pSelf->pSampleOut->RemoveAllBuffers());
+ SafeRelease(&pBufferOut);
+ CHECK_HR(hr = MFCreateMemoryBuffer(mftStreamInfo.cbSize, &pBufferOut));
+ CHECK_HR(hr = pSelf->pSampleOut->AddBuffer(pBufferOut));
+ }
+ }
+
+ CHECK_HR(hr = pBufferOut->SetCurrentLength(0));
+
//Set the output sample
mftOutputData.pSample = pSelf->pSampleOut;
//Set the output id
@@ -529,72 +497,64 @@ bail:
static HRESULT _plugin_win_mf_converter_video_ms_process(plugin_win_mf_converter_video_ms_t* pSelf, const void* pcInputPtr, UINT32 nInputSize, IMFSample **ppSampleOut)
{
- if(!pcInputPtr || !nInputSize || !ppSampleOut)
- {
- TSK_DEBUG_ERROR("Invalid parameter");
- return E_INVALIDARG;
- }
-
- *ppSampleOut = NULL;
-
- HRESULT hr = S_OK;
-
- IMFMediaBuffer* pBufferIn = NULL;
- BYTE* pBufferPtr = NULL;
-
- if(!pSelf->pSampleIn)
- {
- CHECK_HR(hr = MFUtils::CreateMediaSample(nInputSize, &pSelf->pSampleIn));
- hr = pSelf->pSampleIn->GetBufferByIndex(0, &pBufferIn);
- if(FAILED(hr))
- {
- SafeRelease(&pSelf->pSampleIn);
- CHECK_HR(hr);
- }
- }
- else
- {
- DWORD dwMaxLength = 0;
- CHECK_HR(hr = pSelf->pSampleIn->GetBufferByIndex(0, &pBufferIn));
- CHECK_HR(hr = pBufferIn->GetMaxLength(&dwMaxLength));
- if(dwMaxLength < nInputSize)
- {
- CHECK_HR(hr = pSelf->pSampleIn->RemoveAllBuffers());
- SafeRelease(&pBufferIn);
- CHECK_HR(hr = MFCreateMemoryBuffer(nInputSize, &pBufferIn));
- CHECK_HR(hr = pSelf->pSampleIn->AddBuffer(pBufferIn));
- }
- }
-
- CHECK_HR(hr = pBufferIn->Lock(&pBufferPtr, NULL, NULL));
- memcpy(pBufferPtr, pcInputPtr, nInputSize);
- CHECK_HR(hr = pBufferIn->Unlock());
- CHECK_HR(hr = pBufferIn->SetCurrentLength(nInputSize));
-
- CHECK_HR(hr = pSelf->pSampleIn->SetSampleDuration(pSelf->rtDuration));
- CHECK_HR(hr = pSelf->pSampleIn->SetSampleTime(pSelf->rtStart));
-
- hr = _plugin_win_mf_converter_video_ms_process_input(pSelf, pSelf->pSampleIn);
- while(hr == MF_E_NOTACCEPTING)
- {
- TSK_DEBUG_INFO("MF_E_NOTACCEPTING");
- IMFSample* pSample = NULL;
- hr = _plugin_win_mf_converter_video_ms_process_output(pSelf, &pSample);
- if(SUCCEEDED(hr) && pSample)
- {
- SafeRelease(ppSampleOut);
- *ppSampleOut = pSample, pSample = NULL;
-
- hr = pSelf->pSampleIn->SetUINT32(MFSampleExtension_Discontinuity, TRUE);
- hr = _plugin_win_mf_converter_video_ms_process_input(pSelf, pSelf->pSampleIn);
- }
- }
- if(!*ppSampleOut)
- {
- CHECK_HR(hr = _plugin_win_mf_converter_video_ms_process_output(pSelf, ppSampleOut));
- }
-
+ if(!pcInputPtr || !nInputSize || !ppSampleOut) {
+ TSK_DEBUG_ERROR("Invalid parameter");
+ return E_INVALIDARG;
+ }
+
+ *ppSampleOut = NULL;
+
+ HRESULT hr = S_OK;
+
+ IMFMediaBuffer* pBufferIn = NULL;
+ BYTE* pBufferPtr = NULL;
+
+ if(!pSelf->pSampleIn) {
+ CHECK_HR(hr = MFUtils::CreateMediaSample(nInputSize, &pSelf->pSampleIn));
+ hr = pSelf->pSampleIn->GetBufferByIndex(0, &pBufferIn);
+ if(FAILED(hr)) {
+ SafeRelease(&pSelf->pSampleIn);
+ CHECK_HR(hr);
+ }
+ }
+ else {
+ DWORD dwMaxLength = 0;
+ CHECK_HR(hr = pSelf->pSampleIn->GetBufferByIndex(0, &pBufferIn));
+ CHECK_HR(hr = pBufferIn->GetMaxLength(&dwMaxLength));
+ if(dwMaxLength < nInputSize) {
+ CHECK_HR(hr = pSelf->pSampleIn->RemoveAllBuffers());
+ SafeRelease(&pBufferIn);
+ CHECK_HR(hr = MFCreateMemoryBuffer(nInputSize, &pBufferIn));
+ CHECK_HR(hr = pSelf->pSampleIn->AddBuffer(pBufferIn));
+ }
+ }
+
+ CHECK_HR(hr = pBufferIn->Lock(&pBufferPtr, NULL, NULL));
+ memcpy(pBufferPtr, pcInputPtr, nInputSize);
+ CHECK_HR(hr = pBufferIn->Unlock());
+ CHECK_HR(hr = pBufferIn->SetCurrentLength(nInputSize));
+
+ CHECK_HR(hr = pSelf->pSampleIn->SetSampleDuration(pSelf->rtDuration));
+ CHECK_HR(hr = pSelf->pSampleIn->SetSampleTime(pSelf->rtStart));
+
+ hr = _plugin_win_mf_converter_video_ms_process_input(pSelf, pSelf->pSampleIn);
+ while(hr == MF_E_NOTACCEPTING) {
+ TSK_DEBUG_INFO("MF_E_NOTACCEPTING");
+ IMFSample* pSample = NULL;
+ hr = _plugin_win_mf_converter_video_ms_process_output(pSelf, &pSample);
+ if(SUCCEEDED(hr) && pSample) {
+ SafeRelease(ppSampleOut);
+ *ppSampleOut = pSample, pSample = NULL;
+
+ hr = pSelf->pSampleIn->SetUINT32(MFSampleExtension_Discontinuity, TRUE);
+ hr = _plugin_win_mf_converter_video_ms_process_input(pSelf, pSelf->pSampleIn);
+ }
+ }
+ if(!*ppSampleOut) {
+ CHECK_HR(hr = _plugin_win_mf_converter_video_ms_process_output(pSelf, ppSampleOut));
+ }
+
bail:
- SafeRelease(&pBufferIn);
- return hr;
+ SafeRelease(&pBufferIn);
+ return hr;
} \ No newline at end of file
diff --git a/plugins/pluginWinMF/plugin_win_mf_producer_audio.cxx b/plugins/pluginWinMF/plugin_win_mf_producer_audio.cxx
index 2a3c314..5745b24 100755
--- a/plugins/pluginWinMF/plugin_win_mf_producer_audio.cxx
+++ b/plugins/pluginWinMF/plugin_win_mf_producer_audio.cxx
@@ -1,18 +1,18 @@
/* Copyright (C) 2013 Mamadou DIOP
* Copyright (C) 2013 Doubango Telecom <http://www.doubango.org>
-*
+*
* This file is part of Open Source Doubango Framework.
*
* DOUBANGO is free software: you can redistribute it and/or modify
* it under the terms of the GNU General Public License as published by
* the Free Software Foundation, either version 3 of the License, or
* (at your option) any later version.
-*
+*
* DOUBANGO is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
* GNU General Public License for more details.
-*
+*
* You should have received a copy of the GNU General Public License
* along with DOUBANGO.
*/
@@ -28,12 +28,11 @@
static void* TSK_STDCALL RunSessionThread(void *pArg);
-typedef struct plugin_win_mf_producer_audio_s
-{
- TDAV_DECLARE_PRODUCER_AUDIO;
+typedef struct plugin_win_mf_producer_audio_s {
+ TDAV_DECLARE_PRODUCER_AUDIO;
- bool bStarted;
- tsk_thread_handle_t* ppTread[1];
+ bool bStarted;
+ tsk_thread_handle_t* ppTread[1];
DeviceListAudio* pDeviceList;
@@ -48,162 +47,162 @@ plugin_win_mf_producer_audio_t;
/* ============ Media Producer Interface ================= */
static int plugin_win_mf_producer_audio_set(tmedia_producer_t* self, const tmedia_param_t* param)
-{
- plugin_win_mf_producer_audio_t* pSelf = (plugin_win_mf_producer_audio_t*)self;
- if(param->plugin_type == tmedia_ppt_producer){
- }
- return tdav_producer_audio_set(TDAV_PRODUCER_AUDIO(pSelf), param);
+{
+ plugin_win_mf_producer_audio_t* pSelf = (plugin_win_mf_producer_audio_t*)self;
+ if(param->plugin_type == tmedia_ppt_producer) {
+ }
+ return tdav_producer_audio_set(TDAV_PRODUCER_AUDIO(pSelf), param);
}
static int plugin_win_mf_producer_audio_prepare(tmedia_producer_t* self, const tmedia_codec_t* codec)
{
- plugin_win_mf_producer_audio_t* pSelf = (plugin_win_mf_producer_audio_t*)self;
-
- if(!pSelf || !codec){
- TSK_DEBUG_ERROR("Invalid parameter");
- return -1;
- }
-
- TMEDIA_PRODUCER(pSelf)->audio.channels = TMEDIA_CODEC_CHANNELS_AUDIO_ENCODING(codec);
- TMEDIA_PRODUCER(pSelf)->audio.rate = TMEDIA_CODEC_RATE_ENCODING(codec);
- TMEDIA_PRODUCER(pSelf)->audio.ptime = TMEDIA_CODEC_PTIME_AUDIO_ENCODING(codec);
-
- TSK_DEBUG_INFO("MF audio producer: channels=%d, rate=%d, ptime=%d",
- TMEDIA_PRODUCER(pSelf)->audio.channels,
- TMEDIA_PRODUCER(pSelf)->audio.rate,
- TMEDIA_PRODUCER(pSelf)->audio.ptime
- );
-
- HRESULT hr = S_OK;
-
- // create device list object
- if(!pSelf->pDeviceList && !(pSelf->pDeviceList = new DeviceListAudio())){
- TSK_DEBUG_ERROR("Failed to create device list");
- hr = E_OUTOFMEMORY;
- goto bail;
- }
- // enumerate devices
- hr = pSelf->pDeviceList->EnumerateDevices();
- if(!SUCCEEDED(hr)){
- goto bail;
- }
-
- // check if we have at least one MF video source connected to the PC
- if(pSelf->pDeviceList->Count() == 0){
- TSK_DEBUG_WARN("No MF video source could be found...no video will be sent");
- // do not break the negotiation as one-way video connection is a valid use-case
- }
- else{
- IMFActivate* pActivate = NULL;
- // Get best MF audio source
- hr = pSelf->pDeviceList->GetDeviceBest(&pActivate);
- if(!SUCCEEDED(hr) || !pActivate){
- TSK_DEBUG_ERROR("Failed to get best MF audio source");
- if(!pActivate){
- hr = E_OUTOFMEMORY;
- }
- goto bail;
- }
-
- // Create the media source for the device.
- hr = pActivate->ActivateObject(
- __uuidof(IMFMediaSource),
- (void**)&pSelf->pSource
- );
- SafeRelease(&pActivate);
- if(!SUCCEEDED(hr)){
- TSK_DEBUG_ERROR("ActivateObject(MF audio source) failed");
- goto bail;
- }
-
- // Create and configure the media type
- CHECK_HR(hr = MFCreateMediaType(&pSelf->pType));
- CHECK_HR(hr = pSelf->pType->SetGUID(MF_MT_MAJOR_TYPE, MFMediaType_Audio));
- CHECK_HR(hr = pSelf->pType->SetGUID(MF_MT_SUBTYPE, MFAudioFormat_PCM));
- CHECK_HR(hr = pSelf->pType->SetUINT32(MF_MT_AUDIO_NUM_CHANNELS, TMEDIA_PRODUCER(pSelf)->audio.channels));
- CHECK_HR(hr = pSelf->pType->SetUINT32(MF_MT_AUDIO_SAMPLES_PER_SECOND, TMEDIA_PRODUCER(pSelf)->audio.rate));
- CHECK_HR(hr = pSelf->pType->SetUINT32(MF_MT_AUDIO_BITS_PER_SAMPLE, TMEDIA_PRODUCER(pSelf)->audio.bits_per_sample));
- CHECK_HR(hr = pSelf->pType->SetUINT32(MF_MT_ALL_SAMPLES_INDEPENDENT, TRUE)); // because uncompressed media type
- CHECK_HR(hr = pSelf->pType->SetUINT32(MF_MT_FIXED_SIZE_SAMPLES, TRUE));
- UINT32 nBlockAlign = TMEDIA_PRODUCER(pSelf)->audio.channels * (TMEDIA_PRODUCER(pSelf)->audio.bits_per_sample >> 3);
- UINT32 nAvgBytesPerSec = (nBlockAlign * TMEDIA_PRODUCER(pSelf)->audio.rate);
- CHECK_HR(hr = pSelf->pType->SetUINT32(MF_MT_AUDIO_BLOCK_ALIGNMENT, nBlockAlign));
- CHECK_HR(hr = pSelf->pType->SetUINT32(MF_MT_AUDIO_AVG_BYTES_PER_SECOND, nAvgBytesPerSec));
-
- // Create the sample grabber sink.
- CHECK_HR(hr = SampleGrabberCB::CreateInstance(TMEDIA_PRODUCER(pSelf), &pSelf->pCallback));
- CHECK_HR(hr = MFCreateSampleGrabberSinkActivate(pSelf->pType, pSelf->pCallback, &pSelf->pSinkActivate));
-
- // To run as fast as possible, set this attribute (requires Windows 7):
- CHECK_HR(hr = pSelf->pSinkActivate->SetUINT32(MF_SAMPLEGRABBERSINK_IGNORE_CLOCK, TRUE));
-
- // Create the Media Session.
- CHECK_HR(hr = MFCreateMediaSession(NULL, &pSelf->pSession));
-
- // Create the topology.
- CHECK_HR(hr = MFUtils::CreateTopology(pSelf->pSource, NULL/*NO ENCODER*/, pSelf->pSinkActivate, NULL/*Preview*/, pSelf->pType, &pSelf->pTopology));
- }
+ plugin_win_mf_producer_audio_t* pSelf = (plugin_win_mf_producer_audio_t*)self;
+
+ if(!pSelf || !codec) {
+ TSK_DEBUG_ERROR("Invalid parameter");
+ return -1;
+ }
+
+ TMEDIA_PRODUCER(pSelf)->audio.channels = TMEDIA_CODEC_CHANNELS_AUDIO_ENCODING(codec);
+ TMEDIA_PRODUCER(pSelf)->audio.rate = TMEDIA_CODEC_RATE_ENCODING(codec);
+ TMEDIA_PRODUCER(pSelf)->audio.ptime = TMEDIA_CODEC_PTIME_AUDIO_ENCODING(codec);
+
+ TSK_DEBUG_INFO("MF audio producer: channels=%d, rate=%d, ptime=%d",
+ TMEDIA_PRODUCER(pSelf)->audio.channels,
+ TMEDIA_PRODUCER(pSelf)->audio.rate,
+ TMEDIA_PRODUCER(pSelf)->audio.ptime
+ );
+
+ HRESULT hr = S_OK;
+
+ // create device list object
+ if(!pSelf->pDeviceList && !(pSelf->pDeviceList = new DeviceListAudio())) {
+ TSK_DEBUG_ERROR("Failed to create device list");
+ hr = E_OUTOFMEMORY;
+ goto bail;
+ }
+ // enumerate devices
+ hr = pSelf->pDeviceList->EnumerateDevices();
+ if(!SUCCEEDED(hr)) {
+ goto bail;
+ }
+
+ // check if we have at least one MF video source connected to the PC
+ if(pSelf->pDeviceList->Count() == 0) {
+ TSK_DEBUG_WARN("No MF video source could be found...no video will be sent");
+ // do not break the negotiation as one-way video connection is a valid use-case
+ }
+ else {
+ IMFActivate* pActivate = NULL;
+ // Get best MF audio source
+ hr = pSelf->pDeviceList->GetDeviceBest(&pActivate);
+ if(!SUCCEEDED(hr) || !pActivate) {
+ TSK_DEBUG_ERROR("Failed to get best MF audio source");
+ if(!pActivate) {
+ hr = E_OUTOFMEMORY;
+ }
+ goto bail;
+ }
+
+ // Create the media source for the device.
+ hr = pActivate->ActivateObject(
+ __uuidof(IMFMediaSource),
+ (void**)&pSelf->pSource
+ );
+ SafeRelease(&pActivate);
+ if(!SUCCEEDED(hr)) {
+ TSK_DEBUG_ERROR("ActivateObject(MF audio source) failed");
+ goto bail;
+ }
+
+ // Create and configure the media type
+ CHECK_HR(hr = MFCreateMediaType(&pSelf->pType));
+ CHECK_HR(hr = pSelf->pType->SetGUID(MF_MT_MAJOR_TYPE, MFMediaType_Audio));
+ CHECK_HR(hr = pSelf->pType->SetGUID(MF_MT_SUBTYPE, MFAudioFormat_PCM));
+ CHECK_HR(hr = pSelf->pType->SetUINT32(MF_MT_AUDIO_NUM_CHANNELS, TMEDIA_PRODUCER(pSelf)->audio.channels));
+ CHECK_HR(hr = pSelf->pType->SetUINT32(MF_MT_AUDIO_SAMPLES_PER_SECOND, TMEDIA_PRODUCER(pSelf)->audio.rate));
+ CHECK_HR(hr = pSelf->pType->SetUINT32(MF_MT_AUDIO_BITS_PER_SAMPLE, TMEDIA_PRODUCER(pSelf)->audio.bits_per_sample));
+ CHECK_HR(hr = pSelf->pType->SetUINT32(MF_MT_ALL_SAMPLES_INDEPENDENT, TRUE)); // because uncompressed media type
+ CHECK_HR(hr = pSelf->pType->SetUINT32(MF_MT_FIXED_SIZE_SAMPLES, TRUE));
+ UINT32 nBlockAlign = TMEDIA_PRODUCER(pSelf)->audio.channels * (TMEDIA_PRODUCER(pSelf)->audio.bits_per_sample >> 3);
+ UINT32 nAvgBytesPerSec = (nBlockAlign * TMEDIA_PRODUCER(pSelf)->audio.rate);
+ CHECK_HR(hr = pSelf->pType->SetUINT32(MF_MT_AUDIO_BLOCK_ALIGNMENT, nBlockAlign));
+ CHECK_HR(hr = pSelf->pType->SetUINT32(MF_MT_AUDIO_AVG_BYTES_PER_SECOND, nAvgBytesPerSec));
+
+ // Create the sample grabber sink.
+ CHECK_HR(hr = SampleGrabberCB::CreateInstance(TMEDIA_PRODUCER(pSelf), &pSelf->pCallback));
+ CHECK_HR(hr = MFCreateSampleGrabberSinkActivate(pSelf->pType, pSelf->pCallback, &pSelf->pSinkActivate));
+
+ // To run as fast as possible, set this attribute (requires Windows 7):
+ CHECK_HR(hr = pSelf->pSinkActivate->SetUINT32(MF_SAMPLEGRABBERSINK_IGNORE_CLOCK, TRUE));
+
+ // Create the Media Session.
+ CHECK_HR(hr = MFCreateMediaSession(NULL, &pSelf->pSession));
+
+ // Create the topology.
+ CHECK_HR(hr = MFUtils::CreateTopology(pSelf->pSource, NULL/*NO ENCODER*/, pSelf->pSinkActivate, NULL/*Preview*/, pSelf->pType, &pSelf->pTopology));
+ }
bail:
- return SUCCEEDED(hr) ? 0 : -1;
+ return SUCCEEDED(hr) ? 0 : -1;
}
static int plugin_win_mf_producer_audio_start(tmedia_producer_t* self)
{
- plugin_win_mf_producer_audio_t* pSelf = (plugin_win_mf_producer_audio_t*)self;
-
- if(!pSelf){
- TSK_DEBUG_ERROR("Invalid parameter");
- return -1;
- }
-
- if(pSelf->bStarted){
- TSK_DEBUG_INFO("MF audio producer already started");
- return 0;
- }
-
- HRESULT hr = S_OK;
-
- // Run the media session.
- CHECK_HR(hr = MFUtils::RunSession(pSelf->pSession, pSelf->pTopology));
-
- // Start asynchronous watcher thread
- pSelf->bStarted = true;
- int ret = tsk_thread_create(&pSelf->ppTread[0], RunSessionThread, pSelf);
- if(ret != 0) {
- TSK_DEBUG_ERROR("Failed to create thread");
- hr = E_FAIL;
- pSelf->bStarted = false;
- if(pSelf->ppTread[0]){
- tsk_thread_join(&pSelf->ppTread[0]);
- }
- MFUtils::ShutdownSession(pSelf->pSession, pSelf->pSource);
- goto bail;
- }
+ plugin_win_mf_producer_audio_t* pSelf = (plugin_win_mf_producer_audio_t*)self;
+
+ if(!pSelf) {
+ TSK_DEBUG_ERROR("Invalid parameter");
+ return -1;
+ }
+
+ if(pSelf->bStarted) {
+ TSK_DEBUG_INFO("MF audio producer already started");
+ return 0;
+ }
+
+ HRESULT hr = S_OK;
+
+ // Run the media session.
+ CHECK_HR(hr = MFUtils::RunSession(pSelf->pSession, pSelf->pTopology));
+
+ // Start asynchronous watcher thread
+ pSelf->bStarted = true;
+ int ret = tsk_thread_create(&pSelf->ppTread[0], RunSessionThread, pSelf);
+ if(ret != 0) {
+ TSK_DEBUG_ERROR("Failed to create thread");
+ hr = E_FAIL;
+ pSelf->bStarted = false;
+ if(pSelf->ppTread[0]) {
+ tsk_thread_join(&pSelf->ppTread[0]);
+ }
+ MFUtils::ShutdownSession(pSelf->pSession, pSelf->pSource);
+ goto bail;
+ }
bail:
- return SUCCEEDED(hr) ? 0 : -1;
+ return SUCCEEDED(hr) ? 0 : -1;
}
static int plugin_win_mf_producer_audio_pause(tmedia_producer_t* self)
{
- plugin_win_mf_producer_audio_t* pSelf = (plugin_win_mf_producer_audio_t*)self;
+ plugin_win_mf_producer_audio_t* pSelf = (plugin_win_mf_producer_audio_t*)self;
- if(!pSelf){
- TSK_DEBUG_ERROR("Invalid parameter");
- return -1;
- }
+ if(!pSelf) {
+ TSK_DEBUG_ERROR("Invalid parameter");
+ return -1;
+ }
- HRESULT hr = MFUtils::PauseSession(pSelf->pSession);
+ HRESULT hr = MFUtils::PauseSession(pSelf->pSession);
- return SUCCEEDED(hr) ? 0 : -1;
+ return SUCCEEDED(hr) ? 0 : -1;
}
static int plugin_win_mf_producer_audio_stop(tmedia_producer_t* self)
{
- plugin_win_mf_producer_audio_t* pSelf = (plugin_win_mf_producer_audio_t*)self;
-
- if(!pSelf){
+ plugin_win_mf_producer_audio_t* pSelf = (plugin_win_mf_producer_audio_t*)self;
+
+ if(!pSelf) {
TSK_DEBUG_ERROR("Invalid parameter");
return -1;
}
@@ -213,7 +212,7 @@ static int plugin_win_mf_producer_audio_stop(tmedia_producer_t* self)
// for the thread
pSelf->bStarted = false;
hr = MFUtils::ShutdownSession(pSelf->pSession, NULL); // stop session to wakeup the asynchronous thread
- if(pSelf->ppTread[0]){
+ if(pSelf->ppTread[0]) {
tsk_thread_join(&pSelf->ppTread[0]);
}
hr = MFUtils::ShutdownSession(NULL, pSelf->pSource); // stop source to release the camera
@@ -228,37 +227,37 @@ static int plugin_win_mf_producer_audio_stop(tmedia_producer_t* self)
/* constructor */
static tsk_object_t* plugin_win_mf_producer_audio_ctor(tsk_object_t * self, va_list * app)
{
- MFUtils::Startup();
-
- plugin_win_mf_producer_audio_t *pSelf = (plugin_win_mf_producer_audio_t*)self;
- if(pSelf){
- /* init base */
- tdav_producer_audio_init(TDAV_PRODUCER_AUDIO(pSelf));
- /* init self */
-
- }
- return self;
+ MFUtils::Startup();
+
+ plugin_win_mf_producer_audio_t *pSelf = (plugin_win_mf_producer_audio_t*)self;
+ if(pSelf) {
+ /* init base */
+ tdav_producer_audio_init(TDAV_PRODUCER_AUDIO(pSelf));
+ /* init self */
+
+ }
+ return self;
}
/* destructor */
static tsk_object_t* plugin_win_mf_producer_audio_dtor(tsk_object_t * self)
-{
- plugin_win_mf_producer_audio_t *pSelf = (plugin_win_mf_producer_audio_t *)self;
- if(pSelf){
- /* stop */
- if(pSelf->bStarted){
- plugin_win_mf_producer_audio_stop(TMEDIA_PRODUCER(pSelf));
- }
-
- /* deinit base */
- tdav_producer_audio_deinit(TDAV_PRODUCER_AUDIO(pSelf));
- /* deinit self */
- if(pSelf->pDeviceList){
- delete pSelf->pDeviceList, pSelf->pDeviceList = NULL;
+{
+ plugin_win_mf_producer_audio_t *pSelf = (plugin_win_mf_producer_audio_t *)self;
+ if(pSelf) {
+ /* stop */
+ if(pSelf->bStarted) {
+ plugin_win_mf_producer_audio_stop(TMEDIA_PRODUCER(pSelf));
}
- if(pSelf->pSource){
- pSelf->pSource->Shutdown();
+
+ /* deinit base */
+ tdav_producer_audio_deinit(TDAV_PRODUCER_AUDIO(pSelf));
+ /* deinit self */
+ if(pSelf->pDeviceList) {
+ delete pSelf->pDeviceList, pSelf->pDeviceList = NULL;
+ }
+ if(pSelf->pSource) {
+ pSelf->pSource->Shutdown();
}
- if(pSelf->pSession){
+ if(pSelf->pSession) {
pSelf->pSession->Shutdown();
}
@@ -268,31 +267,29 @@ static tsk_object_t* plugin_win_mf_producer_audio_dtor(tsk_object_t * self)
SafeRelease(&pSelf->pSinkActivate);
SafeRelease(&pSelf->pTopology);
SafeRelease(&pSelf->pType);
- }
+ }
- return self;
+ return self;
}
/* object definition */
-static const tsk_object_def_t plugin_win_mf_producer_audio_def_s =
-{
- sizeof(plugin_win_mf_producer_audio_t),
- plugin_win_mf_producer_audio_ctor,
- plugin_win_mf_producer_audio_dtor,
- tdav_producer_audio_cmp,
+static const tsk_object_def_t plugin_win_mf_producer_audio_def_s = {
+ sizeof(plugin_win_mf_producer_audio_t),
+ plugin_win_mf_producer_audio_ctor,
+ plugin_win_mf_producer_audio_dtor,
+ tdav_producer_audio_cmp,
};
/* plugin definition*/
-static const tmedia_producer_plugin_def_t plugin_win_mf_producer_audio_plugin_def_s =
-{
- &plugin_win_mf_producer_audio_def_s,
-
- tmedia_audio,
- "Media Foundation audio producer",
-
- plugin_win_mf_producer_audio_set,
- plugin_win_mf_producer_audio_prepare,
- plugin_win_mf_producer_audio_start,
- plugin_win_mf_producer_audio_pause,
- plugin_win_mf_producer_audio_stop
+static const tmedia_producer_plugin_def_t plugin_win_mf_producer_audio_plugin_def_s = {
+ &plugin_win_mf_producer_audio_def_s,
+
+ tmedia_audio,
+ "Media Foundation audio producer",
+
+ plugin_win_mf_producer_audio_set,
+ plugin_win_mf_producer_audio_prepare,
+ plugin_win_mf_producer_audio_start,
+ plugin_win_mf_producer_audio_pause,
+ plugin_win_mf_producer_audio_stop
};
const tmedia_producer_plugin_def_t *plugin_win_mf_producer_audio_plugin_def_t = &plugin_win_mf_producer_audio_plugin_def_s;
@@ -300,34 +297,32 @@ const tmedia_producer_plugin_def_t *plugin_win_mf_producer_audio_plugin_def_t =
// Run session async thread
static void* TSK_STDCALL RunSessionThread(void *pArg)
{
- plugin_win_mf_producer_audio_t *pSelf = (plugin_win_mf_producer_audio_t *)pArg;
- HRESULT hrStatus = S_OK;
- HRESULT hr = S_OK;
- IMFMediaEvent *pEvent = NULL;
- MediaEventType met;
-
- TSK_DEBUG_INFO("RunSessionThread (audio) - ENTER");
-
- while(pSelf->bStarted){
- CHECK_HR(hr = pSelf->pSession->GetEvent(0, &pEvent));
- CHECK_HR(hr = pEvent->GetStatus(&hrStatus));
- CHECK_HR(hr = pEvent->GetType(&met));
-
- if (FAILED(hrStatus))
- {
- TSK_DEBUG_ERROR("Session error: 0x%x (event id: %d)\n", hrStatus, met);
- hr = hrStatus;
- goto bail;
- }
- if (met == MESessionEnded)
- {
- break;
- }
- SafeRelease(&pEvent);
- }
+ plugin_win_mf_producer_audio_t *pSelf = (plugin_win_mf_producer_audio_t *)pArg;
+ HRESULT hrStatus = S_OK;
+ HRESULT hr = S_OK;
+ IMFMediaEvent *pEvent = NULL;
+ MediaEventType met;
+
+ TSK_DEBUG_INFO("RunSessionThread (audio) - ENTER");
+
+ while(pSelf->bStarted) {
+ CHECK_HR(hr = pSelf->pSession->GetEvent(0, &pEvent));
+ CHECK_HR(hr = pEvent->GetStatus(&hrStatus));
+ CHECK_HR(hr = pEvent->GetType(&met));
+
+ if (FAILED(hrStatus)) {
+ TSK_DEBUG_ERROR("Session error: 0x%x (event id: %d)\n", hrStatus, met);
+ hr = hrStatus;
+ goto bail;
+ }
+ if (met == MESessionEnded) {
+ break;
+ }
+ SafeRelease(&pEvent);
+ }
bail:
- TSK_DEBUG_INFO("RunSessionThread (audio) - EXIT");
+ TSK_DEBUG_INFO("RunSessionThread (audio) - EXIT");
- return NULL;
+ return NULL;
}
diff --git a/plugins/pluginWinMF/plugin_win_mf_producer_video.cxx b/plugins/pluginWinMF/plugin_win_mf_producer_video.cxx
index f20f9e4..e4bf690 100755
--- a/plugins/pluginWinMF/plugin_win_mf_producer_video.cxx
+++ b/plugins/pluginWinMF/plugin_win_mf_producer_video.cxx
@@ -50,7 +50,7 @@
#endif /* PLUGIN_MF_GOP_SIZE_IN_SECONDS */
DEFINE_GUID(PLUGIN_MF_LOW_LATENCY,
- 0x9c27891a, 0xed7a, 0x40e1, 0x88, 0xe8, 0xb2, 0x27, 0x27, 0xa0, 0x24, 0xee);
+ 0x9c27891a, 0xed7a, 0x40e1, 0x88, 0xe8, 0xb2, 0x27, 0x27, 0xa0, 0x24, 0xee);
extern const tmedia_codec_plugin_def_t *mf_codec_h264_main_plugin_def_t;
extern const tmedia_codec_plugin_def_t *mf_codec_h264_base_plugin_def_t;
@@ -61,548 +61,527 @@ static int _plugin_win_mf_producer_video_unprepare(struct plugin_win_mf_producer
//
// plugin_win_mf_producer_video_t
//
-typedef struct plugin_win_mf_producer_video_s
-{
- TMEDIA_DECLARE_PRODUCER;
-
- bool bStarted, bPrepared, bMuted;
- tsk_thread_handle_t* ppTread[1];
- HWND hWndPreview;
-
- int32_t bitrate_bps; // used when encoder bundled only
-
- DeviceListVideo* pDeviceList;
-
- MFCodecVideo *pEncoder;
- IMFMediaSession *pSession;
- IMFMediaSource *pSource;
- SampleGrabberCB *pCallback;
- IMFActivate *pSinkGrabber;
- IMFActivate *pSinkActivatePreview;
- DisplayWatcher* pWatcherPreview;
- IMFTopology *pTopology;
- IMFMediaType *pGrabberInputType;
+typedef struct plugin_win_mf_producer_video_s {
+ TMEDIA_DECLARE_PRODUCER;
+
+ bool bStarted, bPrepared, bMuted;
+ tsk_thread_handle_t* ppTread[1];
+ HWND hWndPreview;
+
+ int32_t bitrate_bps; // used when encoder bundled only
+
+ DeviceListVideo* pDeviceList;
+
+ MFCodecVideo *pEncoder;
+ IMFMediaSession *pSession;
+ IMFMediaSource *pSource;
+ SampleGrabberCB *pCallback;
+ IMFActivate *pSinkGrabber;
+ IMFActivate *pSinkActivatePreview;
+ DisplayWatcher* pWatcherPreview;
+ IMFTopology *pTopology;
+ IMFMediaType *pGrabberInputType;
}
plugin_win_mf_producer_video_t;
/* ============ Video MF Producer Interface ================= */
static int plugin_win_mf_producer_video_set(tmedia_producer_t *self, const tmedia_param_t* param)
{
- int ret = 0;
- HRESULT hr = S_OK;
- plugin_win_mf_producer_video_t* pSelf = (plugin_win_mf_producer_video_t*)self;
-
- if (!pSelf || !param){
- TSK_DEBUG_ERROR("Invalid parameter");
- return -1;
- }
-
- if (tsk_striequals(param->key, "action")){
- tmedia_codec_action_t action = (tmedia_codec_action_t)TSK_TO_INT32((uint8_t*)param->value);
- HRESULT hr = S_OK;
- switch (action){
- case tmedia_codec_action_encode_idr:
- {
- if (pSelf->pEncoder)
- {
- CHECK_HR(hr = pSelf->pEncoder->RequestKeyFrame());
- }
- break;
- }
- case tmedia_codec_action_bw_down:
- {
- pSelf->bitrate_bps = TSK_CLAMP(0, (int32_t)((pSelf->bitrate_bps << 1) / 3), TMEDIA_CODEC(pSelf)->bandwidth_max_upload);
- TSK_DEBUG_INFO("New target bitrate = %d kbps", pSelf->bitrate_bps);
- if (pSelf->pEncoder)
- {
- CHECK_HR(hr = pSelf->pEncoder->SetBitRate(pSelf->bitrate_bps));
- }
- break;
- }
- case tmedia_codec_action_bw_up:
- {
- pSelf->bitrate_bps = TSK_CLAMP(0, (int32_t)((pSelf->bitrate_bps * 3) >> 1), TMEDIA_CODEC(pSelf)->bandwidth_max_upload);
- TSK_DEBUG_INFO("New target bitrate = %d kbps", pSelf->bitrate_bps);
- if (pSelf->pEncoder)
- {
- CHECK_HR(hr = pSelf->pEncoder->SetBitRate(pSelf->bitrate_bps));
- }
- break;
- }
- }
- }
- else if (param->value_type == tmedia_pvt_int64){
- if (tsk_striequals(param->key, "local-hwnd")){
- HWND hWnd = reinterpret_cast<HWND>((INT64)*((int64_t*)param->value));
- if (hWnd != pSelf->hWndPreview)
- {
- pSelf->hWndPreview = hWnd;
- if (pSelf->pWatcherPreview)
- {
- CHECK_HR(hr = pSelf->pWatcherPreview->SetHwnd(hWnd));
- }
- }
- }
- }
- else if (param->value_type == tmedia_pvt_int32){
- if (tsk_striequals(param->key, "mute")){
- pSelf->bMuted = (TSK_TO_INT32((uint8_t*)param->value) != 0);
- if (pSelf->pCallback) {
- pSelf->pCallback->SetMute(pSelf->bMuted);
- }
+ int ret = 0;
+ HRESULT hr = S_OK;
+ plugin_win_mf_producer_video_t* pSelf = (plugin_win_mf_producer_video_t*)self;
+
+ if (!pSelf || !param) {
+ TSK_DEBUG_ERROR("Invalid parameter");
+ return -1;
+ }
+
+ if (tsk_striequals(param->key, "action")) {
+ tmedia_codec_action_t action = (tmedia_codec_action_t)TSK_TO_INT32((uint8_t*)param->value);
+ HRESULT hr = S_OK;
+ switch (action) {
+ case tmedia_codec_action_encode_idr: {
+ if (pSelf->pEncoder) {
+ CHECK_HR(hr = pSelf->pEncoder->RequestKeyFrame());
+ }
+ break;
+ }
+ case tmedia_codec_action_bw_down: {
+ pSelf->bitrate_bps = TSK_CLAMP(0, (int32_t)((pSelf->bitrate_bps << 1) / 3), TMEDIA_CODEC(pSelf)->bandwidth_max_upload);
+ TSK_DEBUG_INFO("New target bitrate = %d kbps", pSelf->bitrate_bps);
+ if (pSelf->pEncoder) {
+ CHECK_HR(hr = pSelf->pEncoder->SetBitRate(pSelf->bitrate_bps));
+ }
+ break;
+ }
+ case tmedia_codec_action_bw_up: {
+ pSelf->bitrate_bps = TSK_CLAMP(0, (int32_t)((pSelf->bitrate_bps * 3) >> 1), TMEDIA_CODEC(pSelf)->bandwidth_max_upload);
+ TSK_DEBUG_INFO("New target bitrate = %d kbps", pSelf->bitrate_bps);
+ if (pSelf->pEncoder) {
+ CHECK_HR(hr = pSelf->pEncoder->SetBitRate(pSelf->bitrate_bps));
+ }
+ break;
+ }
+ }
+ }
+ else if (param->value_type == tmedia_pvt_int64) {
+ if (tsk_striequals(param->key, "local-hwnd")) {
+ HWND hWnd = reinterpret_cast<HWND>((INT64)*((int64_t*)param->value));
+ if (hWnd != pSelf->hWndPreview) {
+ pSelf->hWndPreview = hWnd;
+ if (pSelf->pWatcherPreview) {
+ CHECK_HR(hr = pSelf->pWatcherPreview->SetHwnd(hWnd));
+ }
+ }
+ }
+ }
+ else if (param->value_type == tmedia_pvt_int32) {
+ if (tsk_striequals(param->key, "mute")) {
+ pSelf->bMuted = (TSK_TO_INT32((uint8_t*)param->value) != 0);
+ if (pSelf->pCallback) {
+ pSelf->pCallback->SetMute(pSelf->bMuted);
+ }
#if 0
- if (pSelf->bStarted && pSelf->pSession) {
- if (pSelf->bMuted) {
- pSelf->pSession->Pause();
- }
- else {
- CHECK_HR(hr = MFUtils::RunSession(pSelf->pSession, pSelf->pTopology));
- }
- }
+ if (pSelf->bStarted && pSelf->pSession) {
+ if (pSelf->bMuted) {
+ pSelf->pSession->Pause();
+ }
+ else {
+ CHECK_HR(hr = MFUtils::RunSession(pSelf->pSession, pSelf->pTopology));
+ }
+ }
#endif
- }
- else if (tsk_striequals(param->key, "create-on-current-thead")){
- //producer->create_on_ui_thread = *((int32_t*)param->value) ? tsk_false : tsk_true;
- }
- else if (tsk_striequals(param->key, "plugin-firefox")){
- //producer->plugin_firefox = (*((int32_t*)param->value) != 0);
- //if(producer->grabber){
- // producer->grabber->setPluginFirefox((producer->plugin_firefox == tsk_true));
- //}
- }
- }
+ }
+ else if (tsk_striequals(param->key, "create-on-current-thead")) {
+ //producer->create_on_ui_thread = *((int32_t*)param->value) ? tsk_false : tsk_true;
+ }
+ else if (tsk_striequals(param->key, "plugin-firefox")) {
+ //producer->plugin_firefox = (*((int32_t*)param->value) != 0);
+ //if(producer->grabber){
+ // producer->grabber->setPluginFirefox((producer->plugin_firefox == tsk_true));
+ //}
+ }
+ }
bail:
- return SUCCEEDED(hr) ? 0 : -1;
+ return SUCCEEDED(hr) ? 0 : -1;
}
static int plugin_win_mf_producer_video_prepare(tmedia_producer_t* self, const tmedia_codec_t* codec)
{
- plugin_win_mf_producer_video_t* pSelf = (plugin_win_mf_producer_video_t*)self;
-
- if (!pSelf || !codec && codec->plugin){
- TSK_DEBUG_ERROR("Invalid parameter");
- return -1;
- }
- if (pSelf->bPrepared){
- TSK_DEBUG_WARN("MF video producer already prepared");
- return -1;
- }
-
- // FIXME: DirectShow requires flipping but not MF
- // The Core library always tries to flip when OSType==Win32. Must be changed
- TMEDIA_CODEC_VIDEO(codec)->out.flip = tsk_false;
-
- TMEDIA_PRODUCER(pSelf)->video.fps = TMEDIA_CODEC_VIDEO(codec)->out.fps;
- TMEDIA_PRODUCER(pSelf)->video.width = TMEDIA_CODEC_VIDEO(codec)->out.width;
- TMEDIA_PRODUCER(pSelf)->video.height = TMEDIA_CODEC_VIDEO(codec)->out.height;
- TMEDIA_PRODUCER(pSelf)->encoder.codec_id = tmedia_codec_id_none; // means RAW frames as input
-
- TSK_DEBUG_INFO("MF video producer: fps=%d, width=%d, height=%d",
- TMEDIA_PRODUCER(pSelf)->video.fps,
- TMEDIA_PRODUCER(pSelf)->video.width,
- TMEDIA_PRODUCER(pSelf)->video.height);
-
- HRESULT hr = S_OK;
- IMFAttributes* pSessionAttributes = NULL;
- IMFTopology *pTopology = NULL;
- IMFMediaSink* pEvr = NULL;
- IMFMediaType* pEncoderInputType = NULL;
- IMFTopologyNode *pNodeGrabber = NULL;
- IMFMediaType* pGrabberNegotiatedInputMedia = NULL;
- BOOL bVideoProcessorIsSupported = FALSE;
- const VideoSubTypeGuidPair *pcPreferredSubTypeGuidPair = NULL;
-
- // create device list object
- if (!pSelf->pDeviceList && !(pSelf->pDeviceList = new DeviceListVideo())){
- TSK_DEBUG_ERROR("Failed to create device list");
- hr = E_OUTOFMEMORY;
- goto bail;
- }
- // enumerate devices
- hr = pSelf->pDeviceList->EnumerateDevices();
- if (!SUCCEEDED(hr)){
- goto bail;
- }
-
- // check if we have at least one MF video source connected to the PC
- if (pSelf->pDeviceList->Count() == 0){
- TSK_DEBUG_WARN("No MF video source could be found...no video will be sent");
- // do not break the negotiation as one-way video connection is a valid use-case
- }
- else{
- // Get best MF video source
- IMFActivate* pActivate = NULL;
- const char* pczSrcFriendlyName = tmedia_producer_get_friendly_name(tmedia_video);
- if (!tsk_strnullORempty(pczSrcFriendlyName)) {
- TSK_DEBUG_INFO("MF pref. video source = %s", pczSrcFriendlyName);
- wchar_t pczwSrcFriendlyName[MAX_PATH] = { 0 };
- mbstowcs(pczwSrcFriendlyName, pczSrcFriendlyName, sizeof(pczwSrcFriendlyName) / sizeof(pczwSrcFriendlyName[0]));
- hr = pSelf->pDeviceList->GetDeviceBest(&pActivate, pczwSrcFriendlyName);
- }
- else {
- hr = pSelf->pDeviceList->GetDeviceBest(&pActivate);
- }
- if (!SUCCEEDED(hr) || !pActivate){
- TSK_DEBUG_ERROR("Failed to get best MF video source");
- if (!pActivate){
- hr = E_OUTOFMEMORY;
- }
- goto bail;
- }
-
- // Create the media source for the device.
- hr = pActivate->ActivateObject(
- __uuidof(IMFMediaSource),
- (void**)&pSelf->pSource
- );
- SafeRelease(&pActivate);
- if (!SUCCEEDED(hr)){
- TSK_DEBUG_ERROR("ActivateObject(MF video source) failed");
- goto bail;
- }
-
- // Check whether video processor (http://msdn.microsoft.com/en-us/library/windows/desktop/hh162913(v=vs.85).aspx) is supported
- CHECK_HR(hr = MFUtils::IsVideoProcessorSupported(&bVideoProcessorIsSupported));
-
- // Must not be set because not supported by Frame Rate Converter DSP (http://msdn.microsoft.com/en-us/library/windows/desktop/ff819100(v=vs.85).aspx).aspx) because of color (neither I420 nor NV12)
- // Video Processor (http://msdn.microsoft.com/en-us/library/windows/desktop/hh162913(v=vs.85).aspx) supports both NV12 and I420
- if (!bVideoProcessorIsSupported) {
- UINT32 nWidth, nHeight, nFps;
- hr = MFUtils::GetBestFormat(
- pSelf->pSource,
- &MFVideoFormat_I420,
- (UINT32)TMEDIA_PRODUCER(pSelf)->video.width,
- (UINT32)TMEDIA_PRODUCER(pSelf)->video.height,
- (UINT32)TMEDIA_PRODUCER(pSelf)->video.fps,
- &nWidth,
- &nHeight,
- &nFps,
- &pcPreferredSubTypeGuidPair
- );
- if (SUCCEEDED(hr))
- {
- TSK_DEBUG_INFO("Video processor not supported...using source fps=%u, width=%u, height=%u", nFps, nWidth, nHeight);
- TMEDIA_PRODUCER(pSelf)->video.width = nWidth;
- TMEDIA_PRODUCER(pSelf)->video.height = nHeight;
- TMEDIA_PRODUCER(pSelf)->video.fps = nFps;
- }
- }
-
- // If H.264 is negotiated for this session then, try to find hardware encoder
- // If no HW encoder is found will fallback to SW implementation from x264
+ plugin_win_mf_producer_video_t* pSelf = (plugin_win_mf_producer_video_t*)self;
+
+ if (!pSelf || !codec && codec->plugin) {
+ TSK_DEBUG_ERROR("Invalid parameter");
+ return -1;
+ }
+ if (pSelf->bPrepared) {
+ TSK_DEBUG_WARN("MF video producer already prepared");
+ return -1;
+ }
+
+ // FIXME: DirectShow requires flipping but not MF
+ // The Core library always tries to flip when OSType==Win32. Must be changed
+ TMEDIA_CODEC_VIDEO(codec)->out.flip = tsk_false;
+
+ TMEDIA_PRODUCER(pSelf)->video.fps = TMEDIA_CODEC_VIDEO(codec)->out.fps;
+ TMEDIA_PRODUCER(pSelf)->video.width = TMEDIA_CODEC_VIDEO(codec)->out.width;
+ TMEDIA_PRODUCER(pSelf)->video.height = TMEDIA_CODEC_VIDEO(codec)->out.height;
+ TMEDIA_PRODUCER(pSelf)->encoder.codec_id = tmedia_codec_id_none; // means RAW frames as input
+
+ TSK_DEBUG_INFO("MF video producer: fps=%d, width=%d, height=%d",
+ TMEDIA_PRODUCER(pSelf)->video.fps,
+ TMEDIA_PRODUCER(pSelf)->video.width,
+ TMEDIA_PRODUCER(pSelf)->video.height);
+
+ HRESULT hr = S_OK;
+ IMFAttributes* pSessionAttributes = NULL;
+ IMFTopology *pTopology = NULL;
+ IMFMediaSink* pEvr = NULL;
+ IMFMediaType* pEncoderInputType = NULL;
+ IMFTopologyNode *pNodeGrabber = NULL;
+ IMFMediaType* pGrabberNegotiatedInputMedia = NULL;
+ BOOL bVideoProcessorIsSupported = FALSE;
+ const VideoSubTypeGuidPair *pcPreferredSubTypeGuidPair = NULL;
+
+ // create device list object
+ if (!pSelf->pDeviceList && !(pSelf->pDeviceList = new DeviceListVideo())) {
+ TSK_DEBUG_ERROR("Failed to create device list");
+ hr = E_OUTOFMEMORY;
+ goto bail;
+ }
+ // enumerate devices
+ hr = pSelf->pDeviceList->EnumerateDevices();
+ if (!SUCCEEDED(hr)) {
+ goto bail;
+ }
+
+ // check if we have at least one MF video source connected to the PC
+ if (pSelf->pDeviceList->Count() == 0) {
+ TSK_DEBUG_WARN("No MF video source could be found...no video will be sent");
+ // do not break the negotiation as one-way video connection is a valid use-case
+ }
+ else {
+ // Get best MF video source
+ IMFActivate* pActivate = NULL;
+ const char* pczSrcFriendlyName = tmedia_producer_get_friendly_name(tmedia_video);
+ if (!tsk_strnullORempty(pczSrcFriendlyName)) {
+ TSK_DEBUG_INFO("MF pref. video source = %s", pczSrcFriendlyName);
+ wchar_t pczwSrcFriendlyName[MAX_PATH] = { 0 };
+ mbstowcs(pczwSrcFriendlyName, pczSrcFriendlyName, sizeof(pczwSrcFriendlyName) / sizeof(pczwSrcFriendlyName[0]));
+ hr = pSelf->pDeviceList->GetDeviceBest(&pActivate, pczwSrcFriendlyName);
+ }
+ else {
+ hr = pSelf->pDeviceList->GetDeviceBest(&pActivate);
+ }
+ if (!SUCCEEDED(hr) || !pActivate) {
+ TSK_DEBUG_ERROR("Failed to get best MF video source");
+ if (!pActivate) {
+ hr = E_OUTOFMEMORY;
+ }
+ goto bail;
+ }
+
+ // Create the media source for the device.
+ hr = pActivate->ActivateObject(
+ __uuidof(IMFMediaSource),
+ (void**)&pSelf->pSource
+ );
+ SafeRelease(&pActivate);
+ if (!SUCCEEDED(hr)) {
+ TSK_DEBUG_ERROR("ActivateObject(MF video source) failed");
+ goto bail;
+ }
+
+ // Check whether video processor (http://msdn.microsoft.com/en-us/library/windows/desktop/hh162913(v=vs.85).aspx) is supported
+ CHECK_HR(hr = MFUtils::IsVideoProcessorSupported(&bVideoProcessorIsSupported));
+
+ // Must not be set because not supported by Frame Rate Converter DSP (http://msdn.microsoft.com/en-us/library/windows/desktop/ff819100(v=vs.85).aspx).aspx) because of color (neither I420 nor NV12)
+ // Video Processor (http://msdn.microsoft.com/en-us/library/windows/desktop/hh162913(v=vs.85).aspx) supports both NV12 and I420
+ if (!bVideoProcessorIsSupported) {
+ UINT32 nWidth, nHeight, nFps;
+ hr = MFUtils::GetBestFormat(
+ pSelf->pSource,
+ &MFVideoFormat_I420,
+ (UINT32)TMEDIA_PRODUCER(pSelf)->video.width,
+ (UINT32)TMEDIA_PRODUCER(pSelf)->video.height,
+ (UINT32)TMEDIA_PRODUCER(pSelf)->video.fps,
+ &nWidth,
+ &nHeight,
+ &nFps,
+ &pcPreferredSubTypeGuidPair
+ );
+ if (SUCCEEDED(hr)) {
+ TSK_DEBUG_INFO("Video processor not supported...using source fps=%u, width=%u, height=%u", nFps, nWidth, nHeight);
+ TMEDIA_PRODUCER(pSelf)->video.width = nWidth;
+ TMEDIA_PRODUCER(pSelf)->video.height = nHeight;
+ TMEDIA_PRODUCER(pSelf)->video.fps = nFps;
+ }
+ }
+
+ // If H.264 is negotiated for this session then, try to find hardware encoder
+ // If no HW encoder is found will fallback to SW implementation from x264
#if PLUGIN_MF_PV_BUNDLE_CODEC
- // Before embedding a H.264 encoder we have to be sure that:
- // - Low latency is supported
- // - The user decided to use MF encoder (Microsoft, Intel Quick Sync or any other)
- if ((codec->id == tmedia_codec_id_h264_bp || codec->id == tmedia_codec_id_h264_mp) && MFUtils::IsLowLatencyH264Supported()) {
- BOOL bMFEncoderIsRegistered =
- (codec->id == tmedia_codec_id_h264_mp && codec->plugin == mf_codec_h264_main_plugin_def_t)
- || (codec->id == tmedia_codec_id_h264_bp && codec->plugin == mf_codec_h264_base_plugin_def_t);
- if (bMFEncoderIsRegistered)
- {
- // both Microsoft and Intel encoders support NV12 only as input
- // static const BOOL kIsEncoder = TRUE;
- // hr = MFUtils::GetBestCodec(kIsEncoder, MFMediaType_Video, MFVideoFormat_NV12, MFVideoFormat_H264, &pSelf->pEncoder);
- pSelf->pEncoder = (codec->id == tmedia_codec_id_h264_bp) ? MFCodecVideoH264::CreateCodecH264Base(MFCodecType_Encoder) : MFCodecVideoH264::CreateCodecH264Main(MFCodecType_Encoder);
- if (pSelf->pEncoder)
- {
- pSelf->pEncoder->setBundled(TRUE);
- int32_t avg_bitrate_kbps = tmedia_get_video_bandwidth_kbps_2((unsigned int)TMEDIA_PRODUCER(pSelf)->video.width, (unsigned int)TMEDIA_PRODUCER(pSelf)->video.height, TMEDIA_PRODUCER(pSelf)->video.fps);
- TSK_DEBUG_INFO("MF_MT_AVG_BITRATE defined with value = %d kbps", avg_bitrate_kbps);
- pSelf->bitrate_bps = (avg_bitrate_kbps * 1024);
-
- hr = pSelf->pEncoder->Initialize(
- (UINT32)TMEDIA_PRODUCER(pSelf)->video.fps,
- (UINT32)TMEDIA_PRODUCER(pSelf)->video.width,
- (UINT32)TMEDIA_PRODUCER(pSelf)->video.height,
- (UINT32)pSelf->bitrate_bps);
- if (SUCCEEDED(hr))
- {
- /*hr =*/ pSelf->pEncoder->SetGOPSize((PLUGIN_MF_GOP_SIZE_IN_SECONDS * TMEDIA_PRODUCER(pSelf)->video.fps));
- }
- if (FAILED(hr))
- {
- SafeRelease(&pSelf->pEncoder);
- hr = S_OK;
- }
- }
- if (SUCCEEDED(hr) && pSelf->pEncoder)
- {
- TMEDIA_PRODUCER(pSelf)->encoder.codec_id = codec->id; // means encoded frames as input
- }
- else
- {
- SafeRelease(&pSelf->pEncoder);
- TSK_DEBUG_WARN("Failed to find H.264 HW encoder...fallback to SW implementation");
- }
- }
- else /* if(!bMFEncoderIsRegistered) */
- {
- TSK_DEBUG_INFO("Not bundling MF H.264 encoder even if low latency is supported because another implementation is registered: %s", codec->plugin->desc);
- }
- }
+ // Before embedding a H.264 encoder we have to be sure that:
+ // - Low latency is supported
+ // - The user decided to use MF encoder (Microsoft, Intel Quick Sync or any other)
+ if ((codec->id == tmedia_codec_id_h264_bp || codec->id == tmedia_codec_id_h264_mp) && MFUtils::IsLowLatencyH264Supported()) {
+ BOOL bMFEncoderIsRegistered =
+ (codec->id == tmedia_codec_id_h264_mp && codec->plugin == mf_codec_h264_main_plugin_def_t)
+ || (codec->id == tmedia_codec_id_h264_bp && codec->plugin == mf_codec_h264_base_plugin_def_t);
+ if (bMFEncoderIsRegistered) {
+ // both Microsoft and Intel encoders support NV12 only as input
+ // static const BOOL kIsEncoder = TRUE;
+ // hr = MFUtils::GetBestCodec(kIsEncoder, MFMediaType_Video, MFVideoFormat_NV12, MFVideoFormat_H264, &pSelf->pEncoder);
+ pSelf->pEncoder = (codec->id == tmedia_codec_id_h264_bp) ? MFCodecVideoH264::CreateCodecH264Base(MFCodecType_Encoder) : MFCodecVideoH264::CreateCodecH264Main(MFCodecType_Encoder);
+ if (pSelf->pEncoder) {
+ pSelf->pEncoder->setBundled(TRUE);
+ int32_t avg_bitrate_kbps = tmedia_get_video_bandwidth_kbps_2((unsigned int)TMEDIA_PRODUCER(pSelf)->video.width, (unsigned int)TMEDIA_PRODUCER(pSelf)->video.height, TMEDIA_PRODUCER(pSelf)->video.fps);
+ TSK_DEBUG_INFO("MF_MT_AVG_BITRATE defined with value = %d kbps", avg_bitrate_kbps);
+ pSelf->bitrate_bps = (avg_bitrate_kbps * 1024);
+
+ hr = pSelf->pEncoder->Initialize(
+ (UINT32)TMEDIA_PRODUCER(pSelf)->video.fps,
+ (UINT32)TMEDIA_PRODUCER(pSelf)->video.width,
+ (UINT32)TMEDIA_PRODUCER(pSelf)->video.height,
+ (UINT32)pSelf->bitrate_bps);
+ if (SUCCEEDED(hr)) {
+ /*hr =*/ pSelf->pEncoder->SetGOPSize((PLUGIN_MF_GOP_SIZE_IN_SECONDS * TMEDIA_PRODUCER(pSelf)->video.fps));
+ }
+ if (FAILED(hr)) {
+ SafeRelease(&pSelf->pEncoder);
+ hr = S_OK;
+ }
+ }
+ if (SUCCEEDED(hr) && pSelf->pEncoder) {
+ TMEDIA_PRODUCER(pSelf)->encoder.codec_id = codec->id; // means encoded frames as input
+ }
+ else {
+ SafeRelease(&pSelf->pEncoder);
+ TSK_DEBUG_WARN("Failed to find H.264 HW encoder...fallback to SW implementation");
+ }
+ }
+ else { /* if(!bMFEncoderIsRegistered) */
+ TSK_DEBUG_INFO("Not bundling MF H.264 encoder even if low latency is supported because another implementation is registered: %s", codec->plugin->desc);
+ }
+ }
#endif
- // Set session attributes
- CHECK_HR(hr = MFCreateAttributes(&pSessionAttributes, 1));
- CHECK_HR(hr = pSessionAttributes->SetUINT32(PLUGIN_MF_LOW_LATENCY, 1));
-
- // Configure the media type that the Sample Grabber will receive.
- // Setting the major and subtype is usually enough for the topology loader
- // to resolve the topology.
-
- CHECK_HR(hr = MFCreateMediaType(&pSelf->pGrabberInputType));
- CHECK_HR(hr = pSelf->pGrabberInputType->SetGUID(MF_MT_MAJOR_TYPE, MFMediaType_Video));
- CHECK_HR(hr = pSelf->pGrabberInputType->SetUINT32(MF_MT_INTERLACE_MODE, MFVideoInterlace_Progressive));
-
- CHECK_HR(hr = MFSetAttributeSize(pSelf->pGrabberInputType, MF_MT_FRAME_SIZE, (UINT32)TMEDIA_PRODUCER(pSelf)->video.width, (UINT32)TMEDIA_PRODUCER(pSelf)->video.height));
- CHECK_HR(hr = MFSetAttributeRatio(pSelf->pGrabberInputType, MF_MT_FRAME_RATE, TMEDIA_PRODUCER(pSelf)->video.fps, 1));
-
- CHECK_HR(hr = MFSetAttributeRatio(pSelf->pGrabberInputType, MF_MT_PIXEL_ASPECT_RATIO, 1, 1));
- CHECK_HR(hr = pSelf->pGrabberInputType->SetUINT32(MF_MT_ALL_SAMPLES_INDEPENDENT, pSelf->pEncoder ? FALSE : TRUE));
- CHECK_HR(hr = pSelf->pGrabberInputType->SetUINT32(MF_MT_FIXED_SIZE_SAMPLES, pSelf->pEncoder ? FALSE : TRUE));
- if (pSelf->pEncoder) {
- switch (codec->id){
- case tmedia_codec_id_h264_bp: case tmedia_codec_id_h264_mp:
- {
- CHECK_HR(hr = pSelf->pGrabberInputType->SetGUID(MF_MT_SUBTYPE, MFVideoFormat_H264));
- CHECK_HR(hr = pSelf->pGrabberInputType->SetUINT32(MF_MT_MPEG2_PROFILE, (codec->id == tmedia_codec_id_h264_bp) ? eAVEncH264VProfile_Base : eAVEncH264VProfile_Main));
- CHECK_HR(hr = pSelf->pGrabberInputType->SetUINT32(MF_MT_AVG_BITRATE, pSelf->bitrate_bps));
- break;
- }
- default:
- {
- TSK_DEBUG_ERROR("HW encoder with id = %d not expected", codec->id);
- assert(false);
- }
- }
- TMEDIA_PRODUCER(pSelf)->video.chroma = tmedia_chroma_nv12;
- TSK_DEBUG_INFO("MF video producer chroma = NV12 (because of HW encoder)");
- }
- else {
- // Video Processors will be inserted in the topology if the source cannot produce I420 frames
- // IMPORTANT: Must not be NV12 because not supported by Video Resizer DSP (http://msdn.microsoft.com/en-us/library/windows/desktop/ff819491(v=vs.85).aspx)
- CHECK_HR(hr = pSelf->pGrabberInputType->SetGUID(MF_MT_SUBTYPE, pcPreferredSubTypeGuidPair ? pcPreferredSubTypeGuidPair->fourcc : MFVideoFormat_I420));
- TMEDIA_PRODUCER(pSelf)->video.chroma = pcPreferredSubTypeGuidPair ? pcPreferredSubTypeGuidPair->chroma : tmedia_chroma_yuv420p;
- TSK_DEBUG_INFO("MF video producer chroma = %d", TMEDIA_PRODUCER(pSelf)->video.chroma);
- }
-
- if (pSelf->pEncoder) {
- // Unlock the encoder
- //BOOL bIsAsyncMFT = FALSE;
- //CHECK_HR(hr = MFUtils::IsAsyncMFT(pSelf->pEncoder->GetMFT(), &bIsAsyncMFT));
- //if(bIsAsyncMFT)
- //{
- // CHECK_HR(hr = MFUtils::UnlockAsyncMFT(pSelf->pEncoderpSelf->pEncoder->GetMFT()));
- //}
- // Apply Encoder output type (must be called before SetInputType)
- //CHECK_HR(hr = pSelf->pEncoder->GetMFT()->SetOutputType(0, pSelf->pGrabberInputType, 0/*MFT_SET_TYPE_TEST_ONLY*/));
-
- // HW encoders support only NV12
- //CHECK_HR(hr = MFUtils::ConvertVideoTypeToUncompressedType(pSelf->pGrabberInputType, MFVideoFormat_NV12, &pEncoderInputType));
- //CHECK_HR(hr = pSelf->pEncoder->GetMFT()->SetInputType(0, pEncoderInputType, 0/*MFT_SET_TYPE_TEST_ONLY*/));
- }
- // Create the sample grabber sink.
- CHECK_HR(hr = SampleGrabberCB::CreateInstance(TMEDIA_PRODUCER(pSelf), &pSelf->pCallback));
- CHECK_HR(hr = MFCreateSampleGrabberSinkActivate(pSelf->pGrabberInputType, pSelf->pCallback, &pSelf->pSinkGrabber));
-
- // To run as fast as possible, set this attribute (requires Windows 7):
- CHECK_HR(hr = pSelf->pSinkGrabber->SetUINT32(MF_SAMPLEGRABBERSINK_IGNORE_CLOCK, TRUE));
-
- // Create the Media Session.
- CHECK_HR(hr = MFCreateMediaSession(pSessionAttributes, &pSelf->pSession));
-
- // Create the EVR activation object for the preview.
- CHECK_HR(hr = MFCreateVideoRendererActivate(pSelf->hWndPreview, &pSelf->pSinkActivatePreview));
-
- // Create the topology.
- CHECK_HR(hr = MFUtils::CreateTopology(
- pSelf->pSource,
- pSelf->pEncoder ? pSelf->pEncoder->GetMFT() : NULL,
- pSelf->pSinkGrabber,
- pSelf->pSinkActivatePreview,
- pSelf->pGrabberInputType,
- &pTopology));
- // Resolve topology (adds video processors if needed).
- CHECK_HR(hr = MFUtils::ResolveTopology(pTopology, &pSelf->pTopology));
-
- // Find EVR for the preview.
- CHECK_HR(hr = MFUtils::FindNodeObject(pSelf->pTopology, MFUtils::g_ullTopoIdSinkPreview, (void**)&pEvr));
-
- // Find negotiated media and update producer
- UINT32 nNegWidth = (UINT32)TMEDIA_PRODUCER(pSelf)->video.width, nNegHeight = (UINT32)TMEDIA_PRODUCER(pSelf)->video.height, nNegNumeratorFps = (UINT32)TMEDIA_PRODUCER(pSelf)->video.fps, nNegDenominatorFps = 1;
- CHECK_HR(hr = pSelf->pTopology->GetNodeByID(MFUtils::g_ullTopoIdSinkMain, &pNodeGrabber));
- CHECK_HR(hr = pNodeGrabber->GetInputPrefType(0, &pGrabberNegotiatedInputMedia));
- hr = MFGetAttributeSize(pGrabberNegotiatedInputMedia, MF_MT_FRAME_SIZE, &nNegWidth, &nNegHeight);
- if (SUCCEEDED(hr))
- {
- TSK_DEBUG_INFO("MF video producer topology vs sdp parameters: width(%u/%u), height(%u/%u)",
- TMEDIA_PRODUCER(pSelf)->video.width, nNegWidth,
- TMEDIA_PRODUCER(pSelf)->video.height, nNegHeight
- );
- TMEDIA_PRODUCER(pSelf)->video.width = nNegWidth;
- TMEDIA_PRODUCER(pSelf)->video.height = nNegHeight;
- }
- hr = MFGetAttributeRatio(pGrabberNegotiatedInputMedia, MF_MT_FRAME_RATE, &nNegNumeratorFps, &nNegDenominatorFps);
- if (SUCCEEDED(hr))
- {
- TSK_DEBUG_INFO("MF video producer topology vs sdp parameters: fps(%u/%u)",
- TMEDIA_PRODUCER(pSelf)->video.fps, (nNegNumeratorFps / nNegDenominatorFps)
- );
- TMEDIA_PRODUCER(pSelf)->video.fps = (nNegNumeratorFps / nNegDenominatorFps);
- }
-
- // Create EVR watcher for the preview.
- pSelf->pWatcherPreview = new DisplayWatcher(pSelf->hWndPreview, pEvr, hr);
- CHECK_HR(hr);
- }
+ // Set session attributes
+ CHECK_HR(hr = MFCreateAttributes(&pSessionAttributes, 1));
+ CHECK_HR(hr = pSessionAttributes->SetUINT32(PLUGIN_MF_LOW_LATENCY, 1));
+
+ // Configure the media type that the Sample Grabber will receive.
+ // Setting the major and subtype is usually enough for the topology loader
+ // to resolve the topology.
+
+ CHECK_HR(hr = MFCreateMediaType(&pSelf->pGrabberInputType));
+ CHECK_HR(hr = pSelf->pGrabberInputType->SetGUID(MF_MT_MAJOR_TYPE, MFMediaType_Video));
+ CHECK_HR(hr = pSelf->pGrabberInputType->SetUINT32(MF_MT_INTERLACE_MODE, MFVideoInterlace_Progressive));
+
+ CHECK_HR(hr = MFSetAttributeSize(pSelf->pGrabberInputType, MF_MT_FRAME_SIZE, (UINT32)TMEDIA_PRODUCER(pSelf)->video.width, (UINT32)TMEDIA_PRODUCER(pSelf)->video.height));
+ CHECK_HR(hr = MFSetAttributeRatio(pSelf->pGrabberInputType, MF_MT_FRAME_RATE, TMEDIA_PRODUCER(pSelf)->video.fps, 1));
+
+ CHECK_HR(hr = MFSetAttributeRatio(pSelf->pGrabberInputType, MF_MT_PIXEL_ASPECT_RATIO, 1, 1));
+ CHECK_HR(hr = pSelf->pGrabberInputType->SetUINT32(MF_MT_ALL_SAMPLES_INDEPENDENT, pSelf->pEncoder ? FALSE : TRUE));
+ CHECK_HR(hr = pSelf->pGrabberInputType->SetUINT32(MF_MT_FIXED_SIZE_SAMPLES, pSelf->pEncoder ? FALSE : TRUE));
+ if (pSelf->pEncoder) {
+ switch (codec->id) {
+ case tmedia_codec_id_h264_bp:
+ case tmedia_codec_id_h264_mp: {
+ CHECK_HR(hr = pSelf->pGrabberInputType->SetGUID(MF_MT_SUBTYPE, MFVideoFormat_H264));
+ CHECK_HR(hr = pSelf->pGrabberInputType->SetUINT32(MF_MT_MPEG2_PROFILE, (codec->id == tmedia_codec_id_h264_bp) ? eAVEncH264VProfile_Base : eAVEncH264VProfile_Main));
+ CHECK_HR(hr = pSelf->pGrabberInputType->SetUINT32(MF_MT_AVG_BITRATE, pSelf->bitrate_bps));
+ break;
+ }
+ default: {
+ TSK_DEBUG_ERROR("HW encoder with id = %d not expected", codec->id);
+ assert(false);
+ }
+ }
+ TMEDIA_PRODUCER(pSelf)->video.chroma = tmedia_chroma_nv12;
+ TSK_DEBUG_INFO("MF video producer chroma = NV12 (because of HW encoder)");
+ }
+ else {
+ // Video Processors will be inserted in the topology if the source cannot produce I420 frames
+ // IMPORTANT: Must not be NV12 because not supported by Video Resizer DSP (http://msdn.microsoft.com/en-us/library/windows/desktop/ff819491(v=vs.85).aspx)
+ CHECK_HR(hr = pSelf->pGrabberInputType->SetGUID(MF_MT_SUBTYPE, pcPreferredSubTypeGuidPair ? pcPreferredSubTypeGuidPair->fourcc : MFVideoFormat_I420));
+ TMEDIA_PRODUCER(pSelf)->video.chroma = pcPreferredSubTypeGuidPair ? pcPreferredSubTypeGuidPair->chroma : tmedia_chroma_yuv420p;
+ TSK_DEBUG_INFO("MF video producer chroma = %d", TMEDIA_PRODUCER(pSelf)->video.chroma);
+ }
+
+ if (pSelf->pEncoder) {
+ // Unlock the encoder
+ //BOOL bIsAsyncMFT = FALSE;
+ //CHECK_HR(hr = MFUtils::IsAsyncMFT(pSelf->pEncoder->GetMFT(), &bIsAsyncMFT));
+ //if(bIsAsyncMFT)
+ //{
+ // CHECK_HR(hr = MFUtils::UnlockAsyncMFT(pSelf->pEncoderpSelf->pEncoder->GetMFT()));
+ //}
+ // Apply Encoder output type (must be called before SetInputType)
+ //CHECK_HR(hr = pSelf->pEncoder->GetMFT()->SetOutputType(0, pSelf->pGrabberInputType, 0/*MFT_SET_TYPE_TEST_ONLY*/));
+
+ // HW encoders support only NV12
+ //CHECK_HR(hr = MFUtils::ConvertVideoTypeToUncompressedType(pSelf->pGrabberInputType, MFVideoFormat_NV12, &pEncoderInputType));
+ //CHECK_HR(hr = pSelf->pEncoder->GetMFT()->SetInputType(0, pEncoderInputType, 0/*MFT_SET_TYPE_TEST_ONLY*/));
+ }
+ // Create the sample grabber sink.
+ CHECK_HR(hr = SampleGrabberCB::CreateInstance(TMEDIA_PRODUCER(pSelf), &pSelf->pCallback));
+ CHECK_HR(hr = MFCreateSampleGrabberSinkActivate(pSelf->pGrabberInputType, pSelf->pCallback, &pSelf->pSinkGrabber));
+
+ // To run as fast as possible, set this attribute (requires Windows 7):
+ CHECK_HR(hr = pSelf->pSinkGrabber->SetUINT32(MF_SAMPLEGRABBERSINK_IGNORE_CLOCK, TRUE));
+
+ // Create the Media Session.
+ CHECK_HR(hr = MFCreateMediaSession(pSessionAttributes, &pSelf->pSession));
+
+ // Create the EVR activation object for the preview.
+ CHECK_HR(hr = MFCreateVideoRendererActivate(pSelf->hWndPreview, &pSelf->pSinkActivatePreview));
+
+ // Create the topology.
+ CHECK_HR(hr = MFUtils::CreateTopology(
+ pSelf->pSource,
+ pSelf->pEncoder ? pSelf->pEncoder->GetMFT() : NULL,
+ pSelf->pSinkGrabber,
+ pSelf->pSinkActivatePreview,
+ pSelf->pGrabberInputType,
+ &pTopology));
+ // Resolve topology (adds video processors if needed).
+ CHECK_HR(hr = MFUtils::ResolveTopology(pTopology, &pSelf->pTopology));
+
+ // Find EVR for the preview.
+ CHECK_HR(hr = MFUtils::FindNodeObject(pSelf->pTopology, MFUtils::g_ullTopoIdSinkPreview, (void**)&pEvr));
+
+ // Find negotiated media and update producer
+ UINT32 nNegWidth = (UINT32)TMEDIA_PRODUCER(pSelf)->video.width, nNegHeight = (UINT32)TMEDIA_PRODUCER(pSelf)->video.height, nNegNumeratorFps = (UINT32)TMEDIA_PRODUCER(pSelf)->video.fps, nNegDenominatorFps = 1;
+ CHECK_HR(hr = pSelf->pTopology->GetNodeByID(MFUtils::g_ullTopoIdSinkMain, &pNodeGrabber));
+ CHECK_HR(hr = pNodeGrabber->GetInputPrefType(0, &pGrabberNegotiatedInputMedia));
+ hr = MFGetAttributeSize(pGrabberNegotiatedInputMedia, MF_MT_FRAME_SIZE, &nNegWidth, &nNegHeight);
+ if (SUCCEEDED(hr)) {
+ TSK_DEBUG_INFO("MF video producer topology vs sdp parameters: width(%u/%u), height(%u/%u)",
+ TMEDIA_PRODUCER(pSelf)->video.width, nNegWidth,
+ TMEDIA_PRODUCER(pSelf)->video.height, nNegHeight
+ );
+ TMEDIA_PRODUCER(pSelf)->video.width = nNegWidth;
+ TMEDIA_PRODUCER(pSelf)->video.height = nNegHeight;
+ }
+ hr = MFGetAttributeRatio(pGrabberNegotiatedInputMedia, MF_MT_FRAME_RATE, &nNegNumeratorFps, &nNegDenominatorFps);
+ if (SUCCEEDED(hr)) {
+ TSK_DEBUG_INFO("MF video producer topology vs sdp parameters: fps(%u/%u)",
+ TMEDIA_PRODUCER(pSelf)->video.fps, (nNegNumeratorFps / nNegDenominatorFps)
+ );
+ TMEDIA_PRODUCER(pSelf)->video.fps = (nNegNumeratorFps / nNegDenominatorFps);
+ }
+
+ // Create EVR watcher for the preview.
+ pSelf->pWatcherPreview = new DisplayWatcher(pSelf->hWndPreview, pEvr, hr);
+ CHECK_HR(hr);
+ }
bail:
- SafeRelease(&pSessionAttributes);
- SafeRelease(&pTopology);
- SafeRelease(&pEvr);
- SafeRelease(&pEncoderInputType);
- SafeRelease(&pNodeGrabber);
- SafeRelease(&pGrabberNegotiatedInputMedia);
-
- pSelf->bPrepared = SUCCEEDED(hr);
- return pSelf->bPrepared ? 0 : -1;
+ SafeRelease(&pSessionAttributes);
+ SafeRelease(&pTopology);
+ SafeRelease(&pEvr);
+ SafeRelease(&pEncoderInputType);
+ SafeRelease(&pNodeGrabber);
+ SafeRelease(&pGrabberNegotiatedInputMedia);
+
+ pSelf->bPrepared = SUCCEEDED(hr);
+ return pSelf->bPrepared ? 0 : -1;
}
static int plugin_win_mf_producer_video_start(tmedia_producer_t* self)
{
- plugin_win_mf_producer_video_t* pSelf = (plugin_win_mf_producer_video_t*)self;
-
- if (!pSelf){
- TSK_DEBUG_ERROR("Invalid parameter");
- return -1;
- }
-
- if (pSelf->bStarted){
- TSK_DEBUG_INFO("MF video producer already started");
- return 0;
- }
- if (!pSelf->bPrepared){
- TSK_DEBUG_ERROR("MF video producer not prepared");
- return -1;
- }
-
- HRESULT hr = S_OK;
-
- // Run preview watcher
- if (pSelf->pWatcherPreview) {
- CHECK_HR(hr = pSelf->pWatcherPreview->Start());
- }
-
- // Run the media session.
- CHECK_HR(hr = MFUtils::RunSession(pSelf->pSession, pSelf->pTopology));
-
- // Start asynchronous watcher thread
- pSelf->bStarted = true;
- int ret = tsk_thread_create(&pSelf->ppTread[0], RunSessionThread, pSelf);
- if (ret != 0) {
- TSK_DEBUG_ERROR("Failed to create thread");
- hr = E_FAIL;
- pSelf->bStarted = false;
- if (pSelf->ppTread[0]){
- tsk_thread_join(&pSelf->ppTread[0]);
- }
- MFUtils::ShutdownSession(pSelf->pSession, pSelf->pSource);
- goto bail;
- }
+ plugin_win_mf_producer_video_t* pSelf = (plugin_win_mf_producer_video_t*)self;
+
+ if (!pSelf) {
+ TSK_DEBUG_ERROR("Invalid parameter");
+ return -1;
+ }
+
+ if (pSelf->bStarted) {
+ TSK_DEBUG_INFO("MF video producer already started");
+ return 0;
+ }
+ if (!pSelf->bPrepared) {
+ TSK_DEBUG_ERROR("MF video producer not prepared");
+ return -1;
+ }
+
+ HRESULT hr = S_OK;
+
+ // Run preview watcher
+ if (pSelf->pWatcherPreview) {
+ CHECK_HR(hr = pSelf->pWatcherPreview->Start());
+ }
+
+ // Run the media session.
+ CHECK_HR(hr = MFUtils::RunSession(pSelf->pSession, pSelf->pTopology));
+
+ // Start asynchronous watcher thread
+ pSelf->bStarted = true;
+ int ret = tsk_thread_create(&pSelf->ppTread[0], RunSessionThread, pSelf);
+ if (ret != 0) {
+ TSK_DEBUG_ERROR("Failed to create thread");
+ hr = E_FAIL;
+ pSelf->bStarted = false;
+ if (pSelf->ppTread[0]) {
+ tsk_thread_join(&pSelf->ppTread[0]);
+ }
+ MFUtils::ShutdownSession(pSelf->pSession, pSelf->pSource);
+ goto bail;
+ }
bail:
- return SUCCEEDED(hr) ? 0 : -1;
+ return SUCCEEDED(hr) ? 0 : -1;
}
static int plugin_win_mf_producer_video_pause(tmedia_producer_t* self)
{
- plugin_win_mf_producer_video_t* pSelf = (plugin_win_mf_producer_video_t*)self;
+ plugin_win_mf_producer_video_t* pSelf = (plugin_win_mf_producer_video_t*)self;
- if (!pSelf){
- TSK_DEBUG_ERROR("Invalid parameter");
- return -1;
- }
- if (!pSelf->bStarted)
- {
- TSK_DEBUG_INFO("MF video producer not started");
- return 0;
- }
+ if (!pSelf) {
+ TSK_DEBUG_ERROR("Invalid parameter");
+ return -1;
+ }
+ if (!pSelf->bStarted) {
+ TSK_DEBUG_INFO("MF video producer not started");
+ return 0;
+ }
- HRESULT hr = MFUtils::PauseSession(pSelf->pSession);
+ HRESULT hr = MFUtils::PauseSession(pSelf->pSession);
- return SUCCEEDED(hr) ? 0 : -1;
+ return SUCCEEDED(hr) ? 0 : -1;
}
static int plugin_win_mf_producer_video_stop(tmedia_producer_t* self)
{
- plugin_win_mf_producer_video_t* pSelf = (plugin_win_mf_producer_video_t*)self;
+ plugin_win_mf_producer_video_t* pSelf = (plugin_win_mf_producer_video_t*)self;
- if (!pSelf){
- TSK_DEBUG_ERROR("Invalid parameter");
- return -1;
- }
+ if (!pSelf) {
+ TSK_DEBUG_ERROR("Invalid parameter");
+ return -1;
+ }
- HRESULT hr = S_OK;
+ HRESULT hr = S_OK;
- if (pSelf->pWatcherPreview){
- hr = pSelf->pWatcherPreview->Stop();
- }
+ if (pSelf->pWatcherPreview) {
+ hr = pSelf->pWatcherPreview->Stop();
+ }
- // for the thread
- pSelf->bStarted = false;
- hr = MFUtils::ShutdownSession(pSelf->pSession, NULL); // stop session to wakeup the asynchronous thread
- if (pSelf->ppTread[0]){
- tsk_thread_join(&pSelf->ppTread[0]);
- }
- hr = MFUtils::ShutdownSession(NULL, pSelf->pSource); // stop source to release the camera
+ // for the thread
+ pSelf->bStarted = false;
+ hr = MFUtils::ShutdownSession(pSelf->pSession, NULL); // stop session to wakeup the asynchronous thread
+ if (pSelf->ppTread[0]) {
+ tsk_thread_join(&pSelf->ppTread[0]);
+ }
+ hr = MFUtils::ShutdownSession(NULL, pSelf->pSource); // stop source to release the camera
- // next start() will be called after prepare()
- return _plugin_win_mf_producer_video_unprepare(pSelf);
+ // next start() will be called after prepare()
+ return _plugin_win_mf_producer_video_unprepare(pSelf);
}
static int _plugin_win_mf_producer_video_unprepare(plugin_win_mf_producer_video_t* pSelf)
{
- if (!pSelf){
- TSK_DEBUG_ERROR("Invalid parameter");
- return -1;
- }
-
- if (pSelf->bStarted) {
- // plugin_win_mf_producer_video_stop(TMEDIA_PRODUCER(pSelf));
- TSK_DEBUG_ERROR("Producer must be stopped before calling unprepare");
- }
- if (pSelf->pDeviceList){
- delete pSelf->pDeviceList, pSelf->pDeviceList = NULL;
- }
- if (pSelf->pWatcherPreview){
- pSelf->pWatcherPreview->Stop();
- }
- if (pSelf->pSource){
- pSelf->pSource->Shutdown();
- }
- if (pSelf->pSession){
- pSelf->pSession->Shutdown();
- }
-
- SafeRelease(&pSelf->pEncoder);
- SafeRelease(&pSelf->pSession);
- SafeRelease(&pSelf->pSource);
- SafeRelease(&pSelf->pSinkActivatePreview);
- SafeRelease(&pSelf->pCallback);
- SafeRelease(&pSelf->pSinkGrabber);
- SafeRelease(&pSelf->pTopology);
- SafeRelease(&pSelf->pGrabberInputType);
-
- if (pSelf->pWatcherPreview){
- delete pSelf->pWatcherPreview;
- pSelf->pWatcherPreview = NULL;
- }
-
- pSelf->bPrepared = false;
-
- return 0;
+ if (!pSelf) {
+ TSK_DEBUG_ERROR("Invalid parameter");
+ return -1;
+ }
+
+ if (pSelf->bStarted) {
+ // plugin_win_mf_producer_video_stop(TMEDIA_PRODUCER(pSelf));
+ TSK_DEBUG_ERROR("Producer must be stopped before calling unprepare");
+ }
+ if (pSelf->pDeviceList) {
+ delete pSelf->pDeviceList, pSelf->pDeviceList = NULL;
+ }
+ if (pSelf->pWatcherPreview) {
+ pSelf->pWatcherPreview->Stop();
+ }
+ if (pSelf->pSource) {
+ pSelf->pSource->Shutdown();
+ }
+ if (pSelf->pSession) {
+ pSelf->pSession->Shutdown();
+ }
+
+ SafeRelease(&pSelf->pEncoder);
+ SafeRelease(&pSelf->pSession);
+ SafeRelease(&pSelf->pSource);
+ SafeRelease(&pSelf->pSinkActivatePreview);
+ SafeRelease(&pSelf->pCallback);
+ SafeRelease(&pSelf->pSinkGrabber);
+ SafeRelease(&pSelf->pTopology);
+ SafeRelease(&pSelf->pGrabberInputType);
+
+ if (pSelf->pWatcherPreview) {
+ delete pSelf->pWatcherPreview;
+ pSelf->pWatcherPreview = NULL;
+ }
+
+ pSelf->bPrepared = false;
+
+ return 0;
}
//
@@ -611,63 +590,61 @@ static int _plugin_win_mf_producer_video_unprepare(plugin_win_mf_producer_video_
/* constructor */
static tsk_object_t* plugin_win_mf_producer_video_ctor(tsk_object_t * self, va_list * app)
{
- MFUtils::Startup();
-
- plugin_win_mf_producer_video_t *pSelf = (plugin_win_mf_producer_video_t *)self;
- if (pSelf){
- /* init base */
- tmedia_producer_init(TMEDIA_PRODUCER(pSelf));
-
- /* init self with default values*/
- TMEDIA_PRODUCER(pSelf)->encoder.codec_id = tmedia_codec_id_none; // means RAW frames as input
- TMEDIA_PRODUCER(pSelf)->video.chroma = tmedia_chroma_nv12;
- TMEDIA_PRODUCER(pSelf)->video.fps = 15;
- TMEDIA_PRODUCER(pSelf)->video.width = 352;
- TMEDIA_PRODUCER(pSelf)->video.height = 288;
-
- TSK_DEBUG_INFO("Create WinMF video producer");
- }
- return self;
+ MFUtils::Startup();
+
+ plugin_win_mf_producer_video_t *pSelf = (plugin_win_mf_producer_video_t *)self;
+ if (pSelf) {
+ /* init base */
+ tmedia_producer_init(TMEDIA_PRODUCER(pSelf));
+
+ /* init self with default values*/
+ TMEDIA_PRODUCER(pSelf)->encoder.codec_id = tmedia_codec_id_none; // means RAW frames as input
+ TMEDIA_PRODUCER(pSelf)->video.chroma = tmedia_chroma_nv12;
+ TMEDIA_PRODUCER(pSelf)->video.fps = 15;
+ TMEDIA_PRODUCER(pSelf)->video.width = 352;
+ TMEDIA_PRODUCER(pSelf)->video.height = 288;
+
+ TSK_DEBUG_INFO("Create WinMF video producer");
+ }
+ return self;
}
/* destructor */
static tsk_object_t* plugin_win_mf_producer_video_dtor(tsk_object_t * self)
{
- plugin_win_mf_producer_video_t *pSelf = (plugin_win_mf_producer_video_t *)self;
- if (pSelf){
- /* stop */
- if (pSelf->bStarted){
- plugin_win_mf_producer_video_stop(TMEDIA_PRODUCER(pSelf));
- }
-
- /* deinit base */
- tmedia_producer_deinit(TMEDIA_PRODUCER(pSelf));
- /* deinit self */
- _plugin_win_mf_producer_video_unprepare(pSelf);
- }
-
- return self;
+ plugin_win_mf_producer_video_t *pSelf = (plugin_win_mf_producer_video_t *)self;
+ if (pSelf) {
+ /* stop */
+ if (pSelf->bStarted) {
+ plugin_win_mf_producer_video_stop(TMEDIA_PRODUCER(pSelf));
+ }
+
+ /* deinit base */
+ tmedia_producer_deinit(TMEDIA_PRODUCER(pSelf));
+ /* deinit self */
+ _plugin_win_mf_producer_video_unprepare(pSelf);
+ }
+
+ return self;
}
/* object definition */
-static const tsk_object_def_t plugin_win_mf_producer_video_def_s =
-{
- sizeof(plugin_win_mf_producer_video_t),
- plugin_win_mf_producer_video_ctor,
- plugin_win_mf_producer_video_dtor,
- tsk_null,
+static const tsk_object_def_t plugin_win_mf_producer_video_def_s = {
+ sizeof(plugin_win_mf_producer_video_t),
+ plugin_win_mf_producer_video_ctor,
+ plugin_win_mf_producer_video_dtor,
+ tsk_null,
};
/* plugin definition*/
-static const tmedia_producer_plugin_def_t plugin_win_mf_producer_video_plugin_def_s =
-{
- &plugin_win_mf_producer_video_def_s,
+static const tmedia_producer_plugin_def_t plugin_win_mf_producer_video_plugin_def_s = {
+ &plugin_win_mf_producer_video_def_s,
- tmedia_video,
- "Microsoft Windows Media Foundation producer (Video)",
+ tmedia_video,
+ "Microsoft Windows Media Foundation producer (Video)",
- plugin_win_mf_producer_video_set,
- plugin_win_mf_producer_video_prepare,
- plugin_win_mf_producer_video_start,
- plugin_win_mf_producer_video_pause,
- plugin_win_mf_producer_video_stop
+ plugin_win_mf_producer_video_set,
+ plugin_win_mf_producer_video_prepare,
+ plugin_win_mf_producer_video_start,
+ plugin_win_mf_producer_video_pause,
+ plugin_win_mf_producer_video_stop
};
const tmedia_producer_plugin_def_t *plugin_win_mf_producer_video_plugin_def_t = &plugin_win_mf_producer_video_plugin_def_s;
@@ -675,40 +652,38 @@ const tmedia_producer_plugin_def_t *plugin_win_mf_producer_video_plugin_def_t =
// Run session async thread
static void* TSK_STDCALL RunSessionThread(void *pArg)
{
- plugin_win_mf_producer_video_t *pSelf = (plugin_win_mf_producer_video_t *)pArg;
- HRESULT hrStatus = S_OK;
- HRESULT hr = S_OK;
- IMFMediaEvent *pEvent = NULL;
- MediaEventType met;
-
- TSK_DEBUG_INFO("RunSessionThread (MF video producer) - ENTER");
-
- while (pSelf->bStarted){
- hr = pSelf->pSession->GetEvent(0, &pEvent);
- if (hr == MF_E_SHUTDOWN) {
- if (pSelf->bStarted) {
- CHECK_HR(hr); // Shutdown called but "bStarted" not equal to false
- }
- break; // Shutdown called and "bStarted" is equal to false => break the loop
- }
- CHECK_HR(hr = pEvent->GetStatus(&hrStatus));
- CHECK_HR(hr = pEvent->GetType(&met));
-
- if (FAILED(hrStatus) /*&& hrStatus != MF_E_NO_SAMPLE_TIMESTAMP*/)
- {
- TSK_DEBUG_ERROR("Session error: 0x%x (event id: %d)\n", hrStatus, met);
- hr = hrStatus;
- goto bail;
- }
- if (met == MESessionEnded)
- {
- break;
- }
- SafeRelease(&pEvent);
- }
+ plugin_win_mf_producer_video_t *pSelf = (plugin_win_mf_producer_video_t *)pArg;
+ HRESULT hrStatus = S_OK;
+ HRESULT hr = S_OK;
+ IMFMediaEvent *pEvent = NULL;
+ MediaEventType met;
+
+ TSK_DEBUG_INFO("RunSessionThread (MF video producer) - ENTER");
+
+ while (pSelf->bStarted) {
+ hr = pSelf->pSession->GetEvent(0, &pEvent);
+ if (hr == MF_E_SHUTDOWN) {
+ if (pSelf->bStarted) {
+ CHECK_HR(hr); // Shutdown called but "bStarted" not equal to false
+ }
+ break; // Shutdown called and "bStarted" is equal to false => break the loop
+ }
+ CHECK_HR(hr = pEvent->GetStatus(&hrStatus));
+ CHECK_HR(hr = pEvent->GetType(&met));
+
+ if (FAILED(hrStatus) /*&& hrStatus != MF_E_NO_SAMPLE_TIMESTAMP*/) {
+ TSK_DEBUG_ERROR("Session error: 0x%x (event id: %d)\n", hrStatus, met);
+ hr = hrStatus;
+ goto bail;
+ }
+ if (met == MESessionEnded) {
+ break;
+ }
+ SafeRelease(&pEvent);
+ }
bail:
- TSK_DEBUG_INFO("RunSessionThread (MF video producer) - EXIT");
+ TSK_DEBUG_INFO("RunSessionThread (MF video producer) - EXIT");
- return NULL;
+ return NULL;
} \ No newline at end of file
diff --git a/plugins/pluginWinMF/plugin_win_mf_tdav.cxx b/plugins/pluginWinMF/plugin_win_mf_tdav.cxx
index d08bcfc..1885617 100755
--- a/plugins/pluginWinMF/plugin_win_mf_tdav.cxx
+++ b/plugins/pluginWinMF/plugin_win_mf_tdav.cxx
@@ -1,18 +1,18 @@
/* Copyright (C) 2013 Mamadou DIOP
* Copyright (C) 2013 Doubango Telecom <http://www.doubango.org>
-*
+*
* This file is part of Open Source Doubango Framework.
*
* DOUBANGO is free software: you can redistribute it and/or modify
* it under the terms of the GNU General Public License as published by
* the Free Software Foundation, either version 3 of the License, or
* (at your option) any later version.
-*
+*
* DOUBANGO is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
* GNU General Public License for more details.
-*
+*
* You should have received a copy of the GNU General Public License
* along with DOUBANGO.
*/
OpenPOWER on IntegriCloud