summaryrefslogtreecommitdiffstats
path: root/tinyDAV/src/video
diff options
context:
space:
mode:
Diffstat (limited to 'tinyDAV/src/video')
-rwxr-xr-xtinyDAV/src/video/directx/tdav_producer_screencast_d3d9.cxx163
-rwxr-xr-xtinyDAV/src/video/directx/tdav_producer_screencast_ddraw.cxx2018
-rwxr-xr-xtinyDAV/src/video/gdi/tdav_consumer_video_gdi.c786
-rwxr-xr-xtinyDAV/src/video/gdi/tdav_producer_screencast_gdi.c759
-rwxr-xr-xtinyDAV/src/video/jb/tdav_video_frame.c105
-rwxr-xr-xtinyDAV/src/video/jb/tdav_video_jb.c260
-rwxr-xr-xtinyDAV/src/video/mf/tdav_consumer_video_mf.cxx187
-rwxr-xr-xtinyDAV/src/video/mf/tdav_producer_video_mf.cxx608
-rwxr-xr-xtinyDAV/src/video/tdav_consumer_video.c245
-rwxr-xr-xtinyDAV/src/video/tdav_converter_video.cxx1321
-rwxr-xr-xtinyDAV/src/video/tdav_runnable_video.c71
-rwxr-xr-xtinyDAV/src/video/tdav_session_video.c1234
-rwxr-xr-xtinyDAV/src/video/v4linux/tdav_producer_video_v4l2.c1855
-rwxr-xr-xtinyDAV/src/video/winm/tdav_consumer_winm.cxx225
-rwxr-xr-xtinyDAV/src/video/winm/tdav_producer_winm.cxx845
15 files changed, 5365 insertions, 5317 deletions
diff --git a/tinyDAV/src/video/directx/tdav_producer_screencast_d3d9.cxx b/tinyDAV/src/video/directx/tdav_producer_screencast_d3d9.cxx
index 7efd1d6..b95a8e2 100755
--- a/tinyDAV/src/video/directx/tdav_producer_screencast_d3d9.cxx
+++ b/tinyDAV/src/video/directx/tdav_producer_screencast_d3d9.cxx
@@ -22,10 +22,10 @@
#include <windows.h>
#if TDAV_UNDER_WINDOWS_CE
- // Direct3D Mobile (D3DM) was removed from Windows CE in version 7.
- // Only include that header if running version 5 or 6. (When this
- // class's implementation is complete, we'll need to revisit how
- // this entire file is compiled.)
+// Direct3D Mobile (D3DM) was removed from Windows CE in version 7.
+// Only include that header if running version 5 or 6. (When this
+// class's implementation is complete, we'll need to revisit how
+// this entire file is compiled.)
# if _WIN32_WCE >= 0x0500 && _WIN32_WCE < 0x0700
# include <D3dm.h>
# endif
@@ -55,60 +55,59 @@
#define D3D9_DEBUG_ERROR(FMT, ...) TSK_DEBUG_ERROR("[D3D9 Producer] " FMT, ##__VA_ARGS__)
#define D3D9_DEBUG_FATAL(FMT, ...) TSK_DEBUG_FATAL("[D3D9 Producer] " FMT, ##__VA_ARGS__)
-typedef struct tdav_producer_screencast_d3d9_s
-{
- TMEDIA_DECLARE_PRODUCER;
+typedef struct tdav_producer_screencast_d3d9_s {
+ TMEDIA_DECLARE_PRODUCER;
- HWND hwnd_preview;
- HWND hwnd_src;
+ HWND hwnd_preview;
+ HWND hwnd_src;
- tsk_thread_handle_t* tid[1];
+ tsk_thread_handle_t* tid[1];
- void* p_buff_src; // must use VirtualAlloc()
- tsk_size_t n_buff_src;
- void* p_buff_neg; // must use VirtualAlloc()
- tsk_size_t n_buff_neg;
+ void* p_buff_src; // must use VirtualAlloc()
+ tsk_size_t n_buff_src;
+ void* p_buff_neg; // must use VirtualAlloc()
+ tsk_size_t n_buff_neg;
- tsk_bool_t b_started;
- tsk_bool_t b_paused;
- tsk_bool_t b_muted;
+ tsk_bool_t b_started;
+ tsk_bool_t b_paused;
+ tsk_bool_t b_muted;
- RECT rcScreen;
+ RECT rcScreen;
- TSK_DECLARE_SAFEOBJ;
+ TSK_DECLARE_SAFEOBJ;
}
tdav_producer_screencast_d3d9_t;
/* ============ Media Producer Interface ================= */
static int _tdav_producer_screencast_d3d9_set(tmedia_producer_t *p_self, const tmedia_param_t* pc_param)
{
- D3D9_DEBUG_ERROR("Not implemented");
- return -1;
+ D3D9_DEBUG_ERROR("Not implemented");
+ return -1;
}
static int _tdav_producer_screencast_d3d9_prepare(tmedia_producer_t* p_self, const tmedia_codec_t* pc_codec)
{
- D3D9_DEBUG_ERROR("Not implemented");
- return -1;
+ D3D9_DEBUG_ERROR("Not implemented");
+ return -1;
}
static int _tdav_producer_screencast_d3d9_start(tmedia_producer_t* p_self)
{
- D3D9_DEBUG_ERROR("Not implemented");
- return -1;
+ D3D9_DEBUG_ERROR("Not implemented");
+ return -1;
}
static int _tdav_producer_screencast_d3d9_pause(tmedia_producer_t* p_self)
{
- D3D9_DEBUG_ERROR("Not implemented");
- return -1;
+ D3D9_DEBUG_ERROR("Not implemented");
+ return -1;
}
static int _tdav_producer_screencast_d3d9_stop(tmedia_producer_t* p_self)
{
- D3D9_DEBUG_ERROR("Not implemented");
- return -1;
+ D3D9_DEBUG_ERROR("Not implemented");
+ return -1;
}
//
@@ -117,68 +116,66 @@ static int _tdav_producer_screencast_d3d9_stop(tmedia_producer_t* p_self)
/* constructor */
static tsk_object_t* _tdav_producer_screencast_d3d9_ctor(tsk_object_t *self, va_list * app)
{
- tdav_producer_screencast_d3d9_t *p_d3d9 = (tdav_producer_screencast_d3d9_t *)self;
- if (p_d3d9) {
- /* init base */
- tmedia_producer_init(TMEDIA_PRODUCER(p_d3d9));
- TMEDIA_PRODUCER(p_d3d9)->video.chroma = tmedia_chroma_bgr24; // RGB24 on x86 (little endians) stored as BGR24
- /* init self with default values*/
- TMEDIA_PRODUCER(p_d3d9)->video.fps = 15;
- TMEDIA_PRODUCER(p_d3d9)->video.width = 352;
- TMEDIA_PRODUCER(p_d3d9)->video.height = 288;
-
- tsk_safeobj_init(p_d3d9);
- }
- return self;
+ tdav_producer_screencast_d3d9_t *p_d3d9 = (tdav_producer_screencast_d3d9_t *)self;
+ if (p_d3d9) {
+ /* init base */
+ tmedia_producer_init(TMEDIA_PRODUCER(p_d3d9));
+ TMEDIA_PRODUCER(p_d3d9)->video.chroma = tmedia_chroma_bgr24; // RGB24 on x86 (little endians) stored as BGR24
+ /* init self with default values*/
+ TMEDIA_PRODUCER(p_d3d9)->video.fps = 15;
+ TMEDIA_PRODUCER(p_d3d9)->video.width = 352;
+ TMEDIA_PRODUCER(p_d3d9)->video.height = 288;
+
+ tsk_safeobj_init(p_d3d9);
+ }
+ return self;
}
/* destructor */
static tsk_object_t* _tdav_producer_screencast_d3d9_dtor(tsk_object_t * self)
{
- tdav_producer_screencast_d3d9_t *p_d3d9 = (tdav_producer_screencast_d3d9_t *)self;
- if (p_d3d9) {
- /* stop */
- if (p_d3d9->b_started) {
- _tdav_producer_screencast_d3d9_stop((tmedia_producer_t*)p_d3d9);
- }
-
- /* deinit base */
- tmedia_producer_deinit(TMEDIA_PRODUCER(p_d3d9));
- /* deinit self */
- if (p_d3d9->p_buff_neg) {
- VirtualFree(p_d3d9->p_buff_neg, 0, MEM_RELEASE);
- p_d3d9->p_buff_neg = NULL;
- }
- if (p_d3d9->p_buff_src) {
- VirtualFree(p_d3d9->p_buff_src, 0, MEM_RELEASE);
- p_d3d9->p_buff_src = NULL;
- }
- tsk_safeobj_deinit(p_d3d9);
-
- TSK_DEBUG_INFO("*** d3d9 Screencast producer destroyed ***");
- }
-
- return self;
+ tdav_producer_screencast_d3d9_t *p_d3d9 = (tdav_producer_screencast_d3d9_t *)self;
+ if (p_d3d9) {
+ /* stop */
+ if (p_d3d9->b_started) {
+ _tdav_producer_screencast_d3d9_stop((tmedia_producer_t*)p_d3d9);
+ }
+
+ /* deinit base */
+ tmedia_producer_deinit(TMEDIA_PRODUCER(p_d3d9));
+ /* deinit self */
+ if (p_d3d9->p_buff_neg) {
+ VirtualFree(p_d3d9->p_buff_neg, 0, MEM_RELEASE);
+ p_d3d9->p_buff_neg = NULL;
+ }
+ if (p_d3d9->p_buff_src) {
+ VirtualFree(p_d3d9->p_buff_src, 0, MEM_RELEASE);
+ p_d3d9->p_buff_src = NULL;
+ }
+ tsk_safeobj_deinit(p_d3d9);
+
+ TSK_DEBUG_INFO("*** d3d9 Screencast producer destroyed ***");
+ }
+
+ return self;
}
/* object definition */
-static const tsk_object_def_t tdav_producer_screencast_d3d9_def_s =
-{
- sizeof(tdav_producer_screencast_d3d9_t),
- _tdav_producer_screencast_d3d9_ctor,
- _tdav_producer_screencast_d3d9_dtor,
- tsk_null,
+static const tsk_object_def_t tdav_producer_screencast_d3d9_def_s = {
+ sizeof(tdav_producer_screencast_d3d9_t),
+ _tdav_producer_screencast_d3d9_ctor,
+ _tdav_producer_screencast_d3d9_dtor,
+ tsk_null,
};
/* plugin definition*/
-static const tmedia_producer_plugin_def_t tdav_producer_screencast_d3d9_plugin_def_s =
-{
- &tdav_producer_screencast_d3d9_def_s,
- tmedia_bfcp_video,
- "Microsoft Direct3D screencast producer",
-
- _tdav_producer_screencast_d3d9_set,
- _tdav_producer_screencast_d3d9_prepare,
- _tdav_producer_screencast_d3d9_start,
- _tdav_producer_screencast_d3d9_pause,
- _tdav_producer_screencast_d3d9_stop
+static const tmedia_producer_plugin_def_t tdav_producer_screencast_d3d9_plugin_def_s = {
+ &tdav_producer_screencast_d3d9_def_s,
+ tmedia_bfcp_video,
+ "Microsoft Direct3D screencast producer",
+
+ _tdav_producer_screencast_d3d9_set,
+ _tdav_producer_screencast_d3d9_prepare,
+ _tdav_producer_screencast_d3d9_start,
+ _tdav_producer_screencast_d3d9_pause,
+ _tdav_producer_screencast_d3d9_stop
};
const tmedia_producer_plugin_def_t *tdav_producer_screencast_d3d9_plugin_def_t = &tdav_producer_screencast_d3d9_plugin_def_s;
diff --git a/tinyDAV/src/video/directx/tdav_producer_screencast_ddraw.cxx b/tinyDAV/src/video/directx/tdav_producer_screencast_ddraw.cxx
index 13507db..607b5ff 100755
--- a/tinyDAV/src/video/directx/tdav_producer_screencast_ddraw.cxx
+++ b/tinyDAV/src/video/directx/tdav_producer_screencast_ddraw.cxx
@@ -111,58 +111,57 @@
#define DDRAW_CHECK_HR(x) { HRESULT __hr__ = (x); if (FAILED(__hr__)) { DDRAW_DEBUG_ERROR("Operation Failed (%08x)", __hr__); goto bail; } }
typedef struct DDrawModule {
- LPDIRECTDRAW lpDD;
- HMODULE hDLL;
-}DDrawModule;
+ LPDIRECTDRAW lpDD;
+ HMODULE hDLL;
+} DDrawModule;
typedef struct DDrawModule FAR *LPDDrawModule;
#define DDrawModuleSafeFree(module) DDRAW_SAFE_RELEASE(&module.lpDD); if (module.hDLL) { FreeLibrary(module.hDLL), module.hDLL = NULL; }
-typedef struct tdav_producer_screencast_ddraw_s
-{
- TMEDIA_DECLARE_PRODUCER;
+typedef struct tdav_producer_screencast_ddraw_s {
+ TMEDIA_DECLARE_PRODUCER;
- HWND hwnd_preview;
- HWND hwnd_src;
+ HWND hwnd_preview;
+ HWND hwnd_src;
#if DDRAW_PREVIEW
- BITMAPINFO bi_preview;
+ BITMAPINFO bi_preview;
#endif /* DDRAW_PREVIEW */
#if DDRAW_CPU_MONITOR || DDRAW_CPU_THROTTLING
- tsk_timer_manager_handle_t *p_timer_mgr;
- struct {
- tsk_timer_id_t id_timer;
- int fps_target;
- } cpu;
+ tsk_timer_manager_handle_t *p_timer_mgr;
+ struct {
+ tsk_timer_id_t id_timer;
+ int fps_target;
+ } cpu;
#endif /* DDRAW_CPU_MONITOR || DDRAW_CPU_THROTTLING */
#if DDRAW_MT
- struct{
- tsk_thread_handle_t* tid[1];
- void* p_buff_yuv_aligned_array[DDRAW_MT_COUNT];
- BOOL b_flags_array[DDRAW_MT_COUNT];
- HANDLE h_events[DDRAW_MT_COUNT + 1]; // #DDRAW_MT_COUNT events for each buffer plus #1 for the shutdown/stop
- } mt;
+ struct {
+ tsk_thread_handle_t* tid[1];
+ void* p_buff_yuv_aligned_array[DDRAW_MT_COUNT];
+ BOOL b_flags_array[DDRAW_MT_COUNT];
+ HANDLE h_events[DDRAW_MT_COUNT + 1]; // #DDRAW_MT_COUNT events for each buffer plus #1 for the shutdown/stop
+ } mt;
#endif /* DDRAW_MT */
- DDrawModule ddrawModule;
- IDirectDrawSurface* p_surf_primary;
+ DDrawModule ddrawModule;
+ IDirectDrawSurface* p_surf_primary;
- tsk_thread_handle_t* tid[1];
+ tsk_thread_handle_t* tid[1];
- void* p_buff_rgb_aligned;
- tsk_size_t n_buff_rgb;
- tsk_size_t n_buff_rgb_bitscount;
+ void* p_buff_rgb_aligned;
+ tsk_size_t n_buff_rgb;
+ tsk_size_t n_buff_rgb_bitscount;
- void* p_buff_yuv_aligned;
- tsk_size_t n_buff_yuv;
+ void* p_buff_yuv_aligned;
+ tsk_size_t n_buff_yuv;
- BOOL b_have_rgb32_conv; // support for RGB32 -> I420 and primary screen format is RGB32
+ BOOL b_have_rgb32_conv; // support for RGB32 -> I420 and primary screen format is RGB32
- tsk_bool_t b_started;
- tsk_bool_t b_paused;
- tsk_bool_t b_muted;
+ tsk_bool_t b_started;
+ tsk_bool_t b_paused;
+ tsk_bool_t b_muted;
- TSK_DECLARE_SAFEOBJ;
+ TSK_DECLARE_SAFEOBJ;
}
tdav_producer_screencast_ddraw_t;
@@ -180,94 +179,94 @@ static HRESULT _tdav_producer_screencast_alloc_yuv_buff(tdav_producer_screencast
#if DDRAW_HAVE_RGB32_TO_I420_INTRIN || DDRAW_HAVE_RGB32_TO_I420_ASM
static __declspec(align(DDRAW_MEM_ALIGNMENT)) const int8_t kYCoeffs[16] = {
- 13, 65, 33, 0,
- 13, 65, 33, 0,
- 13, 65, 33, 0,
- 13, 65, 33, 0,
- };
- static __declspec(align(DDRAW_MEM_ALIGNMENT)) const int8_t kUCoeffs[16] = {
- 112, -74, -38, 0,
- 112, -74, -38, 0,
- 112, -74, -38, 0,
- 112, -74, -38, 0,
- };
- static __declspec(align(DDRAW_MEM_ALIGNMENT)) const int8_t kVCoeffs[16] = {
- -18, -94, 112, 0,
- -18, -94, 112, 0,
- -18, -94, 112, 0,
- -18, -94, 112, 0,
- };
- static __declspec(align(DDRAW_MEM_ALIGNMENT)) const int32_t kRGBAShuffleDuplicate[4] = { 0x03020100, 0x0b0a0908, 0x03020100, 0x0b0a0908 }; // RGBA(X) || RGBA(X + 2) || RGBA(X) || RGBA(X + 2) = 2U || 2V
- static __declspec(align(DDRAW_MEM_ALIGNMENT)) const uint16_t kY16[8] = {
- 16, 16, 16, 16,
- 16, 16, 16, 16
- };
- static __declspec(align(DDRAW_MEM_ALIGNMENT)) const uint16_t kUV128[8] = {
- 128, 128, 128, 128,
- 128, 128, 128, 128
- };
+ 13, 65, 33, 0,
+ 13, 65, 33, 0,
+ 13, 65, 33, 0,
+ 13, 65, 33, 0,
+};
+static __declspec(align(DDRAW_MEM_ALIGNMENT)) const int8_t kUCoeffs[16] = {
+ 112, -74, -38, 0,
+ 112, -74, -38, 0,
+ 112, -74, -38, 0,
+ 112, -74, -38, 0,
+};
+static __declspec(align(DDRAW_MEM_ALIGNMENT)) const int8_t kVCoeffs[16] = {
+ -18, -94, 112, 0,
+ -18, -94, 112, 0,
+ -18, -94, 112, 0,
+ -18, -94, 112, 0,
+};
+static __declspec(align(DDRAW_MEM_ALIGNMENT)) const int32_t kRGBAShuffleDuplicate[4] = { 0x03020100, 0x0b0a0908, 0x03020100, 0x0b0a0908 }; // RGBA(X) || RGBA(X + 2) || RGBA(X) || RGBA(X + 2) = 2U || 2V
+static __declspec(align(DDRAW_MEM_ALIGNMENT)) const uint16_t kY16[8] = {
+ 16, 16, 16, 16,
+ 16, 16, 16, 16
+};
+static __declspec(align(DDRAW_MEM_ALIGNMENT)) const uint16_t kUV128[8] = {
+ 128, 128, 128, 128,
+ 128, 128, 128, 128
+};
#endif /* DDRAW_HAVE_RGB32_TO_I420_INTRIN || DDRAW_HAVE_RGB32_TO_I420_ASM */
// public function used to check that we can use DDRAW plugin before loading it
tsk_bool_t tdav_producer_screencast_ddraw_plugin_is_supported()
{
- static tsk_bool_t __checked = tsk_false; // static guard to avoid checking more than once
- static tsk_bool_t __supported = tsk_false;
-
- HRESULT hr = DD_OK;
- DDSURFACEDESC ddsd;
- DDPIXELFORMAT DDPixelFormat;
- LPDIRECTDRAWSURFACE lpDDS = NULL;
- DDrawModule ddrawModule = { 0 };
-
- if (__checked) {
- goto bail;
- }
-
- __checked = tsk_true;
-
- DDRAW_CHECK_HR(hr = _tdav_producer_screencast_create_module(&ddrawModule));
- DDRAW_CHECK_HR(hr = ddrawModule.lpDD->SetCooperativeLevel(NULL, DDSCL_NORMAL));
-
- ZeroMemory(&ddsd, sizeof(ddsd));
- ddsd.dwSize = sizeof(ddsd);
- ddsd.dwFlags = DDSD_CAPS;
- ddsd.ddsCaps.dwCaps = DDSCAPS_PRIMARYSURFACE;
-
- DDRAW_CHECK_HR(hr = ddrawModule.lpDD->CreateSurface(&ddsd, &lpDDS, NULL));
-
- ZeroMemory(&DDPixelFormat, sizeof(DDPixelFormat));
- DDPixelFormat.dwSize = sizeof(DDPixelFormat);
- DDRAW_CHECK_HR(hr = lpDDS->GetPixelFormat(&DDPixelFormat));
- DDRAW_DEBUG_INFO("dwRGBBitCount:%d, dwRBitMask:%x, dwGBitMask:%x, dwBBitMask:%x, dwRGBAlphaBitMask:%x",
- DDPixelFormat.dwRGBBitCount, DDPixelFormat.dwRBitMask, DDPixelFormat.dwGBitMask, DDPixelFormat.dwBBitMask, DDPixelFormat.dwRGBAlphaBitMask);
- if (_tdav_producer_screencast_get_chroma(&DDPixelFormat) == tmedia_chroma_none) {
- DDRAW_CHECK_HR(hr = DDERR_INVALIDCAPS);
- }
+ static tsk_bool_t __checked = tsk_false; // static guard to avoid checking more than once
+ static tsk_bool_t __supported = tsk_false;
+
+ HRESULT hr = DD_OK;
+ DDSURFACEDESC ddsd;
+ DDPIXELFORMAT DDPixelFormat;
+ LPDIRECTDRAWSURFACE lpDDS = NULL;
+ DDrawModule ddrawModule = { 0 };
+
+ if (__checked) {
+ goto bail;
+ }
- __supported = SUCCEEDED(hr);
+ __checked = tsk_true;
+
+ DDRAW_CHECK_HR(hr = _tdav_producer_screencast_create_module(&ddrawModule));
+ DDRAW_CHECK_HR(hr = ddrawModule.lpDD->SetCooperativeLevel(NULL, DDSCL_NORMAL));
+
+ ZeroMemory(&ddsd, sizeof(ddsd));
+ ddsd.dwSize = sizeof(ddsd);
+ ddsd.dwFlags = DDSD_CAPS;
+ ddsd.ddsCaps.dwCaps = DDSCAPS_PRIMARYSURFACE;
+
+ DDRAW_CHECK_HR(hr = ddrawModule.lpDD->CreateSurface(&ddsd, &lpDDS, NULL));
+
+ ZeroMemory(&DDPixelFormat, sizeof(DDPixelFormat));
+ DDPixelFormat.dwSize = sizeof(DDPixelFormat);
+ DDRAW_CHECK_HR(hr = lpDDS->GetPixelFormat(&DDPixelFormat));
+ DDRAW_DEBUG_INFO("dwRGBBitCount:%d, dwRBitMask:%x, dwGBitMask:%x, dwBBitMask:%x, dwRGBAlphaBitMask:%x",
+ DDPixelFormat.dwRGBBitCount, DDPixelFormat.dwRBitMask, DDPixelFormat.dwGBitMask, DDPixelFormat.dwBBitMask, DDPixelFormat.dwRGBAlphaBitMask);
+ if (_tdav_producer_screencast_get_chroma(&DDPixelFormat) == tmedia_chroma_none) {
+ DDRAW_CHECK_HR(hr = DDERR_INVALIDCAPS);
+ }
+
+ __supported = SUCCEEDED(hr);
bail:
- DDRAW_SAFE_RELEASE(&lpDDS);
- DDrawModuleSafeFree(ddrawModule);
- return __supported;
+ DDRAW_SAFE_RELEASE(&lpDDS);
+ DDrawModuleSafeFree(ddrawModule);
+ return __supported;
}
static BOOL _tdav_producer_screencast_have_ssse3()
{
- static BOOL __checked = FALSE; // static guard to avoid checking more than once
- static BOOL __supported = FALSE;
+ static BOOL __checked = FALSE; // static guard to avoid checking more than once
+ static BOOL __supported = FALSE;
- if (__checked) {
- return __supported;
- }
- __checked = TRUE;
+ if (__checked) {
+ return __supported;
+ }
+ __checked = TRUE;
#ifndef BIT
# define BIT(n) (1<<n)
#endif /*BIT*/
#if DDRAW_HAVE_RGB32_TO_I420_ASM
- #define cpuid(func, func2, a, b, c, d)\
+#define cpuid(func, func2, a, b, c, d)\
__asm mov eax, func\
__asm mov ecx, func2\
__asm cpuid\
@@ -285,28 +284,28 @@ static BOOL _tdav_producer_screencast_have_ssse3()
#define HAS_AVX 0x40
#define HAS_AVX2 0x80
- unsigned int reg_eax, reg_ebx, reg_ecx, reg_edx;
- cpuid(0, 0, reg_eax, reg_ebx, reg_ecx, reg_edx);
- if (reg_eax < 1) {
- DDRAW_DEBUG_ERROR("reg_eax < 1");
- return FALSE;
- }
- cpuid(1, 0, reg_eax, reg_ebx, reg_ecx, reg_edx);
- __supported = (reg_ecx & BIT(9)) ? TRUE : FALSE;
+ unsigned int reg_eax, reg_ebx, reg_ecx, reg_edx;
+ cpuid(0, 0, reg_eax, reg_ebx, reg_ecx, reg_edx);
+ if (reg_eax < 1) {
+ DDRAW_DEBUG_ERROR("reg_eax < 1");
+ return FALSE;
+ }
+ cpuid(1, 0, reg_eax, reg_ebx, reg_ecx, reg_edx);
+ __supported = (reg_ecx & BIT(9)) ? TRUE : FALSE;
#elif DDRAW_HAVE_RGB32_TO_I420_INTRIN
- int cpu_info[4] = { 0 }, num_ids;
- __cpuid(cpu_info, 0);
- num_ids = cpu_info[0];
- __cpuid(cpu_info, 0x80000000);
- if (num_ids > 0) {
- __cpuid(cpu_info, 0x00000001);
- __supported = (cpu_info[2] & BIT(9)) ? TRUE : FALSE;
- }
+ int cpu_info[4] = { 0 }, num_ids;
+ __cpuid(cpu_info, 0);
+ num_ids = cpu_info[0];
+ __cpuid(cpu_info, 0x80000000);
+ if (num_ids > 0) {
+ __cpuid(cpu_info, 0x00000001);
+ __supported = (cpu_info[2] & BIT(9)) ? TRUE : FALSE;
+ }
#endif /* DDRAW_HAVE_RGB32_TO_I420_ASM */
- DDRAW_DEBUG_INFO("SSSE3 supported = %s", __supported ? "YES" : "NO");
+ DDRAW_DEBUG_INFO("SSSE3 supported = %s", __supported ? "YES" : "NO");
- return __supported;
+ return __supported;
}
#if DDRAW_HAVE_RGB32_TO_I420_INTRIN
@@ -327,105 +326,102 @@ static BOOL _tdav_producer_screencast_have_ssse3()
static void _tdav_producer_screencast_rgb32_to_yuv420_intrin_ssse3(uint8_t *yuvPtr, const uint8_t *rgbPtr, int width, int height)
{
- // rgbPtr contains (samplesCount * 16) bytes
- // yPtr contains samplesCount bytes
- const int samplesCount = (width * height); // "width" and "height" are in samples
- const uint8_t *rgbPtr_;
- uint8_t* yPtr_ = yuvPtr, *uPtr_ = (yPtr_ + samplesCount), *vPtr_ = uPtr_ + (samplesCount >> 2);
- __m128i mmRgb0, mmRgb1, mmRgb2, mmRgb3, mmY0, mmY1, mmY;
- __m128i mmRgbU0, mmRgbU1, mmRgbV0, mmRgbV1;
-
- // Convert 16 RGBA samples to 16 Y samples
- rgbPtr_ = rgbPtr;
- /* const */__m128i yCoeffs = _mm_load_si128((__m128i*)kYCoeffs);
- /* const */__m128i y16 = _mm_load_si128((__m128i*)kY16);
- for(int i = 0; i < samplesCount; i += 16)
- {
- // load 16 RGBA samples
- _mm_store_si128(&mmRgb0, _mm_load_si128((__m128i*)rgbPtr_)); // 4 RGBA samples
- _mm_store_si128(&mmRgb1, _mm_load_si128((__m128i*)&rgbPtr_[16])); // 4 RGBA samples
- _mm_store_si128(&mmRgb2, _mm_load_si128((__m128i*)&rgbPtr_[32])); // 4 RGBA samples
- _mm_store_si128(&mmRgb3, _mm_load_si128((__m128i*)&rgbPtr_[48])); // 4 RGBA samples
-
- _mm_store_si128(&mmRgb0, _mm_maddubs_epi16(mmRgb0/*unsigned*/, yCoeffs/*signed*/)); // mmRgb0 = ((yCoeffs[j] * mmRgb0[j]) + (yCoeffs[j + 1] * mmRgb0[j + 1]))
- _mm_store_si128(&mmRgb1, _mm_maddubs_epi16(mmRgb1/*unsigned*/, yCoeffs/*signed*/));
- _mm_store_si128(&mmRgb2, _mm_maddubs_epi16(mmRgb2/*unsigned*/, yCoeffs/*signed*/));
- _mm_store_si128(&mmRgb3, _mm_maddubs_epi16(mmRgb3/*unsigned*/, yCoeffs/*signed*/));
-
- _mm_store_si128(&mmY0, _mm_hadd_epi16(mmRgb0, mmRgb1)); // horizontal add
- _mm_store_si128(&mmY1, _mm_hadd_epi16(mmRgb2, mmRgb3));
-
- _mm_store_si128(&mmY0, _mm_srai_epi16(mmY0, 7)); // >> 7
- _mm_store_si128(&mmY1, _mm_srai_epi16(mmY1, 7));
-
- _mm_store_si128(&mmY0, _mm_add_epi16(mmY0, y16)); // + 16
- _mm_store_si128(&mmY1, _mm_add_epi16(mmY1, y16));
-
- _mm_store_si128(&mmY, _mm_packus_epi16(mmY0, mmY1)); // Saturate(I16 -> U8)
-
- _mm_store_si128((__m128i*)yPtr_, mmY);
-
- rgbPtr_ += 64; // 16samples * 4bytes
- yPtr_ += 16; // 16samples * 1byte
- }
+ // rgbPtr contains (samplesCount * 16) bytes
+ // yPtr contains samplesCount bytes
+ const int samplesCount = (width * height); // "width" and "height" are in samples
+ const uint8_t *rgbPtr_;
+ uint8_t* yPtr_ = yuvPtr, *uPtr_ = (yPtr_ + samplesCount), *vPtr_ = uPtr_ + (samplesCount >> 2);
+ __m128i mmRgb0, mmRgb1, mmRgb2, mmRgb3, mmY0, mmY1, mmY;
+ __m128i mmRgbU0, mmRgbU1, mmRgbV0, mmRgbV1;
+
+ // Convert 16 RGBA samples to 16 Y samples
+ rgbPtr_ = rgbPtr;
+ /* const */__m128i yCoeffs = _mm_load_si128((__m128i*)kYCoeffs);
+ /* const */__m128i y16 = _mm_load_si128((__m128i*)kY16);
+ for(int i = 0; i < samplesCount; i += 16) {
+ // load 16 RGBA samples
+ _mm_store_si128(&mmRgb0, _mm_load_si128((__m128i*)rgbPtr_)); // 4 RGBA samples
+ _mm_store_si128(&mmRgb1, _mm_load_si128((__m128i*)&rgbPtr_[16])); // 4 RGBA samples
+ _mm_store_si128(&mmRgb2, _mm_load_si128((__m128i*)&rgbPtr_[32])); // 4 RGBA samples
+ _mm_store_si128(&mmRgb3, _mm_load_si128((__m128i*)&rgbPtr_[48])); // 4 RGBA samples
+
+ _mm_store_si128(&mmRgb0, _mm_maddubs_epi16(mmRgb0/*unsigned*/, yCoeffs/*signed*/)); // mmRgb0 = ((yCoeffs[j] * mmRgb0[j]) + (yCoeffs[j + 1] * mmRgb0[j + 1]))
+ _mm_store_si128(&mmRgb1, _mm_maddubs_epi16(mmRgb1/*unsigned*/, yCoeffs/*signed*/));
+ _mm_store_si128(&mmRgb2, _mm_maddubs_epi16(mmRgb2/*unsigned*/, yCoeffs/*signed*/));
+ _mm_store_si128(&mmRgb3, _mm_maddubs_epi16(mmRgb3/*unsigned*/, yCoeffs/*signed*/));
+
+ _mm_store_si128(&mmY0, _mm_hadd_epi16(mmRgb0, mmRgb1)); // horizontal add
+ _mm_store_si128(&mmY1, _mm_hadd_epi16(mmRgb2, mmRgb3));
+
+ _mm_store_si128(&mmY0, _mm_srai_epi16(mmY0, 7)); // >> 7
+ _mm_store_si128(&mmY1, _mm_srai_epi16(mmY1, 7));
+
+ _mm_store_si128(&mmY0, _mm_add_epi16(mmY0, y16)); // + 16
+ _mm_store_si128(&mmY1, _mm_add_epi16(mmY1, y16));
+
+ _mm_store_si128(&mmY, _mm_packus_epi16(mmY0, mmY1)); // Saturate(I16 -> U8)
+
+ _mm_store_si128((__m128i*)yPtr_, mmY);
+
+ rgbPtr_ += 64; // 16samples * 4bytes
+ yPtr_ += 16; // 16samples * 1byte
+ }
- // U+V planes
- /* const */__m128i uCoeffs = _mm_load_si128((__m128i*)kUCoeffs);
- /* const */__m128i vCoeffs = _mm_load_si128((__m128i*)kVCoeffs);
- /* const */__m128i rgbaShuffleDuplicate = _mm_load_si128((__m128i*)kRGBAShuffleDuplicate);
- /* const */__m128i uv128 = _mm_load_si128((__m128i*)kUV128);
- rgbPtr_ = rgbPtr;
- for(int i = 0; i < samplesCount; )
- {
- // load 16 RGBA samples
- _mm_store_si128(&mmRgb0, _mm_load_si128((__m128i*)rgbPtr_)); // 4 RGBA samples
- _mm_store_si128(&mmRgb1, _mm_load_si128((__m128i*)&rgbPtr_[16])); // 4 RGBA samples
- _mm_store_si128(&mmRgb2, _mm_load_si128((__m128i*)&rgbPtr_[32])); // 4 RGBA samples
- _mm_store_si128(&mmRgb3, _mm_load_si128((__m128i*)&rgbPtr_[48])); // 4 RGBA samples
-
- _mm_store_si128(&mmRgb0, _mm_shuffle_epi8(mmRgb0, rgbaShuffleDuplicate));
- _mm_store_si128(&mmRgb1, _mm_shuffle_epi8(mmRgb1, rgbaShuffleDuplicate));
- _mm_store_si128(&mmRgb2, _mm_shuffle_epi8(mmRgb2, rgbaShuffleDuplicate));
- _mm_store_si128(&mmRgb3, _mm_shuffle_epi8(mmRgb3, rgbaShuffleDuplicate));
-
- _mm_store_si128(&mmRgbU0, _mm_unpacklo_epi64(mmRgb0, mmRgb1));
- _mm_store_si128(&mmRgbV0, _mm_unpackhi_epi64(mmRgb0, mmRgb1)); // same as mmRgbU0: Use _mm_store_si128??
- _mm_store_si128(&mmRgbU1, _mm_unpacklo_epi64(mmRgb2, mmRgb3));
- _mm_store_si128(&mmRgbV1, _mm_unpackhi_epi64(mmRgb2, mmRgb3)); // same as mmRgbU0: Use _mm_store_si128??
-
- _mm_store_si128(&mmRgbU0, _mm_maddubs_epi16(mmRgbU0/*unsigned*/, uCoeffs/*signed*/));
- _mm_store_si128(&mmRgbV0, _mm_maddubs_epi16(mmRgbV0/*unsigned*/, vCoeffs/*signed*/));
- _mm_store_si128(&mmRgbU1, _mm_maddubs_epi16(mmRgbU1/*unsigned*/, uCoeffs/*signed*/));
- _mm_store_si128(&mmRgbV1, _mm_maddubs_epi16(mmRgbV1/*unsigned*/, vCoeffs/*signed*/));
-
- _mm_store_si128(&mmY0, _mm_hadd_epi16(mmRgbU0, mmRgbU1)); // horizontal add
- _mm_store_si128(&mmY1, _mm_hadd_epi16(mmRgbV0, mmRgbV1));
-
- _mm_store_si128(&mmY0, _mm_srai_epi16(mmY0, 8)); // >> 8
- _mm_store_si128(&mmY1, _mm_srai_epi16(mmY1, 8));
-
- _mm_store_si128(&mmY0, _mm_add_epi16(mmY0, uv128)); // + 128
- _mm_store_si128(&mmY1, _mm_add_epi16(mmY1, uv128));
-
- // Y contains 8 samples for U then 8 samples for V
- _mm_store_si128(&mmY, _mm_packus_epi16(mmY0, mmY1)); // Saturate(I16 -> U8)
- _mm_storel_pi((__m64*)uPtr_, _mm_load_ps((float*)&mmY));
- _mm_storeh_pi((__m64*)vPtr_, _mm_load_ps((float*)&mmY));
-
- uPtr_ += 8; // 8samples * 1byte
- vPtr_ += 8; // 8samples * 1byte
-
- // move to next 16 samples
- i += 16;
- rgbPtr_ += 64; // 16samples * 4bytes
-
- if (/*i % width == 0*/ !(i & (width - 1)))
- {
- // skip next line
- i += width;
- rgbPtr_ += (width * 4);
- }
- }
+ // U+V planes
+ /* const */__m128i uCoeffs = _mm_load_si128((__m128i*)kUCoeffs);
+ /* const */__m128i vCoeffs = _mm_load_si128((__m128i*)kVCoeffs);
+ /* const */__m128i rgbaShuffleDuplicate = _mm_load_si128((__m128i*)kRGBAShuffleDuplicate);
+ /* const */__m128i uv128 = _mm_load_si128((__m128i*)kUV128);
+ rgbPtr_ = rgbPtr;
+ for(int i = 0; i < samplesCount; ) {
+ // load 16 RGBA samples
+ _mm_store_si128(&mmRgb0, _mm_load_si128((__m128i*)rgbPtr_)); // 4 RGBA samples
+ _mm_store_si128(&mmRgb1, _mm_load_si128((__m128i*)&rgbPtr_[16])); // 4 RGBA samples
+ _mm_store_si128(&mmRgb2, _mm_load_si128((__m128i*)&rgbPtr_[32])); // 4 RGBA samples
+ _mm_store_si128(&mmRgb3, _mm_load_si128((__m128i*)&rgbPtr_[48])); // 4 RGBA samples
+
+ _mm_store_si128(&mmRgb0, _mm_shuffle_epi8(mmRgb0, rgbaShuffleDuplicate));
+ _mm_store_si128(&mmRgb1, _mm_shuffle_epi8(mmRgb1, rgbaShuffleDuplicate));
+ _mm_store_si128(&mmRgb2, _mm_shuffle_epi8(mmRgb2, rgbaShuffleDuplicate));
+ _mm_store_si128(&mmRgb3, _mm_shuffle_epi8(mmRgb3, rgbaShuffleDuplicate));
+
+ _mm_store_si128(&mmRgbU0, _mm_unpacklo_epi64(mmRgb0, mmRgb1));
+ _mm_store_si128(&mmRgbV0, _mm_unpackhi_epi64(mmRgb0, mmRgb1)); // same as mmRgbU0: Use _mm_store_si128??
+ _mm_store_si128(&mmRgbU1, _mm_unpacklo_epi64(mmRgb2, mmRgb3));
+ _mm_store_si128(&mmRgbV1, _mm_unpackhi_epi64(mmRgb2, mmRgb3)); // same as mmRgbU0: Use _mm_store_si128??
+
+ _mm_store_si128(&mmRgbU0, _mm_maddubs_epi16(mmRgbU0/*unsigned*/, uCoeffs/*signed*/));
+ _mm_store_si128(&mmRgbV0, _mm_maddubs_epi16(mmRgbV0/*unsigned*/, vCoeffs/*signed*/));
+ _mm_store_si128(&mmRgbU1, _mm_maddubs_epi16(mmRgbU1/*unsigned*/, uCoeffs/*signed*/));
+ _mm_store_si128(&mmRgbV1, _mm_maddubs_epi16(mmRgbV1/*unsigned*/, vCoeffs/*signed*/));
+
+ _mm_store_si128(&mmY0, _mm_hadd_epi16(mmRgbU0, mmRgbU1)); // horizontal add
+ _mm_store_si128(&mmY1, _mm_hadd_epi16(mmRgbV0, mmRgbV1));
+
+ _mm_store_si128(&mmY0, _mm_srai_epi16(mmY0, 8)); // >> 8
+ _mm_store_si128(&mmY1, _mm_srai_epi16(mmY1, 8));
+
+ _mm_store_si128(&mmY0, _mm_add_epi16(mmY0, uv128)); // + 128
+ _mm_store_si128(&mmY1, _mm_add_epi16(mmY1, uv128));
+
+ // Y contains 8 samples for U then 8 samples for V
+ _mm_store_si128(&mmY, _mm_packus_epi16(mmY0, mmY1)); // Saturate(I16 -> U8)
+ _mm_storel_pi((__m64*)uPtr_, _mm_load_ps((float*)&mmY));
+ _mm_storeh_pi((__m64*)vPtr_, _mm_load_ps((float*)&mmY));
+
+ uPtr_ += 8; // 8samples * 1byte
+ vPtr_ += 8; // 8samples * 1byte
+
+ // move to next 16 samples
+ i += 16;
+ rgbPtr_ += 64; // 16samples * 4bytes
+
+ if (/*i % width == 0*/ !(i & (width - 1))) {
+ // skip next line
+ i += width;
+ rgbPtr_ += (width * 4);
+ }
+ }
}
#endif /* DDRAW_HAVE_RGB32_TO_I420_INTRIN */
@@ -502,917 +498,917 @@ static void _tdav_producer_screencast_rgb32_to_yuv420_intrin_ssse3(uint8_t *yuvP
__declspec(naked) __declspec(align(DDRAW_MEM_ALIGNMENT))
static void _tdav_producer_screencast_rgb32_to_yuv420_asm_ssse3(uint8_t *yuvPtr, const uint8_t *rgbPtr, int width, int height)
{
- __asm {
- push esi
- push edi
- push ebx
- /*** Y Samples ***/
- mov edx, [esp + 12 + 4] // yuvPtr
- mov eax, [esp + 12 + 8] // rgbPtr
- mov ecx, [esp + 12 + 12] // width
- imul ecx, [esp + 12 + 16] // (width * height) = samplesCount
-
- movdqa xmm7, kYCoeffs // yCoeffs
- movdqa xmm6, kY16 // y16
- /* loopY start */
-loopY:
- // load 16 RGBA samples
- movdqa xmm0, [eax] // mmRgb0
- movdqa xmm1, [eax + 16] // mmRgb1
- movdqa xmm2, [eax + 32] // mmRgb2
- movdqa xmm3, [eax + 48] // mmRgb3
- lea eax, [eax + 64] // rgbPtr_ += 64
- // (yCoeffs[0] * mmRgbX[0]) + (yCoeffs[1] * mmRgbX[1])
- pmaddubsw xmm0, xmm7
- pmaddubsw xmm1, xmm7
- pmaddubsw xmm2, xmm7
- pmaddubsw xmm3, xmm7
- // horizontal add
- phaddw xmm0, xmm1
- phaddw xmm2, xmm3
- // >> 7
- psraw xmm0, 7
- psraw xmm2, 7
- // + 16
- paddw xmm0, xmm6
- paddw xmm2, xmm6
- // Saturate(I16 -> U8) - Packs
- packuswb xmm0, xmm2
- // Copy to yuvPtr
- movdqa [edx], xmm0
- lea edx, [edx + 16] // yPtr_ += 16
- sub ecx, 16 // samplesCount -= 16
- jnz loopY // goto loop if (samplesCount != 0)
-
- //==================================//
- //=========== UV Samples ===========//
- //==================================//
- mov esi, [esp + 12 + 4] // yuvPtr
- mov eax, [esp + 12 + 8] // rgbPtr
- mov ecx, [esp + 12 + 12] // width
- imul ecx, [esp + 12 + 16] // (width * height) = samplesCount
- mov edx, ecx
- shr edx, 2 // edx = samplesCount / 4
- add esi, ecx // [[esi = uPtr_]]
- mov edi, esi // edi = uPtr_
- add edi, edx // [[edi = uPtr_ + edx = uPtr_ + (samplesCount / 4) = vPtr_]]
- xor edx, edx // edx = 0 = i
- mov ebx, [esp + 12 + 12] // ebx = width
- sub ebx, 1 // ebx = (width - 1)
-
- movdqa xmm7, kUCoeffs // uCoeffs
- movdqa xmm6, kVCoeffs // vCoeffs
- movdqa xmm5, kRGBAShuffleDuplicate // rgbaShuffleDuplicate
- movdqa xmm4, kUV128 // uv128
-
- /* loopUV start */
-loopUV:
- // load 16 RGBA samples
- movdqa xmm0, [eax] // mmRgb0
- movdqa xmm1, [eax + 16] // mmRgb1
- movdqa xmm2, [eax + 32] // mmRgb2
- movdqa xmm3, [eax + 48] // mmRgb3
- lea eax, [eax + 64] // rgbPtr_ += 64
-
- pshufb xmm0, xmm5
- pshufb xmm1, xmm5
- pshufb xmm2, xmm5
- pshufb xmm3, xmm5
-
- punpcklqdq xmm0, xmm1 // mmRgbU0
- punpcklqdq xmm2, xmm3 // mmRgbU1
- movdqa xmm1, xmm0 // mmRgbV0
- movdqa xmm3, xmm2 // mmRgbV1
-
- pmaddubsw xmm0, xmm7 // mmRgbU0
- pmaddubsw xmm1, xmm6 // mmRgbV0
- pmaddubsw xmm2, xmm7 // mmRgbU1
- pmaddubsw xmm3, xmm6 // mmRgbV1
-
- phaddw xmm0, xmm2 // mmY0
- phaddw xmm1, xmm3 // mmY1
-
- psraw xmm0, 8
- psraw xmm1, 8
-
- paddw xmm0, xmm4
- paddw xmm1, xmm4
-
- packuswb xmm0, xmm1
- movlps [esi], xmm0
- movhps [edi], xmm0
-
- lea esi, [esi + 8]
- lea edi, [edi + 8]
-
- add edx, 16 // i += 16;
- push edx // save edx
- and edx, ebx // edx = (ebx & ebx) = (ebx & (width - 1)) = (ebx % width)
- cmp edx, 0 // (ebx % width) == 0 ?
- pop edx // restore edx
- jne loopUV_NextLine
-
- // loopUV_EndOfLine: ((ebx % width) == 0)
- add ebx, 1// change ebx value from width-1 to width
- add edx, ebx // i += width
- lea eax, [eax + 4 * ebx]// rgbPtr_ += (width * 4);
- sub ebx, 1// change back ebx value to width - 1
-loopUV_NextLine:
- cmp edx, ecx
- jl loopUV
-
- pop ebx
- pop edi
- pop esi
- ret
- }
+ __asm {
+ push esi
+ push edi
+ push ebx
+ /*** Y Samples ***/
+ mov edx, [esp + 12 + 4] // yuvPtr
+ mov eax, [esp + 12 + 8] // rgbPtr
+ mov ecx, [esp + 12 + 12] // width
+ imul ecx, [esp + 12 + 16] // (width * height) = samplesCount
+
+ movdqa xmm7, kYCoeffs // yCoeffs
+ movdqa xmm6, kY16 // y16
+ /* loopY start */
+ loopY:
+ // load 16 RGBA samples
+ movdqa xmm0, [eax] // mmRgb0
+ movdqa xmm1, [eax + 16] // mmRgb1
+ movdqa xmm2, [eax + 32] // mmRgb2
+ movdqa xmm3, [eax + 48] // mmRgb3
+ lea eax, [eax + 64] // rgbPtr_ += 64
+ // (yCoeffs[0] * mmRgbX[0]) + (yCoeffs[1] * mmRgbX[1])
+ pmaddubsw xmm0, xmm7
+ pmaddubsw xmm1, xmm7
+ pmaddubsw xmm2, xmm7
+ pmaddubsw xmm3, xmm7
+ // horizontal add
+ phaddw xmm0, xmm1
+ phaddw xmm2, xmm3
+ // >> 7
+ psraw xmm0, 7
+ psraw xmm2, 7
+ // + 16
+ paddw xmm0, xmm6
+ paddw xmm2, xmm6
+ // Saturate(I16 -> U8) - Packs
+ packuswb xmm0, xmm2
+ // Copy to yuvPtr
+ movdqa [edx], xmm0
+ lea edx, [edx + 16] // yPtr_ += 16
+ sub ecx, 16 // samplesCount -= 16
+ jnz loopY // goto loop if (samplesCount != 0)
+
+ //==================================//
+ //=========== UV Samples ===========//
+ //==================================//
+ mov esi, [esp + 12 + 4] // yuvPtr
+ mov eax, [esp + 12 + 8] // rgbPtr
+ mov ecx, [esp + 12 + 12] // width
+ imul ecx, [esp + 12 + 16] // (width * height) = samplesCount
+ mov edx, ecx
+ shr edx, 2 // edx = samplesCount / 4
+ add esi, ecx // [[esi = uPtr_]]
+ mov edi, esi // edi = uPtr_
+ add edi, edx // [[edi = uPtr_ + edx = uPtr_ + (samplesCount / 4) = vPtr_]]
+ xor edx, edx // edx = 0 = i
+ mov ebx, [esp + 12 + 12] // ebx = width
+ sub ebx, 1 // ebx = (width - 1)
+
+ movdqa xmm7, kUCoeffs // uCoeffs
+ movdqa xmm6, kVCoeffs // vCoeffs
+ movdqa xmm5, kRGBAShuffleDuplicate // rgbaShuffleDuplicate
+ movdqa xmm4, kUV128 // uv128
+
+ /* loopUV start */
+ loopUV:
+ // load 16 RGBA samples
+ movdqa xmm0, [eax] // mmRgb0
+ movdqa xmm1, [eax + 16] // mmRgb1
+ movdqa xmm2, [eax + 32] // mmRgb2
+ movdqa xmm3, [eax + 48] // mmRgb3
+ lea eax, [eax + 64] // rgbPtr_ += 64
+
+ pshufb xmm0, xmm5
+ pshufb xmm1, xmm5
+ pshufb xmm2, xmm5
+ pshufb xmm3, xmm5
+
+ punpcklqdq xmm0, xmm1 // mmRgbU0
+ punpcklqdq xmm2, xmm3 // mmRgbU1
+ movdqa xmm1, xmm0 // mmRgbV0
+ movdqa xmm3, xmm2 // mmRgbV1
+
+ pmaddubsw xmm0, xmm7 // mmRgbU0
+ pmaddubsw xmm1, xmm6 // mmRgbV0
+ pmaddubsw xmm2, xmm7 // mmRgbU1
+ pmaddubsw xmm3, xmm6 // mmRgbV1
+
+ phaddw xmm0, xmm2 // mmY0
+ phaddw xmm1, xmm3 // mmY1
+
+ psraw xmm0, 8
+ psraw xmm1, 8
+
+ paddw xmm0, xmm4
+ paddw xmm1, xmm4
+
+ packuswb xmm0, xmm1
+ movlps [esi], xmm0
+ movhps [edi], xmm0
+
+ lea esi, [esi + 8]
+ lea edi, [edi + 8]
+
+ add edx, 16 // i += 16;
+ push edx // save edx
+ and edx, ebx // edx = (ebx & ebx) = (ebx & (width - 1)) = (ebx % width)
+ cmp edx, 0 // (ebx % width) == 0 ?
+ pop edx // restore edx
+ jne loopUV_NextLine
+
+ // loopUV_EndOfLine: ((ebx % width) == 0)
+ add ebx, 1// change ebx value from width-1 to width
+ add edx, ebx // i += width
+ lea eax, [eax + 4 * ebx]// rgbPtr_ += (width * 4);
+ sub ebx, 1// change back ebx value to width - 1
+ loopUV_NextLine:
+ cmp edx, ecx
+ jl loopUV
+
+ pop ebx
+ pop edi
+ pop esi
+ ret
+ }
}
#endif /* DDRAW_HAVE_RGB32_TO_I420_ASM */
/* ============ Media Producer Interface ================= */
static int _tdav_producer_screencast_ddraw_set(tmedia_producer_t *p_self, const tmedia_param_t* pc_param)
{
- int ret = 0;
- tdav_producer_screencast_ddraw_t* p_ddraw = (tdav_producer_screencast_ddraw_t*)p_self;
+ int ret = 0;
+ tdav_producer_screencast_ddraw_t* p_ddraw = (tdav_producer_screencast_ddraw_t*)p_self;
- if (!p_ddraw || !pc_param) {
- DDRAW_DEBUG_ERROR("Invalid parameter");
- return -1;
- }
+ if (!p_ddraw || !pc_param) {
+ DDRAW_DEBUG_ERROR("Invalid parameter");
+ return -1;
+ }
- if (pc_param->value_type == tmedia_pvt_int64) {
- if (tsk_striequals(pc_param->key, "local-hwnd") || tsk_striequals(pc_param->key, "preview-hwnd")) {
- p_ddraw->hwnd_preview = (HWND)*((int64_t*)pc_param->value);
- }
- else if (tsk_striequals(pc_param->key, "src-hwnd")) {
- p_ddraw->hwnd_src = (HWND)*((int64_t*)pc_param->value);
- }
- }
- else if (pc_param->value_type == tmedia_pvt_int32) {
- if (tsk_striequals(pc_param->key, "mute")) {
- p_ddraw->b_muted = (TSK_TO_INT32((uint8_t*)pc_param->value) != 0);
- }
- }
+ if (pc_param->value_type == tmedia_pvt_int64) {
+ if (tsk_striequals(pc_param->key, "local-hwnd") || tsk_striequals(pc_param->key, "preview-hwnd")) {
+ p_ddraw->hwnd_preview = (HWND)*((int64_t*)pc_param->value);
+ }
+ else if (tsk_striequals(pc_param->key, "src-hwnd")) {
+ p_ddraw->hwnd_src = (HWND)*((int64_t*)pc_param->value);
+ }
+ }
+ else if (pc_param->value_type == tmedia_pvt_int32) {
+ if (tsk_striequals(pc_param->key, "mute")) {
+ p_ddraw->b_muted = (TSK_TO_INT32((uint8_t*)pc_param->value) != 0);
+ }
+ }
- return ret;
+ return ret;
}
static int _tdav_producer_screencast_ddraw_prepare(tmedia_producer_t* p_self, const tmedia_codec_t* pc_codec)
{
- tdav_producer_screencast_ddraw_t* p_ddraw = (tdav_producer_screencast_ddraw_t*)p_self;
- int ret = 0;
- HRESULT hr = DD_OK;
+ tdav_producer_screencast_ddraw_t* p_ddraw = (tdav_producer_screencast_ddraw_t*)p_self;
+ int ret = 0;
+ HRESULT hr = DD_OK;
#if 0
- DDPIXELFORMAT DDPixelFormat;
+ DDPIXELFORMAT DDPixelFormat;
#endif
- DDSURFACEDESC ddsd;
+ DDSURFACEDESC ddsd;
- if (!p_ddraw || !pc_codec) {
- DDRAW_DEBUG_ERROR("Invalid parameter");
- DDRAW_CHECK_HR(hr = E_INVALIDARG);
- }
+ if (!p_ddraw || !pc_codec) {
+ DDRAW_DEBUG_ERROR("Invalid parameter");
+ DDRAW_CHECK_HR(hr = E_INVALIDARG);
+ }
- tsk_safeobj_lock(p_ddraw);
+ tsk_safeobj_lock(p_ddraw);
- // check support for DirectDraw again
- if (!tdav_producer_screencast_ddraw_plugin_is_supported()) {
- DDRAW_CHECK_HR(hr = E_FAIL);
- }
+ // check support for DirectDraw again
+ if (!tdav_producer_screencast_ddraw_plugin_is_supported()) {
+ DDRAW_CHECK_HR(hr = E_FAIL);
+ }
- TMEDIA_PRODUCER(p_ddraw)->video.fps = TMEDIA_CODEC_VIDEO(pc_codec)->out.fps;
- TMEDIA_PRODUCER(p_ddraw)->video.width = TMEDIA_CODEC_VIDEO(pc_codec)->out.width;
- TMEDIA_PRODUCER(p_ddraw)->video.height = TMEDIA_CODEC_VIDEO(pc_codec)->out.height;
-
- // Hack the codec to avoid flipping
- TMEDIA_CODEC_VIDEO(pc_codec)->out.flip = tsk_false;
+ TMEDIA_PRODUCER(p_ddraw)->video.fps = TMEDIA_CODEC_VIDEO(pc_codec)->out.fps;
+ TMEDIA_PRODUCER(p_ddraw)->video.width = TMEDIA_CODEC_VIDEO(pc_codec)->out.width;
+ TMEDIA_PRODUCER(p_ddraw)->video.height = TMEDIA_CODEC_VIDEO(pc_codec)->out.height;
- DDRAW_DEBUG_INFO("Prepare with fps:%d, width:%d; height:%d", TMEDIA_PRODUCER(p_ddraw)->video.fps, TMEDIA_PRODUCER(p_ddraw)->video.width, TMEDIA_PRODUCER(p_ddraw)->video.height);
+ // Hack the codec to avoid flipping
+ TMEDIA_CODEC_VIDEO(pc_codec)->out.flip = tsk_false;
- if (!p_ddraw->ddrawModule.lpDD || !p_ddraw->ddrawModule.hDLL) {
- DDRAW_CHECK_HR(hr = _tdav_producer_screencast_create_module(&p_ddraw->ddrawModule));
- }
- DDRAW_CHECK_HR(hr = p_ddraw->ddrawModule.lpDD->SetCooperativeLevel(NULL, DDSCL_NORMAL));
-
- if (!p_ddraw->p_surf_primary) {
- ZeroMemory(&ddsd, sizeof(ddsd));
- ddsd.dwSize = sizeof(ddsd);
- ddsd.dwFlags = DDSD_CAPS;
- ddsd.ddsCaps.dwCaps = DDSCAPS_PRIMARYSURFACE;
-
- DDRAW_CHECK_HR(hr = p_ddraw->ddrawModule.lpDD->CreateSurface(&ddsd, &p_ddraw->p_surf_primary, NULL));
- }
+ DDRAW_DEBUG_INFO("Prepare with fps:%d, width:%d; height:%d", TMEDIA_PRODUCER(p_ddraw)->video.fps, TMEDIA_PRODUCER(p_ddraw)->video.width, TMEDIA_PRODUCER(p_ddraw)->video.height);
+
+ if (!p_ddraw->ddrawModule.lpDD || !p_ddraw->ddrawModule.hDLL) {
+ DDRAW_CHECK_HR(hr = _tdav_producer_screencast_create_module(&p_ddraw->ddrawModule));
+ }
+ DDRAW_CHECK_HR(hr = p_ddraw->ddrawModule.lpDD->SetCooperativeLevel(NULL, DDSCL_NORMAL));
+
+ if (!p_ddraw->p_surf_primary) {
+ ZeroMemory(&ddsd, sizeof(ddsd));
+ ddsd.dwSize = sizeof(ddsd);
+ ddsd.dwFlags = DDSD_CAPS;
+ ddsd.ddsCaps.dwCaps = DDSCAPS_PRIMARYSURFACE;
+
+ DDRAW_CHECK_HR(hr = p_ddraw->ddrawModule.lpDD->CreateSurface(&ddsd, &p_ddraw->p_surf_primary, NULL));
+ }
#if 0
- ZeroMemory(&DDPixelFormat, sizeof(DDPixelFormat));
- DDPixelFormat.dwSize = sizeof(DDPixelFormat);
- DDRAW_CHECK_HR(hr = DDRAW_VTBL(p_ddraw->p_surf_primary)->GetPixelFormat(p_ddraw->p_surf_primary, &DDPixelFormat));
- DDRAW_DEBUG_INFO("dwRGBBitCount:%d, dwRBitMask:%x, dwGBitMask:%x, dwBBitMask:%x, dwRGBAlphaBitMask:%x",
- DDPixelFormat.dwRGBBitCount, DDPixelFormat.dwRBitMask, DDPixelFormat.dwGBitMask, DDPixelFormat.dwBBitMask, DDPixelFormat.dwRGBAlphaBitMask);
- if ((TMEDIA_PRODUCER(p_ddraw)->video.chroma = _tdav_producer_screencast_get_chroma(&DDPixelFormat)) == tmedia_chroma_none) {
- DDRAW_CHECK_HR(hr = DDERR_INVALIDCAPS);
- }
+ ZeroMemory(&DDPixelFormat, sizeof(DDPixelFormat));
+ DDPixelFormat.dwSize = sizeof(DDPixelFormat);
+ DDRAW_CHECK_HR(hr = DDRAW_VTBL(p_ddraw->p_surf_primary)->GetPixelFormat(p_ddraw->p_surf_primary, &DDPixelFormat));
+ DDRAW_DEBUG_INFO("dwRGBBitCount:%d, dwRBitMask:%x, dwGBitMask:%x, dwBBitMask:%x, dwRGBAlphaBitMask:%x",
+ DDPixelFormat.dwRGBBitCount, DDPixelFormat.dwRBitMask, DDPixelFormat.dwGBitMask, DDPixelFormat.dwBBitMask, DDPixelFormat.dwRGBAlphaBitMask);
+ if ((TMEDIA_PRODUCER(p_ddraw)->video.chroma = _tdav_producer_screencast_get_chroma(&DDPixelFormat)) == tmedia_chroma_none) {
+ DDRAW_CHECK_HR(hr = DDERR_INVALIDCAPS);
+ }
#else
- ZeroMemory(&ddsd, sizeof(ddsd));
- ddsd.dwSize = sizeof(ddsd);
- ddsd.dwFlags = DDSD_HEIGHT | DDSD_WIDTH | DDSD_PITCH | DDSD_PIXELFORMAT;
- DDRAW_CHECK_HR(hr = p_ddraw->p_surf_primary->GetSurfaceDesc(&ddsd));
- DDRAW_DEBUG_INFO("Prepare with neg. width:%d, height:%d, pitch=%ld", ddsd.dwWidth, ddsd.dwHeight, ddsd.lPitch);
- TMEDIA_PRODUCER(p_ddraw)->video.width = ddsd.dwWidth;
- TMEDIA_PRODUCER(p_ddraw)->video.height = ddsd.dwHeight;
- p_ddraw->n_buff_rgb_bitscount = ddsd.ddpfPixelFormat.dwRGBBitCount;
- DDRAW_DEBUG_INFO("Prepare with dwRGBBitCount:%d, dwRBitMask:%x, dwGBitMask:%x, dwBBitMask:%x, dwRGBAlphaBitMask:%x",
- ddsd.ddpfPixelFormat.dwRGBBitCount, ddsd.ddpfPixelFormat.dwRBitMask, ddsd.ddpfPixelFormat.dwGBitMask, ddsd.ddpfPixelFormat.dwBBitMask, ddsd.ddpfPixelFormat.dwRGBAlphaBitMask);
- if ((TMEDIA_PRODUCER(p_ddraw)->video.chroma = _tdav_producer_screencast_get_chroma(&ddsd.ddpfPixelFormat)) == tmedia_chroma_none) {
- DDRAW_CHECK_HR(hr = DDERR_INVALIDCAPS);
- }
+ ZeroMemory(&ddsd, sizeof(ddsd));
+ ddsd.dwSize = sizeof(ddsd);
+ ddsd.dwFlags = DDSD_HEIGHT | DDSD_WIDTH | DDSD_PITCH | DDSD_PIXELFORMAT;
+ DDRAW_CHECK_HR(hr = p_ddraw->p_surf_primary->GetSurfaceDesc(&ddsd));
+ DDRAW_DEBUG_INFO("Prepare with neg. width:%d, height:%d, pitch=%ld", ddsd.dwWidth, ddsd.dwHeight, ddsd.lPitch);
+ TMEDIA_PRODUCER(p_ddraw)->video.width = ddsd.dwWidth;
+ TMEDIA_PRODUCER(p_ddraw)->video.height = ddsd.dwHeight;
+ p_ddraw->n_buff_rgb_bitscount = ddsd.ddpfPixelFormat.dwRGBBitCount;
+ DDRAW_DEBUG_INFO("Prepare with dwRGBBitCount:%d, dwRBitMask:%x, dwGBitMask:%x, dwBBitMask:%x, dwRGBAlphaBitMask:%x",
+ ddsd.ddpfPixelFormat.dwRGBBitCount, ddsd.ddpfPixelFormat.dwRBitMask, ddsd.ddpfPixelFormat.dwGBitMask, ddsd.ddpfPixelFormat.dwBBitMask, ddsd.ddpfPixelFormat.dwRGBAlphaBitMask);
+ if ((TMEDIA_PRODUCER(p_ddraw)->video.chroma = _tdav_producer_screencast_get_chroma(&ddsd.ddpfPixelFormat)) == tmedia_chroma_none) {
+ DDRAW_CHECK_HR(hr = DDERR_INVALIDCAPS);
+ }
#endif
- // allocate RGB buffer
- DDRAW_CHECK_HR(hr = _tdav_producer_screencast_alloc_rgb_buff(p_ddraw, ddsd.dwWidth, ddsd.dwHeight, ddsd.ddpfPixelFormat.dwRGBBitCount));
+ // allocate RGB buffer
+ DDRAW_CHECK_HR(hr = _tdav_producer_screencast_alloc_rgb_buff(p_ddraw, ddsd.dwWidth, ddsd.dwHeight, ddsd.ddpfPixelFormat.dwRGBBitCount));
- // Check if we can use built-in chroma conversion
+ // Check if we can use built-in chroma conversion
#if DDRAW_HAVE_RGB32_TO_I420_INTRIN || DDRAW_HAVE_RGB32_TO_I420_ASM
- p_ddraw->b_have_rgb32_conv =
- _tdav_producer_screencast_have_ssse3() // SSSE3 supported
- && DDRAW_IS_ALIGNED(TMEDIA_PRODUCER(p_ddraw)->video.width, DDRAW_MEM_ALIGNMENT) // width multiple of 16
- /* && DDRAW_IS_ALIGNED(TMEDIA_PRODUCER(p_ddraw)->video.height, DDRAW_MEM_ALIGNMENT) // height multiple of 16 */
- && TMEDIA_PRODUCER(p_ddraw)->video.chroma == tmedia_chroma_rgb32; // Primary screen RGB32
- if (p_ddraw->b_have_rgb32_conv) {
- TMEDIA_PRODUCER(p_ddraw)->video.chroma = tmedia_chroma_yuv420p;
- }
+ p_ddraw->b_have_rgb32_conv =
+ _tdav_producer_screencast_have_ssse3() // SSSE3 supported
+ && DDRAW_IS_ALIGNED(TMEDIA_PRODUCER(p_ddraw)->video.width, DDRAW_MEM_ALIGNMENT) // width multiple of 16
+ /* && DDRAW_IS_ALIGNED(TMEDIA_PRODUCER(p_ddraw)->video.height, DDRAW_MEM_ALIGNMENT) // height multiple of 16 */
+ && TMEDIA_PRODUCER(p_ddraw)->video.chroma == tmedia_chroma_rgb32; // Primary screen RGB32
+ if (p_ddraw->b_have_rgb32_conv) {
+ TMEDIA_PRODUCER(p_ddraw)->video.chroma = tmedia_chroma_yuv420p;
+ }
#endif
- DDRAW_DEBUG_INFO("RGB32 -> I420 conversion supported: %s", p_ddraw->b_have_rgb32_conv ? "YES" : "NO");
+ DDRAW_DEBUG_INFO("RGB32 -> I420 conversion supported: %s", p_ddraw->b_have_rgb32_conv ? "YES" : "NO");
- // allocate YUV buffer
- if (p_ddraw->b_have_rgb32_conv) {
- DDRAW_CHECK_HR(hr = _tdav_producer_screencast_alloc_yuv_buff(p_ddraw, (DWORD)TMEDIA_PRODUCER(p_ddraw)->video.width, (DWORD)TMEDIA_PRODUCER(p_ddraw)->video.height));
- }
+ // allocate YUV buffer
+ if (p_ddraw->b_have_rgb32_conv) {
+ DDRAW_CHECK_HR(hr = _tdav_producer_screencast_alloc_yuv_buff(p_ddraw, (DWORD)TMEDIA_PRODUCER(p_ddraw)->video.width, (DWORD)TMEDIA_PRODUCER(p_ddraw)->video.height));
+ }
- // BitmapInfo for preview
+ // BitmapInfo for preview
#if DDRAW_PREVIEW
- ZeroMemory(&p_ddraw->bi_preview, sizeof(p_ddraw->bi_preview));
- p_ddraw->bi_preview.bmiHeader.biSize = (DWORD)sizeof(BITMAPINFOHEADER);
- p_ddraw->bi_preview.bmiHeader.biCompression = BI_RGB;
- p_ddraw->bi_preview.bmiHeader.biPlanes = 1;
- p_ddraw->bi_preview.bmiHeader.biWidth = ddsd.dwWidth;
- p_ddraw->bi_preview.bmiHeader.biHeight = ddsd.dwHeight;
- p_ddraw->bi_preview.bmiHeader.biBitCount = (WORD)ddsd.ddpfPixelFormat.dwRGBBitCount;
- p_ddraw->bi_preview.bmiHeader.biSizeImage = (p_ddraw->bi_preview.bmiHeader.biWidth * p_ddraw->bi_preview.bmiHeader.biHeight * (p_ddraw->bi_preview.bmiHeader.biBitCount >> 3));
+ ZeroMemory(&p_ddraw->bi_preview, sizeof(p_ddraw->bi_preview));
+ p_ddraw->bi_preview.bmiHeader.biSize = (DWORD)sizeof(BITMAPINFOHEADER);
+ p_ddraw->bi_preview.bmiHeader.biCompression = BI_RGB;
+ p_ddraw->bi_preview.bmiHeader.biPlanes = 1;
+ p_ddraw->bi_preview.bmiHeader.biWidth = ddsd.dwWidth;
+ p_ddraw->bi_preview.bmiHeader.biHeight = ddsd.dwHeight;
+ p_ddraw->bi_preview.bmiHeader.biBitCount = (WORD)ddsd.ddpfPixelFormat.dwRGBBitCount;
+ p_ddraw->bi_preview.bmiHeader.biSizeImage = (p_ddraw->bi_preview.bmiHeader.biWidth * p_ddraw->bi_preview.bmiHeader.biHeight * (p_ddraw->bi_preview.bmiHeader.biBitCount >> 3));
#endif /* DDRAW_PREVIEW */
#if DDRAW_CPU_MONITOR || DDRAW_CPU_THROTTLING
- if (!p_ddraw->p_timer_mgr) {
- p_ddraw->p_timer_mgr = tsk_timer_manager_create();
- }
+ if (!p_ddraw->p_timer_mgr) {
+ p_ddraw->p_timer_mgr = tsk_timer_manager_create();
+ }
#endif /* DDRAW_CPU_MONITOR ||DDRAW_CPU_THROTTLING */
#if DDRAW_CPU_THROTTLING
- p_ddraw->cpu.fps_target = (TMEDIA_PRODUCER(p_ddraw)->video.fps + DDRAW_CPU_THROTTLING_FPS_MIN) >> 1; // start with minimum fps and increase the value based on the fps
+ p_ddraw->cpu.fps_target = (TMEDIA_PRODUCER(p_ddraw)->video.fps + DDRAW_CPU_THROTTLING_FPS_MIN) >> 1; // start with minimum fps and increase the value based on the fps
#endif /* DDRAW_CPU_THROTTLING */
-bail:
- tsk_safeobj_unlock(p_ddraw);
- return SUCCEEDED(hr) ? 0 : -1;
+ bail:
+ tsk_safeobj_unlock(p_ddraw);
+ return SUCCEEDED(hr) ? 0 : -1;
}
static int _tdav_producer_screencast_ddraw_start(tmedia_producer_t* p_self)
{
- tdav_producer_screencast_ddraw_t* p_ddraw = (tdav_producer_screencast_ddraw_t*)p_self;
- int ret = 0;
+ tdav_producer_screencast_ddraw_t* p_ddraw = (tdav_producer_screencast_ddraw_t*)p_self;
+ int ret = 0;
- if (!p_ddraw) {
- DDRAW_DEBUG_ERROR("Invalid parameter");
- return -1;
- }
+ if (!p_ddraw) {
+ DDRAW_DEBUG_ERROR("Invalid parameter");
+ return -1;
+ }
- ret = tsk_safeobj_lock(p_ddraw);
+ ret = tsk_safeobj_lock(p_ddraw);
- p_ddraw->b_paused = tsk_false;
+ p_ddraw->b_paused = tsk_false;
- if (p_ddraw->b_started) {
- DDRAW_DEBUG_INFO("Already started");
- goto bail;
- }
+ if (p_ddraw->b_started) {
+ DDRAW_DEBUG_INFO("Already started");
+ goto bail;
+ }
- p_ddraw->b_started = tsk_true;
+ p_ddraw->b_started = tsk_true;
- // Create notify events (must be done here before starting the grabber thread)
+ // Create notify events (must be done here before starting the grabber thread)
#if DDRAW_MT
- for (int i = 0; i < sizeof(p_ddraw->mt.h_events) / sizeof(p_ddraw->mt.h_events[0]); ++i) {
- if (!p_ddraw->mt.h_events[i] && !(p_ddraw->mt.h_events[i] = CreateEvent(NULL, FALSE, FALSE, NULL))) {
- DDRAW_DEBUG_ERROR("Failed to create event at %d", i);
- ret = -1;
- goto bail;
- }
- }
+ for (int i = 0; i < sizeof(p_ddraw->mt.h_events) / sizeof(p_ddraw->mt.h_events[0]); ++i) {
+ if (!p_ddraw->mt.h_events[i] && !(p_ddraw->mt.h_events[i] = CreateEvent(NULL, FALSE, FALSE, NULL))) {
+ DDRAW_DEBUG_ERROR("Failed to create event at %d", i);
+ ret = -1;
+ goto bail;
+ }
+ }
#endif /* DDRAW_MT */
- ret = tsk_thread_create(&p_ddraw->tid[0], _tdav_producer_screencast_grap_thread, p_ddraw);
- if (ret != 0) {
- DDRAW_DEBUG_ERROR("Failed to create thread");
- goto bail;
- }
- //BOOL okSetTA = CeSetThreadAffinity((HANDLE)p_ddraw->tid[0], 0x01);
+ ret = tsk_thread_create(&p_ddraw->tid[0], _tdav_producer_screencast_grap_thread, p_ddraw);
+ if (ret != 0) {
+ DDRAW_DEBUG_ERROR("Failed to create thread");
+ goto bail;
+ }
+ //BOOL okSetTA = CeSetThreadAffinity((HANDLE)p_ddraw->tid[0], 0x01);
#if DDRAW_MT
- ret = tsk_thread_create(&p_ddraw->mt.tid[0], _tdav_producer_screencast_mt_encode_thread, p_ddraw);
- if (ret != 0) {
- DDRAW_DEBUG_ERROR("Failed to create thread");
- goto bail;
- }
- //okSetTA = CeSetThreadAffinity((HANDLE)p_ddraw->mt.tid[0], 0x02);
+ ret = tsk_thread_create(&p_ddraw->mt.tid[0], _tdav_producer_screencast_mt_encode_thread, p_ddraw);
+ if (ret != 0) {
+ DDRAW_DEBUG_ERROR("Failed to create thread");
+ goto bail;
+ }
+ //okSetTA = CeSetThreadAffinity((HANDLE)p_ddraw->mt.tid[0], 0x02);
#endif /* DDRAW_MT */
#if DDRAW_HIGH_PRIO_MEMCPY
- if (p_ddraw->tid[0]) {
- tsk_thread_set_priority(p_ddraw->tid[0], TSK_THREAD_PRIORITY_TIME_CRITICAL);
- }
+ if (p_ddraw->tid[0]) {
+ tsk_thread_set_priority(p_ddraw->tid[0], TSK_THREAD_PRIORITY_TIME_CRITICAL);
+ }
#if DDRAW_MT
- if (p_ddraw->mt.tid[0]) {
- tsk_thread_set_priority(p_ddraw->mt.tid[0], TSK_THREAD_PRIORITY_TIME_CRITICAL);
- }
+ if (p_ddraw->mt.tid[0]) {
+ tsk_thread_set_priority(p_ddraw->mt.tid[0], TSK_THREAD_PRIORITY_TIME_CRITICAL);
+ }
#endif /* DDRAW_MT */
#endif /* DDRAW_HIGH_PRIO_MEMCPY */
#if DDRAW_CPU_MONITOR || DDRAW_CPU_THROTTLING
- ret = tsk_timer_manager_start(p_ddraw->p_timer_mgr);
- if (ret == 0) {
- p_ddraw->cpu.id_timer = tsk_timer_manager_schedule(p_ddraw->p_timer_mgr, DDRAW_CPU_SCHEDULE_TIMEOUT, _tdav_producer_screencast_timer_cb, p_ddraw);
- }
- else {
- ret = 0; // not fatal error
- DDRAW_DEBUG_WARN("Failed to start CPU timer");
- }
+ ret = tsk_timer_manager_start(p_ddraw->p_timer_mgr);
+ if (ret == 0) {
+ p_ddraw->cpu.id_timer = tsk_timer_manager_schedule(p_ddraw->p_timer_mgr, DDRAW_CPU_SCHEDULE_TIMEOUT, _tdav_producer_screencast_timer_cb, p_ddraw);
+ }
+ else {
+ ret = 0; // not fatal error
+ DDRAW_DEBUG_WARN("Failed to start CPU timer");
+ }
#endif /* DDRAW_CPU_MONITOR || DDRAW_CPU_THROTTLING */
-bail:
- if (ret) {
- p_ddraw->b_started = tsk_false;
- if (p_ddraw->tid[0]) {
- tsk_thread_join(&(p_ddraw->tid[0]));
- }
+ bail:
+ if (ret) {
+ p_ddraw->b_started = tsk_false;
+ if (p_ddraw->tid[0]) {
+ tsk_thread_join(&(p_ddraw->tid[0]));
+ }
#if DDRAW_MT
- if (p_ddraw->mt.tid[0]) {
- tsk_thread_join(&(p_ddraw->mt.tid[0]));
- }
+ if (p_ddraw->mt.tid[0]) {
+ tsk_thread_join(&(p_ddraw->mt.tid[0]));
+ }
#endif /* DDRAW_MT */
- }
- ret = tsk_safeobj_unlock(p_ddraw);
+ }
+ ret = tsk_safeobj_unlock(p_ddraw);
- return ret;
+ return ret;
}
static int _tdav_producer_screencast_ddraw_pause(tmedia_producer_t* p_self)
{
- tdav_producer_screencast_ddraw_t* p_ddraw = (tdav_producer_screencast_ddraw_t*)p_self;
+ tdav_producer_screencast_ddraw_t* p_ddraw = (tdav_producer_screencast_ddraw_t*)p_self;
- if (!p_ddraw) {
- DDRAW_DEBUG_ERROR("Invalid parameter");
- return -1;
- }
+ if (!p_ddraw) {
+ DDRAW_DEBUG_ERROR("Invalid parameter");
+ return -1;
+ }
- tsk_safeobj_lock(p_ddraw);
+ tsk_safeobj_lock(p_ddraw);
- p_ddraw->b_paused = tsk_true;
- goto bail;
+ p_ddraw->b_paused = tsk_true;
+ goto bail;
-bail:
- tsk_safeobj_unlock(p_ddraw);
+ bail:
+ tsk_safeobj_unlock(p_ddraw);
- return 0;
+ return 0;
}
static int _tdav_producer_screencast_ddraw_stop(tmedia_producer_t* p_self)
{
- tdav_producer_screencast_ddraw_t* p_ddraw = (tdav_producer_screencast_ddraw_t*)p_self;
+ tdav_producer_screencast_ddraw_t* p_ddraw = (tdav_producer_screencast_ddraw_t*)p_self;
- if (!p_ddraw) {
- DDRAW_DEBUG_ERROR("Invalid parameter");
- return -1;
- }
+ if (!p_ddraw) {
+ DDRAW_DEBUG_ERROR("Invalid parameter");
+ return -1;
+ }
- tsk_safeobj_lock(p_ddraw);
+ tsk_safeobj_lock(p_ddraw);
- if (!p_ddraw->b_started) {
- DDRAW_DEBUG_INFO("Already stopped");
- goto bail;
- }
+ if (!p_ddraw->b_started) {
+ DDRAW_DEBUG_INFO("Already stopped");
+ goto bail;
+ }
- p_ddraw->b_started = tsk_false;
- p_ddraw->b_paused = tsk_false;
+ p_ddraw->b_started = tsk_false;
+ p_ddraw->b_paused = tsk_false;
#if DDRAW_CPU_MONITOR || DDRAW_CPU_THROTTLING
- if (p_ddraw->p_timer_mgr) {
- tsk_timer_manager_stop(p_ddraw->p_timer_mgr);
- }
+ if (p_ddraw->p_timer_mgr) {
+ tsk_timer_manager_stop(p_ddraw->p_timer_mgr);
+ }
#endif /* DDRAW_CPU_MONITOR ||DDRAW_CPU_THROTTLING */
- // stop grabber thread
- if (p_ddraw->tid[0]) {
- tsk_thread_join(&(p_ddraw->tid[0]));
- }
+ // stop grabber thread
+ if (p_ddraw->tid[0]) {
+ tsk_thread_join(&(p_ddraw->tid[0]));
+ }
#if DDRAW_MT
- if (p_ddraw->mt.h_events[DDRAW_MT_EVENT_SHUTDOWN_INDEX]){
- SetEvent(p_ddraw->mt.h_events[DDRAW_MT_EVENT_SHUTDOWN_INDEX]);
- }
- if (p_ddraw->mt.tid[0]) {
- tsk_thread_join(&(p_ddraw->mt.tid[0]));
- }
- for (int i = 0; i < sizeof(p_ddraw->mt.h_events) / sizeof(p_ddraw->mt.h_events[0]); ++i) {
- if (p_ddraw->mt.h_events[i]) {
- CloseHandle(p_ddraw->mt.h_events[i]);
- p_ddraw->mt.h_events[i] = NULL;
- }
- }
+ if (p_ddraw->mt.h_events[DDRAW_MT_EVENT_SHUTDOWN_INDEX]) {
+ SetEvent(p_ddraw->mt.h_events[DDRAW_MT_EVENT_SHUTDOWN_INDEX]);
+ }
+ if (p_ddraw->mt.tid[0]) {
+ tsk_thread_join(&(p_ddraw->mt.tid[0]));
+ }
+ for (int i = 0; i < sizeof(p_ddraw->mt.h_events) / sizeof(p_ddraw->mt.h_events[0]); ++i) {
+ if (p_ddraw->mt.h_events[i]) {
+ CloseHandle(p_ddraw->mt.h_events[i]);
+ p_ddraw->mt.h_events[i] = NULL;
+ }
+ }
#endif
-bail:
- tsk_safeobj_unlock(p_ddraw);
+ bail:
+ tsk_safeobj_unlock(p_ddraw);
- return 0;
+ return 0;
}
static int _tdav_producer_screencast_grab(tdav_producer_screencast_ddraw_t* p_self)
{
- int ret = 0;
- HRESULT hr = S_OK;
- DDSURFACEDESC ddsd;
- DWORD nSizeWithoutPadding, nRowLengthInBytes, lockFlags;
- tmedia_producer_t* p_base = TMEDIA_PRODUCER(p_self);
- LPVOID lpBuffToSend, lpBuffYUV;
- BOOL bDirectMemSurfAccess = DDRAW_MEM_SURFACE_DIRECT_ACCESS;
+ int ret = 0;
+ HRESULT hr = S_OK;
+ DDSURFACEDESC ddsd;
+ DWORD nSizeWithoutPadding, nRowLengthInBytes, lockFlags;
+ tmedia_producer_t* p_base = TMEDIA_PRODUCER(p_self);
+ LPVOID lpBuffToSend, lpBuffYUV;
+ BOOL bDirectMemSurfAccess = DDRAW_MEM_SURFACE_DIRECT_ACCESS;
#if DDRAW_MT
- INT iMtFreeBuffIndex = -1;
+ INT iMtFreeBuffIndex = -1;
#endif
- //--uint64_t timeStart, timeEnd;
+ //--uint64_t timeStart, timeEnd;
- //--timeStart = tsk_time_now();
+ //--timeStart = tsk_time_now();
- if (!p_self) {
- DDRAW_CHECK_HR(hr = E_INVALIDARG);
- }
+ if (!p_self) {
+ DDRAW_CHECK_HR(hr = E_INVALIDARG);
+ }
- if (!p_self->b_started) {
+ if (!p_self->b_started) {
#if defined(E_ILLEGAL_METHOD_CALL)
- DDRAW_CHECK_HR(hr = E_ILLEGAL_METHOD_CALL);
+ DDRAW_CHECK_HR(hr = E_ILLEGAL_METHOD_CALL);
#else
- DDRAW_CHECK_HR(hr = E_FAIL);
+ DDRAW_CHECK_HR(hr = E_FAIL);
#endif
- }
+ }
#if DDRAW_MT
- {
- INT iIndex = 0;
- for (; (iIndex < DDRAW_MT_COUNT) && (p_self->mt.b_flags_array[iIndex] == TRUE); ++iIndex);
- if (iIndex == DDRAW_MT_COUNT) {
- goto bail;
- }
- }
+ {
+ INT iIndex = 0;
+ for (; (iIndex < DDRAW_MT_COUNT) && (p_self->mt.b_flags_array[iIndex] == TRUE); ++iIndex);
+ if (iIndex == DDRAW_MT_COUNT) {
+ goto bail;
+ }
+ }
#endif /* DDRAW_MT */
- if (p_self->p_surf_primary->IsLost() == DDERR_SURFACELOST) {
- DDRAW_CHECK_HR(hr = p_self->p_surf_primary->Restore());
- }
+ if (p_self->p_surf_primary->IsLost() == DDERR_SURFACELOST) {
+ DDRAW_CHECK_HR(hr = p_self->p_surf_primary->Restore());
+ }
- ddsd.dwSize = sizeof(ddsd);
- ddsd.dwFlags = DDSD_HEIGHT | DDSD_WIDTH | DDSD_PITCH | DDSD_PIXELFORMAT;
- lockFlags = DDLOCK_READONLY |
+ ddsd.dwSize = sizeof(ddsd);
+ ddsd.dwFlags = DDSD_HEIGHT | DDSD_WIDTH | DDSD_PITCH | DDSD_PIXELFORMAT;
+ lockFlags = DDLOCK_READONLY |
#if TDAV_UNDER_WINDOWS_CE
- // This flag has a slightly different name under Windows CE vs. Desktop, but it's the same behavior.
- DDLOCK_WAITNOTBUSY;
+ // This flag has a slightly different name under Windows CE vs. Desktop, but it's the same behavior.
+ DDLOCK_WAITNOTBUSY;
#else
- DDLOCK_WAIT;
+ DDLOCK_WAIT;
#endif
- DDRAW_CHECK_HR(hr = p_self->p_surf_primary->Lock(NULL, &ddsd, lockFlags, NULL));
- // make sure surface size and number of bits per pixel haven't changed
- if (TMEDIA_PRODUCER(p_self)->video.width != ddsd.dwWidth || TMEDIA_PRODUCER(p_self)->video.height != ddsd.dwHeight || p_self->n_buff_rgb_bitscount != ddsd.ddpfPixelFormat.dwRGBBitCount) {
- tsk_size_t n_buff_rgb_new;
- tmedia_chroma_t chroma_new;
- DDRAW_DEBUG_WARN("surface has changed: width %d<>%d or height %d<>%d or rgb_bits_count %d<>%d",
- p_base->video.width, ddsd.dwWidth,
- p_base->video.height, ddsd.dwHeight,
- p_self->n_buff_rgb_bitscount, ddsd.ddpfPixelFormat.dwRGBBitCount);
- if ((chroma_new = _tdav_producer_screencast_get_chroma(&ddsd.ddpfPixelFormat)) == tmedia_chroma_none) {
- DDRAW_CHECK_HR(hr = DDERR_INVALIDCAPS);
- }
- // allocate RGB buffer
- n_buff_rgb_new = (ddsd.dwWidth * ddsd.dwHeight * (ddsd.ddpfPixelFormat.dwRGBBitCount >> 3));
- if (p_self->n_buff_rgb < n_buff_rgb_new) {
- hr = _tdav_producer_screencast_alloc_rgb_buff(p_self, ddsd.dwWidth, ddsd.dwHeight, ddsd.ddpfPixelFormat.dwRGBBitCount);
- if (FAILED(hr)) {
- p_self->p_surf_primary->Unlock(NULL); // unlock before going to bail
- DDRAW_CHECK_HR(hr);
- }
- }
- p_base->video.width = ddsd.dwWidth;
- p_base->video.height = ddsd.dwHeight;
- p_base->video.chroma = chroma_new;
- p_self->n_buff_rgb_bitscount = ddsd.ddpfPixelFormat.dwRGBBitCount;
- // Check if we can use built-in chroma conversion
+ DDRAW_CHECK_HR(hr = p_self->p_surf_primary->Lock(NULL, &ddsd, lockFlags, NULL));
+ // make sure surface size and number of bits per pixel haven't changed
+ if (TMEDIA_PRODUCER(p_self)->video.width != ddsd.dwWidth || TMEDIA_PRODUCER(p_self)->video.height != ddsd.dwHeight || p_self->n_buff_rgb_bitscount != ddsd.ddpfPixelFormat.dwRGBBitCount) {
+ tsk_size_t n_buff_rgb_new;
+ tmedia_chroma_t chroma_new;
+ DDRAW_DEBUG_WARN("surface has changed: width %d<>%d or height %d<>%d or rgb_bits_count %d<>%d",
+ p_base->video.width, ddsd.dwWidth,
+ p_base->video.height, ddsd.dwHeight,
+ p_self->n_buff_rgb_bitscount, ddsd.ddpfPixelFormat.dwRGBBitCount);
+ if ((chroma_new = _tdav_producer_screencast_get_chroma(&ddsd.ddpfPixelFormat)) == tmedia_chroma_none) {
+ DDRAW_CHECK_HR(hr = DDERR_INVALIDCAPS);
+ }
+ // allocate RGB buffer
+ n_buff_rgb_new = (ddsd.dwWidth * ddsd.dwHeight * (ddsd.ddpfPixelFormat.dwRGBBitCount >> 3));
+ if (p_self->n_buff_rgb < n_buff_rgb_new) {
+ hr = _tdav_producer_screencast_alloc_rgb_buff(p_self, ddsd.dwWidth, ddsd.dwHeight, ddsd.ddpfPixelFormat.dwRGBBitCount);
+ if (FAILED(hr)) {
+ p_self->p_surf_primary->Unlock(NULL); // unlock before going to bail
+ DDRAW_CHECK_HR(hr);
+ }
+ }
+ p_base->video.width = ddsd.dwWidth;
+ p_base->video.height = ddsd.dwHeight;
+ p_base->video.chroma = chroma_new;
+ p_self->n_buff_rgb_bitscount = ddsd.ddpfPixelFormat.dwRGBBitCount;
+ // Check if we can use built-in chroma conversion
#if DDRAW_HAVE_RGB32_TO_I420_INTRIN || DDRAW_HAVE_RGB32_TO_I420_ASM
- p_self->b_have_rgb32_conv =
- _tdav_producer_screencast_have_ssse3() // SSSE3 supported
- && DDRAW_IS_ALIGNED(p_base->video.width, DDRAW_MEM_ALIGNMENT) // width multiple of 16
- /* && DDRAW_IS_ALIGNED(p_base->video.height, DDRAW_MEM_ALIGNMENT) // height multiple of 16 */
- && p_base->video.chroma == tmedia_chroma_rgb32; // Primary screen RGB32
- if (p_self->b_have_rgb32_conv) {
- p_base->video.chroma = tmedia_chroma_yuv420p;
- }
+ p_self->b_have_rgb32_conv =
+ _tdav_producer_screencast_have_ssse3() // SSSE3 supported
+ && DDRAW_IS_ALIGNED(p_base->video.width, DDRAW_MEM_ALIGNMENT) // width multiple of 16
+ /* && DDRAW_IS_ALIGNED(p_base->video.height, DDRAW_MEM_ALIGNMENT) // height multiple of 16 */
+ && p_base->video.chroma == tmedia_chroma_rgb32; // Primary screen RGB32
+ if (p_self->b_have_rgb32_conv) {
+ p_base->video.chroma = tmedia_chroma_yuv420p;
+ }
#endif
- DDRAW_DEBUG_INFO("RGB32 -> I420 conversion supported: %s", p_self->b_have_rgb32_conv ? "YES" : "NO");
- // allocate YUV buffer
- if (p_self->b_have_rgb32_conv) {
- hr = _tdav_producer_screencast_alloc_yuv_buff(p_self, (DWORD)p_base->video.width, (DWORD)p_base->video.height);
- if (FAILED(hr)) {
- p_self->p_surf_primary->Unlock(NULL); // unlock before going to bail
- DDRAW_CHECK_HR(hr);
- }
- }
- // preview
+ DDRAW_DEBUG_INFO("RGB32 -> I420 conversion supported: %s", p_self->b_have_rgb32_conv ? "YES" : "NO");
+ // allocate YUV buffer
+ if (p_self->b_have_rgb32_conv) {
+ hr = _tdav_producer_screencast_alloc_yuv_buff(p_self, (DWORD)p_base->video.width, (DWORD)p_base->video.height);
+ if (FAILED(hr)) {
+ p_self->p_surf_primary->Unlock(NULL); // unlock before going to bail
+ DDRAW_CHECK_HR(hr);
+ }
+ }
+ // preview
#if DDRAW_PREVIEW
- p_self->bi_preview.bmiHeader.biWidth = ddsd.dwWidth;
- p_self->bi_preview.bmiHeader.biHeight = ddsd.dwHeight;
- p_self->bi_preview.bmiHeader.biBitCount = (WORD)ddsd.ddpfPixelFormat.dwRGBBitCount;
- p_self->bi_preview.bmiHeader.biSizeImage = (p_self->bi_preview.bmiHeader.biWidth * p_self->bi_preview.bmiHeader.biHeight * (p_self->bi_preview.bmiHeader.biBitCount >> 3));
+ p_self->bi_preview.bmiHeader.biWidth = ddsd.dwWidth;
+ p_self->bi_preview.bmiHeader.biHeight = ddsd.dwHeight;
+ p_self->bi_preview.bmiHeader.biBitCount = (WORD)ddsd.ddpfPixelFormat.dwRGBBitCount;
+ p_self->bi_preview.bmiHeader.biSizeImage = (p_self->bi_preview.bmiHeader.biWidth * p_self->bi_preview.bmiHeader.biHeight * (p_self->bi_preview.bmiHeader.biBitCount >> 3));
#endif /* DDRAW_PREVIEW */
- }
- nRowLengthInBytes = ddsd.dwWidth * (ddsd.ddpfPixelFormat.dwRGBBitCount >> 3);
- nSizeWithoutPadding = ddsd.dwHeight * nRowLengthInBytes;
-
- // init lpBuffToSend
- if (DDRAW_MEM_SURFACE_DIRECT_ACCESS && ddsd.lPitch == nRowLengthInBytes && (!p_self->b_have_rgb32_conv || DDRAW_IS_ALIGNED(ddsd.lpSurface, DDRAW_MEM_ALIGNMENT))) {
- // no padding
- lpBuffToSend = ddsd.lpSurface;
- bDirectMemSurfAccess = TRUE;
- }
- else {
- // with padding or copy requested
- UINT8 *pSurfBuff = (UINT8 *)ddsd.lpSurface, *pNegBuff = (UINT8 *)p_self->p_buff_rgb_aligned;
- DWORD y;
- bDirectMemSurfAccess = FALSE;
- //--timeStart = tsk_time_now();
- if (ddsd.lPitch == nRowLengthInBytes) {
- // copy without padding padding
- const UINT8* src = pSurfBuff;
- UINT8* dst = (UINT8*)p_self->p_buff_rgb_aligned;
- if (DDRAW_IS_ALIGNED(src, 16) && (nSizeWithoutPadding & 15) == 0) {
+ }
+ nRowLengthInBytes = ddsd.dwWidth * (ddsd.ddpfPixelFormat.dwRGBBitCount >> 3);
+ nSizeWithoutPadding = ddsd.dwHeight * nRowLengthInBytes;
+
+ // init lpBuffToSend
+ if (DDRAW_MEM_SURFACE_DIRECT_ACCESS && ddsd.lPitch == nRowLengthInBytes && (!p_self->b_have_rgb32_conv || DDRAW_IS_ALIGNED(ddsd.lpSurface, DDRAW_MEM_ALIGNMENT))) {
+ // no padding
+ lpBuffToSend = ddsd.lpSurface;
+ bDirectMemSurfAccess = TRUE;
+ }
+ else {
+ // with padding or copy requested
+ UINT8 *pSurfBuff = (UINT8 *)ddsd.lpSurface, *pNegBuff = (UINT8 *)p_self->p_buff_rgb_aligned;
+ DWORD y;
+ bDirectMemSurfAccess = FALSE;
+ //--timeStart = tsk_time_now();
+ if (ddsd.lPitch == nRowLengthInBytes) {
+ // copy without padding padding
+ const UINT8* src = pSurfBuff;
+ UINT8* dst = (UINT8*)p_self->p_buff_rgb_aligned;
+ if (DDRAW_IS_ALIGNED(src, 16) && (nSizeWithoutPadding & 15) == 0) {
#if DDRAW_HAVE_RGB32_TO_I420_INTRIN || DDRAW_HAVE_RGB32_TO_I420_ASM
- if ((nSizeWithoutPadding & 127) == 0) {
- for (DWORD i = 0; i < nSizeWithoutPadding; i += 128, src += 128, dst += 128) {
+ if ((nSizeWithoutPadding & 127) == 0) {
+ for (DWORD i = 0; i < nSizeWithoutPadding; i += 128, src += 128, dst += 128) {
#if defined(DDRAW_COPY128_ASM)
- DDRAW_COPY128_ASM(dst, src);
+ DDRAW_COPY128_ASM(dst, src);
#else
- DDRAW_COPY128_INTRIN(dst, src);
+ DDRAW_COPY128_INTRIN(dst, src);
#endif /* DDRAW_COPY128_ASM */
- }
- }
- else if((nSizeWithoutPadding & 63) == 0) {
- for (DWORD i = 0; i < nSizeWithoutPadding; i += 64, src += 64, dst += 64) {
+ }
+ }
+ else if((nSizeWithoutPadding & 63) == 0) {
+ for (DWORD i = 0; i < nSizeWithoutPadding; i += 64, src += 64, dst += 64) {
#if defined(DDRAW_COPY64_ASM)
- DDRAW_COPY64_ASM(dst, src);
+ DDRAW_COPY64_ASM(dst, src);
#else
- DDRAW_COPY64_INTRIN(dst, src);
+ DDRAW_COPY64_INTRIN(dst, src);
#endif /* DDRAW_COPY64_ASM */
- }
- }
- else { // (nSizeWithoutPadding & 15) == 0
- for (DWORD i = 0; i < nSizeWithoutPadding; i += 16, src += 16, dst += 16) {
+ }
+ }
+ else { // (nSizeWithoutPadding & 15) == 0
+ for (DWORD i = 0; i < nSizeWithoutPadding; i += 16, src += 16, dst += 16) {
#if defined(DDRAW_COPY16_ASM)
- DDRAW_COPY16_ASM(dst, src);
+ DDRAW_COPY16_ASM(dst, src);
#else
- DDRAW_COPY16_INTRIN(dst, src);
+ DDRAW_COPY16_INTRIN(dst, src);
#endif /* DDRAW_COPY16_ASM */
- }
- }
+ }
+ }
#else // neither ASM nor INTRINSIC support
- CopyMemory(dst, src, nSizeWithoutPadding);
+ CopyMemory(dst, src, nSizeWithoutPadding);
#endif /* DDRAW_HAVE_RGB32_TO_I420_INTRIN || DDRAW_HAVE_RGB32_TO_I420_ASM */
- }
- else { // not 16bytes aligned
- CopyMemory(dst, src, nSizeWithoutPadding);
- }
- }
- else {
- // copy with padding padding
- for (y = 0; y < ddsd.dwHeight; ++y) {
- CopyMemory(pNegBuff, pSurfBuff, nRowLengthInBytes);
- pSurfBuff += ddsd.lPitch;
- pNegBuff += nRowLengthInBytes;
- }
- }
- lpBuffToSend = p_self->p_buff_rgb_aligned;
- //--timeEnd = tsk_time_now();
- //--DDRAW_DEBUG_INFO("Mem copy: start=%llu, end=%llu, duration=%llu", timeStart, timeEnd, (timeEnd - timeStart));
- }
- if (!bDirectMemSurfAccess) {
- // surface buffer no longer needed, unlock
- DDRAW_CHECK_HR(hr = p_self->p_surf_primary->Unlock(NULL));
- }
- // display preview
+ }
+ else { // not 16bytes aligned
+ CopyMemory(dst, src, nSizeWithoutPadding);
+ }
+ }
+ else {
+ // copy with padding padding
+ for (y = 0; y < ddsd.dwHeight; ++y) {
+ CopyMemory(pNegBuff, pSurfBuff, nRowLengthInBytes);
+ pSurfBuff += ddsd.lPitch;
+ pNegBuff += nRowLengthInBytes;
+ }
+ }
+ lpBuffToSend = p_self->p_buff_rgb_aligned;
+ //--timeEnd = tsk_time_now();
+ //--DDRAW_DEBUG_INFO("Mem copy: start=%llu, end=%llu, duration=%llu", timeStart, timeEnd, (timeEnd - timeStart));
+ }
+ if (!bDirectMemSurfAccess) {
+ // surface buffer no longer needed, unlock
+ DDRAW_CHECK_HR(hr = p_self->p_surf_primary->Unlock(NULL));
+ }
+ // display preview
#if DDRAW_PREVIEW
- if (p_self->hwnd_preview) {
- HWND hWnd; // copy for thread-safeness
- HDC hDC = GetDC((hWnd = p_self->hwnd_preview));
- if (hDC) {
- RECT rcPreview;
- if (GetWindowRect(hWnd, &rcPreview)) {
- LONG nPreviewWidth = (rcPreview.right - rcPreview.left);
- LONG nPreviewHeight = (rcPreview.bottom - rcPreview.top);
- StretchDIBits(
- hDC,
- 0, 0, nPreviewWidth, nPreviewHeight,
- 0, 0, p_self->bi_preview.bmiHeader.biWidth, p_self->bi_preview.bmiHeader.biHeight,
- lpBuffToSend,
- &p_self->bi_preview,
- DIB_RGB_COLORS,
- SRCCOPY);
- }
- ReleaseDC(hWnd, hDC);
- }
- }
+ if (p_self->hwnd_preview) {
+ HWND hWnd; // copy for thread-safeness
+ HDC hDC = GetDC((hWnd = p_self->hwnd_preview));
+ if (hDC) {
+ RECT rcPreview;
+ if (GetWindowRect(hWnd, &rcPreview)) {
+ LONG nPreviewWidth = (rcPreview.right - rcPreview.left);
+ LONG nPreviewHeight = (rcPreview.bottom - rcPreview.top);
+ StretchDIBits(
+ hDC,
+ 0, 0, nPreviewWidth, nPreviewHeight,
+ 0, 0, p_self->bi_preview.bmiHeader.biWidth, p_self->bi_preview.bmiHeader.biHeight,
+ lpBuffToSend,
+ &p_self->bi_preview,
+ DIB_RGB_COLORS,
+ SRCCOPY);
+ }
+ ReleaseDC(hWnd, hDC);
+ }
+ }
#endif /* DDRAW_PREVIEW */
- // check we have a free buffer
+ // check we have a free buffer
#if DDRAW_MT
- {
- for (INT iIndex = 0; iIndex < DDRAW_MT_COUNT; ++iIndex) {
- if (p_self->mt.b_flags_array[iIndex] != TRUE) {
- iMtFreeBuffIndex = iIndex;
- lpBuffYUV = p_self->mt.p_buff_yuv_aligned_array[iIndex];
- break;
- }
- }
- if (iMtFreeBuffIndex < 0) {
- lpBuffToSend = NULL; // do not waste time converting or encoding
- lpBuffYUV = NULL;
- }
- }
+ {
+ for (INT iIndex = 0; iIndex < DDRAW_MT_COUNT; ++iIndex) {
+ if (p_self->mt.b_flags_array[iIndex] != TRUE) {
+ iMtFreeBuffIndex = iIndex;
+ lpBuffYUV = p_self->mt.p_buff_yuv_aligned_array[iIndex];
+ break;
+ }
+ }
+ if (iMtFreeBuffIndex < 0) {
+ lpBuffToSend = NULL; // do not waste time converting or encoding
+ lpBuffYUV = NULL;
+ }
+ }
#else
- lpBuffYUV = p_self->p_buff_yuv_aligned;
+ lpBuffYUV = p_self->p_buff_yuv_aligned;
#endif /* DDRAW_MT */
- //--timeStart = tsk_time_now();
- if (lpBuffToSend && (lpBuffYUV || !p_self->b_have_rgb32_conv)) {
- if (p_self->b_have_rgb32_conv) {
- // Convert from RGB32 to I420
+ //--timeStart = tsk_time_now();
+ if (lpBuffToSend && (lpBuffYUV || !p_self->b_have_rgb32_conv)) {
+ if (p_self->b_have_rgb32_conv) {
+ // Convert from RGB32 to I420
#if DDRAW_HAVE_RGB32_TO_I420_ASM
- _tdav_producer_screencast_rgb32_to_yuv420_asm_ssse3((uint8_t*)lpBuffYUV, (const uint8_t*)lpBuffToSend, (int)p_base->video.width, (int)p_base->video.height);
+ _tdav_producer_screencast_rgb32_to_yuv420_asm_ssse3((uint8_t*)lpBuffYUV, (const uint8_t*)lpBuffToSend, (int)p_base->video.width, (int)p_base->video.height);
#elif DDRAW_HAVE_RGB32_TO_I420_INTRIN
- _tdav_producer_screencast_rgb32_to_yuv420_intrin_ssse3((uint8_t*)lpBuffYUV, (const uint8_t*)lpBuffToSend, (int)p_base->video.width, (int)p_base->video.height);
+ _tdav_producer_screencast_rgb32_to_yuv420_intrin_ssse3((uint8_t*)lpBuffYUV, (const uint8_t*)lpBuffToSend, (int)p_base->video.width, (int)p_base->video.height);
#else
- DDRAW_CHECK_HR(hr = E_NOTIMPL); // never called
+ DDRAW_CHECK_HR(hr = E_NOTIMPL); // never called
#endif
#if DDRAW_MT
- p_self->mt.b_flags_array[iMtFreeBuffIndex] = TRUE;
- if (!SetEvent(p_self->mt.h_events[iMtFreeBuffIndex])) {
- DDRAW_CHECK_HR(hr = E_FAIL);
- }
+ p_self->mt.b_flags_array[iMtFreeBuffIndex] = TRUE;
+ if (!SetEvent(p_self->mt.h_events[iMtFreeBuffIndex])) {
+ DDRAW_CHECK_HR(hr = E_FAIL);
+ }
#else
- p_base->enc_cb.callback(p_base->enc_cb.callback_data, lpBuffYUV, p_self->n_buff_yuv);
+ p_base->enc_cb.callback(p_base->enc_cb.callback_data, lpBuffYUV, p_self->n_buff_yuv);
#endif
- }
- else {
- // Send RGB32 buffer to the encode callback and let conversion be done by libyuv
- // do not multi-thread as we cannot perform chroma conversion and encoding in parallel
- p_base->enc_cb.callback(p_base->enc_cb.callback_data, lpBuffToSend, nSizeWithoutPadding);
- }
- }
- //--timeEnd = tsk_time_now();
- //--DDRAW_DEBUG_INFO("Encode callback: start=%llu, end=%llu, duration=%llu", timeStart, timeEnd, (timeEnd - timeStart));
-
- if (bDirectMemSurfAccess) {
- // surface buffer was used in preview and encode callback, unlock now
- DDRAW_CHECK_HR(hr = p_self->p_surf_primary->Unlock(NULL));
- }
+ }
+ else {
+ // Send RGB32 buffer to the encode callback and let conversion be done by libyuv
+ // do not multi-thread as we cannot perform chroma conversion and encoding in parallel
+ p_base->enc_cb.callback(p_base->enc_cb.callback_data, lpBuffToSend, nSizeWithoutPadding);
+ }
+ }
+ //--timeEnd = tsk_time_now();
+ //--DDRAW_DEBUG_INFO("Encode callback: start=%llu, end=%llu, duration=%llu", timeStart, timeEnd, (timeEnd - timeStart));
-bail:
- if (hr == DDERR_SURFACELOST) {
- /*hr = */p_self->p_surf_primary->Restore();
- hr = S_OK;
- }
+ if (bDirectMemSurfAccess) {
+ // surface buffer was used in preview and encode callback, unlock now
+ DDRAW_CHECK_HR(hr = p_self->p_surf_primary->Unlock(NULL));
+ }
- //--timeEnd = tsk_time_now();
- //--DDRAW_DEBUG_INFO("Grab and encode duration=%llu", (timeEnd - timeStart));
+ bail:
+ if (hr == DDERR_SURFACELOST) {
+ /*hr = */p_self->p_surf_primary->Restore();
+ hr = S_OK;
+ }
+
+ //--timeEnd = tsk_time_now();
+ //--DDRAW_DEBUG_INFO("Grab and encode duration=%llu", (timeEnd - timeStart));
- return SUCCEEDED(hr) ? 0 : -1;
+ return SUCCEEDED(hr) ? 0 : -1;
}
static tmedia_chroma_t _tdav_producer_screencast_get_chroma(const DDPIXELFORMAT* pixelFormat)
{
- HRESULT hr = DD_OK;
- if (pixelFormat->dwFlags != DDPF_RGB) {
- DDRAW_DEBUG_ERROR("dwFlags(%d) != DDPF_RGB", pixelFormat->dwFlags);
- DDRAW_CHECK_HR(hr = DDERR_INVALIDCAPS);
- }
- switch (pixelFormat->dwRGBBitCount) {
- case 32: // RGB32
- case 24: // RGB24
- // pixels must be aligned for fast copy
- if (pixelFormat->dwRBitMask != 0xff0000 || pixelFormat->dwGBitMask != 0xff00 || pixelFormat->dwBBitMask != 0xff || pixelFormat->dwRGBAlphaBitMask != 0) {
- DDRAW_DEBUG_ERROR("Pixels not aligned");
- }
- return pixelFormat->dwRGBBitCount == 24 ? tmedia_chroma_bgr24 : tmedia_chroma_rgb32;
- case 16: // RGB565
- // pixels must be aligned for fast copy
- if (pixelFormat->dwRBitMask != 0xF800 || pixelFormat->dwGBitMask != 0x7E0 || pixelFormat->dwBBitMask != 0x1F) {
- DDRAW_DEBUG_ERROR("Pixels not aligned");
- }
- return tmedia_chroma_rgb565le;
- default:
- DDRAW_DEBUG_ERROR("dwRGBBitCount(%d) != 24 and 32", pixelFormat->dwRGBBitCount);
- DDRAW_CHECK_HR(hr = DDERR_INVALIDCAPS);
- break;
- }
+ HRESULT hr = DD_OK;
+ if (pixelFormat->dwFlags != DDPF_RGB) {
+ DDRAW_DEBUG_ERROR("dwFlags(%d) != DDPF_RGB", pixelFormat->dwFlags);
+ DDRAW_CHECK_HR(hr = DDERR_INVALIDCAPS);
+ }
+ switch (pixelFormat->dwRGBBitCount) {
+ case 32: // RGB32
+ case 24: // RGB24
+ // pixels must be aligned for fast copy
+ if (pixelFormat->dwRBitMask != 0xff0000 || pixelFormat->dwGBitMask != 0xff00 || pixelFormat->dwBBitMask != 0xff || pixelFormat->dwRGBAlphaBitMask != 0) {
+ DDRAW_DEBUG_ERROR("Pixels not aligned");
+ }
+ return pixelFormat->dwRGBBitCount == 24 ? tmedia_chroma_bgr24 : tmedia_chroma_rgb32;
+ case 16: // RGB565
+ // pixels must be aligned for fast copy
+ if (pixelFormat->dwRBitMask != 0xF800 || pixelFormat->dwGBitMask != 0x7E0 || pixelFormat->dwBBitMask != 0x1F) {
+ DDRAW_DEBUG_ERROR("Pixels not aligned");
+ }
+ return tmedia_chroma_rgb565le;
+ default:
+ DDRAW_DEBUG_ERROR("dwRGBBitCount(%d) != 24 and 32", pixelFormat->dwRGBBitCount);
+ DDRAW_CHECK_HR(hr = DDERR_INVALIDCAPS);
+ break;
+ }
-bail:
- return tmedia_chroma_none;
+ bail:
+ return tmedia_chroma_none;
}
static HRESULT _tdav_producer_screencast_create_module(LPDDrawModule lpModule)
{
- typedef HRESULT (WINAPI *pDirectDrawCreateFunc)(_In_ GUID FAR *lpGUID,
- _Out_ LPDIRECTDRAW FAR *lplpDD,
- _In_ IUnknown FAR *pUnkOuter);
- HRESULT hr = S_OK;
- pDirectDrawCreateFunc DirectDrawCreate_ = NULL;
-
- if (!lpModule) {
- DDRAW_CHECK_HR(hr = E_INVALIDARG);
- }
+ typedef HRESULT (WINAPI *pDirectDrawCreateFunc)(_In_ GUID FAR *lpGUID,
+ _Out_ LPDIRECTDRAW FAR *lplpDD,
+ _In_ IUnknown FAR *pUnkOuter);
+ HRESULT hr = S_OK;
+ pDirectDrawCreateFunc DirectDrawCreate_ = NULL;
+
+ if (!lpModule) {
+ DDRAW_CHECK_HR(hr = E_INVALIDARG);
+ }
- if (!lpModule->hDLL && !(lpModule->hDLL = LoadLibrary(TEXT("ddraw.dll")))) {
- DDRAW_DEBUG_ERROR("Failed to load ddraw.dll: %d", GetLastError());
- DDRAW_CHECK_HR(hr = E_FAIL);
- }
- if (!lpModule->lpDD) {
- // Hum, "GetProcAddressA" is missing but ""GetProcAddressW" exists on CE
+ if (!lpModule->hDLL && !(lpModule->hDLL = LoadLibrary(TEXT("ddraw.dll")))) {
+ DDRAW_DEBUG_ERROR("Failed to load ddraw.dll: %d", GetLastError());
+ DDRAW_CHECK_HR(hr = E_FAIL);
+ }
+ if (!lpModule->lpDD) {
+ // Hum, "GetProcAddressA" is missing but ""GetProcAddressW" exists on CE
#if TDAV_UNDER_WINDOWS_CE
# define DirectDrawCreateName TEXT("DirectDrawCreate")
#else
# define DirectDrawCreateName "DirectDrawCreate"
#endif
- if (!(DirectDrawCreate_ = (pDirectDrawCreateFunc)GetProcAddress(lpModule->hDLL, DirectDrawCreateName))) {
- DDRAW_DEBUG_ERROR("Failed to find DirectDrawCreate in ddraw.dll: %d", GetLastError());
- DDRAW_CHECK_HR(hr = E_FAIL);
- }
- DDRAW_CHECK_HR(hr = DirectDrawCreate_(NULL, &lpModule->lpDD, NULL));
- }
+ if (!(DirectDrawCreate_ = (pDirectDrawCreateFunc)GetProcAddress(lpModule->hDLL, DirectDrawCreateName))) {
+ DDRAW_DEBUG_ERROR("Failed to find DirectDrawCreate in ddraw.dll: %d", GetLastError());
+ DDRAW_CHECK_HR(hr = E_FAIL);
+ }
+ DDRAW_CHECK_HR(hr = DirectDrawCreate_(NULL, &lpModule->lpDD, NULL));
+ }
-bail:
- return hr;
+ bail:
+ return hr;
}
static HRESULT _tdav_producer_screencast_alloc_rgb_buff(tdav_producer_screencast_ddraw_t* p_ddraw, DWORD w, DWORD h, DWORD bitsCount)
{
- HRESULT hr = S_OK;
- DWORD n_buff_rgb_new = (w * h * (bitsCount >> 3));
-
- if (p_ddraw->n_buff_rgb < n_buff_rgb_new) {
- p_ddraw->p_buff_rgb_aligned = tsk_realloc_aligned(p_ddraw->p_buff_rgb_aligned, n_buff_rgb_new, DDRAW_MEM_ALIGNMENT);
- if (!p_ddraw->p_buff_rgb_aligned) {
- p_ddraw->n_buff_rgb = 0;
- DDRAW_CHECK_HR(hr = DDERR_OUTOFMEMORY);
- }
- p_ddraw->n_buff_rgb = n_buff_rgb_new;
- }
+ HRESULT hr = S_OK;
+ DWORD n_buff_rgb_new = (w * h * (bitsCount >> 3));
+
+ if (p_ddraw->n_buff_rgb < n_buff_rgb_new) {
+ p_ddraw->p_buff_rgb_aligned = tsk_realloc_aligned(p_ddraw->p_buff_rgb_aligned, n_buff_rgb_new, DDRAW_MEM_ALIGNMENT);
+ if (!p_ddraw->p_buff_rgb_aligned) {
+ p_ddraw->n_buff_rgb = 0;
+ DDRAW_CHECK_HR(hr = DDERR_OUTOFMEMORY);
+ }
+ p_ddraw->n_buff_rgb = n_buff_rgb_new;
+ }
-bail:
- return hr;
+ bail:
+ return hr;
}
static HRESULT _tdav_producer_screencast_alloc_yuv_buff(tdav_producer_screencast_ddraw_t* p_ddraw, DWORD w, DWORD h)
{
- HRESULT hr = S_OK;
- void** pp_buff_yuv_aligned;
- int n_buff_yuv_aligned_count;
+ HRESULT hr = S_OK;
+ void** pp_buff_yuv_aligned;
+ int n_buff_yuv_aligned_count;
#if DDRAW_MT
- pp_buff_yuv_aligned = p_ddraw->mt.p_buff_yuv_aligned_array;
- n_buff_yuv_aligned_count = sizeof(p_ddraw->mt.p_buff_yuv_aligned_array)/sizeof(p_ddraw->mt.p_buff_yuv_aligned_array[0]);
+ pp_buff_yuv_aligned = p_ddraw->mt.p_buff_yuv_aligned_array;
+ n_buff_yuv_aligned_count = sizeof(p_ddraw->mt.p_buff_yuv_aligned_array)/sizeof(p_ddraw->mt.p_buff_yuv_aligned_array[0]);
#else
- pp_buff_yuv_aligned = &p_ddraw->p_buff_yuv_aligned;
- n_buff_yuv_aligned_count = 1;
+ pp_buff_yuv_aligned = &p_ddraw->p_buff_yuv_aligned;
+ n_buff_yuv_aligned_count = 1;
#endif /* DDRAW_MT */
- p_ddraw->n_buff_yuv = (w * h * 3) >> 1;
- for (int i = 0; i < n_buff_yuv_aligned_count; ++i) {
- pp_buff_yuv_aligned[i] = tsk_realloc_aligned(pp_buff_yuv_aligned[i], p_ddraw->n_buff_yuv, DDRAW_MEM_ALIGNMENT);
- if (!pp_buff_yuv_aligned[i]) {
- p_ddraw->n_buff_yuv = 0;
- DDRAW_CHECK_HR(hr = DDERR_OUTOFMEMORY);
- }
- }
+ p_ddraw->n_buff_yuv = (w * h * 3) >> 1;
+ for (int i = 0; i < n_buff_yuv_aligned_count; ++i) {
+ pp_buff_yuv_aligned[i] = tsk_realloc_aligned(pp_buff_yuv_aligned[i], p_ddraw->n_buff_yuv, DDRAW_MEM_ALIGNMENT);
+ if (!pp_buff_yuv_aligned[i]) {
+ p_ddraw->n_buff_yuv = 0;
+ DDRAW_CHECK_HR(hr = DDERR_OUTOFMEMORY);
+ }
+ }
-bail:
- return hr;
+ bail:
+ return hr;
}
static void* TSK_STDCALL _tdav_producer_screencast_grap_thread(void *arg)
{
- tdav_producer_screencast_ddraw_t* p_ddraw = (tdav_producer_screencast_ddraw_t*)arg;
- tmedia_producer_t* p_base = TMEDIA_PRODUCER(arg);
- int ret = 0;
+ tdav_producer_screencast_ddraw_t* p_ddraw = (tdav_producer_screencast_ddraw_t*)arg;
+ tmedia_producer_t* p_base = TMEDIA_PRODUCER(arg);
+ int ret = 0;
- // FPS manager
- uint64_t TimeNow, TimeLastFrame = 0;
- uint64_t TimeFrameDuration = (1000 / p_base->video.fps);
+ // FPS manager
+ uint64_t TimeNow, TimeLastFrame = 0;
+ uint64_t TimeFrameDuration = (1000 / p_base->video.fps);
- DDRAW_DEBUG_INFO("Grab thread -- START");
+ DDRAW_DEBUG_INFO("Grab thread -- START");
- while (ret == 0 && p_ddraw->b_started) {
+ while (ret == 0 && p_ddraw->b_started) {
#if DDRAW_CPU_THROTTLING
- TimeFrameDuration = (1000 / p_ddraw->cpu.fps_target);
+ TimeFrameDuration = (1000 / p_ddraw->cpu.fps_target);
#endif /* DDRAW_CPU_THROTTLING */
- TimeNow = tsk_time_now();
- if ((TimeNow - TimeLastFrame) > TimeFrameDuration) {
- if (!p_ddraw->b_muted && !p_ddraw->b_paused) {
- if (ret = _tdav_producer_screencast_grab(p_ddraw)) {
- goto next;
- }
- }
- TimeLastFrame = TimeNow;
- }
- else {
- tsk_thread_sleep(1);
+ TimeNow = tsk_time_now();
+ if ((TimeNow - TimeLastFrame) > TimeFrameDuration) {
+ if (!p_ddraw->b_muted && !p_ddraw->b_paused) {
+ if (ret = _tdav_producer_screencast_grab(p_ddraw)) {
+ goto next;
+ }
+ }
+ TimeLastFrame = TimeNow;
+ }
+ else {
+ tsk_thread_sleep(1);
#if 0
- DDRAW_DEBUG_INFO("Skip frame");
+ DDRAW_DEBUG_INFO("Skip frame");
#endif
- }
- next:
- ;
- }
- DDRAW_DEBUG_INFO("Grab thread -- STOP");
- return tsk_null;
+ }
+ next:
+ ;
+ }
+ DDRAW_DEBUG_INFO("Grab thread -- STOP");
+ return tsk_null;
}
#if DDRAW_MT
static void* TSK_STDCALL _tdav_producer_screencast_mt_encode_thread(void *arg)
{
- tdav_producer_screencast_ddraw_t* p_ddraw = (tdav_producer_screencast_ddraw_t*)arg;
- tmedia_producer_t* p_base = TMEDIA_PRODUCER(arg);
- DWORD dwEvent, dwIndex;
- int ret = 0;
- DWORD events_count = sizeof(p_ddraw->mt.h_events) / sizeof(p_ddraw->mt.h_events[0]);
-
- DDRAW_DEBUG_INFO("Encode MT thread -- START");
-
- while (ret == 0 && p_ddraw->b_started) {
- dwEvent = WaitForMultipleObjects(events_count, p_ddraw->mt.h_events, FALSE, INFINITE);
- if (!p_ddraw->b_started) {
- break;
- }
- if (dwEvent < WAIT_OBJECT_0 || dwEvent >(WAIT_OBJECT_0 + events_count)) {
- DDRAW_DEBUG_ERROR("Invalid dwEvent(%d)", dwEvent);
- break;
- }
- dwIndex = (dwEvent - WAIT_OBJECT_0);
- if (p_ddraw->mt.b_flags_array[dwIndex] != TRUE) {
- // must never happen
- DDRAW_DEBUG_ERROR("Invalid b_flags_array(%d)", dwIndex);
- break;
- }
-
- p_base->enc_cb.callback(p_base->enc_cb.callback_data, p_ddraw->mt.p_buff_yuv_aligned_array[dwIndex], p_ddraw->n_buff_yuv);
- p_ddraw->mt.b_flags_array[dwIndex] = FALSE;
- }
- DDRAW_DEBUG_INFO("Encode MT -- STOP");
- return tsk_null;
+ tdav_producer_screencast_ddraw_t* p_ddraw = (tdav_producer_screencast_ddraw_t*)arg;
+ tmedia_producer_t* p_base = TMEDIA_PRODUCER(arg);
+ DWORD dwEvent, dwIndex;
+ int ret = 0;
+ DWORD events_count = sizeof(p_ddraw->mt.h_events) / sizeof(p_ddraw->mt.h_events[0]);
+
+ DDRAW_DEBUG_INFO("Encode MT thread -- START");
+
+ while (ret == 0 && p_ddraw->b_started) {
+ dwEvent = WaitForMultipleObjects(events_count, p_ddraw->mt.h_events, FALSE, INFINITE);
+ if (!p_ddraw->b_started) {
+ break;
+ }
+ if (dwEvent < WAIT_OBJECT_0 || dwEvent >(WAIT_OBJECT_0 + events_count)) {
+ DDRAW_DEBUG_ERROR("Invalid dwEvent(%d)", dwEvent);
+ break;
+ }
+ dwIndex = (dwEvent - WAIT_OBJECT_0);
+ if (p_ddraw->mt.b_flags_array[dwIndex] != TRUE) {
+ // must never happen
+ DDRAW_DEBUG_ERROR("Invalid b_flags_array(%d)", dwIndex);
+ break;
+ }
+
+ p_base->enc_cb.callback(p_base->enc_cb.callback_data, p_ddraw->mt.p_buff_yuv_aligned_array[dwIndex], p_ddraw->n_buff_yuv);
+ p_ddraw->mt.b_flags_array[dwIndex] = FALSE;
+ }
+ DDRAW_DEBUG_INFO("Encode MT -- STOP");
+ return tsk_null;
}
#endif /* DDRAW_MT */
#if DDRAW_CPU_MONITOR || DDRAW_CPU_THROTTLING
static unsigned long long FileTimeToInt64(const FILETIME & ft)
{
- return (((unsigned long long)(ft.dwHighDateTime))<<32) | ((unsigned long long)ft.dwLowDateTime);
+ return (((unsigned long long)(ft.dwHighDateTime))<<32) | ((unsigned long long)ft.dwLowDateTime);
}
static BOOL GetCpuPercents(unsigned long long* PercentIdle, unsigned long long* PercentUsage)
{
- static unsigned long long _prevTicks = 0;
- static unsigned long long _prevIdleTime = 0;
- unsigned long long ticks, idleTime;
- BOOL bSaveValues = FALSE, bSet = FALSE;
+ static unsigned long long _prevTicks = 0;
+ static unsigned long long _prevIdleTime = 0;
+ unsigned long long ticks, idleTime;
+ BOOL bSaveValues = FALSE, bSet = FALSE;
#if TDAV_UNDER_WINDOWS_CE
- bSaveValues = TRUE;
- ticks = GetTickCount();
- idleTime = GetIdleTime();
+ bSaveValues = TRUE;
+ ticks = GetTickCount();
+ idleTime = GetIdleTime();
#else
- {
- FILETIME _idleTime, _kernelTime, _userTime;
- if (GetSystemTimes(&_idleTime, &_kernelTime, &_userTime)) {
- idleTime = FileTimeToInt64(_idleTime);
- ticks = FileTimeToInt64(_kernelTime) + FileTimeToInt64(_userTime);
- bSaveValues = TRUE;
- }
- }
+ {
+ FILETIME _idleTime, _kernelTime, _userTime;
+ if (GetSystemTimes(&_idleTime, &_kernelTime, &_userTime)) {
+ idleTime = FileTimeToInt64(_idleTime);
+ ticks = FileTimeToInt64(_kernelTime) + FileTimeToInt64(_userTime);
+ bSaveValues = TRUE;
+ }
+ }
#endif
- if (_prevTicks > 0) {
- *PercentIdle = ((100 * (idleTime - _prevIdleTime)) / (ticks - _prevTicks));
- *PercentUsage = 100 - *PercentIdle;
- bSet = TRUE;
- }
- if (bSaveValues) {
- _prevTicks = ticks;
- _prevIdleTime = idleTime;
- }
+ if (_prevTicks > 0) {
+ *PercentIdle = ((100 * (idleTime - _prevIdleTime)) / (ticks - _prevTicks));
+ *PercentUsage = 100 - *PercentIdle;
+ bSet = TRUE;
+ }
+ if (bSaveValues) {
+ _prevTicks = ticks;
+ _prevIdleTime = idleTime;
+ }
- return bSet;
+ return bSet;
}
static int _tdav_producer_screencast_timer_cb(const void* arg, tsk_timer_id_t timer_id)
@@ -1420,36 +1416,36 @@ static int _tdav_producer_screencast_timer_cb(const void* arg, tsk_timer_id_t ti
tdav_producer_screencast_ddraw_t* p_ddraw = (tdav_producer_screencast_ddraw_t*)arg;
int ret = 0;
- if (!p_ddraw->b_started) {
- return 0;
- }
+ if (!p_ddraw->b_started) {
+ return 0;
+ }
if (p_ddraw->cpu.id_timer == timer_id) {
- unsigned long long PercentIdle, PercentUsage;
- if (GetCpuPercents(&PercentIdle, &PercentUsage) == TRUE) {
- TSK_DEBUG_INFO("\n\n****\n\nCPU Usage = %lld\n\n***", PercentUsage);
+ unsigned long long PercentIdle, PercentUsage;
+ if (GetCpuPercents(&PercentIdle, &PercentUsage) == TRUE) {
+ TSK_DEBUG_INFO("\n\n****\n\nCPU Usage = %lld\n\n***", PercentUsage);
#if DDRAW_CPU_THROTTLING
- {
- if ((PercentUsage + DDRAW_CPU_THROTTLING_THRESHOLD_MARGIN) > DDRAW_CPU_THROTTLING_THRESHOLD) {
- unsigned long long NewTargetPercentUsage = TSK_CLAMP(DDRAW_CPU_THROTTLING_THRESHOLD_MARGIN, DDRAW_CPU_THROTTLING_THRESHOLD - DDRAW_CPU_THROTTLING_THRESHOLD_MARGIN, INT_MAX);
- int NewTargetFps = (int)((NewTargetPercentUsage * p_ddraw->cpu.fps_target) / PercentUsage);
- NewTargetFps = TSK_CLAMP(DDRAW_CPU_THROTTLING_FPS_MIN, NewTargetFps, TMEDIA_PRODUCER(p_ddraw)->video.fps);
- TSK_DEBUG_INFO("\n\n****\n\nCPU throttling = (%lld+%d)>%d, NewTargetPercentUsage=%lld, NewTargetFps=%d\n\n***",
- PercentUsage, DDRAW_CPU_THROTTLING_THRESHOLD_MARGIN, DDRAW_CPU_THROTTLING_THRESHOLD, NewTargetPercentUsage, NewTargetFps);
- p_ddraw->cpu.fps_target = NewTargetFps;
- }
- else if (PercentUsage < DDRAW_CPU_THROTTLING_THRESHOLD) {
- if ((p_ddraw->cpu.fps_target + DDRAW_CPU_THROTTLING_THRESHOLD_MARGIN) < TMEDIA_PRODUCER(p_ddraw)->video.fps) { // not honoring the negotiated fps yet?
- p_ddraw->cpu.fps_target += 1; // TODO: this is ok only if the timer timeout is set to 1s or less
- }
- }
- }
+ {
+ if ((PercentUsage + DDRAW_CPU_THROTTLING_THRESHOLD_MARGIN) > DDRAW_CPU_THROTTLING_THRESHOLD) {
+ unsigned long long NewTargetPercentUsage = TSK_CLAMP(DDRAW_CPU_THROTTLING_THRESHOLD_MARGIN, DDRAW_CPU_THROTTLING_THRESHOLD - DDRAW_CPU_THROTTLING_THRESHOLD_MARGIN, INT_MAX);
+ int NewTargetFps = (int)((NewTargetPercentUsage * p_ddraw->cpu.fps_target) / PercentUsage);
+ NewTargetFps = TSK_CLAMP(DDRAW_CPU_THROTTLING_FPS_MIN, NewTargetFps, TMEDIA_PRODUCER(p_ddraw)->video.fps);
+ TSK_DEBUG_INFO("\n\n****\n\nCPU throttling = (%lld+%d)>%d, NewTargetPercentUsage=%lld, NewTargetFps=%d\n\n***",
+ PercentUsage, DDRAW_CPU_THROTTLING_THRESHOLD_MARGIN, DDRAW_CPU_THROTTLING_THRESHOLD, NewTargetPercentUsage, NewTargetFps);
+ p_ddraw->cpu.fps_target = NewTargetFps;
+ }
+ else if (PercentUsage < DDRAW_CPU_THROTTLING_THRESHOLD) {
+ if ((p_ddraw->cpu.fps_target + DDRAW_CPU_THROTTLING_THRESHOLD_MARGIN) < TMEDIA_PRODUCER(p_ddraw)->video.fps) { // not honoring the negotiated fps yet?
+ p_ddraw->cpu.fps_target += 1; // TODO: this is ok only if the timer timeout is set to 1s or less
+ }
+ }
+ }
#endif /* DDRAW_CPU_THROTTLING */
- }
+ }
- if (p_ddraw->b_started) {
- p_ddraw->cpu.id_timer = tsk_timer_manager_schedule(p_ddraw->p_timer_mgr, DDRAW_CPU_SCHEDULE_TIMEOUT, _tdav_producer_screencast_timer_cb, p_ddraw);
- }
+ if (p_ddraw->b_started) {
+ p_ddraw->cpu.id_timer = tsk_timer_manager_schedule(p_ddraw->p_timer_mgr, DDRAW_CPU_SCHEDULE_TIMEOUT, _tdav_producer_screencast_timer_cb, p_ddraw);
+ }
}
return 0;
}
@@ -1462,80 +1458,78 @@ static int _tdav_producer_screencast_timer_cb(const void* arg, tsk_timer_id_t ti
/* constructor */
static tsk_object_t* _tdav_producer_screencast_ddraw_ctor(tsk_object_t *self, va_list * app)
{
- tdav_producer_screencast_ddraw_t *p_ddraw = (tdav_producer_screencast_ddraw_t *)self;
- if (p_ddraw) {
- /* init base */
- tmedia_producer_init(TMEDIA_PRODUCER(p_ddraw));
- TMEDIA_PRODUCER(p_ddraw)->video.chroma = tmedia_chroma_bgr24; // RGB24 on x86 (little endians) stored as BGR24
- /* init self with default values*/
- TMEDIA_PRODUCER(p_ddraw)->video.fps = 15;
- TMEDIA_PRODUCER(p_ddraw)->video.width = 352;
- TMEDIA_PRODUCER(p_ddraw)->video.height = 288;
-
- tsk_safeobj_init(p_ddraw);
- }
- return self;
+ tdav_producer_screencast_ddraw_t *p_ddraw = (tdav_producer_screencast_ddraw_t *)self;
+ if (p_ddraw) {
+ /* init base */
+ tmedia_producer_init(TMEDIA_PRODUCER(p_ddraw));
+ TMEDIA_PRODUCER(p_ddraw)->video.chroma = tmedia_chroma_bgr24; // RGB24 on x86 (little endians) stored as BGR24
+ /* init self with default values*/
+ TMEDIA_PRODUCER(p_ddraw)->video.fps = 15;
+ TMEDIA_PRODUCER(p_ddraw)->video.width = 352;
+ TMEDIA_PRODUCER(p_ddraw)->video.height = 288;
+
+ tsk_safeobj_init(p_ddraw);
+ }
+ return self;
}
/* destructor */
static tsk_object_t* _tdav_producer_screencast_ddraw_dtor(tsk_object_t * self)
{
- tdav_producer_screencast_ddraw_t *p_ddraw = (tdav_producer_screencast_ddraw_t *)self;
- if (p_ddraw) {
- /* stop */
- if (p_ddraw->b_started) {
- _tdav_producer_screencast_ddraw_stop((tmedia_producer_t*)p_ddraw);
- }
-
- /* deinit base */
- tmedia_producer_deinit(TMEDIA_PRODUCER(p_ddraw));
- /* deinit self */
+ tdav_producer_screencast_ddraw_t *p_ddraw = (tdav_producer_screencast_ddraw_t *)self;
+ if (p_ddraw) {
+ /* stop */
+ if (p_ddraw->b_started) {
+ _tdav_producer_screencast_ddraw_stop((tmedia_producer_t*)p_ddraw);
+ }
+
+ /* deinit base */
+ tmedia_producer_deinit(TMEDIA_PRODUCER(p_ddraw));
+ /* deinit self */
#if DDRAW_CPU_MONITOR || DDRAW_CPU_THROTTLING
- if (p_ddraw->p_timer_mgr) {
- tsk_timer_manager_destroy(&p_ddraw->p_timer_mgr);
- }
+ if (p_ddraw->p_timer_mgr) {
+ tsk_timer_manager_destroy(&p_ddraw->p_timer_mgr);
+ }
#endif /* DDRAW_CPU_MONITOR || DDRAW_CPU_THROTTLING */
#if DDRAW_MT
- for (int i = 0; i < sizeof(p_ddraw->mt.p_buff_yuv_aligned_array) / sizeof(p_ddraw->mt.p_buff_yuv_aligned_array[0]); ++i) {
- TSK_FREE_ALIGNED(p_ddraw->mt.p_buff_yuv_aligned_array[i]);
- }
- for (int i = 0; i < sizeof(p_ddraw->mt.h_events) / sizeof(p_ddraw->mt.h_events[0]); ++i) {
- if (p_ddraw->mt.h_events[i]) {
- CloseHandle(p_ddraw->mt.h_events[i]);
- p_ddraw->mt.h_events[i] = NULL;
- }
- }
+ for (int i = 0; i < sizeof(p_ddraw->mt.p_buff_yuv_aligned_array) / sizeof(p_ddraw->mt.p_buff_yuv_aligned_array[0]); ++i) {
+ TSK_FREE_ALIGNED(p_ddraw->mt.p_buff_yuv_aligned_array[i]);
+ }
+ for (int i = 0; i < sizeof(p_ddraw->mt.h_events) / sizeof(p_ddraw->mt.h_events[0]); ++i) {
+ if (p_ddraw->mt.h_events[i]) {
+ CloseHandle(p_ddraw->mt.h_events[i]);
+ p_ddraw->mt.h_events[i] = NULL;
+ }
+ }
#endif /* DDRAW_MT */
- TSK_FREE_ALIGNED(p_ddraw->p_buff_rgb_aligned);
- TSK_FREE_ALIGNED(p_ddraw->p_buff_yuv_aligned);
- DDRAW_SAFE_RELEASE(&p_ddraw->p_surf_primary);
- DDrawModuleSafeFree(p_ddraw->ddrawModule);
- tsk_safeobj_deinit(p_ddraw);
+ TSK_FREE_ALIGNED(p_ddraw->p_buff_rgb_aligned);
+ TSK_FREE_ALIGNED(p_ddraw->p_buff_yuv_aligned);
+ DDRAW_SAFE_RELEASE(&p_ddraw->p_surf_primary);
+ DDrawModuleSafeFree(p_ddraw->ddrawModule);
+ tsk_safeobj_deinit(p_ddraw);
- DDRAW_DEBUG_INFO("*** destroyed ***");
- }
+ DDRAW_DEBUG_INFO("*** destroyed ***");
+ }
- return self;
+ return self;
}
/* object definition */
-static const tsk_object_def_t tdav_producer_screencast_ddraw_def_s =
-{
- sizeof(tdav_producer_screencast_ddraw_t),
- _tdav_producer_screencast_ddraw_ctor,
- _tdav_producer_screencast_ddraw_dtor,
- tsk_null,
+static const tsk_object_def_t tdav_producer_screencast_ddraw_def_s = {
+ sizeof(tdav_producer_screencast_ddraw_t),
+ _tdav_producer_screencast_ddraw_ctor,
+ _tdav_producer_screencast_ddraw_dtor,
+ tsk_null,
};
/* plugin definition*/
-static const tmedia_producer_plugin_def_t tdav_producer_screencast_ddraw_plugin_def_s =
-{
- &tdav_producer_screencast_ddraw_def_s,
- tmedia_bfcp_video,
- "Microsoft DirectDraw screencast producer",
-
- _tdav_producer_screencast_ddraw_set,
- _tdav_producer_screencast_ddraw_prepare,
- _tdav_producer_screencast_ddraw_start,
- _tdav_producer_screencast_ddraw_pause,
- _tdav_producer_screencast_ddraw_stop
+static const tmedia_producer_plugin_def_t tdav_producer_screencast_ddraw_plugin_def_s = {
+ &tdav_producer_screencast_ddraw_def_s,
+ tmedia_bfcp_video,
+ "Microsoft DirectDraw screencast producer",
+
+ _tdav_producer_screencast_ddraw_set,
+ _tdav_producer_screencast_ddraw_prepare,
+ _tdav_producer_screencast_ddraw_start,
+ _tdav_producer_screencast_ddraw_pause,
+ _tdav_producer_screencast_ddraw_stop
};
const tmedia_producer_plugin_def_t *tdav_producer_screencast_ddraw_plugin_def_t = &tdav_producer_screencast_ddraw_plugin_def_s;
diff --git a/tinyDAV/src/video/gdi/tdav_consumer_video_gdi.c b/tinyDAV/src/video/gdi/tdav_consumer_video_gdi.c
index 8a81b66..bbfa2a1 100755
--- a/tinyDAV/src/video/gdi/tdav_consumer_video_gdi.c
+++ b/tinyDAV/src/video/gdi/tdav_consumer_video_gdi.c
@@ -1,17 +1,17 @@
/* Copyright (C) 2014 Mamadou DIOP
-*
+*
* This file is part of Open Source Doubango Framework.
*
* DOUBANGO is free software: you can redistribute it and/or modify
* it under the terms of the GNU General Public License as published by
* the Free Software Foundation, either version 3 of the License, or
* (at your option) any later version.
-*
+*
* DOUBANGO is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
* GNU General Public License for more details.
-*
+*
* You should have received a copy of the GNU General Public License
* along with DOUBANGO.
*/
@@ -34,19 +34,18 @@ static HRESULT SetFullscreen(struct tdav_consumer_video_gdi_s *p_gdi, BOOL bFull
static HWND CreateFullScreenWindow(struct tdav_consumer_video_gdi_s *p_gdi);
static LRESULT CALLBACK WndProc(HWND hWnd, UINT uMsg, WPARAM wParam, LPARAM lParam);
-typedef struct tdav_consumer_video_gdi_s
-{
- TMEDIA_DECLARE_CONSUMER;
+typedef struct tdav_consumer_video_gdi_s {
+ TMEDIA_DECLARE_CONSUMER;
- BOOL bStarted, bPrepared, bPaused, bFullScreen, bWindowHooked, bWindowHookedFullScreen;
- HWND hWindow;
- WNDPROC wndProc;
- HWND hWindowFullScreen;
- WNDPROC wndProcFullScreen;
- BITMAPINFO bitmapInfo;
- void* pBuffer;
+ BOOL bStarted, bPrepared, bPaused, bFullScreen, bWindowHooked, bWindowHookedFullScreen;
+ HWND hWindow;
+ WNDPROC wndProc;
+ HWND hWindowFullScreen;
+ WNDPROC wndProcFullScreen;
+ BITMAPINFO bitmapInfo;
+ void* pBuffer;
- TSK_DECLARE_SAFEOBJ;
+ TSK_DECLARE_SAFEOBJ;
}
tdav_consumer_video_gdi_t;
@@ -55,426 +54,421 @@ tdav_consumer_video_gdi_t;
/* ============ Media Consumer Interface ================= */
static int tdav_consumer_video_gdi_set(tmedia_consumer_t *self, const tmedia_param_t* param)
{
- int ret = 0;
- tdav_consumer_video_gdi_t* p_gdi = (tdav_consumer_video_gdi_t*)self;
- HRESULT hr = S_OK;
-
- if (!self || !param) {
- TSK_DEBUG_ERROR("Invalid parameter");
- CHECK_HR(hr = E_POINTER);
- }
-
- if (param->value_type == tmedia_pvt_int64) {
- if (tsk_striequals(param->key, "remote-hwnd")) {
- HWND hWnd = ((HWND)*((int64_t*)param->value));
- if (hWnd != p_gdi->hWindow) {
- tsk_safeobj_lock(p_gdi); // block consumer thread
- UnhookWindow(p_gdi, FALSE);
- p_gdi->hWindow = hWnd;
- tsk_safeobj_unlock(p_gdi); // unblock consumer thread
- }
- }
- }
- else if(param->value_type == tmedia_pvt_int32) {
- if(tsk_striequals(param->key, "fullscreen")) {
- BOOL bFullScreen = !!*((int32_t*)param->value);
- TSK_DEBUG_INFO("[GDI video consumer] Full Screen = %d", bFullScreen);
- CHECK_HR(hr = SetFullscreen(p_gdi, bFullScreen));
- }
- }
-
- CHECK_HR(hr);
+ int ret = 0;
+ tdav_consumer_video_gdi_t* p_gdi = (tdav_consumer_video_gdi_t*)self;
+ HRESULT hr = S_OK;
+
+ if (!self || !param) {
+ TSK_DEBUG_ERROR("Invalid parameter");
+ CHECK_HR(hr = E_POINTER);
+ }
+
+ if (param->value_type == tmedia_pvt_int64) {
+ if (tsk_striequals(param->key, "remote-hwnd")) {
+ HWND hWnd = ((HWND)*((int64_t*)param->value));
+ if (hWnd != p_gdi->hWindow) {
+ tsk_safeobj_lock(p_gdi); // block consumer thread
+ UnhookWindow(p_gdi, FALSE);
+ p_gdi->hWindow = hWnd;
+ tsk_safeobj_unlock(p_gdi); // unblock consumer thread
+ }
+ }
+ }
+ else if(param->value_type == tmedia_pvt_int32) {
+ if(tsk_striequals(param->key, "fullscreen")) {
+ BOOL bFullScreen = !!*((int32_t*)param->value);
+ TSK_DEBUG_INFO("[GDI video consumer] Full Screen = %d", bFullScreen);
+ CHECK_HR(hr = SetFullscreen(p_gdi, bFullScreen));
+ }
+ }
+
+ CHECK_HR(hr);
bail:
- return SUCCEEDED(hr) ? 0 : -1;
+ return SUCCEEDED(hr) ? 0 : -1;
}
static int tdav_consumer_video_gdi_prepare(tmedia_consumer_t* self, const tmedia_codec_t* codec)
{
- tdav_consumer_video_gdi_t* p_gdi = (tdav_consumer_video_gdi_t*)self;
-
- if (!p_gdi || !codec && codec->plugin) {
- TSK_DEBUG_ERROR("Invalid parameter");
- return -1;
- }
-
- TMEDIA_CONSUMER(p_gdi)->video.fps = TMEDIA_CODEC_VIDEO(codec)->in.fps;
- TMEDIA_CONSUMER(p_gdi)->video.in.width = TMEDIA_CODEC_VIDEO(codec)->in.width;
- TMEDIA_CONSUMER(p_gdi)->video.in.height = TMEDIA_CODEC_VIDEO(codec)->in.height;
-
- if (!TMEDIA_CONSUMER(p_gdi)->video.display.width) {
- TMEDIA_CONSUMER(p_gdi)->video.display.width = TMEDIA_CONSUMER(p_gdi)->video.in.width;
- }
- if (!TMEDIA_CONSUMER(p_gdi)->video.display.height) {
- TMEDIA_CONSUMER(p_gdi)->video.display.height = TMEDIA_CONSUMER(p_gdi)->video.in.height;
- }
-
- ZeroMemory(&p_gdi->bitmapInfo, sizeof(p_gdi->bitmapInfo));
- p_gdi->bitmapInfo.bmiHeader.biSize = sizeof(BITMAPINFOHEADER);
- p_gdi->bitmapInfo.bmiHeader.biPlanes = 1;
- p_gdi->bitmapInfo.bmiHeader.biBitCount = 24; // RGB24
- p_gdi->bitmapInfo.bmiHeader.biCompression = BI_RGB;
- p_gdi->bitmapInfo.bmiHeader.biWidth = (LONG)TMEDIA_CONSUMER(p_gdi)->video.in.width;
- p_gdi->bitmapInfo.bmiHeader.biHeight = (LONG)(TMEDIA_CONSUMER(p_gdi)->video.in.height * -1);
- p_gdi->bitmapInfo.bmiHeader.biSizeImage = (DWORD)(TMEDIA_CONSUMER(p_gdi)->video.in.width * abs((int)TMEDIA_CONSUMER(p_gdi)->video.in.height) *
- (p_gdi->bitmapInfo.bmiHeader.biBitCount >> 3));
-
- return 0;
+ tdav_consumer_video_gdi_t* p_gdi = (tdav_consumer_video_gdi_t*)self;
+
+ if (!p_gdi || !codec && codec->plugin) {
+ TSK_DEBUG_ERROR("Invalid parameter");
+ return -1;
+ }
+
+ TMEDIA_CONSUMER(p_gdi)->video.fps = TMEDIA_CODEC_VIDEO(codec)->in.fps;
+ TMEDIA_CONSUMER(p_gdi)->video.in.width = TMEDIA_CODEC_VIDEO(codec)->in.width;
+ TMEDIA_CONSUMER(p_gdi)->video.in.height = TMEDIA_CODEC_VIDEO(codec)->in.height;
+
+ if (!TMEDIA_CONSUMER(p_gdi)->video.display.width) {
+ TMEDIA_CONSUMER(p_gdi)->video.display.width = TMEDIA_CONSUMER(p_gdi)->video.in.width;
+ }
+ if (!TMEDIA_CONSUMER(p_gdi)->video.display.height) {
+ TMEDIA_CONSUMER(p_gdi)->video.display.height = TMEDIA_CONSUMER(p_gdi)->video.in.height;
+ }
+
+ ZeroMemory(&p_gdi->bitmapInfo, sizeof(p_gdi->bitmapInfo));
+ p_gdi->bitmapInfo.bmiHeader.biSize = sizeof(BITMAPINFOHEADER);
+ p_gdi->bitmapInfo.bmiHeader.biPlanes = 1;
+ p_gdi->bitmapInfo.bmiHeader.biBitCount = 24; // RGB24
+ p_gdi->bitmapInfo.bmiHeader.biCompression = BI_RGB;
+ p_gdi->bitmapInfo.bmiHeader.biWidth = (LONG)TMEDIA_CONSUMER(p_gdi)->video.in.width;
+ p_gdi->bitmapInfo.bmiHeader.biHeight = (LONG)(TMEDIA_CONSUMER(p_gdi)->video.in.height * -1);
+ p_gdi->bitmapInfo.bmiHeader.biSizeImage = (DWORD)(TMEDIA_CONSUMER(p_gdi)->video.in.width * abs((int)TMEDIA_CONSUMER(p_gdi)->video.in.height) *
+ (p_gdi->bitmapInfo.bmiHeader.biBitCount >> 3));
+
+ return 0;
}
static int tdav_consumer_video_gdi_start(tmedia_consumer_t* self)
{
- tdav_consumer_video_gdi_t* p_gdi = (tdav_consumer_video_gdi_t*)self;
+ tdav_consumer_video_gdi_t* p_gdi = (tdav_consumer_video_gdi_t*)self;
- if (!p_gdi) {
- TSK_DEBUG_ERROR("Invalid parameter");
- return -1;
- }
-
- tsk_safeobj_lock(p_gdi);
+ if (!p_gdi) {
+ TSK_DEBUG_ERROR("Invalid parameter");
+ return -1;
+ }
- p_gdi->bPaused = FALSE;
- p_gdi->bStarted = TRUE;
+ tsk_safeobj_lock(p_gdi);
- tsk_safeobj_unlock(p_gdi);
+ p_gdi->bPaused = FALSE;
+ p_gdi->bStarted = TRUE;
- return 0;
+ tsk_safeobj_unlock(p_gdi);
+
+ return 0;
}
static int tdav_consumer_video_gdi_consume(tmedia_consumer_t* self, const void* buffer, tsk_size_t size, const tsk_object_t* proto_hdr)
{
- tdav_consumer_video_gdi_t* p_gdi = (tdav_consumer_video_gdi_t*)self;
- int ret = 0;
- HWND* p_Window;
- BOOL *p_bWindowHooked, bImputSizeChanged;
-
- if (!p_gdi) {
- TSK_DEBUG_ERROR("Invalid parameter");
- return -1;
- }
-
- tsk_safeobj_lock(p_gdi);
-
- if (!p_gdi->bStarted || p_gdi->bPaused) {
- TSK_DEBUG_INFO("GDI consumer stopped or paused");
- goto bail;
- }
-
- bImputSizeChanged = (size != p_gdi->bitmapInfo.bmiHeader.biSizeImage)
- || (TMEDIA_CONSUMER(p_gdi)->video.in.width != p_gdi->bitmapInfo.bmiHeader.biWidth)
- || (TMEDIA_CONSUMER(p_gdi)->video.in.height != TSK_ABS(p_gdi->bitmapInfo.bmiHeader.biHeight));
-
- if (bImputSizeChanged) {
- tsk_size_t xNewSize = TMEDIA_CONSUMER(p_gdi)->video.in.width * TMEDIA_CONSUMER(p_gdi)->video.in.height * (p_gdi->bitmapInfo.bmiHeader.biBitCount >> 3);
- TSK_DEBUG_INFO("GDI input size changed: %u->%u", p_gdi->bitmapInfo.bmiHeader.biSizeImage, size);
- if (xNewSize != size) {
- TSK_DEBUG_ERROR("GDI consumer: chroma issue?");
- ret = -1;
- goto bail;
- }
- p_gdi->bitmapInfo.bmiHeader.biWidth = (LONG)TMEDIA_CONSUMER(p_gdi)->video.in.width;
- p_gdi->bitmapInfo.bmiHeader.biHeight = (LONG)TMEDIA_CONSUMER(p_gdi)->video.in.height * -1;
- p_gdi->bitmapInfo.bmiHeader.biSizeImage = (DWORD)xNewSize;
- p_gdi->pBuffer = tsk_realloc(p_gdi->pBuffer, p_gdi->bitmapInfo.bmiHeader.biSizeImage);
- }
-
- p_Window = p_gdi->bFullScreen ? &p_gdi->hWindowFullScreen : &p_gdi->hWindow;
- p_bWindowHooked = p_gdi->bFullScreen ? &p_gdi->bWindowHookedFullScreen : &p_gdi->bWindowHooked;
-
- if (*p_Window) {
- if (!*p_bWindowHooked) {
- // Do not hook "hWnd" as it could be the fullscreen handle which is always hooked.
- CHECK_HR(HookWindow(p_gdi, *p_Window, p_gdi->bFullScreen));
- }
- if (!p_gdi->pBuffer) {
- p_gdi->pBuffer = tsk_realloc(p_gdi->pBuffer, p_gdi->bitmapInfo.bmiHeader.biSizeImage);
- }
- if (p_gdi->pBuffer) {
- memcpy(p_gdi->pBuffer, buffer, size);
- InvalidateRect(*p_Window, NULL, TRUE);
- }
- }
+ tdav_consumer_video_gdi_t* p_gdi = (tdav_consumer_video_gdi_t*)self;
+ int ret = 0;
+ HWND* p_Window;
+ BOOL *p_bWindowHooked, bImputSizeChanged;
+
+ if (!p_gdi) {
+ TSK_DEBUG_ERROR("Invalid parameter");
+ return -1;
+ }
+
+ tsk_safeobj_lock(p_gdi);
+
+ if (!p_gdi->bStarted || p_gdi->bPaused) {
+ TSK_DEBUG_INFO("GDI consumer stopped or paused");
+ goto bail;
+ }
+
+ bImputSizeChanged = (size != p_gdi->bitmapInfo.bmiHeader.biSizeImage)
+ || (TMEDIA_CONSUMER(p_gdi)->video.in.width != p_gdi->bitmapInfo.bmiHeader.biWidth)
+ || (TMEDIA_CONSUMER(p_gdi)->video.in.height != TSK_ABS(p_gdi->bitmapInfo.bmiHeader.biHeight));
+
+ if (bImputSizeChanged) {
+ tsk_size_t xNewSize = TMEDIA_CONSUMER(p_gdi)->video.in.width * TMEDIA_CONSUMER(p_gdi)->video.in.height * (p_gdi->bitmapInfo.bmiHeader.biBitCount >> 3);
+ TSK_DEBUG_INFO("GDI input size changed: %u->%u", p_gdi->bitmapInfo.bmiHeader.biSizeImage, size);
+ if (xNewSize != size) {
+ TSK_DEBUG_ERROR("GDI consumer: chroma issue?");
+ ret = -1;
+ goto bail;
+ }
+ p_gdi->bitmapInfo.bmiHeader.biWidth = (LONG)TMEDIA_CONSUMER(p_gdi)->video.in.width;
+ p_gdi->bitmapInfo.bmiHeader.biHeight = (LONG)TMEDIA_CONSUMER(p_gdi)->video.in.height * -1;
+ p_gdi->bitmapInfo.bmiHeader.biSizeImage = (DWORD)xNewSize;
+ p_gdi->pBuffer = tsk_realloc(p_gdi->pBuffer, p_gdi->bitmapInfo.bmiHeader.biSizeImage);
+ }
+
+ p_Window = p_gdi->bFullScreen ? &p_gdi->hWindowFullScreen : &p_gdi->hWindow;
+ p_bWindowHooked = p_gdi->bFullScreen ? &p_gdi->bWindowHookedFullScreen : &p_gdi->bWindowHooked;
+
+ if (*p_Window) {
+ if (!*p_bWindowHooked) {
+ // Do not hook "hWnd" as it could be the fullscreen handle which is always hooked.
+ CHECK_HR(HookWindow(p_gdi, *p_Window, p_gdi->bFullScreen));
+ }
+ if (!p_gdi->pBuffer) {
+ p_gdi->pBuffer = tsk_realloc(p_gdi->pBuffer, p_gdi->bitmapInfo.bmiHeader.biSizeImage);
+ }
+ if (p_gdi->pBuffer) {
+ memcpy(p_gdi->pBuffer, buffer, size);
+ InvalidateRect(*p_Window, NULL, TRUE);
+ }
+ }
bail:
- tsk_safeobj_unlock(p_gdi);
- return ret;
+ tsk_safeobj_unlock(p_gdi);
+ return ret;
}
static int tdav_consumer_video_gdi_pause(tmedia_consumer_t* self)
{
- tdav_consumer_video_gdi_t* p_gdi = (tdav_consumer_video_gdi_t*)self;
+ tdav_consumer_video_gdi_t* p_gdi = (tdav_consumer_video_gdi_t*)self;
- if (!p_gdi) {
- TSK_DEBUG_ERROR("Invalid parameter");
- return -1;
- }
+ if (!p_gdi) {
+ TSK_DEBUG_ERROR("Invalid parameter");
+ return -1;
+ }
- tsk_safeobj_lock(p_gdi);
+ tsk_safeobj_lock(p_gdi);
- p_gdi->bPaused = TRUE;
+ p_gdi->bPaused = TRUE;
- tsk_safeobj_unlock(p_gdi);
+ tsk_safeobj_unlock(p_gdi);
- return 0;
+ return 0;
}
static int tdav_consumer_video_gdi_stop(tmedia_consumer_t* self)
{
- tdav_consumer_video_gdi_t* p_gdi = (tdav_consumer_video_gdi_t*)self;
-
- if (!p_gdi) {
- TSK_DEBUG_ERROR("Invalid parameter");
- return -1;
- }
-
- tsk_safeobj_lock(p_gdi);
-
- p_gdi->bStarted = FALSE;
- p_gdi->bPaused = FALSE;
- SetFullscreen(p_gdi, FALSE);
- UnhookWindow(p_gdi, TRUE);
- UnhookWindow(p_gdi, FALSE);
-
- tsk_safeobj_unlock(p_gdi);
-
- return 0;
+ tdav_consumer_video_gdi_t* p_gdi = (tdav_consumer_video_gdi_t*)self;
+
+ if (!p_gdi) {
+ TSK_DEBUG_ERROR("Invalid parameter");
+ return -1;
+ }
+
+ tsk_safeobj_lock(p_gdi);
+
+ p_gdi->bStarted = FALSE;
+ p_gdi->bPaused = FALSE;
+ SetFullscreen(p_gdi, FALSE);
+ UnhookWindow(p_gdi, TRUE);
+ UnhookWindow(p_gdi, FALSE);
+
+ tsk_safeobj_unlock(p_gdi);
+
+ return 0;
}
static LRESULT CALLBACK WndProc(HWND hWnd, UINT uMsg, WPARAM wParam, LPARAM lParam)
{
- switch(uMsg)
- {
- case WM_CREATE:
- case WM_SIZE:
- case WM_MOVE:
- {
- struct tdav_consumer_video_gdi_s* p_gdi = ((struct tdav_consumer_video_gdi_s*)GetProp(hWnd, TEXT("Self")));
- if (p_gdi) {
-
- }
- break;
- }
-
- case WM_PAINT:
- {
- struct tdav_consumer_video_gdi_s* p_gdi = ((struct tdav_consumer_video_gdi_s*)GetProp(hWnd, TEXT("Self")));
- if (p_gdi) {
- tsk_safeobj_lock(p_gdi);
-
- if (p_gdi->bStarted && !p_gdi->bPaused && p_gdi->pBuffer) {
- PAINTSTRUCT ps;
- HDC hdc;
- RECT rc, logical_rect;
- int height, width, i, x, y;
- HDC dc_mem, all_dc[2];
- HBITMAP bmp_mem;
- HGDIOBJ bmp_old;
- POINT logical_area;
- HBRUSH brush;
-
- if (!(hdc = BeginPaint(hWnd, &ps))) {
- goto paint_done;
- }
-
- if (!GetClientRect(hWnd, &rc)) {
- EndPaint(hWnd, &ps);
- goto paint_done;
- }
-
- height = abs(p_gdi->bitmapInfo.bmiHeader.biHeight);
- width = p_gdi->bitmapInfo.bmiHeader.biWidth;
-
- dc_mem = CreateCompatibleDC(ps.hdc);
- SetStretchBltMode(dc_mem, HALFTONE);
-
- // Set the map mode so that the ratio will be maintained for us.
- all_dc[0] = ps.hdc, all_dc[1] = dc_mem;
- for (i = 0; i < sizeof(all_dc)/sizeof(all_dc[0]); ++i) {
+ switch(uMsg) {
+ case WM_CREATE:
+ case WM_SIZE:
+ case WM_MOVE: {
+ struct tdav_consumer_video_gdi_s* p_gdi = ((struct tdav_consumer_video_gdi_s*)GetProp(hWnd, TEXT("Self")));
+ if (p_gdi) {
+
+ }
+ break;
+ }
+
+ case WM_PAINT: {
+ struct tdav_consumer_video_gdi_s* p_gdi = ((struct tdav_consumer_video_gdi_s*)GetProp(hWnd, TEXT("Self")));
+ if (p_gdi) {
+ tsk_safeobj_lock(p_gdi);
+
+ if (p_gdi->bStarted && !p_gdi->bPaused && p_gdi->pBuffer) {
+ PAINTSTRUCT ps;
+ HDC hdc;
+ RECT rc, logical_rect;
+ int height, width, i, x, y;
+ HDC dc_mem, all_dc[2];
+ HBITMAP bmp_mem;
+ HGDIOBJ bmp_old;
+ POINT logical_area;
+ HBRUSH brush;
+
+ if (!(hdc = BeginPaint(hWnd, &ps))) {
+ goto paint_done;
+ }
+
+ if (!GetClientRect(hWnd, &rc)) {
+ EndPaint(hWnd, &ps);
+ goto paint_done;
+ }
+
+ height = abs(p_gdi->bitmapInfo.bmiHeader.biHeight);
+ width = p_gdi->bitmapInfo.bmiHeader.biWidth;
+
+ dc_mem = CreateCompatibleDC(ps.hdc);
+ SetStretchBltMode(dc_mem, HALFTONE);
+
+ // Set the map mode so that the ratio will be maintained for us.
+ all_dc[0] = ps.hdc, all_dc[1] = dc_mem;
+ for (i = 0; i < sizeof(all_dc)/sizeof(all_dc[0]); ++i) {
#if !TDAV_UNDER_WINDOWS_CE
- SetMapMode(all_dc[i], MM_ISOTROPIC);
- SetWindowExtEx(all_dc[i], width, height, NULL);
- SetViewportExtEx(all_dc[i], rc.right, rc.bottom, NULL);
+ SetMapMode(all_dc[i], MM_ISOTROPIC);
+ SetWindowExtEx(all_dc[i], width, height, NULL);
+ SetViewportExtEx(all_dc[i], rc.right, rc.bottom, NULL);
#endif
- }
+ }
- bmp_mem = CreateCompatibleBitmap(ps.hdc, rc.right, rc.bottom);
- bmp_old = SelectObject(dc_mem, bmp_mem);
+ bmp_mem = CreateCompatibleBitmap(ps.hdc, rc.right, rc.bottom);
+ bmp_old = SelectObject(dc_mem, bmp_mem);
- logical_area.x = rc.right, logical_area.y = rc.bottom;
+ logical_area.x = rc.right, logical_area.y = rc.bottom;
#if !TDAV_UNDER_WINDOWS_CE
- DPtoLP(ps.hdc, &logical_area, 1);
+ DPtoLP(ps.hdc, &logical_area, 1);
#endif
- brush = CreateSolidBrush(RGB(0, 0, 0));
- logical_rect.left = 0, logical_rect.top = 0, logical_rect.right = logical_area.x, logical_rect.bottom = logical_area.y;
- FillRect(dc_mem, &logical_rect, brush);
- DeleteObject(brush);
+ brush = CreateSolidBrush(RGB(0, 0, 0));
+ logical_rect.left = 0, logical_rect.top = 0, logical_rect.right = logical_area.x, logical_rect.bottom = logical_area.y;
+ FillRect(dc_mem, &logical_rect, brush);
+ DeleteObject(brush);
- x = (logical_area.x / 2) - (width / 2);
- y = (logical_area.y / 2) - (height / 2);
+ x = (logical_area.x / 2) - (width / 2);
+ y = (logical_area.y / 2) - (height / 2);
- StretchDIBits(dc_mem, x, y, width, height,
- 0, 0, width, height, p_gdi->pBuffer, &p_gdi->bitmapInfo, DIB_RGB_COLORS, SRCCOPY);
+ StretchDIBits(dc_mem, x, y, width, height,
+ 0, 0, width, height, p_gdi->pBuffer, &p_gdi->bitmapInfo, DIB_RGB_COLORS, SRCCOPY);
- BitBlt(ps.hdc, 0, 0, logical_area.x, logical_area.y,
- dc_mem, 0, 0, SRCCOPY);
+ BitBlt(ps.hdc, 0, 0, logical_area.x, logical_area.y,
+ dc_mem, 0, 0, SRCCOPY);
- // Cleanup.
- SelectObject(dc_mem, bmp_old);
- DeleteObject(bmp_mem);
- DeleteDC(dc_mem);
+ // Cleanup.
+ SelectObject(dc_mem, bmp_old);
+ DeleteObject(bmp_mem);
+ DeleteDC(dc_mem);
- EndPaint(hWnd, &ps);
- }
+ EndPaint(hWnd, &ps);
+ }
paint_done:
- tsk_safeobj_unlock(p_gdi);
- }
- break;
- }
-
- case WM_ERASEBKGND:
- {
- return TRUE; // avoid background erasing.
- }
-
- case WM_CHAR:
- case WM_KEYUP:
- {
- struct tdav_consumer_video_gdi_s* p_gdi = ((struct tdav_consumer_video_gdi_s*)GetProp(hWnd, TEXT("Self")));
- if (p_gdi) {
- SetFullscreen(p_gdi, FALSE);
- }
-
- break;
- }
- }
-
- return DefWindowProc(hWnd, uMsg, wParam, lParam);
+ tsk_safeobj_unlock(p_gdi);
+ }
+ break;
+ }
+
+ case WM_ERASEBKGND: {
+ return TRUE; // avoid background erasing.
+ }
+
+ case WM_CHAR:
+ case WM_KEYUP: {
+ struct tdav_consumer_video_gdi_s* p_gdi = ((struct tdav_consumer_video_gdi_s*)GetProp(hWnd, TEXT("Self")));
+ if (p_gdi) {
+ SetFullscreen(p_gdi, FALSE);
+ }
+
+ break;
+ }
+ }
+
+ return DefWindowProc(hWnd, uMsg, wParam, lParam);
}
static HRESULT HookWindow(struct tdav_consumer_video_gdi_s *p_gdi, HWND hWnd, BOOL bFullScreenWindow)
{
- HRESULT hr = S_OK;
- HWND* p_Window = bFullScreenWindow ? &p_gdi->hWindowFullScreen : &p_gdi->hWindow;
- WNDPROC* p_wndProc = bFullScreenWindow ? &p_gdi->wndProcFullScreen : &p_gdi->wndProc;
- BOOL* p_bWindowHooked = bFullScreenWindow ? &p_gdi->bWindowHookedFullScreen : &p_gdi->bWindowHooked;
+ HRESULT hr = S_OK;
+ HWND* p_Window = bFullScreenWindow ? &p_gdi->hWindowFullScreen : &p_gdi->hWindow;
+ WNDPROC* p_wndProc = bFullScreenWindow ? &p_gdi->wndProcFullScreen : &p_gdi->wndProc;
+ BOOL* p_bWindowHooked = bFullScreenWindow ? &p_gdi->bWindowHookedFullScreen : &p_gdi->bWindowHooked;
- tsk_safeobj_lock(p_gdi);
+ tsk_safeobj_lock(p_gdi);
- CHECK_HR(hr = UnhookWindow(p_gdi, bFullScreenWindow));
+ CHECK_HR(hr = UnhookWindow(p_gdi, bFullScreenWindow));
- if ((*p_Window = hWnd)) {
+ if ((*p_Window = hWnd)) {
#if TDAV_UNDER_WINDOWS_CE
- *p_wndProc = (WNDPROC)SetWindowLong(hWnd, GWL_WNDPROC, (LONG)WndProc);
+ *p_wndProc = (WNDPROC)SetWindowLong(hWnd, GWL_WNDPROC, (LONG)WndProc);
#else
- *p_wndProc = (WNDPROC)SetWindowLongPtr(hWnd, GWLP_WNDPROC, (LONG_PTR)WndProc);
+ *p_wndProc = (WNDPROC)SetWindowLongPtr(hWnd, GWLP_WNDPROC, (LONG_PTR)WndProc);
#endif
- if (!*p_wndProc) {
- TSK_DEBUG_ERROR("HookWindowLongPtr() failed with errcode=%d", GetLastError());
- CHECK_HR(hr = E_FAIL);
- }
- *p_bWindowHooked = TRUE;
- SetProp(*p_Window, TEXT("Self"), p_gdi);
- }
+ if (!*p_wndProc) {
+ TSK_DEBUG_ERROR("HookWindowLongPtr() failed with errcode=%d", GetLastError());
+ CHECK_HR(hr = E_FAIL);
+ }
+ *p_bWindowHooked = TRUE;
+ SetProp(*p_Window, TEXT("Self"), p_gdi);
+ }
bail:
- tsk_safeobj_unlock(p_gdi);
- return S_OK;
+ tsk_safeobj_unlock(p_gdi);
+ return S_OK;
}
static HRESULT UnhookWindow(struct tdav_consumer_video_gdi_s *p_gdi, BOOL bFullScreenWindow)
{
- HWND* p_Window = bFullScreenWindow ? &p_gdi->hWindowFullScreen : &p_gdi->hWindow;
- WNDPROC* p_wndProc = bFullScreenWindow ? &p_gdi->wndProcFullScreen : &p_gdi->wndProc;
- BOOL* p_bWindowHooked = bFullScreenWindow ? &p_gdi->bWindowHookedFullScreen : &p_gdi->bWindowHooked;
+ HWND* p_Window = bFullScreenWindow ? &p_gdi->hWindowFullScreen : &p_gdi->hWindow;
+ WNDPROC* p_wndProc = bFullScreenWindow ? &p_gdi->wndProcFullScreen : &p_gdi->wndProc;
+ BOOL* p_bWindowHooked = bFullScreenWindow ? &p_gdi->bWindowHookedFullScreen : &p_gdi->bWindowHooked;
- tsk_safeobj_lock(p_gdi);
- if (*p_Window && *p_wndProc) {
+ tsk_safeobj_lock(p_gdi);
+ if (*p_Window && *p_wndProc) {
#if TDAV_UNDER_WINDOWS_CE
- SetWindowLong(*p_Window, GWL_WNDPROC, (LONG)*p_wndProc);
+ SetWindowLong(*p_Window, GWL_WNDPROC, (LONG)*p_wndProc);
#else
- SetWindowLongPtr(*p_Window, GWLP_WNDPROC, (LONG_PTR)*p_wndProc);
+ SetWindowLongPtr(*p_Window, GWLP_WNDPROC, (LONG_PTR)*p_wndProc);
#endif
- *p_wndProc = NULL;
- }
- if (*p_Window) {
- if (p_gdi->pBuffer) {
- memset(p_gdi->pBuffer, 0, p_gdi->bitmapInfo.bmiHeader.biSizeImage);
- }
- InvalidateRect(*p_Window, NULL, FALSE);
- }
- *p_bWindowHooked = FALSE;
- tsk_safeobj_unlock(p_gdi);
- return S_OK;
+ *p_wndProc = NULL;
+ }
+ if (*p_Window) {
+ if (p_gdi->pBuffer) {
+ memset(p_gdi->pBuffer, 0, p_gdi->bitmapInfo.bmiHeader.biSizeImage);
+ }
+ InvalidateRect(*p_Window, NULL, FALSE);
+ }
+ *p_bWindowHooked = FALSE;
+ tsk_safeobj_unlock(p_gdi);
+ return S_OK;
}
static HRESULT SetFullscreen(struct tdav_consumer_video_gdi_s *p_gdi, BOOL bFullScreen)
{
- HRESULT hr = S_OK;
- if (!p_gdi) {
- CHECK_HR(hr = E_POINTER);
- }
-
- if (p_gdi->bFullScreen != bFullScreen) {
- tsk_safeobj_lock(p_gdi);
- if (bFullScreen) {
- HWND hWnd = CreateFullScreenWindow(p_gdi);
- if (hWnd) {
+ HRESULT hr = S_OK;
+ if (!p_gdi) {
+ CHECK_HR(hr = E_POINTER);
+ }
+
+ if (p_gdi->bFullScreen != bFullScreen) {
+ tsk_safeobj_lock(p_gdi);
+ if (bFullScreen) {
+ HWND hWnd = CreateFullScreenWindow(p_gdi);
+ if (hWnd) {
#if TDAV_UNDER_WINDOWS_CE
- ShowWindow(hWnd, SW_SHOWNORMAL);
+ ShowWindow(hWnd, SW_SHOWNORMAL);
#else
- ShowWindow(hWnd, SW_SHOWDEFAULT);
+ ShowWindow(hWnd, SW_SHOWDEFAULT);
#endif
- UpdateWindow(hWnd);
- HookWindow(p_gdi, hWnd, TRUE);
- }
- }
- else if(p_gdi->hWindowFullScreen) {
- ShowWindow(p_gdi->hWindowFullScreen, SW_HIDE);
- UnhookWindow(p_gdi, TRUE);
- }
- p_gdi->bFullScreen = bFullScreen;
- tsk_safeobj_unlock(p_gdi);
-
- CHECK_HR(hr);
- }
+ UpdateWindow(hWnd);
+ HookWindow(p_gdi, hWnd, TRUE);
+ }
+ }
+ else if(p_gdi->hWindowFullScreen) {
+ ShowWindow(p_gdi->hWindowFullScreen, SW_HIDE);
+ UnhookWindow(p_gdi, TRUE);
+ }
+ p_gdi->bFullScreen = bFullScreen;
+ tsk_safeobj_unlock(p_gdi);
+
+ CHECK_HR(hr);
+ }
bail:
- return hr;
+ return hr;
}
static HWND CreateFullScreenWindow(struct tdav_consumer_video_gdi_s *p_gdi)
{
- HRESULT hr = S_OK;
-
- if(!p_gdi) {
- return NULL;
- }
-
- if (!p_gdi->hWindowFullScreen) {
- WNDCLASS wc = {0};
-
- wc.lpfnWndProc = WndProc;
- wc.hInstance = GetModuleHandle(NULL);
- wc.hCursor = LoadCursor(NULL, IDC_ARROW);
- wc.lpszClassName = L"WindowClass";
- RegisterClass(&wc);
- p_gdi->hWindowFullScreen = CreateWindowEx(
- 0,
- wc.lpszClassName,
- L"Doubango's Video Consumer Fullscreen",
- WS_EX_TOPMOST | WS_POPUP,
- 0, 0,
- GetSystemMetrics(SM_CXSCREEN), GetSystemMetrics(SM_CYSCREEN),
- NULL,
- NULL,
- GetModuleHandle(NULL),
- NULL);
-
- SetProp(p_gdi->hWindowFullScreen, TEXT("Self"), p_gdi);
- }
- return p_gdi->hWindowFullScreen;
+ HRESULT hr = S_OK;
+
+ if(!p_gdi) {
+ return NULL;
+ }
+
+ if (!p_gdi->hWindowFullScreen) {
+ WNDCLASS wc = {0};
+
+ wc.lpfnWndProc = WndProc;
+ wc.hInstance = GetModuleHandle(NULL);
+ wc.hCursor = LoadCursor(NULL, IDC_ARROW);
+ wc.lpszClassName = L"WindowClass";
+ RegisterClass(&wc);
+ p_gdi->hWindowFullScreen = CreateWindowEx(
+ 0,
+ wc.lpszClassName,
+ L"Doubango's Video Consumer Fullscreen",
+ WS_EX_TOPMOST | WS_POPUP,
+ 0, 0,
+ GetSystemMetrics(SM_CXSCREEN), GetSystemMetrics(SM_CYSCREEN),
+ NULL,
+ NULL,
+ GetModuleHandle(NULL),
+ NULL);
+
+ SetProp(p_gdi->hWindowFullScreen, TEXT("Self"), p_gdi);
+ }
+ return p_gdi->hWindowFullScreen;
}
//
@@ -483,60 +477,58 @@ static HWND CreateFullScreenWindow(struct tdav_consumer_video_gdi_s *p_gdi)
/* constructor */
static tsk_object_t* tdav_consumer_video_gdi_ctor(tsk_object_t * self, va_list * app)
{
- tdav_consumer_video_gdi_t *p_gdi = (tdav_consumer_video_gdi_t *)self;
- if (p_gdi) {
- /* init base */
- tmedia_consumer_init(TMEDIA_CONSUMER(p_gdi));
- TMEDIA_CONSUMER(p_gdi)->video.display.chroma = tmedia_chroma_bgr24;
-
- /* init self */
- TMEDIA_CONSUMER(p_gdi)->video.fps = 15;
- TMEDIA_CONSUMER(p_gdi)->video.display.width = 352;
- TMEDIA_CONSUMER(p_gdi)->video.display.height = 288;
- TMEDIA_CONSUMER(p_gdi)->video.display.auto_resize = tsk_true;
- tsk_safeobj_init(p_gdi);
- }
- return self;
+ tdav_consumer_video_gdi_t *p_gdi = (tdav_consumer_video_gdi_t *)self;
+ if (p_gdi) {
+ /* init base */
+ tmedia_consumer_init(TMEDIA_CONSUMER(p_gdi));
+ TMEDIA_CONSUMER(p_gdi)->video.display.chroma = tmedia_chroma_bgr24;
+
+ /* init self */
+ TMEDIA_CONSUMER(p_gdi)->video.fps = 15;
+ TMEDIA_CONSUMER(p_gdi)->video.display.width = 352;
+ TMEDIA_CONSUMER(p_gdi)->video.display.height = 288;
+ TMEDIA_CONSUMER(p_gdi)->video.display.auto_resize = tsk_true;
+ tsk_safeobj_init(p_gdi);
+ }
+ return self;
}
/* destructor */
static tsk_object_t* tdav_consumer_video_gdi_dtor(tsk_object_t * self)
-{
- tdav_consumer_video_gdi_t *p_gdi = (tdav_consumer_video_gdi_t *)self;
- if (p_gdi) {
- /* stop */
- tdav_consumer_video_gdi_stop((tmedia_consumer_t*)self);
-
- /* deinit base */
- tmedia_consumer_deinit(TMEDIA_CONSUMER(p_gdi));
- /* deinit self */
- TSK_FREE(p_gdi->pBuffer);
- tsk_safeobj_deinit(p_gdi);
- }
-
- return self;
+{
+ tdav_consumer_video_gdi_t *p_gdi = (tdav_consumer_video_gdi_t *)self;
+ if (p_gdi) {
+ /* stop */
+ tdav_consumer_video_gdi_stop((tmedia_consumer_t*)self);
+
+ /* deinit base */
+ tmedia_consumer_deinit(TMEDIA_CONSUMER(p_gdi));
+ /* deinit self */
+ TSK_FREE(p_gdi->pBuffer);
+ tsk_safeobj_deinit(p_gdi);
+ }
+
+ return self;
}
/* object definition */
-static const tsk_object_def_t tdav_consumer_video_gdi_def_s =
-{
- sizeof(tdav_consumer_video_gdi_t),
- tdav_consumer_video_gdi_ctor,
- tdav_consumer_video_gdi_dtor,
- tsk_null,
+static const tsk_object_def_t tdav_consumer_video_gdi_def_s = {
+ sizeof(tdav_consumer_video_gdi_t),
+ tdav_consumer_video_gdi_ctor,
+ tdav_consumer_video_gdi_dtor,
+ tsk_null,
};
/* plugin definition*/
-static const tmedia_consumer_plugin_def_t tdav_consumer_video_gdi_plugin_def_s =
-{
- &tdav_consumer_video_gdi_def_s,
-
- tmedia_video,
- "Microsoft GDI consumer (using custom source)",
-
- tdav_consumer_video_gdi_set,
- tdav_consumer_video_gdi_prepare,
- tdav_consumer_video_gdi_start,
- tdav_consumer_video_gdi_consume,
- tdav_consumer_video_gdi_pause,
- tdav_consumer_video_gdi_stop
+static const tmedia_consumer_plugin_def_t tdav_consumer_video_gdi_plugin_def_s = {
+ &tdav_consumer_video_gdi_def_s,
+
+ tmedia_video,
+ "Microsoft GDI consumer (using custom source)",
+
+ tdav_consumer_video_gdi_set,
+ tdav_consumer_video_gdi_prepare,
+ tdav_consumer_video_gdi_start,
+ tdav_consumer_video_gdi_consume,
+ tdav_consumer_video_gdi_pause,
+ tdav_consumer_video_gdi_stop
};
const tmedia_consumer_plugin_def_t *tdav_consumer_video_gdi_plugin_def_t = &tdav_consumer_video_gdi_plugin_def_s;
diff --git a/tinyDAV/src/video/gdi/tdav_producer_screencast_gdi.c b/tinyDAV/src/video/gdi/tdav_producer_screencast_gdi.c
index 799aafc..12ea4cd 100755
--- a/tinyDAV/src/video/gdi/tdav_producer_screencast_gdi.c
+++ b/tinyDAV/src/video/gdi/tdav_producer_screencast_gdi.c
@@ -1,17 +1,17 @@
/* Copyright (C) 2014-2015 Mamadou DIOP.
-*
+*
* This file is part of Open Source Doubango Framework.
*
* DOUBANGO is free software: you can redistribute it and/or modify
* it under the terms of the GNU General Public License as published by
* the Free Software Foundation, either version 3 of the License, or
* (at your option) any later version.
-*
+*
* DOUBANGO is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
* GNU General Public License for more details.
-*
+*
* You should have received a copy of the GNU General Public License
* along with DOUBANGO.
*/
@@ -47,30 +47,29 @@ static const BOOL bitmapBuffSrcOwnMemory = TRUE;
# define HIGH_PRIO_BITBLIT 0
#endif /* HIGH_PRIO_BITBLIT */
-typedef struct tdav_producer_screencast_gdi_s
-{
- TMEDIA_DECLARE_PRODUCER;
-
- HWND hwnd_preview;
- HWND hwnd_src;
+typedef struct tdav_producer_screencast_gdi_s {
+ TMEDIA_DECLARE_PRODUCER;
- BITMAPINFO bitmapInfoSrc;
- BITMAPINFO bitmapInfoNeg;
+ HWND hwnd_preview;
+ HWND hwnd_src;
- tsk_thread_handle_t* tid[1];
+ BITMAPINFO bitmapInfoSrc;
+ BITMAPINFO bitmapInfoNeg;
- void* p_buff_src; // must use VirtualAlloc()
- tsk_size_t n_buff_src;
- void* p_buff_neg; // must use VirtualAlloc()
- tsk_size_t n_buff_neg;
-
- tsk_bool_t b_started;
- tsk_bool_t b_paused;
- tsk_bool_t b_muted;
+ tsk_thread_handle_t* tid[1];
- RECT rcScreen;
+ void* p_buff_src; // must use VirtualAlloc()
+ tsk_size_t n_buff_src;
+ void* p_buff_neg; // must use VirtualAlloc()
+ tsk_size_t n_buff_neg;
- TSK_DECLARE_SAFEOBJ;
+ tsk_bool_t b_started;
+ tsk_bool_t b_paused;
+ tsk_bool_t b_muted;
+
+ RECT rcScreen;
+
+ TSK_DECLARE_SAFEOBJ;
}
tdav_producer_screencast_gdi_t;
@@ -81,264 +80,268 @@ static int _tdav_producer_screencast_grab(tdav_producer_screencast_gdi_t* p_self
/* ============ Media Producer Interface ================= */
static int _tdav_producer_screencast_gdi_set(tmedia_producer_t *p_self, const tmedia_param_t* pc_param)
{
- int ret = 0;
- tdav_producer_screencast_gdi_t* p_gdi = (tdav_producer_screencast_gdi_t*)p_self;
-
- if (!p_gdi || !pc_param) {
- TSK_DEBUG_ERROR("Invalid parameter");
- return -1;
- }
-
- if (pc_param->value_type == tmedia_pvt_int64) {
- if (tsk_striequals(pc_param->key, "local-hwnd") || tsk_striequals(pc_param->key, "preview-hwnd")) {
- p_gdi->hwnd_preview = (HWND)*((int64_t*)pc_param->value);
- }
- else if (tsk_striequals(pc_param->key, "src-hwnd")) {
- p_gdi->hwnd_src = (HWND)*((int64_t*)pc_param->value);
- }
- }
- else if (pc_param->value_type == tmedia_pvt_int32) {
- if (tsk_striequals(pc_param->key, "mute")) {
- p_gdi->b_muted = (TSK_TO_INT32((uint8_t*)pc_param->value) != 0);
- }
- }
-
- return ret;
+ int ret = 0;
+ tdav_producer_screencast_gdi_t* p_gdi = (tdav_producer_screencast_gdi_t*)p_self;
+
+ if (!p_gdi || !pc_param) {
+ TSK_DEBUG_ERROR("Invalid parameter");
+ return -1;
+ }
+
+ if (pc_param->value_type == tmedia_pvt_int64) {
+ if (tsk_striequals(pc_param->key, "local-hwnd") || tsk_striequals(pc_param->key, "preview-hwnd")) {
+ p_gdi->hwnd_preview = (HWND)*((int64_t*)pc_param->value);
+ }
+ else if (tsk_striequals(pc_param->key, "src-hwnd")) {
+ p_gdi->hwnd_src = (HWND)*((int64_t*)pc_param->value);
+ }
+ }
+ else if (pc_param->value_type == tmedia_pvt_int32) {
+ if (tsk_striequals(pc_param->key, "mute")) {
+ p_gdi->b_muted = (TSK_TO_INT32((uint8_t*)pc_param->value) != 0);
+ }
+ }
+
+ return ret;
}
static int _tdav_producer_screencast_gdi_prepare(tmedia_producer_t* p_self, const tmedia_codec_t* pc_codec)
{
- tdav_producer_screencast_gdi_t* p_gdi = (tdav_producer_screencast_gdi_t*)p_self;
- int ret = 0;
+ tdav_producer_screencast_gdi_t* p_gdi = (tdav_producer_screencast_gdi_t*)p_self;
+ int ret = 0;
+
+ if (!p_gdi || !pc_codec) {
+ TSK_DEBUG_ERROR("Invalid parameter");
+ return -1;
+ }
- if (!p_gdi || !pc_codec) {
- TSK_DEBUG_ERROR("Invalid parameter");
- return -1;
- }
+ tsk_safeobj_lock(p_gdi);
- tsk_safeobj_lock(p_gdi);
-
#if METROPOLIS /*= G2J.COM */
- TMEDIA_PRODUCER(p_gdi)->video.fps = TSK_MIN(TMEDIA_CODEC_VIDEO(pc_codec)->out.fps, kMaxFrameRate);
+ TMEDIA_PRODUCER(p_gdi)->video.fps = TSK_MIN(TMEDIA_CODEC_VIDEO(pc_codec)->out.fps, kMaxFrameRate);
#else
- TMEDIA_PRODUCER(p_gdi)->video.fps = TMEDIA_CODEC_VIDEO(pc_codec)->out.fps;
+ TMEDIA_PRODUCER(p_gdi)->video.fps = TMEDIA_CODEC_VIDEO(pc_codec)->out.fps;
#endif
- TMEDIA_PRODUCER(p_gdi)->video.width = TMEDIA_CODEC_VIDEO(pc_codec)->out.width;
- TMEDIA_PRODUCER(p_gdi)->video.height = TMEDIA_CODEC_VIDEO(pc_codec)->out.height;
-
- TSK_DEBUG_INFO("[GDI screencast] fps:%d, width:%d; height:%d", TMEDIA_PRODUCER(p_gdi)->video.fps, TMEDIA_PRODUCER(p_gdi)->video.width, TMEDIA_PRODUCER(p_gdi)->video.height);
-
- p_gdi->bitmapInfoNeg.bmiHeader.biSize = p_gdi->bitmapInfoSrc.bmiHeader.biSize = (DWORD)sizeof(BITMAPINFOHEADER);
- p_gdi->bitmapInfoNeg.bmiHeader.biWidth = p_gdi->bitmapInfoSrc.bmiHeader.biWidth = (LONG)TMEDIA_PRODUCER(p_gdi)->video.width;
- p_gdi->bitmapInfoNeg.bmiHeader.biHeight = p_gdi->bitmapInfoSrc.bmiHeader.biHeight = (LONG)TMEDIA_PRODUCER(p_gdi)->video.height;
- p_gdi->bitmapInfoNeg.bmiHeader.biPlanes = p_gdi->bitmapInfoSrc.bmiHeader.biPlanes = 1;
- p_gdi->bitmapInfoNeg.bmiHeader.biBitCount = p_gdi->bitmapInfoSrc.bmiHeader.biBitCount = 24;
- p_gdi->bitmapInfoNeg.bmiHeader.biCompression = p_gdi->bitmapInfoSrc.bmiHeader.biCompression = BI_RGB;
- p_gdi->bitmapInfoNeg.bmiHeader.biSizeImage = (p_gdi->bitmapInfoNeg.bmiHeader.biWidth * p_gdi->bitmapInfoNeg.bmiHeader.biHeight * (p_gdi->bitmapInfoNeg.bmiHeader.biBitCount >> 3));
-
- if (p_gdi->n_buff_neg < p_gdi->bitmapInfoNeg.bmiHeader.biSizeImage) {
- if (p_gdi->p_buff_neg) VirtualFree(p_gdi->p_buff_neg, 0, MEM_RELEASE);
- if (!(p_gdi->p_buff_neg = VirtualAlloc(NULL, p_gdi->bitmapInfoNeg.bmiHeader.biSizeImage, MEM_RESERVE|MEM_COMMIT, PAGE_READWRITE))) {
- p_gdi->n_buff_neg = 0;
- ret = -3;
- goto bail;
- }
- p_gdi->n_buff_neg = p_gdi->bitmapInfoNeg.bmiHeader.biSizeImage;
- }
-
- /* Get screen size */ {
- HDC hDC;
- hDC = CreateDC(TEXT("DISPLAY"), NULL, NULL, NULL);
- if (!hDC) {
- TSK_DEBUG_ERROR("CreateDC failed");
- ret = -4;
- goto bail;
- }
-
- // Get the dimensions of the main desktop window
- p_gdi->rcScreen.left = p_gdi->rcScreen.top = 0;
- p_gdi->rcScreen.right = GetDeviceCaps(hDC, HORZRES);
- p_gdi->rcScreen.bottom = GetDeviceCaps(hDC, VERTRES);
-
- // Release the device context
- DeleteDC(hDC);
- }
-
+ TMEDIA_PRODUCER(p_gdi)->video.width = TMEDIA_CODEC_VIDEO(pc_codec)->out.width;
+ TMEDIA_PRODUCER(p_gdi)->video.height = TMEDIA_CODEC_VIDEO(pc_codec)->out.height;
+
+ TSK_DEBUG_INFO("[GDI screencast] fps:%d, width:%d; height:%d", TMEDIA_PRODUCER(p_gdi)->video.fps, TMEDIA_PRODUCER(p_gdi)->video.width, TMEDIA_PRODUCER(p_gdi)->video.height);
+
+ p_gdi->bitmapInfoNeg.bmiHeader.biSize = p_gdi->bitmapInfoSrc.bmiHeader.biSize = (DWORD)sizeof(BITMAPINFOHEADER);
+ p_gdi->bitmapInfoNeg.bmiHeader.biWidth = p_gdi->bitmapInfoSrc.bmiHeader.biWidth = (LONG)TMEDIA_PRODUCER(p_gdi)->video.width;
+ p_gdi->bitmapInfoNeg.bmiHeader.biHeight = p_gdi->bitmapInfoSrc.bmiHeader.biHeight = (LONG)TMEDIA_PRODUCER(p_gdi)->video.height;
+ p_gdi->bitmapInfoNeg.bmiHeader.biPlanes = p_gdi->bitmapInfoSrc.bmiHeader.biPlanes = 1;
+ p_gdi->bitmapInfoNeg.bmiHeader.biBitCount = p_gdi->bitmapInfoSrc.bmiHeader.biBitCount = 24;
+ p_gdi->bitmapInfoNeg.bmiHeader.biCompression = p_gdi->bitmapInfoSrc.bmiHeader.biCompression = BI_RGB;
+ p_gdi->bitmapInfoNeg.bmiHeader.biSizeImage = (p_gdi->bitmapInfoNeg.bmiHeader.biWidth * p_gdi->bitmapInfoNeg.bmiHeader.biHeight * (p_gdi->bitmapInfoNeg.bmiHeader.biBitCount >> 3));
+
+ if (p_gdi->n_buff_neg < p_gdi->bitmapInfoNeg.bmiHeader.biSizeImage) {
+ if (p_gdi->p_buff_neg) {
+ VirtualFree(p_gdi->p_buff_neg, 0, MEM_RELEASE);
+ }
+ if (!(p_gdi->p_buff_neg = VirtualAlloc(NULL, p_gdi->bitmapInfoNeg.bmiHeader.biSizeImage, MEM_RESERVE|MEM_COMMIT, PAGE_READWRITE))) {
+ p_gdi->n_buff_neg = 0;
+ ret = -3;
+ goto bail;
+ }
+ p_gdi->n_buff_neg = p_gdi->bitmapInfoNeg.bmiHeader.biSizeImage;
+ }
+
+ /* Get screen size */ {
+ HDC hDC;
+ hDC = CreateDC(TEXT("DISPLAY"), NULL, NULL, NULL);
+ if (!hDC) {
+ TSK_DEBUG_ERROR("CreateDC failed");
+ ret = -4;
+ goto bail;
+ }
+
+ // Get the dimensions of the main desktop window
+ p_gdi->rcScreen.left = p_gdi->rcScreen.top = 0;
+ p_gdi->rcScreen.right = GetDeviceCaps(hDC, HORZRES);
+ p_gdi->rcScreen.bottom = GetDeviceCaps(hDC, VERTRES);
+
+ // Release the device context
+ DeleteDC(hDC);
+ }
+
bail:
- tsk_safeobj_unlock(p_gdi);
- return ret;
+ tsk_safeobj_unlock(p_gdi);
+ return ret;
}
static int _tdav_producer_screencast_gdi_start(tmedia_producer_t* p_self)
{
- tdav_producer_screencast_gdi_t* p_gdi = (tdav_producer_screencast_gdi_t*)p_self;
- int ret = 0;
-
- if (!p_gdi) {
- TSK_DEBUG_ERROR("Invalid parameter");
- return -1;
- }
-
- tsk_safeobj_lock(p_gdi);
-
- p_gdi->b_paused = tsk_false;
-
- if (p_gdi->b_started) {
- TSK_DEBUG_INFO("GDI screencast producer already started");
- goto bail;
- }
-
- p_gdi->b_started = tsk_true;
-
- tsk_thread_create(&p_gdi->tid[0], _tdav_producer_screencast_record_thread, p_gdi);
+ tdav_producer_screencast_gdi_t* p_gdi = (tdav_producer_screencast_gdi_t*)p_self;
+ int ret = 0;
+
+ if (!p_gdi) {
+ TSK_DEBUG_ERROR("Invalid parameter");
+ return -1;
+ }
+
+ tsk_safeobj_lock(p_gdi);
+
+ p_gdi->b_paused = tsk_false;
+
+ if (p_gdi->b_started) {
+ TSK_DEBUG_INFO("GDI screencast producer already started");
+ goto bail;
+ }
+
+ p_gdi->b_started = tsk_true;
+
+ tsk_thread_create(&p_gdi->tid[0], _tdav_producer_screencast_record_thread, p_gdi);
#if HIGH_PRIO_BITBLIT
- if (p_gdi->tid[0]) {
- tsk_thread_set_priority(p_gdi->tid[0], TSK_THREAD_PRIORITY_TIME_CRITICAL);
- }
+ if (p_gdi->tid[0]) {
+ tsk_thread_set_priority(p_gdi->tid[0], TSK_THREAD_PRIORITY_TIME_CRITICAL);
+ }
#endif
bail:
- if (ret) {
- p_gdi->b_started = tsk_false;
- }
- tsk_safeobj_unlock(p_gdi);
+ if (ret) {
+ p_gdi->b_started = tsk_false;
+ }
+ tsk_safeobj_unlock(p_gdi);
- return ret;
+ return ret;
}
static int _tdav_producer_screencast_gdi_pause(tmedia_producer_t* p_self)
{
- tdav_producer_screencast_gdi_t* p_gdi = (tdav_producer_screencast_gdi_t*)p_self;
+ tdav_producer_screencast_gdi_t* p_gdi = (tdav_producer_screencast_gdi_t*)p_self;
- if (!p_gdi) {
- TSK_DEBUG_ERROR("Invalid parameter");
- return -1;
- }
+ if (!p_gdi) {
+ TSK_DEBUG_ERROR("Invalid parameter");
+ return -1;
+ }
- tsk_safeobj_lock(p_gdi);
+ tsk_safeobj_lock(p_gdi);
- p_gdi->b_paused = tsk_true;
- goto bail;
+ p_gdi->b_paused = tsk_true;
+ goto bail;
bail:
- tsk_safeobj_unlock(p_gdi);
+ tsk_safeobj_unlock(p_gdi);
- return 0;
+ return 0;
}
static int _tdav_producer_screencast_gdi_stop(tmedia_producer_t* p_self)
{
- tdav_producer_screencast_gdi_t* p_gdi = (tdav_producer_screencast_gdi_t*)p_self;
-
- if (!p_gdi) {
- TSK_DEBUG_ERROR("Invalid parameter");
- return -1;
- }
-
- tsk_safeobj_lock(p_gdi);
-
- if (!p_gdi->b_started) {
- TSK_DEBUG_INFO("GDI screencast producer already stopped");
- goto bail;
- }
-
- p_gdi->b_started = tsk_false;
- p_gdi->b_paused = tsk_false;
-
- // stop thread
- if (p_gdi->tid[0]) {
- tsk_thread_join(&(p_gdi->tid[0]));
- }
+ tdav_producer_screencast_gdi_t* p_gdi = (tdav_producer_screencast_gdi_t*)p_self;
+
+ if (!p_gdi) {
+ TSK_DEBUG_ERROR("Invalid parameter");
+ return -1;
+ }
+
+ tsk_safeobj_lock(p_gdi);
+
+ if (!p_gdi->b_started) {
+ TSK_DEBUG_INFO("GDI screencast producer already stopped");
+ goto bail;
+ }
+
+ p_gdi->b_started = tsk_false;
+ p_gdi->b_paused = tsk_false;
+
+ // stop thread
+ if (p_gdi->tid[0]) {
+ tsk_thread_join(&(p_gdi->tid[0]));
+ }
bail:
- tsk_safeobj_unlock(p_gdi);
+ tsk_safeobj_unlock(p_gdi);
- return 0;
+ return 0;
}
static int _tdav_producer_screencast_grab(tdav_producer_screencast_gdi_t* p_self)
{
- int ret = 0;
- HDC hSrcDC = NULL, hMemDC = NULL;
- HBITMAP hBitmap, hOldBitmap;
- int nWidth, nHeight;
- RECT rcSrc;
-
- if (!p_self) {
- TSK_DEBUG_ERROR("Invalid parameter");
- return -1;
- }
-
- //--tsk_safeobj_lock(p_self);
-
- if (!p_self->b_started) {
- TSK_DEBUG_ERROR("producer not started yet");
- ret = -2;
- goto bail;
- }
-
- if (!TMEDIA_PRODUCER(p_self)->enc_cb.callback) {
- goto bail;
- }
-
- hSrcDC = GetDC(p_self->hwnd_src);
- if (!hSrcDC) {
- TSK_DEBUG_ERROR("GetDC(%x) failed", (int64_t)p_self->hwnd_src);
- ret = -5;
- goto bail;
- }
+ int ret = 0;
+ HDC hSrcDC = NULL, hMemDC = NULL;
+ HBITMAP hBitmap, hOldBitmap;
+ int nWidth, nHeight;
+ RECT rcSrc;
+
+ if (!p_self) {
+ TSK_DEBUG_ERROR("Invalid parameter");
+ return -1;
+ }
+
+ //--tsk_safeobj_lock(p_self);
+
+ if (!p_self->b_started) {
+ TSK_DEBUG_ERROR("producer not started yet");
+ ret = -2;
+ goto bail;
+ }
+
+ if (!TMEDIA_PRODUCER(p_self)->enc_cb.callback) {
+ goto bail;
+ }
+
+ hSrcDC = GetDC(p_self->hwnd_src);
+ if (!hSrcDC) {
+ TSK_DEBUG_ERROR("GetDC(%x) failed", (int64_t)p_self->hwnd_src);
+ ret = -5;
+ goto bail;
+ }
hMemDC = CreateCompatibleDC(hSrcDC);
- if (!hMemDC) {
- TSK_DEBUG_ERROR("CreateCompatibleDC(%x) failed", (int64_t)hSrcDC);
- ret = -6;
- goto bail;
- }
-
- // get points of rectangle to grab
- if (p_self->hwnd_src) {
- GetWindowRect(p_self->hwnd_src, &rcSrc);
- }
- else {
- rcSrc.left = rcSrc.top = 0;
- rcSrc.right = GetDeviceCaps(hSrcDC, HORZRES);
- rcSrc.bottom = GetDeviceCaps(hSrcDC, VERTRES);
- }
+ if (!hMemDC) {
+ TSK_DEBUG_ERROR("CreateCompatibleDC(%x) failed", (int64_t)hSrcDC);
+ ret = -6;
+ goto bail;
+ }
+
+ // get points of rectangle to grab
+ if (p_self->hwnd_src) {
+ GetWindowRect(p_self->hwnd_src, &rcSrc);
+ }
+ else {
+ rcSrc.left = rcSrc.top = 0;
+ rcSrc.right = GetDeviceCaps(hSrcDC, HORZRES);
+ rcSrc.bottom = GetDeviceCaps(hSrcDC, VERTRES);
+ }
nWidth = rcSrc.right - rcSrc.left;
- nHeight = rcSrc.bottom - rcSrc.top;
+ nHeight = rcSrc.bottom - rcSrc.top;
- p_self->bitmapInfoSrc.bmiHeader.biWidth = nWidth;
- p_self->bitmapInfoSrc.bmiHeader.biHeight = nHeight;
- p_self->bitmapInfoSrc.bmiHeader.biSizeImage = nWidth * nHeight * (p_self->bitmapInfoSrc.bmiHeader.biBitCount >> 3);
+ p_self->bitmapInfoSrc.bmiHeader.biWidth = nWidth;
+ p_self->bitmapInfoSrc.bmiHeader.biHeight = nHeight;
+ p_self->bitmapInfoSrc.bmiHeader.biSizeImage = nWidth * nHeight * (p_self->bitmapInfoSrc.bmiHeader.biBitCount >> 3);
- // create a bitmap compatible with the screen DC
+ // create a bitmap compatible with the screen DC
#if TDAV_UNDER_WINDOWS_CE
- {
- void* pvBits = NULL;
- hBitmap = CreateDIBSection(hSrcDC, &p_self->bitmapInfoSrc, DIB_RGB_COLORS, &pvBits, NULL, 0);
- if (!hBitmap || !pvBits) {
- TSK_DEBUG_ERROR("Failed to create bitmap(%dx%d)", nWidth, nHeight);
- goto bail;
- }
- p_self->p_buff_src = pvBits;
- p_self->n_buff_src = p_self->bitmapInfoSrc.bmiHeader.biSizeImage;
- }
+ {
+ void* pvBits = NULL;
+ hBitmap = CreateDIBSection(hSrcDC, &p_self->bitmapInfoSrc, DIB_RGB_COLORS, &pvBits, NULL, 0);
+ if (!hBitmap || !pvBits) {
+ TSK_DEBUG_ERROR("Failed to create bitmap(%dx%d)", nWidth, nHeight);
+ goto bail;
+ }
+ p_self->p_buff_src = pvBits;
+ p_self->n_buff_src = p_self->bitmapInfoSrc.bmiHeader.biSizeImage;
+ }
#else
hBitmap = CreateCompatibleBitmap(hSrcDC, nWidth, nHeight);
- if (!hBitmap) {
- TSK_DEBUG_ERROR("Failed to create bitmap(%dx%d)", nWidth, nHeight);
- goto bail;
- }
-
- if (p_self->n_buff_src < p_self->bitmapInfoSrc.bmiHeader.biSizeImage) {
- if (p_self->p_buff_src) VirtualFree(p_self->p_buff_src, 0, MEM_RELEASE);
- if (!(p_self->p_buff_src = VirtualAlloc(NULL, p_self->bitmapInfoSrc.bmiHeader.biSizeImage, MEM_RESERVE|MEM_COMMIT, PAGE_READWRITE))) {
- p_self->n_buff_src = 0;
- ret = -3;
- goto bail;
- }
- p_self->n_buff_src = p_self->bitmapInfoSrc.bmiHeader.biSizeImage;
- }
+ if (!hBitmap) {
+ TSK_DEBUG_ERROR("Failed to create bitmap(%dx%d)", nWidth, nHeight);
+ goto bail;
+ }
+
+ if (p_self->n_buff_src < p_self->bitmapInfoSrc.bmiHeader.biSizeImage) {
+ if (p_self->p_buff_src) {
+ VirtualFree(p_self->p_buff_src, 0, MEM_RELEASE);
+ }
+ if (!(p_self->p_buff_src = VirtualAlloc(NULL, p_self->bitmapInfoSrc.bmiHeader.biSizeImage, MEM_RESERVE|MEM_COMMIT, PAGE_READWRITE))) {
+ p_self->n_buff_src = 0;
+ ret = -3;
+ goto bail;
+ }
+ p_self->n_buff_src = p_self->bitmapInfoSrc.bmiHeader.biSizeImage;
+ }
#endif /* TDAV_UNDER_WINDOWS_CE */
// select new bitmap into memory DC
@@ -348,114 +351,114 @@ static int _tdav_producer_screencast_grab(tdav_producer_screencast_gdi_t* p_self
BitBlt(hMemDC, 0, 0, nWidth, nHeight, hSrcDC, 0, 0, SRCCOPY);
// select old bitmap back into memory DC and get handle to
- // bitmap of the screen
+ // bitmap of the screen
hBitmap = (HBITMAP) SelectObject(hMemDC, hOldBitmap);
- // Copy the bitmap data into the provided BYTE buffer
+ // Copy the bitmap data into the provided BYTE buffer
#if TDAV_UNDER_WINDOWS_CE
- // memory already retrieved using "CreateDIBSection"
+ // memory already retrieved using "CreateDIBSection"
#else
GetDIBits(hSrcDC, hBitmap, 0, nHeight, p_self->p_buff_src, &p_self->bitmapInfoSrc, DIB_RGB_COLORS);
#endif
-
- // resize
- ResizeRGB(&p_self->bitmapInfoSrc.bmiHeader,
- (const unsigned char *) p_self->p_buff_src,
- &p_self->bitmapInfoNeg.bmiHeader,
- (unsigned char *) p_self->p_buff_neg,
- p_self->bitmapInfoNeg.bmiHeader.biWidth,
- p_self->bitmapInfoNeg.bmiHeader.biHeight);
-
- // preview
- if (p_self->hwnd_preview) {
- HDC hDC = GetDC(p_self->hwnd_preview);
- if (hDC) {
- RECT rcPreview = {0};
- if (GetWindowRect(p_self->hwnd_preview, &rcPreview)) {
- LONG nPreviewWidth = (rcPreview.right - rcPreview.left);
- LONG nPreviewHeight = (rcPreview.bottom - rcPreview.top);
-
- SetStretchBltMode(hDC, COLORONCOLOR);
+
+ // resize
+ ResizeRGB(&p_self->bitmapInfoSrc.bmiHeader,
+ (const unsigned char *) p_self->p_buff_src,
+ &p_self->bitmapInfoNeg.bmiHeader,
+ (unsigned char *) p_self->p_buff_neg,
+ p_self->bitmapInfoNeg.bmiHeader.biWidth,
+ p_self->bitmapInfoNeg.bmiHeader.biHeight);
+
+ // preview
+ if (p_self->hwnd_preview) {
+ HDC hDC = GetDC(p_self->hwnd_preview);
+ if (hDC) {
+ RECT rcPreview = {0};
+ if (GetWindowRect(p_self->hwnd_preview, &rcPreview)) {
+ LONG nPreviewWidth = (rcPreview.right - rcPreview.left);
+ LONG nPreviewHeight = (rcPreview.bottom - rcPreview.top);
+
+ SetStretchBltMode(hDC, COLORONCOLOR);
#if 0 // preview(neg)
- StretchDIBits(
- hDC,
- 0, 0, nPreviewWidth, nPreviewHeight,
- 0, 0, p_self->bitmapInfoNeg.bmiHeader.biWidth, p_self->bitmapInfoNeg.bmiHeader.biHeight,
- p_self->p_buff_neg,
- &p_self->bitmapInfoNeg,
- DIB_RGB_COLORS,
- SRCCOPY);
+ StretchDIBits(
+ hDC,
+ 0, 0, nPreviewWidth, nPreviewHeight,
+ 0, 0, p_self->bitmapInfoNeg.bmiHeader.biWidth, p_self->bitmapInfoNeg.bmiHeader.biHeight,
+ p_self->p_buff_neg,
+ &p_self->bitmapInfoNeg,
+ DIB_RGB_COLORS,
+ SRCCOPY);
#else // preview(src)
- StretchDIBits(
- hDC,
- 0, 0, nPreviewWidth, nPreviewHeight,
- 0, 0, p_self->bitmapInfoSrc.bmiHeader.biWidth, p_self->bitmapInfoSrc.bmiHeader.biHeight,
- p_self->p_buff_src,
- &p_self->bitmapInfoSrc,
- DIB_RGB_COLORS,
- SRCCOPY);
-#endif
- }
- ReleaseDC(p_self->hwnd_preview, hDC);
- }
- }
-
- // encode and send data
- TMEDIA_PRODUCER(p_self)->enc_cb.callback(TMEDIA_PRODUCER(p_self)->enc_cb.callback_data, p_self->p_buff_neg, p_self->bitmapInfoNeg.bmiHeader.biSizeImage);
+ StretchDIBits(
+ hDC,
+ 0, 0, nPreviewWidth, nPreviewHeight,
+ 0, 0, p_self->bitmapInfoSrc.bmiHeader.biWidth, p_self->bitmapInfoSrc.bmiHeader.biHeight,
+ p_self->p_buff_src,
+ &p_self->bitmapInfoSrc,
+ DIB_RGB_COLORS,
+ SRCCOPY);
+#endif
+ }
+ ReleaseDC(p_self->hwnd_preview, hDC);
+ }
+ }
+
+ // encode and send data
+ TMEDIA_PRODUCER(p_self)->enc_cb.callback(TMEDIA_PRODUCER(p_self)->enc_cb.callback_data, p_self->p_buff_neg, p_self->bitmapInfoNeg.bmiHeader.biSizeImage);
bail:
- //--tsk_safeobj_unlock(p_self);
-
- if (hSrcDC) {
- ReleaseDC(p_self->hwnd_src, hSrcDC);
- }
- if (hMemDC) {
- DeleteDC(hMemDC);
- }
-
- if (hBitmap) {
+ //--tsk_safeobj_unlock(p_self);
+
+ if (hSrcDC) {
+ ReleaseDC(p_self->hwnd_src, hSrcDC);
+ }
+ if (hMemDC) {
+ DeleteDC(hMemDC);
+ }
+
+ if (hBitmap) {
DeleteObject(hBitmap);
- if (!bitmapBuffSrcOwnMemory) {
- p_self->p_buff_src = NULL;
- p_self->n_buff_src = 0;
- }
- }
+ if (!bitmapBuffSrcOwnMemory) {
+ p_self->p_buff_src = NULL;
+ p_self->n_buff_src = 0;
+ }
+ }
- return ret;
+ return ret;
}
static void* TSK_STDCALL _tdav_producer_screencast_record_thread(void *arg)
{
- tdav_producer_screencast_gdi_t* p_gdi = (tdav_producer_screencast_gdi_t*)arg;
- int ret = 0;
-
- // FPS manager
- uint64_t TimeNow, TimeLastFrame = 0;
- const uint64_t TimeFrameDuration = (1000 / TMEDIA_PRODUCER(p_gdi)->video.fps);
-
- TSK_DEBUG_INFO("_tdav_producer_screencast_record_thread -- START");
-
- while (ret == 0 && p_gdi->b_started) {
- TimeNow = tsk_time_now();
- if ((TimeNow - TimeLastFrame) >= TimeFrameDuration) {
- if (!p_gdi->b_muted && !p_gdi->b_paused) {
- if (ret = _tdav_producer_screencast_grab(p_gdi)) {
- goto next;
- }
- }
- TimeLastFrame = TimeNow;
- }
- else {
- tsk_thread_sleep(1);
+ tdav_producer_screencast_gdi_t* p_gdi = (tdav_producer_screencast_gdi_t*)arg;
+ int ret = 0;
+
+ // FPS manager
+ uint64_t TimeNow, TimeLastFrame = 0;
+ const uint64_t TimeFrameDuration = (1000 / TMEDIA_PRODUCER(p_gdi)->video.fps);
+
+ TSK_DEBUG_INFO("_tdav_producer_screencast_record_thread -- START");
+
+ while (ret == 0 && p_gdi->b_started) {
+ TimeNow = tsk_time_now();
+ if ((TimeNow - TimeLastFrame) >= TimeFrameDuration) {
+ if (!p_gdi->b_muted && !p_gdi->b_paused) {
+ if (ret = _tdav_producer_screencast_grab(p_gdi)) {
+ goto next;
+ }
+ }
+ TimeLastFrame = TimeNow;
+ }
+ else {
+ tsk_thread_sleep(1);
#if 0
- TSK_DEBUG_INFO("[GDI screencast] Skip frame");
+ TSK_DEBUG_INFO("[GDI screencast] Skip frame");
#endif
- }
- next:
- ;
- }
- TSK_DEBUG_INFO("_tdav_producer_screencast_record_thread -- STOP");
- return tsk_null;
+ }
+next:
+ ;
+ }
+ TSK_DEBUG_INFO("_tdav_producer_screencast_record_thread -- STOP");
+ return tsk_null;
}
//
@@ -464,70 +467,68 @@ static void* TSK_STDCALL _tdav_producer_screencast_record_thread(void *arg)
/* constructor */
static tsk_object_t* _tdav_producer_screencast_gdi_ctor(tsk_object_t *self, va_list * app)
{
- tdav_producer_screencast_gdi_t *p_gdi = (tdav_producer_screencast_gdi_t *)self;
- if (p_gdi) {
- /* init base */
- tmedia_producer_init(TMEDIA_PRODUCER(p_gdi));
- TMEDIA_PRODUCER(p_gdi)->video.chroma = tmedia_chroma_bgr24; // RGB24 on x86 (little endians) stored as BGR24
- /* init self with default values*/
- TMEDIA_PRODUCER(p_gdi)->video.fps = 15;
- TMEDIA_PRODUCER(p_gdi)->video.width = 352;
- TMEDIA_PRODUCER(p_gdi)->video.height = 288;
-
- tsk_safeobj_init(p_gdi);
- }
- return self;
+ tdav_producer_screencast_gdi_t *p_gdi = (tdav_producer_screencast_gdi_t *)self;
+ if (p_gdi) {
+ /* init base */
+ tmedia_producer_init(TMEDIA_PRODUCER(p_gdi));
+ TMEDIA_PRODUCER(p_gdi)->video.chroma = tmedia_chroma_bgr24; // RGB24 on x86 (little endians) stored as BGR24
+ /* init self with default values*/
+ TMEDIA_PRODUCER(p_gdi)->video.fps = 15;
+ TMEDIA_PRODUCER(p_gdi)->video.width = 352;
+ TMEDIA_PRODUCER(p_gdi)->video.height = 288;
+
+ tsk_safeobj_init(p_gdi);
+ }
+ return self;
}
/* destructor */
static tsk_object_t* _tdav_producer_screencast_gdi_dtor(tsk_object_t * self)
-{
- tdav_producer_screencast_gdi_t *p_gdi = (tdav_producer_screencast_gdi_t *)self;
- if (p_gdi) {
- /* stop */
- if (p_gdi->b_started) {
- _tdav_producer_screencast_gdi_stop((tmedia_producer_t*)p_gdi);
- }
-
- /* deinit base */
- tmedia_producer_deinit(TMEDIA_PRODUCER(p_gdi));
- /* deinit self */
- if (p_gdi->p_buff_neg) {
- VirtualFree(p_gdi->p_buff_neg, 0, MEM_RELEASE);
- p_gdi->p_buff_neg = NULL;
- }
- if (p_gdi->p_buff_src) {
- if (bitmapBuffSrcOwnMemory) {
- VirtualFree(p_gdi->p_buff_src, 0, MEM_RELEASE);
- }
- p_gdi->p_buff_src = NULL;
- }
- tsk_safeobj_deinit(p_gdi);
-
- TSK_DEBUG_INFO("*** GDI Screencast producer destroyed ***");
- }
-
- return self;
+{
+ tdav_producer_screencast_gdi_t *p_gdi = (tdav_producer_screencast_gdi_t *)self;
+ if (p_gdi) {
+ /* stop */
+ if (p_gdi->b_started) {
+ _tdav_producer_screencast_gdi_stop((tmedia_producer_t*)p_gdi);
+ }
+
+ /* deinit base */
+ tmedia_producer_deinit(TMEDIA_PRODUCER(p_gdi));
+ /* deinit self */
+ if (p_gdi->p_buff_neg) {
+ VirtualFree(p_gdi->p_buff_neg, 0, MEM_RELEASE);
+ p_gdi->p_buff_neg = NULL;
+ }
+ if (p_gdi->p_buff_src) {
+ if (bitmapBuffSrcOwnMemory) {
+ VirtualFree(p_gdi->p_buff_src, 0, MEM_RELEASE);
+ }
+ p_gdi->p_buff_src = NULL;
+ }
+ tsk_safeobj_deinit(p_gdi);
+
+ TSK_DEBUG_INFO("*** GDI Screencast producer destroyed ***");
+ }
+
+ return self;
}
/* object definition */
-static const tsk_object_def_t tdav_producer_screencast_gdi_def_s =
-{
- sizeof(tdav_producer_screencast_gdi_t),
- _tdav_producer_screencast_gdi_ctor,
- _tdav_producer_screencast_gdi_dtor,
- tsk_null,
+static const tsk_object_def_t tdav_producer_screencast_gdi_def_s = {
+ sizeof(tdav_producer_screencast_gdi_t),
+ _tdav_producer_screencast_gdi_ctor,
+ _tdav_producer_screencast_gdi_dtor,
+ tsk_null,
};
/* plugin definition*/
-static const tmedia_producer_plugin_def_t tdav_producer_screencast_gdi_plugin_def_s =
-{
- &tdav_producer_screencast_gdi_def_s,
- tmedia_bfcp_video,
- "Microsoft GDI screencast producer",
-
- _tdav_producer_screencast_gdi_set,
- _tdav_producer_screencast_gdi_prepare,
- _tdav_producer_screencast_gdi_start,
- _tdav_producer_screencast_gdi_pause,
- _tdav_producer_screencast_gdi_stop
+static const tmedia_producer_plugin_def_t tdav_producer_screencast_gdi_plugin_def_s = {
+ &tdav_producer_screencast_gdi_def_s,
+ tmedia_bfcp_video,
+ "Microsoft GDI screencast producer",
+
+ _tdav_producer_screencast_gdi_set,
+ _tdav_producer_screencast_gdi_prepare,
+ _tdav_producer_screencast_gdi_start,
+ _tdav_producer_screencast_gdi_pause,
+ _tdav_producer_screencast_gdi_stop
};
const tmedia_producer_plugin_def_t *tdav_producer_screencast_gdi_plugin_def_t = &tdav_producer_screencast_gdi_plugin_def_s;
diff --git a/tinyDAV/src/video/jb/tdav_video_frame.c b/tinyDAV/src/video/jb/tdav_video_frame.c
index fc7cbc3..2abab76 100755
--- a/tinyDAV/src/video/jb/tdav_video_frame.c
+++ b/tinyDAV/src/video/jb/tdav_video_frame.c
@@ -36,8 +36,8 @@
static tsk_object_t* tdav_video_frame_ctor(tsk_object_t * self, va_list * app)
{
tdav_video_frame_t *frame = self;
- if(frame){
- if(!(frame->pkts = tsk_list_create())){
+ if(frame) {
+ if(!(frame->pkts = tsk_list_create())) {
TSK_DEBUG_ERROR("Faile to list");
return tsk_null;
}
@@ -48,27 +48,30 @@ static tsk_object_t* tdav_video_frame_ctor(tsk_object_t * self, va_list * app)
static tsk_object_t* tdav_video_frame_dtor(tsk_object_t * self)
{
tdav_video_frame_t *frame = self;
- if(frame){
+ if(frame) {
TSK_OBJECT_SAFE_FREE(frame->pkts);
-
+
tsk_safeobj_deinit(frame);
}
-
+
return self;
}
static int tdav_video_frame_cmp(const tsk_object_t *_p1, const tsk_object_t *_p2)
{
const tdav_video_frame_t *p1 = _p1;
const tdav_video_frame_t *p2 = _p2;
-
- if(p1 && p2){
+
+ if(p1 && p2) {
return (int)(p1->timestamp - p2->timestamp);
}
- else if(!p1 && !p2) return 0;
- else return -1;
+ else if(!p1 && !p2) {
+ return 0;
+ }
+ else {
+ return -1;
+ }
}
-static const tsk_object_def_t tdav_video_frame_def_s =
-{
+static const tsk_object_def_t tdav_video_frame_def_s = {
sizeof(tdav_video_frame_t),
tdav_video_frame_ctor,
tdav_video_frame_dtor,
@@ -80,12 +83,12 @@ const tsk_object_def_t *tdav_video_frame_def_t = &tdav_video_frame_def_s;
tdav_video_frame_t* tdav_video_frame_create(trtp_rtp_packet_t* rtp_pkt)
{
tdav_video_frame_t* frame;
- if(!rtp_pkt || !rtp_pkt->header){
+ if(!rtp_pkt || !rtp_pkt->header) {
TSK_DEBUG_ERROR("Invalid parameter");
return tsk_null;
}
-
- if((frame = tsk_object_new(tdav_video_frame_def_t))){
+
+ if((frame = tsk_object_new(tdav_video_frame_def_t))) {
rtp_pkt = tsk_object_ref(rtp_pkt);
frame->payload_type = rtp_pkt->header->payload_type;
frame->timestamp = rtp_pkt->header->timestamp;
@@ -98,25 +101,25 @@ tdav_video_frame_t* tdav_video_frame_create(trtp_rtp_packet_t* rtp_pkt)
int tdav_video_frame_put(tdav_video_frame_t* self, trtp_rtp_packet_t* rtp_pkt)
{
- if(!self || !rtp_pkt || !rtp_pkt->header){
+ if(!self || !rtp_pkt || !rtp_pkt->header) {
TSK_DEBUG_ERROR("Invalid parameter");
return -1;
}
- if(self->timestamp != rtp_pkt->header->timestamp){
+ if(self->timestamp != rtp_pkt->header->timestamp) {
TSK_DEBUG_ERROR("Timestamp mismatch");
return -2;
}
- if(self->payload_type != rtp_pkt->header->payload_type){
+ if(self->payload_type != rtp_pkt->header->payload_type) {
TSK_DEBUG_ERROR("Payload Type mismatch");
return -2;
}
#if 0
- if(self->ssrc != rtp_pkt->header->ssrc){
+ if(self->ssrc != rtp_pkt->header->ssrc) {
TSK_DEBUG_ERROR("SSRC mismatch");
return -2;
}
#endif
-
+
rtp_pkt = tsk_object_ref(rtp_pkt);
self->highest_seq_num = TSK_MAX(self->highest_seq_num, rtp_pkt->header->seq_num);
tsk_list_lock(self->pkts);
@@ -127,7 +130,7 @@ int tdav_video_frame_put(tdav_video_frame_t* self, trtp_rtp_packet_t* rtp_pkt)
tsk_list_push_ascending_data(self->pkts, (void**)&rtp_pkt);
}
tsk_list_unlock(self->pkts);
-
+
return 0;
}
@@ -136,26 +139,26 @@ const trtp_rtp_packet_t* tdav_video_frame_find_by_seq_num(const tdav_video_frame
const tsk_list_item_t *item;
const trtp_rtp_packet_t* pkt;
const trtp_rtp_packet_t* ret;
-
- if(!self){
+
+ if(!self) {
TSK_DEBUG_ERROR("Invalid parameter");
return tsk_null;
}
-
+
ret = tsk_null;
-
+
tsk_list_lock(self->pkts);
- tsk_list_foreach(item, self->pkts){
- if(!(pkt = item->data) || !pkt->header){
+ tsk_list_foreach(item, self->pkts) {
+ if(!(pkt = item->data) || !pkt->header) {
continue;
}
- if(pkt->header->seq_num == seq_num){
+ if(pkt->header->seq_num == seq_num) {
ret = pkt;
break;
}
}
tsk_list_unlock(self->pkts);
-
+
return ret;
}
@@ -168,19 +171,19 @@ tsk_size_t tdav_video_frame_write(struct tdav_video_frame_s* self, void** buffer
const trtp_rtp_packet_t* pkt;
tsk_size_t ret_size = 0;
int32_t last_seq_num = -1; // guard against duplicated packets
-
- if(!self || !buffer_ptr || !buffer_size){
+
+ if(!self || !buffer_ptr || !buffer_size) {
TSK_DEBUG_ERROR("Invalid parameter");
return 0;
}
-
+
tsk_list_lock(self->pkts);
- tsk_list_foreach(item, self->pkts){
- if(!(pkt = item->data) || !pkt->payload.size || !pkt->header || pkt->header->seq_num == last_seq_num){
+ tsk_list_foreach(item, self->pkts) {
+ if(!(pkt = item->data) || !pkt->payload.size || !pkt->header || pkt->header->seq_num == last_seq_num) {
continue;
}
- if((ret_size + pkt->payload.size) > *buffer_size){
- if(!(*buffer_ptr = tsk_realloc(*buffer_ptr, (ret_size + pkt->payload.size)))){
+ if((ret_size + pkt->payload.size) > *buffer_size) {
+ if(!(*buffer_ptr = tsk_realloc(*buffer_ptr, (ret_size + pkt->payload.size)))) {
TSK_DEBUG_ERROR("Failed to resize the buffer");
*buffer_size = 0;
goto bail;
@@ -191,10 +194,10 @@ tsk_size_t tdav_video_frame_write(struct tdav_video_frame_s* self, void** buffer
ret_size += pkt->payload.size;
last_seq_num = pkt->header->seq_num;
}
-
+
bail:
tsk_list_unlock(self->pkts);
-
+
return ret_size;
}
@@ -213,31 +216,39 @@ tsk_bool_t tdav_video_frame_is_complete(const tdav_video_frame_t* self, int32_t
const tsk_list_item_t *item;
uint16_t i;
tsk_bool_t is_complete = tsk_false;
-
- if (!self){
+
+ if (!self) {
TSK_DEBUG_ERROR("Invalid parameter");
return tsk_false;
}
-
+
i = 0;
tsk_list_lock(self->pkts);
tsk_list_foreach (item, self->pkts) {
- if (!(pkt = item->data)){
+ if (!(pkt = item->data)) {
continue;
}
if (last_seq_num_with_mark >= 0 && pkt->header->seq_num != (last_seq_num_with_mark + ++i)) {
- if (missing_seq_num_start) *missing_seq_num_start = (last_seq_num_with_mark + i);
- if (missing_seq_num_count) *missing_seq_num_count = pkt->header->seq_num - (*missing_seq_num_start);
+ if (missing_seq_num_start) {
+ *missing_seq_num_start = (last_seq_num_with_mark + i);
+ }
+ if (missing_seq_num_count) {
+ *missing_seq_num_count = pkt->header->seq_num - (*missing_seq_num_start);
+ }
break;
}
if (item == self->pkts->tail) {
- if(!(is_complete = (pkt->header->marker))){
- if (missing_seq_num_start) *missing_seq_num_start = (pkt->header->seq_num + 1);
- if (missing_seq_num_count) *missing_seq_num_count = 1;
+ if(!(is_complete = (pkt->header->marker))) {
+ if (missing_seq_num_start) {
+ *missing_seq_num_start = (pkt->header->seq_num + 1);
+ }
+ if (missing_seq_num_count) {
+ *missing_seq_num_count = 1;
+ }
}
}
}
tsk_list_unlock(self->pkts);
-
+
return is_complete;
}
diff --git a/tinyDAV/src/video/jb/tdav_video_jb.c b/tinyDAV/src/video/jb/tdav_video_jb.c
index ec9b53b..ae24a2a 100755
--- a/tinyDAV/src/video/jb/tdav_video_jb.c
+++ b/tinyDAV/src/video/jb/tdav_video_jb.c
@@ -65,10 +65,9 @@ static int _tdav_video_jb_set_defaults(struct tdav_video_jb_s* self);
static const tdav_video_frame_t* _tdav_video_jb_get_frame(struct tdav_video_jb_s* self, uint32_t timestamp, uint8_t pt, tsk_bool_t *pt_matched);
static void* TSK_STDCALL _tdav_video_jb_decode_thread_func(void *arg);
-typedef struct tdav_video_jb_s
-{
+typedef struct tdav_video_jb_s {
TSK_DECLARE_OBJECT;
-
+
tsk_bool_t started;
int32_t fps;
int32_t fps_prob;
@@ -79,30 +78,30 @@ typedef struct tdav_video_jb_s
int32_t tail_max;
tdav_video_frames_L_t *frames;
int64_t frames_count;
-
+
tsk_size_t latency_min;
tsk_size_t latency_max;
-
+
uint32_t decode_last_timestamp;
int32_t decode_last_seq_num_with_mark; // -1 = unset
uint64_t decode_last_time;
tsk_thread_handle_t* decode_thread[1];
tsk_condwait_handle_t* decode_thread_cond;
-
+
uint16_t seq_nums[0xFF];
tdav_video_jb_cb_f callback;
const void* callback_data;
-
+
// to avoid locking use different cb_data
tdav_video_jb_cb_data_xt cb_data_rtp;
tdav_video_jb_cb_data_xt cb_data_fdd;
tdav_video_jb_cb_data_xt cb_data_any;
-
- struct{
+
+ struct {
void* ptr;
tsk_size_t size;
} buffer;
-
+
TSK_DECLARE_SAFEOBJ;
}
tdav_video_jb_t;
@@ -111,18 +110,18 @@ tdav_video_jb_t;
static tsk_object_t* tdav_video_jb_ctor(tsk_object_t * self, va_list * app)
{
tdav_video_jb_t *jb = self;
- if(jb){
- if(!(jb->frames = tsk_list_create())){
+ if(jb) {
+ if(!(jb->frames = tsk_list_create())) {
TSK_DEBUG_ERROR("Failed to create list");
return tsk_null;
}
- if(!(jb->decode_thread_cond = tsk_condwait_create())){
+ if(!(jb->decode_thread_cond = tsk_condwait_create())) {
TSK_DEBUG_ERROR("Failed to create condition var");
return tsk_null;
}
jb->cb_data_fdd.type = tdav_video_jb_cb_data_type_fdd;
jb->cb_data_rtp.type = tdav_video_jb_cb_data_type_rtp;
-
+
tsk_safeobj_init(jb);
}
return self;
@@ -130,22 +129,21 @@ static tsk_object_t* tdav_video_jb_ctor(tsk_object_t * self, va_list * app)
static tsk_object_t* tdav_video_jb_dtor(tsk_object_t * self)
{
tdav_video_jb_t *jb = self;
- if(jb){
- if(jb->started){
+ if(jb) {
+ if(jb->started) {
tdav_video_jb_stop(jb);
}
TSK_OBJECT_SAFE_FREE(jb->frames);
- if(jb->decode_thread_cond){
+ if(jb->decode_thread_cond) {
tsk_condwait_destroy(&jb->decode_thread_cond);
}
TSK_SAFE_FREE(jb->buffer.ptr);
tsk_safeobj_deinit(jb);
}
-
+
return self;
}
-static const tsk_object_def_t tdav_video_jb_def_s =
-{
+static const tsk_object_def_t tdav_video_jb_def_s = {
sizeof(tdav_video_jb_t),
tdav_video_jb_ctor,
tdav_video_jb_dtor,
@@ -155,7 +153,7 @@ static const tsk_object_def_t tdav_video_jb_def_s =
tdav_video_jb_t* tdav_video_jb_create()
{
tdav_video_jb_t* jb;
-
+
if ((jb = tsk_object_new(&tdav_video_jb_def_s))) {
if (_tdav_video_jb_set_defaults(jb) != 0) {
TSK_OBJECT_SAFE_FREE(jb);
@@ -176,7 +174,7 @@ tdav_video_jb_t* tdav_video_jb_create()
int tdav_video_jb_set_callback(tdav_video_jb_t* self, tdav_video_jb_cb_f callback, const void* usr_data)
{
- if(!self){
+ if(!self) {
TSK_DEBUG_ERROR("Invalid parameter");
return -1;
}
@@ -187,27 +185,49 @@ int tdav_video_jb_set_callback(tdav_video_jb_t* self, tdav_video_jb_cb_f callbac
return 0;
}
+// Congestion quality metrics based
+int tdav_video_jb_get_qcong(tdav_video_jb_t* self, float* q)
+{
+ float lm;
+ if (!self || !q) {
+ TSK_DEBUG_ERROR("Invalid parameter");
+ return -1;
+ }
+ lm = (float)self->latency_max;
+ if (lm <= 0.f) { // must never happen...but used as a guard against div(0)
+ *q = 1.f;
+ }
+ else {
+ // when "frames_count" is > "latency_max" q is < 0 but it'll be clipped to 0.f
+ *q = 1.f - (self->frames_count / lm);
+ }
+ // 0.0001f instead of zero which could be interpreted as "no data available"
+ // 0.0001f encoded to 1-byte in RTCP-RR-JCNG will be coded as (0.0001f * 255.f) = zero
+ *q = TSK_CLAMP(0.0001f, *q, 1.f);
+ return 0;
+}
+
int tdav_video_jb_start(tdav_video_jb_t* self)
{
int ret = 0;
- if(!self){
+ if(!self) {
TSK_DEBUG_ERROR("Invalid parameter");
return -1;
}
- if(self->started){
+ if(self->started) {
return 0;
}
-
+
self->started = tsk_true;
-
- if(!self->decode_thread[0]){
+
+ if(!self->decode_thread[0]) {
ret = tsk_thread_create(&self->decode_thread[0], _tdav_video_jb_decode_thread_func, self);
- if(ret != 0 || !self->decode_thread[0]){
+ if(ret != 0 || !self->decode_thread[0]) {
TSK_DEBUG_ERROR("Failed to create new thread");
}
ret = tsk_thread_set_priority(self->decode_thread[0], TSK_THREAD_PRIORITY_TIME_CRITICAL);
}
-
+
return ret;
}
@@ -220,43 +240,43 @@ int tdav_video_jb_put(tdav_video_jb_t* self, trtp_rtp_packet_t* rtp_pkt)
const tdav_video_frame_t* old_frame;
tsk_bool_t pt_matched = tsk_false, is_frame_late_or_dup = tsk_false, is_restarted = tsk_false;
uint16_t* seq_num;
-
- if(!self || !rtp_pkt || !rtp_pkt->header){
+
+ if(!self || !rtp_pkt || !rtp_pkt->header) {
TSK_DEBUG_ERROR("Invalid parameter");
return -1;
}
-
- if(!self->started){
+
+ if(!self->started) {
TSK_DEBUG_INFO("Video jitter buffer not started");
return 0;
}
-
+
seq_num = &self->seq_nums[rtp_pkt->header->payload_type];
-
+
tsk_safeobj_lock(self);
-
+
//TSK_DEBUG_INFO("receive seqnum=%u", rtp_pkt->header->seq_num);
-
- if(self->decode_last_timestamp && (self->decode_last_timestamp > rtp_pkt->header->timestamp)){
- if((self->decode_last_timestamp - rtp_pkt->header->timestamp) < TDAV_VIDEO_JB_MAX_DROPOUT){
+
+ if(self->decode_last_timestamp && (self->decode_last_timestamp > rtp_pkt->header->timestamp)) {
+ if((self->decode_last_timestamp - rtp_pkt->header->timestamp) < TDAV_VIDEO_JB_MAX_DROPOUT) {
TSK_DEBUG_INFO("--------Frame already Decoded [seqnum=%u]------------", rtp_pkt->header->seq_num);
tsk_safeobj_unlock(self);
return 0;
}
}
-
+
old_frame = _tdav_video_jb_get_frame(self, rtp_pkt->header->timestamp, rtp_pkt->header->payload_type, &pt_matched);
-
- if((*seq_num && *seq_num != 0xFFFF) && (*seq_num + 1) != rtp_pkt->header->seq_num){
+
+ if((*seq_num && *seq_num != 0xFFFF) && (*seq_num + 1) != rtp_pkt->header->seq_num) {
int32_t diff = ((int32_t)rtp_pkt->header->seq_num - (int32_t)*seq_num);
tsk_bool_t is_frame_loss = (diff > 0);
is_restarted = (TSK_ABS(diff) > TDAV_VIDEO_JB_MAX_DROPOUT);
is_frame_late_or_dup = !is_frame_loss;
tdav_video_jb_reset_fps_prob(self);
TSK_DEBUG_INFO("Packet %s (from JB) [%hu - %hu]", is_frame_loss ? "loss" : "late/duplicated/nack", *seq_num, rtp_pkt->header->seq_num);
-
- if(is_frame_loss && !is_restarted){
- if(self->callback){
+
+ if(is_frame_loss && !is_restarted) {
+ if(self->callback) {
self->cb_data_any.type = tdav_video_jb_cb_data_type_fl;
self->cb_data_any.ssrc = rtp_pkt->header->ssrc;
self->cb_data_any.fl.seq_num = (*seq_num + 1);
@@ -265,48 +285,48 @@ int tdav_video_jb_put(tdav_video_jb_t* self, trtp_rtp_packet_t* rtp_pkt)
}
}
}
-
- if(!old_frame){
+
+ if(!old_frame) {
tdav_video_frame_t* new_frame;
- if(pt_matched){
+ if(pt_matched) {
// if we have a frame with the same payload type but without this timestamp this means that we moved to a new frame
// this happens if the frame is waiting to be decoded or the marker is lost
}
- if((new_frame = tdav_video_frame_create(rtp_pkt))){
+ if((new_frame = tdav_video_frame_create(rtp_pkt))) {
// compute avg frame duration
- if(self->last_timestamp && self->last_timestamp < rtp_pkt->header->timestamp){
+ if(self->last_timestamp && self->last_timestamp < rtp_pkt->header->timestamp) {
uint32_t duration = (rtp_pkt->header->timestamp - self->last_timestamp)/self->rate;
self->avg_duration = self->avg_duration ? ((self->avg_duration + duration) >> 1) : duration;
--self->fps_prob;
}
self->last_timestamp = rtp_pkt->header->timestamp;
-
+
tsk_list_lock(self->frames);
- if(self->frames_count >= self->tail_max){
- if(++self->conseq_frame_drop >= self->tail_max){
+ if(self->frames_count >= self->tail_max) {
+ if(++self->conseq_frame_drop >= self->tail_max) {
TSK_DEBUG_ERROR("Too many frames dropped and fps=%d", self->fps);
tsk_list_clear_items(self->frames);
self->conseq_frame_drop = 0;
self->frames_count = 1;
- if(self->callback){
+ if(self->callback) {
self->cb_data_any.type = tdav_video_jb_cb_data_type_tmfr;
self->cb_data_any.ssrc = rtp_pkt->header->ssrc;
self->callback(&self->cb_data_any);
}
}
- else{
+ else {
TSK_DEBUG_INFO("Dropping video frame because frames_count(%lld)>=tail_max(%d)", self->frames_count, self->tail_max);
tsk_list_remove_first_item(self->frames);
}
tdav_video_jb_reset_fps_prob(self);
}
- else{
+ else {
++self->frames_count;
}
tsk_list_push_ascending_data(self->frames, (void**)&new_frame);
tsk_list_unlock(self->frames);
}
- if(self->fps_prob <= 0 && self->avg_duration){
+ if(self->fps_prob <= 0 && self->avg_duration) {
// compute FPS using timestamp values
int32_t fps_new = (1000 / self->avg_duration);
int32_t fps_old = self->fps;
@@ -315,7 +335,7 @@ int tdav_video_jb_put(tdav_video_jb_t* self, trtp_rtp_packet_t* rtp_pkt)
self->latency_max = self->fps; // maximum = 1 second
TSK_DEBUG_INFO("According to rtp-timestamps ...FPS = %d (clipped to %d) tail_max=%d, latency_max=%u", fps_new, self->fps, self->tail_max, (unsigned)self->latency_max);
tdav_video_jb_reset_fps_prob(self);
- if(self->callback && (fps_old != self->fps)){
+ if(self->callback && (fps_old != self->fps)) {
self->cb_data_any.type = tdav_video_jb_cb_data_type_fps_changed;
self->cb_data_any.ssrc = rtp_pkt->header->ssrc;
self->cb_data_any.fps.new = self->fps; // clipped value
@@ -324,51 +344,51 @@ int tdav_video_jb_put(tdav_video_jb_t* self, trtp_rtp_packet_t* rtp_pkt)
}
}
}
- else{
+ else {
tdav_video_frame_put((tdav_video_frame_t*)old_frame, rtp_pkt);
}
-
+
tsk_safeobj_unlock(self);
-
- if(!is_frame_late_or_dup || is_restarted){
+
+ if(!is_frame_late_or_dup || is_restarted) {
*seq_num = rtp_pkt->header->seq_num;
}
#endif
-
+
return 0;
}
int tdav_video_jb_stop(tdav_video_jb_t* self)
{
int ret;
- if(!self){
+ if(!self) {
TSK_DEBUG_ERROR("Invalid parameter");
return -1;
}
- if(!self->started){
+ if(!self->started) {
return 0;
}
-
+
TSK_DEBUG_INFO("tdav_video_jb_stop()");
-
+
self->started = tsk_false;
-
+
ret = tsk_condwait_broadcast(self->decode_thread_cond);
-
+
if (self->decode_thread[0]) {
ret = tsk_thread_join(&self->decode_thread[0]);
}
-
+
// clear pending frames
tsk_list_lock(self->frames);
tsk_list_clear_items(self->frames);
self->frames_count = 0;
tsk_list_unlock(self->frames);
-
+
// reset default values to make sure next start will be called with right defaults
// do not call this function in start to avoid overriding values defined between prepare() and start()
_tdav_video_jb_set_defaults(self);
-
+
return ret;
}
@@ -378,7 +398,7 @@ static int _tdav_video_jb_set_defaults(struct tdav_video_jb_s* self)
TSK_DEBUG_ERROR("Invalid parameter");
return -1;
}
-
+
self->fps = TDAV_VIDEO_JB_FPS;
self->fps_prob = TDAV_VIDEO_JB_FPS_PROB;
self->tail_max = TDAV_VIDEO_JB_TAIL_MAX;
@@ -389,10 +409,10 @@ static int _tdav_video_jb_set_defaults(struct tdav_video_jb_s* self)
self->decode_last_timestamp = 0;
self->decode_last_seq_num_with_mark = -1;
self->decode_last_time = 0;
-
+
self->latency_min = TDAV_VIDEO_JB_LATENCY_MIN;
self->latency_max = TDAV_VIDEO_JB_LATENCY_MAX;
-
+
return 0;
}
@@ -400,22 +420,24 @@ static const tdav_video_frame_t* _tdav_video_jb_get_frame(tdav_video_jb_t* self,
{
const tdav_video_frame_t* ret = tsk_null;
const tsk_list_item_t *item;
-
+
*pt_matched =tsk_false;
-
+
tsk_list_lock(self->frames);
- tsk_list_foreach(item, self->frames){
- if(TDAV_VIDEO_FRAME(item->data)->payload_type == pt){
- if(!(*pt_matched)) *pt_matched = tsk_true;
- if(TDAV_VIDEO_FRAME(item->data)->timestamp == timestamp){
+ tsk_list_foreach(item, self->frames) {
+ if(TDAV_VIDEO_FRAME(item->data)->payload_type == pt) {
+ if(!(*pt_matched)) {
+ *pt_matched = tsk_true;
+ }
+ if(TDAV_VIDEO_FRAME(item->data)->timestamp == timestamp) {
ret = item->data;
break;
}
}
-
+
}
tsk_list_unlock(self->frames);
-
+
return ret;
}
@@ -433,36 +455,36 @@ static void* TSK_STDCALL _tdav_video_jb_decode_thread_func(void *arg)
#if 0
static const uint64_t __toomuch_delay_to_be_valid = 10000; // guard against systems with buggy "tsk_time_now()" -Won't say Windows ...but :)-
#endif
-
+
jb->decode_last_seq_num_with_mark = -1; // -1 -> unset
jb->decode_last_time = tsk_time_now();
-
+
(void)(now);
//(void)(delay);
-
+
TSK_DEBUG_INFO("Video jitter buffer thread - ENTER");
-
- while(jb->started){
+
+ while(jb->started) {
now = tsk_time_now();
if (next_decode_duration > 0) {
tsk_condwait_timedwait(jb->decode_thread_cond, next_decode_duration);
}
-
- if(!jb->started){
+
+ if(!jb->started) {
break;
}
-
+
// TSK_DEBUG_INFO("Frames count = %d", jb->frames_count);
-
+
// the second condition (jb->frames_count > 0 && latency >= jb->latency_max) is required to make sure we'll process the pending pkts even if the remote party stops sending frames. GE issue: device stops sending frames when it enters in "frame freeze" mode which means #"latency_min" frames won't be displayed.
if (jb->frames_count >= (int64_t)jb->latency_min || (jb->frames_count > 0 && latency >= jb->latency_max)) {
- tsk_list_item_t *item = tsk_null;
+ tsk_list_item_t *item = tsk_null;
postpone = tsk_false;
latency = 0;
-
+
tsk_safeobj_lock(jb); // against get_frame()
tsk_list_lock(jb->frames); // against put()
-
+
// is it still acceptable to wait for missing packets?
if (jb->frames_count < (int64_t)jb->latency_max) {
frame = (const tdav_video_frame_t*)jb->frames->head->data;
@@ -471,7 +493,7 @@ static void* TSK_STDCALL _tdav_video_jb_decode_thread_func(void *arg)
// signal to the session that a sequence number is missing (will send a NACK)
// the missing seqnum has been already requested in jb_put() and here we request it again only ONE time
if (jb->callback && frame) {
- if(prev_missing_seq_num_start != missing_seq_num_start || prev_lasted_missing_seq_num_count != missing_seq_num_count){ // guard to request it only once
+ if(prev_missing_seq_num_start != missing_seq_num_start || prev_lasted_missing_seq_num_count != missing_seq_num_count) { // guard to request it only once
jb->cb_data_any.type = tdav_video_jb_cb_data_type_fl;
jb->cb_data_any.ssrc = frame->ssrc;
jb->cb_data_any.fl.seq_num = prev_missing_seq_num_start = missing_seq_num_start;
@@ -488,34 +510,34 @@ static void* TSK_STDCALL _tdav_video_jb_decode_thread_func(void *arg)
// postpone is equal to "tsk_false" which means the pending frame will be displayed in all cases
}
if (!postpone) {
- if ((item = tsk_list_pop_first_item(jb->frames))) { // always true (jb->frames_count > 0)
- --jb->frames_count;
- // Update the latest decoded timestamp here while we have the lock on the frames
- jb->decode_last_timestamp = ((const tdav_video_frame_t*)item->data)->timestamp;
- }
+ if ((item = tsk_list_pop_first_item(jb->frames))) { // always true (jb->frames_count > 0)
+ --jb->frames_count;
+ // Update the latest decoded timestamp here while we have the lock on the frames
+ jb->decode_last_timestamp = ((const tdav_video_frame_t*)item->data)->timestamp;
+ }
}
tsk_list_unlock(jb->frames);
tsk_safeobj_unlock(jb);
-
+
if (item) {
- if(jb->callback){
+ if(jb->callback) {
trtp_rtp_packet_t* pkt;
const tsk_list_item_t* _item = item; // save memory address as "tsk_list_foreach() will change it for each loop"
int32_t last_seq_num = -1; // guard against duplicated packets
frame = _item->data;
- tsk_list_foreach(_item, frame->pkts){
- if(!(pkt = _item->data) || !pkt->payload.size || !pkt->header || pkt->header->seq_num == last_seq_num || !jb->started){
+ tsk_list_foreach(_item, frame->pkts) {
+ if(!(pkt = _item->data) || !pkt->payload.size || !pkt->header || pkt->header->seq_num == last_seq_num || !jb->started) {
TSK_DEBUG_ERROR("Skipping invalid rtp packet (do not decode!)");
continue;
}
jb->cb_data_rtp.rtp.pkt = pkt;
jb->callback(&jb->cb_data_rtp);
- if(pkt->header->marker){
+ if(pkt->header->marker) {
jb->decode_last_seq_num_with_mark = pkt->header->seq_num;
}
}
}
-
+
TSK_OBJECT_SAFE_FREE(item);
}
}
@@ -524,18 +546,18 @@ static void* TSK_STDCALL _tdav_video_jb_decode_thread_func(void *arg)
latency++;
}
}
-
+
#if 1
- if (cleaning_delay || jb->frames_count > (int64_t)jb->latency_max){
+ if (cleaning_delay || jb->frames_count > (int64_t)jb->latency_max) {
//x_decode_time = now;
next_decode_duration = 0;
cleaning_delay = ((jb->frames_count << 1) > (int64_t)jb->latency_max); // cleanup up2 half
}
- else{
+ else {
next_decode_duration = (1000 / jb->fps);
_now = tsk_time_now();
if (_now > now) {
- if ((_now - now) > next_decode_duration){
+ if ((_now - now) > next_decode_duration) {
next_decode_duration = 0;
}
else {
@@ -556,19 +578,19 @@ static void* TSK_STDCALL _tdav_video_jb_decode_thread_func(void *arg)
next_decode_duration = 0;
}
else*/{
- //next_decode_duration = (delay > x_decode_duration) ? 0 : (x_decode_duration - delay);
- //x_decode_duration = (1000 / jb->fps);
- //x_decode_time += x_decode_duration;
- }
-
-
+ //next_decode_duration = (delay > x_decode_duration) ? 0 : (x_decode_duration - delay);
+ //x_decode_duration = (1000 / jb->fps);
+ //x_decode_time += x_decode_duration;
+ }
+
+
//TSK_DEBUG_INFO("next_decode_timeout=%llu, delay = %llu", next_decode_duration, delay);
#else
next_decode_duration = (1000 / jb->fps);
#endif
}
-
+
TSK_DEBUG_INFO("Video jitter buffer thread - EXIT");
-
+
return tsk_null;
}
diff --git a/tinyDAV/src/video/mf/tdav_consumer_video_mf.cxx b/tinyDAV/src/video/mf/tdav_consumer_video_mf.cxx
index b5048b2..28cf332 100755
--- a/tinyDAV/src/video/mf/tdav_consumer_video_mf.cxx
+++ b/tinyDAV/src/video/mf/tdav_consumer_video_mf.cxx
@@ -1,17 +1,17 @@
/*Copyright (C) 2013 Doubango Telecom <http://www.doubango.org>
-*
+*
* This file is part of Open Source Doubango Framework.
*
* DOUBANGO is free software: you can redistribute it and/or modify
* it under the terms of the GNU General Public License as published by
* the Free Software Foundation, either version 3 of the License, or
* (at your option) any later version.
-*
+*
* DOUBANGO is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
* GNU General Public License for more details.
-*
+*
* You should have received a copy of the GNU General Public License
* along with DOUBANGO.
*/
@@ -26,96 +26,95 @@
#include "tsk_string.h"
#include "tsk_debug.h"
-typedef struct tdav_consumer_video_mf_s
-{
- TMEDIA_DECLARE_CONSUMER;
+typedef struct tdav_consumer_video_mf_s {
+ TMEDIA_DECLARE_CONSUMER;
}
tdav_consumer_video_mf_t;
/* ============ Media Producer Interface ================= */
int tdav_consumer_video_mf_set(tmedia_consumer_t *self, const tmedia_param_t* param)
{
- int ret = 0;
+ int ret = 0;
- if(!self || !param){
- TSK_DEBUG_ERROR("Invalid parameter");
- return -1;
- }
+ if(!self || !param) {
+ TSK_DEBUG_ERROR("Invalid parameter");
+ return -1;
+ }
- return ret;
+ return ret;
}
int tdav_consumer_video_mf_prepare(tmedia_consumer_t* self, const tmedia_codec_t* codec)
{
- tdav_consumer_video_mf_t* consumer = (tdav_consumer_video_mf_t*)self;
+ tdav_consumer_video_mf_t* consumer = (tdav_consumer_video_mf_t*)self;
+
+ if(!consumer || !codec || !codec->plugin) {
+ TSK_DEBUG_ERROR("Invalid parameter");
+ return -1;
+ }
- if(!consumer || !codec || !codec->plugin){
- TSK_DEBUG_ERROR("Invalid parameter");
- return -1;
- }
-
#if 0
- TMEDIA_CONSUMER(consumer)->decoder.codec_id = (tmedia_codec_id_t)(tmedia_codec_id_h264_bp | tmedia_codec_id_h264_mp);
+ TMEDIA_CONSUMER(consumer)->decoder.codec_id = (tmedia_codec_id_t)(tmedia_codec_id_h264_bp | tmedia_codec_id_h264_mp);
#endif
- TMEDIA_CONSUMER(consumer)->video.fps = TMEDIA_CODEC_VIDEO(codec)->in.fps;
- TMEDIA_CONSUMER(consumer)->video.in.width = TMEDIA_CODEC_VIDEO(codec)->in.width;
- TMEDIA_CONSUMER(consumer)->video.in.height = TMEDIA_CODEC_VIDEO(codec)->in.height;
+ TMEDIA_CONSUMER(consumer)->video.fps = TMEDIA_CODEC_VIDEO(codec)->in.fps;
+ TMEDIA_CONSUMER(consumer)->video.in.width = TMEDIA_CODEC_VIDEO(codec)->in.width;
+ TMEDIA_CONSUMER(consumer)->video.in.height = TMEDIA_CODEC_VIDEO(codec)->in.height;
- if(!TMEDIA_CONSUMER(consumer)->video.display.width){
- TMEDIA_CONSUMER(consumer)->video.display.width = TMEDIA_CONSUMER(consumer)->video.in.width;
- }
- if(!TMEDIA_CONSUMER(consumer)->video.display.height){
- TMEDIA_CONSUMER(consumer)->video.display.height = TMEDIA_CONSUMER(consumer)->video.in.height;
- }
+ if(!TMEDIA_CONSUMER(consumer)->video.display.width) {
+ TMEDIA_CONSUMER(consumer)->video.display.width = TMEDIA_CONSUMER(consumer)->video.in.width;
+ }
+ if(!TMEDIA_CONSUMER(consumer)->video.display.height) {
+ TMEDIA_CONSUMER(consumer)->video.display.height = TMEDIA_CONSUMER(consumer)->video.in.height;
+ }
- return 0;
+ return 0;
}
int tdav_consumer_video_mf_start(tmedia_consumer_t* self)
{
- tdav_consumer_video_mf_t* consumer = (tdav_consumer_video_mf_t*)self;
+ tdav_consumer_video_mf_t* consumer = (tdav_consumer_video_mf_t*)self;
- if(!consumer){
- TSK_DEBUG_ERROR("Invalid parameter");
- return -1;
- }
+ if(!consumer) {
+ TSK_DEBUG_ERROR("Invalid parameter");
+ return -1;
+ }
- return 0;
+ return 0;
}
int tdav_consumer_video_mf_consume(tmedia_consumer_t* self, const void* buffer, tsk_size_t size, const tsk_object_t* proto_hdr)
{
- tdav_consumer_video_mf_t* consumer = (tdav_consumer_video_mf_t*)self;
-
- return 0;
+ tdav_consumer_video_mf_t* consumer = (tdav_consumer_video_mf_t*)self;
+
+ return 0;
}
int tdav_consumer_video_mf_pause(tmedia_consumer_t* self)
{
- tdav_consumer_video_mf_t* consumer = (tdav_consumer_video_mf_t*)self;
+ tdav_consumer_video_mf_t* consumer = (tdav_consumer_video_mf_t*)self;
- if(!consumer){
- TSK_DEBUG_ERROR("Invalid parameter");
- return -1;
- }
+ if(!consumer) {
+ TSK_DEBUG_ERROR("Invalid parameter");
+ return -1;
+ }
- return 0;
+ return 0;
}
int tdav_consumer_video_mf_stop(tmedia_consumer_t* self)
{
- tdav_consumer_video_mf_t* consumer = (tdav_consumer_video_mf_t*)self;
+ tdav_consumer_video_mf_t* consumer = (tdav_consumer_video_mf_t*)self;
- TSK_DEBUG_INFO("tdav_consumer_video_mf_stop");
+ TSK_DEBUG_INFO("tdav_consumer_video_mf_stop");
- if(!self){
- TSK_DEBUG_ERROR("Invalid parameter");
- return -1;
- }
+ if(!self) {
+ TSK_DEBUG_ERROR("Invalid parameter");
+ return -1;
+ }
- return 0;
+ return 0;
}
@@ -125,60 +124,58 @@ int tdav_consumer_video_mf_stop(tmedia_consumer_t* self)
/* constructor */
static tsk_object_t* tdav_consumer_video_mf_ctor(tsk_object_t * self, va_list * app)
{
- tdav_consumer_video_mf_t *consumer = (tdav_consumer_video_mf_t *)self;
- if(consumer){
- /* init base */
- tmedia_consumer_init(TMEDIA_CONSUMER(consumer));
- TMEDIA_CONSUMER(consumer)->video.display.chroma = tmedia_chroma_yuv420p; // To avoid chroma conversion
-
- /* init self */
- TMEDIA_CONSUMER(consumer)->video.fps = 15;
- TMEDIA_CONSUMER(consumer)->video.display.width = 352;
- TMEDIA_CONSUMER(consumer)->video.display.height = 288;
- TMEDIA_CONSUMER(consumer)->video.display.auto_resize = tsk_true;
- }
- return self;
+ tdav_consumer_video_mf_t *consumer = (tdav_consumer_video_mf_t *)self;
+ if(consumer) {
+ /* init base */
+ tmedia_consumer_init(TMEDIA_CONSUMER(consumer));
+ TMEDIA_CONSUMER(consumer)->video.display.chroma = tmedia_chroma_yuv420p; // To avoid chroma conversion
+
+ /* init self */
+ TMEDIA_CONSUMER(consumer)->video.fps = 15;
+ TMEDIA_CONSUMER(consumer)->video.display.width = 352;
+ TMEDIA_CONSUMER(consumer)->video.display.height = 288;
+ TMEDIA_CONSUMER(consumer)->video.display.auto_resize = tsk_true;
+ }
+ return self;
}
/* destructor */
static tsk_object_t* tdav_consumer_video_mf_dtor(tsk_object_t * self)
-{
- tdav_consumer_video_mf_t *consumer = (tdav_consumer_video_mf_t *)self;
- if(consumer){
+{
+ tdav_consumer_video_mf_t *consumer = (tdav_consumer_video_mf_t *)self;
+ if(consumer) {
- /* stop */
- //if(consumer->started){
- tdav_consumer_video_mf_stop((tmedia_consumer_t*)self);
- //}
+ /* stop */
+ //if(consumer->started){
+ tdav_consumer_video_mf_stop((tmedia_consumer_t*)self);
+ //}
- /* deinit base */
- tmedia_consumer_deinit(TMEDIA_CONSUMER(consumer));
- /* deinit self */
- }
+ /* deinit base */
+ tmedia_consumer_deinit(TMEDIA_CONSUMER(consumer));
+ /* deinit self */
+ }
- return self;
+ return self;
}
/* object definition */
-static const tsk_object_def_t tdav_consumer_video_mf_def_s =
-{
- sizeof(tdav_consumer_video_mf_t),
- tdav_consumer_video_mf_ctor,
- tdav_consumer_video_mf_dtor,
- tsk_null,
+static const tsk_object_def_t tdav_consumer_video_mf_def_s = {
+ sizeof(tdav_consumer_video_mf_t),
+ tdav_consumer_video_mf_ctor,
+ tdav_consumer_video_mf_dtor,
+ tsk_null,
};
/* plugin definition*/
-static const tmedia_consumer_plugin_def_t tdav_consumer_video_mf_plugin_def_s =
-{
- &tdav_consumer_video_mf_def_s,
-
- tmedia_video,
- "Microsoft Windows Media Foundation consumer (Video)",
-
- tdav_consumer_video_mf_set,
- tdav_consumer_video_mf_prepare,
- tdav_consumer_video_mf_start,
- tdav_consumer_video_mf_consume,
- tdav_consumer_video_mf_pause,
- tdav_consumer_video_mf_stop
+static const tmedia_consumer_plugin_def_t tdav_consumer_video_mf_plugin_def_s = {
+ &tdav_consumer_video_mf_def_s,
+
+ tmedia_video,
+ "Microsoft Windows Media Foundation consumer (Video)",
+
+ tdav_consumer_video_mf_set,
+ tdav_consumer_video_mf_prepare,
+ tdav_consumer_video_mf_start,
+ tdav_consumer_video_mf_consume,
+ tdav_consumer_video_mf_pause,
+ tdav_consumer_video_mf_stop
};
const tmedia_consumer_plugin_def_t *tdav_consumer_video_mf_plugin_def_t = &tdav_consumer_video_mf_plugin_def_s;
diff --git a/tinyDAV/src/video/mf/tdav_producer_video_mf.cxx b/tinyDAV/src/video/mf/tdav_producer_video_mf.cxx
index be899a0..d4930c4 100755
--- a/tinyDAV/src/video/mf/tdav_producer_video_mf.cxx
+++ b/tinyDAV/src/video/mf/tdav_producer_video_mf.cxx
@@ -1,18 +1,18 @@
/*Copyright (C) 2013 Doubango Telecom <http://www.doubango.org>
* Copyright (C) Microsoft Corporation. All rights reserved.
-*
+*
* This file is part of Open Source Doubango Framework.
*
* DOUBANGO is free software: you can redistribute it and/or modify
* it under the terms of the GNU General Public License as published by
* the Free Software Foundation, either version 3 of the License, or
* (at your option) any later version.
-*
+*
* DOUBANGO is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
* GNU General Public License for more details.
-*
+*
* You should have received a copy of the GNU General Public License
* along with DOUBANGO.
*/
@@ -53,25 +53,23 @@
(*ppT)->Release(); \
*ppT = NULL; \
} \
-}
+}
#undef CHECK_HR
#define CHECK_HR(x) if (FAILED(x)) { TSK_DEBUG_ERROR("Operation Failed"); goto bail; }
-typedef struct VideoSubTypeGuidPair
-{
- tmedia_chroma_t chroma;
- const GUID fourcc;
+typedef struct VideoSubTypeGuidPair {
+ tmedia_chroma_t chroma;
+ const GUID fourcc;
}
VideoSubTypeGuidPair;
-static const VideoSubTypeGuidPair g_VideoSubTypeGuidPairs[] =
-{
-
- { tmedia_chroma_nv12, MFVideoFormat_NV12 }, // 0
- { tmedia_chroma_rgb24, MFVideoFormat_RGB24 },
- { tmedia_chroma_rgb32, MFVideoFormat_RGB32 },
- // to be continued
+static const VideoSubTypeGuidPair g_VideoSubTypeGuidPairs[] = {
+
+ { tmedia_chroma_nv12, MFVideoFormat_NV12 }, // 0
+ { tmedia_chroma_rgb24, MFVideoFormat_RGB24 },
+ { tmedia_chroma_rgb32, MFVideoFormat_RGB32 },
+ // to be continued
};
// MFVideoFormat_NV12, MFVideoFormat_YUY2, MFVideoFormat_UYVY,
@@ -88,21 +86,21 @@ class DeviceList
IMFActivate **m_ppDevices;
public:
- DeviceList() : m_ppDevices(NULL), m_cDevices(0)
- {
+ DeviceList() : m_ppDevices(NULL), m_cDevices(0) {
}
- ~DeviceList()
- {
+ ~DeviceList() {
Clear();
}
- UINT32 Count() const { return m_cDevices; }
+ UINT32 Count() const {
+ return m_cDevices;
+ }
void Clear();
HRESULT EnumerateDevices();
HRESULT GetDeviceAtIndex(UINT32 index, IMFActivate **ppActivate);
- HRESULT GetDeviceBest(IMFActivate **ppActivate);
+ HRESULT GetDeviceBest(IMFActivate **ppActivate);
HRESULT GetDeviceName(UINT32 index, WCHAR **ppszName);
};
@@ -111,10 +109,10 @@ public:
// SampleGrabberCB [Declaration]
// http://msdn.microsoft.com/en-us/library/windows/desktop/hh184779(v=vs.85).aspx
//
-class SampleGrabberCB : public IMFSampleGrabberSinkCallback
+class SampleGrabberCB : public IMFSampleGrabberSinkCallback
{
long m_cRef;
- const struct tdav_producer_video_mf_s* m_pWrappedProducer;
+ const struct tdav_producer_video_mf_s* m_pWrappedProducer;
SampleGrabberCB(const struct tdav_producer_video_mf_s* pcWrappedProducer) : m_cRef(1), m_pWrappedProducer(pcWrappedProducer) {}
@@ -136,8 +134,8 @@ public:
// IMFSampleGrabberSinkCallback methods
STDMETHODIMP OnSetPresentationClock(IMFPresentationClock* pClock);
STDMETHODIMP OnProcessSample(REFGUID guidMajorMediaType, DWORD dwSampleFlags,
- LONGLONG llSampleTime, LONGLONG llSampleDuration, const BYTE * pSampleBuffer,
- DWORD dwSampleSize);
+ LONGLONG llSampleTime, LONGLONG llSampleDuration, const BYTE * pSampleBuffer,
+ DWORD dwSampleSize);
STDMETHODIMP OnShutdown();
};
@@ -145,16 +143,15 @@ public:
//
// tdav_producer_video_mf_t
//
-typedef struct tdav_producer_video_mf_s
-{
- TMEDIA_DECLARE_PRODUCER;
+typedef struct tdav_producer_video_mf_s {
+ TMEDIA_DECLARE_PRODUCER;
- bool bStarted;
- tsk_thread_handle_t* ppTread[1];
+ bool bStarted;
+ tsk_thread_handle_t* ppTread[1];
- DeviceList* pDeviceList;
+ DeviceList* pDeviceList;
- IMFMediaSession *pSession;
+ IMFMediaSession *pSession;
IMFMediaSource *pSource;
SampleGrabberCB *pCallback;
IMFActivate *pSinkActivate;
@@ -167,9 +164,9 @@ tdav_producer_video_mf_t;
// Forward declarations for glovbal functions
//
static HRESULT CreateTopology(
- IMFMediaSource *pSource,
- IMFActivate *pSinkActivate,
- IMFTopology **ppTopo);
+ IMFMediaSource *pSource,
+ IMFActivate *pSinkActivate,
+ IMFTopology **ppTopo);
static HRESULT AddSourceNode(
IMFTopology *pTopology,
IMFMediaSource *pSource,
@@ -182,199 +179,199 @@ static HRESULT AddOutputNode(
DWORD dwId,
IMFTopologyNode **ppNode);
static HRESULT RunSession(
- IMFMediaSession *pSession,
- IMFTopology *pTopology);
+ IMFMediaSession *pSession,
+ IMFTopology *pTopology);
static HRESULT StopSession(
- IMFMediaSession *pSession,
- IMFMediaSource *pSource);
+ IMFMediaSession *pSession,
+ IMFMediaSource *pSource);
static HRESULT PauseSession(
- IMFMediaSession *pSession);
+ IMFMediaSession *pSession);
static void* TSK_STDCALL RunSessionThread(void *pArg);
/* ============ Media Producer Interface ================= */
static int tdav_producer_video_mf_set(tmedia_producer_t *self, const tmedia_param_t* param)
{
- int ret = 0;
- tdav_producer_video_mf_t* pSelf = (tdav_producer_video_mf_t*)self;
+ int ret = 0;
+ tdav_producer_video_mf_t* pSelf = (tdav_producer_video_mf_t*)self;
- if(!pSelf || !param){
- TSK_DEBUG_ERROR("Invalid parameter");
- return -1;
- }
+ if(!pSelf || !param) {
+ TSK_DEBUG_ERROR("Invalid parameter");
+ return -1;
+ }
- return ret;
+ return ret;
}
static int tdav_producer_video_mf_prepare(tmedia_producer_t* self, const tmedia_codec_t* codec)
{
- tdav_producer_video_mf_t* pSelf = (tdav_producer_video_mf_t*)self;
+ tdav_producer_video_mf_t* pSelf = (tdav_producer_video_mf_t*)self;
+
+ if(!pSelf || !codec || !codec->plugin) {
+ TSK_DEBUG_ERROR("Invalid parameter");
+ return -1;
+ }
- if(!pSelf || !codec || !codec->plugin){
- TSK_DEBUG_ERROR("Invalid parameter");
- return -1;
- }
-
#if 0
- TMEDIA_PRODUCER(producer)->encoder.codec_id = (tmedia_codec_id_t)(tmedia_codec_id_h264_bp | tmedia_codec_id_h264_mp);
+ TMEDIA_PRODUCER(producer)->encoder.codec_id = (tmedia_codec_id_t)(tmedia_codec_id_h264_bp | tmedia_codec_id_h264_mp);
#else
- TMEDIA_PRODUCER(pSelf)->video.chroma = g_VideoSubTypeGuidPairs[DEFAULT_SUBTYPE_INDEX].chroma;
+ TMEDIA_PRODUCER(pSelf)->video.chroma = g_VideoSubTypeGuidPairs[DEFAULT_SUBTYPE_INDEX].chroma;
#endif
- TMEDIA_PRODUCER(pSelf)->video.fps = TMEDIA_CODEC_VIDEO(codec)->out.fps;
- TMEDIA_PRODUCER(pSelf)->video.width = TMEDIA_CODEC_VIDEO(codec)->out.width;
- TMEDIA_PRODUCER(pSelf)->video.height = TMEDIA_CODEC_VIDEO(codec)->out.height;
+ TMEDIA_PRODUCER(pSelf)->video.fps = TMEDIA_CODEC_VIDEO(codec)->out.fps;
+ TMEDIA_PRODUCER(pSelf)->video.width = TMEDIA_CODEC_VIDEO(codec)->out.width;
+ TMEDIA_PRODUCER(pSelf)->video.height = TMEDIA_CODEC_VIDEO(codec)->out.height;
- // MFVideoFormat_NV12, MFVideoFormat_YUY2, MFVideoFormat_UYVY,
+ // MFVideoFormat_NV12, MFVideoFormat_YUY2, MFVideoFormat_UYVY,
// MFVideoFormat_RGB32, MFVideoFormat_RGB24, MFVideoFormat_IYUV
#undef DEFAULT_SUBTYPE
- HRESULT hr = S_OK;
-
- // create device list object
- if(!pSelf->pDeviceList && !(pSelf->pDeviceList = new DeviceList())){
- TSK_DEBUG_ERROR("Failed to create device list");
- hr = E_OUTOFMEMORY;
- goto bail;
- }
- // enumerate devices
- hr = pSelf->pDeviceList->EnumerateDevices();
- if(!SUCCEEDED(hr)){
- goto bail;
- }
-
- // check if we have at least one MF video source connected to the PC
- if(pSelf->pDeviceList->Count() == 0){
- TSK_DEBUG_WARN("No MF video source could be found...no video will be sent");
- // do not break the negotiation as one-way video connection is a valid use-case
- }
- else{
- IMFActivate* pActivate = NULL;
- // Get best MF video source
- hr = pSelf->pDeviceList->GetDeviceBest(&pActivate);
- if(!SUCCEEDED(hr) || !pActivate){
- TSK_DEBUG_ERROR("Failed to get best MF video source");
- if(!pActivate){
- hr = E_OUTOFMEMORY;
- }
- goto bail;
- }
-
- // Create the media source for the device.
- hr = pActivate->ActivateObject(
- __uuidof(IMFMediaSource),
- (void**)&pSelf->pSource
- );
- SafeRelease(&pActivate);
- if(!SUCCEEDED(hr)){
- TSK_DEBUG_ERROR("ActivateObject(MF video source) failed");
- goto bail;
- }
-
- // Configure the media type that the Sample Grabber will receive.
- // Setting the major and subtype is usually enough for the topology loader
- // to resolve the topology.
-
- CHECK_HR(hr = MFCreateMediaType(&pSelf->pType));
- CHECK_HR(hr = pSelf->pType->SetGUID(MF_MT_MAJOR_TYPE, MFMediaType_Video));
-#if 0
- CHECK_HR(hr = pSelf->pType->SetGUID(MF_MT_SUBTYPE, MFVideoFormat_H264);
-#else
- CHECK_HR(hr = pSelf->pType->SetGUID(MF_MT_SUBTYPE, g_VideoSubTypeGuidPairs[DEFAULT_SUBTYPE_INDEX].fourcc));
-#endif
- CHECK_HR(hr = MFSetAttributeSize(pSelf->pType, MF_MT_FRAME_SIZE, TMEDIA_PRODUCER(pSelf)->video.width, TMEDIA_PRODUCER(pSelf)->video.height));
- // CHECK_HR(hr = pSelf->pType->SetUINT32(MF_MT_DEFAULT_STRIDE, 1280));
- CHECK_HR(hr = pSelf->pType->SetUINT32(MF_MT_ALL_SAMPLES_INDEPENDENT, 1));
- CHECK_HR(hr = pSelf->pType->SetUINT32(MF_MT_FIXED_SIZE_SAMPLES, 1));
- // CHECK_HR(hr = pSelf->pType->SetUINT32(MF_MT_AVG_BITRATE, 147456000));
- //CHECK_HR(hr = MFSetAttributeRatio(pSelf->pType, MF_MT_FRAME_RATE, (UINT32)30000, 1001));
- CHECK_HR(hr = MFSetAttributeRatio(pSelf->pType, MF_MT_PIXEL_ASPECT_RATIO, 1, 1));
- CHECK_HR(hr = pSelf->pType->SetUINT32(MF_MT_INTERLACE_MODE, MFVideoInterlace_Progressive));
+ HRESULT hr = S_OK;
- // Create the sample grabber sink.
- CHECK_HR(hr = SampleGrabberCB::CreateInstance(pSelf, &pSelf->pCallback));
- CHECK_HR(hr = MFCreateSampleGrabberSinkActivate(pSelf->pType, pSelf->pCallback, &pSelf->pSinkActivate));
+ // create device list object
+ if(!pSelf->pDeviceList && !(pSelf->pDeviceList = new DeviceList())) {
+ TSK_DEBUG_ERROR("Failed to create device list");
+ hr = E_OUTOFMEMORY;
+ goto bail;
+ }
+ // enumerate devices
+ hr = pSelf->pDeviceList->EnumerateDevices();
+ if(!SUCCEEDED(hr)) {
+ goto bail;
+ }
- // To run as fast as possible, set this attribute (requires Windows 7):
- CHECK_HR(hr = pSelf->pSinkActivate->SetUINT32(MF_SAMPLEGRABBERSINK_IGNORE_CLOCK, TRUE));
+ // check if we have at least one MF video source connected to the PC
+ if(pSelf->pDeviceList->Count() == 0) {
+ TSK_DEBUG_WARN("No MF video source could be found...no video will be sent");
+ // do not break the negotiation as one-way video connection is a valid use-case
+ }
+ else {
+ IMFActivate* pActivate = NULL;
+ // Get best MF video source
+ hr = pSelf->pDeviceList->GetDeviceBest(&pActivate);
+ if(!SUCCEEDED(hr) || !pActivate) {
+ TSK_DEBUG_ERROR("Failed to get best MF video source");
+ if(!pActivate) {
+ hr = E_OUTOFMEMORY;
+ }
+ goto bail;
+ }
- // Create the Media Session.
- CHECK_HR(hr = MFCreateMediaSession(NULL, &pSelf->pSession));
+ // Create the media source for the device.
+ hr = pActivate->ActivateObject(
+ __uuidof(IMFMediaSource),
+ (void**)&pSelf->pSource
+ );
+ SafeRelease(&pActivate);
+ if(!SUCCEEDED(hr)) {
+ TSK_DEBUG_ERROR("ActivateObject(MF video source) failed");
+ goto bail;
+ }
- // Create the topology.
- CHECK_HR(hr = CreateTopology(pSelf->pSource, pSelf->pSinkActivate, &pSelf->pTopology));
- }
+ // Configure the media type that the Sample Grabber will receive.
+ // Setting the major and subtype is usually enough for the topology loader
+ // to resolve the topology.
+
+ CHECK_HR(hr = MFCreateMediaType(&pSelf->pType));
+ CHECK_HR(hr = pSelf->pType->SetGUID(MF_MT_MAJOR_TYPE, MFMediaType_Video));
+#if 0
+ CHECK_HR(hr = pSelf->pType->SetGUID(MF_MT_SUBTYPE, MFVideoFormat_H264);
+#else
+ CHECK_HR(hr = pSelf->pType->SetGUID(MF_MT_SUBTYPE, g_VideoSubTypeGuidPairs[DEFAULT_SUBTYPE_INDEX].fourcc));
+#endif
+ CHECK_HR(hr = MFSetAttributeSize(pSelf->pType, MF_MT_FRAME_SIZE, TMEDIA_PRODUCER(pSelf)->video.width, TMEDIA_PRODUCER(pSelf)->video.height));
+ // CHECK_HR(hr = pSelf->pType->SetUINT32(MF_MT_DEFAULT_STRIDE, 1280));
+ CHECK_HR(hr = pSelf->pType->SetUINT32(MF_MT_ALL_SAMPLES_INDEPENDENT, 1));
+ CHECK_HR(hr = pSelf->pType->SetUINT32(MF_MT_FIXED_SIZE_SAMPLES, 1));
+ // CHECK_HR(hr = pSelf->pType->SetUINT32(MF_MT_AVG_BITRATE, 147456000));
+ //CHECK_HR(hr = MFSetAttributeRatio(pSelf->pType, MF_MT_FRAME_RATE, (UINT32)30000, 1001));
+ CHECK_HR(hr = MFSetAttributeRatio(pSelf->pType, MF_MT_PIXEL_ASPECT_RATIO, 1, 1));
+ CHECK_HR(hr = pSelf->pType->SetUINT32(MF_MT_INTERLACE_MODE, MFVideoInterlace_Progressive));
+
+ // Create the sample grabber sink.
+ CHECK_HR(hr = SampleGrabberCB::CreateInstance(pSelf, &pSelf->pCallback));
+ CHECK_HR(hr = MFCreateSampleGrabberSinkActivate(pSelf->pType, pSelf->pCallback, &pSelf->pSinkActivate));
+
+ // To run as fast as possible, set this attribute (requires Windows 7):
+ CHECK_HR(hr = pSelf->pSinkActivate->SetUINT32(MF_SAMPLEGRABBERSINK_IGNORE_CLOCK, TRUE));
+
+ // Create the Media Session.
+ CHECK_HR(hr = MFCreateMediaSession(NULL, &pSelf->pSession));
+
+ // Create the topology.
+ CHECK_HR(hr = CreateTopology(pSelf->pSource, pSelf->pSinkActivate, &pSelf->pTopology));
+ }
bail:
- return SUCCEEDED(hr) ? 0 : -1;
+ return SUCCEEDED(hr) ? 0 : -1;
}
static int tdav_producer_video_mf_start(tmedia_producer_t* self)
{
- tdav_producer_video_mf_t* pSelf = (tdav_producer_video_mf_t*)self;
-
- if(!pSelf){
- TSK_DEBUG_ERROR("Invalid parameter");
- return -1;
- }
- if(pSelf->bStarted){
- return 0;
- }
+ tdav_producer_video_mf_t* pSelf = (tdav_producer_video_mf_t*)self;
- HRESULT hr = S_OK;
+ if(!pSelf) {
+ TSK_DEBUG_ERROR("Invalid parameter");
+ return -1;
+ }
+ if(pSelf->bStarted) {
+ return 0;
+ }
- // Run the media session.
- CHECK_HR(hr = RunSession(pSelf->pSession, pSelf->pTopology));
+ HRESULT hr = S_OK;
- // Start asynchronous watcher thread
- pSelf->bStarted = true;
- int ret = tsk_thread_create(&pSelf->ppTread[0], RunSessionThread, pSelf);
- if(ret != 0){
- TSK_DEBUG_ERROR("Failed to create thread");
- hr = E_FAIL;
- pSelf->bStarted = false;
- StopSession(pSelf->pSession, pSelf->pSource);
- goto bail;
- }
+ // Run the media session.
+ CHECK_HR(hr = RunSession(pSelf->pSession, pSelf->pTopology));
+
+ // Start asynchronous watcher thread
+ pSelf->bStarted = true;
+ int ret = tsk_thread_create(&pSelf->ppTread[0], RunSessionThread, pSelf);
+ if(ret != 0) {
+ TSK_DEBUG_ERROR("Failed to create thread");
+ hr = E_FAIL;
+ pSelf->bStarted = false;
+ StopSession(pSelf->pSession, pSelf->pSource);
+ goto bail;
+ }
bail:
- return SUCCEEDED(hr) ? 0 : -1;
+ return SUCCEEDED(hr) ? 0 : -1;
}
static int tdav_producer_video_mf_pause(tmedia_producer_t* self)
{
- tdav_producer_video_mf_t* pSelf = (tdav_producer_video_mf_t*)self;
+ tdav_producer_video_mf_t* pSelf = (tdav_producer_video_mf_t*)self;
+
+ if(!pSelf) {
+ TSK_DEBUG_ERROR("Invalid parameter");
+ return -1;
+ }
- if(!pSelf){
- TSK_DEBUG_ERROR("Invalid parameter");
- return -1;
- }
+ HRESULT hr = PauseSession(pSelf->pSession);
- HRESULT hr = PauseSession(pSelf->pSession);
-
- return 0;
+ return 0;
}
static int tdav_producer_video_mf_stop(tmedia_producer_t* self)
{
- tdav_producer_video_mf_t* pSelf = (tdav_producer_video_mf_t*)self;
+ tdav_producer_video_mf_t* pSelf = (tdav_producer_video_mf_t*)self;
- if(!pSelf){
- TSK_DEBUG_ERROR("Invalid parameter");
- return -1;
- }
+ if(!pSelf) {
+ TSK_DEBUG_ERROR("Invalid parameter");
+ return -1;
+ }
- HRESULT hr = S_OK;
+ HRESULT hr = S_OK;
- // for the thread
- pSelf->bStarted = false;
- hr = StopSession(pSelf->pSession, NULL); // stop session to wakeup the asynchronous thread
- if(pSelf->ppTread[0]){
- tsk_thread_join(&pSelf->ppTread[0]);
- }
- hr = StopSession(NULL, pSelf->pSource); // stop source to release the camera
+ // for the thread
+ pSelf->bStarted = false;
+ hr = StopSession(pSelf->pSession, NULL); // stop session to wakeup the asynchronous thread
+ if(pSelf->ppTread[0]) {
+ tsk_thread_join(&pSelf->ppTread[0]);
+ }
+ hr = StopSession(NULL, pSelf->pSource); // stop source to release the camera
- return 0;
+ return 0;
}
//
@@ -383,79 +380,77 @@ static int tdav_producer_video_mf_stop(tmedia_producer_t* self)
/* constructor */
static tsk_object_t* tdav_producer_video_mf_ctor(tsk_object_t * self, va_list * app)
{
- tdav_producer_video_mf_t *pSelf = (tdav_producer_video_mf_t *)self;
- if(pSelf){
- /* init base */
- tmedia_producer_init(TMEDIA_PRODUCER(pSelf));
+ tdav_producer_video_mf_t *pSelf = (tdav_producer_video_mf_t *)self;
+ if(pSelf) {
+ /* init base */
+ tmedia_producer_init(TMEDIA_PRODUCER(pSelf));
#if 0 // H.264
- TMEDIA_PRODUCER(pSelf)->video.chroma = tmedia_chroma_yuv420p; // To avoid chroma conversion
+ TMEDIA_PRODUCER(pSelf)->video.chroma = tmedia_chroma_yuv420p; // To avoid chroma conversion
#endif
- /* init self with default values*/
+ /* init self with default values*/
- TMEDIA_PRODUCER(pSelf)->video.fps = 15;
- TMEDIA_PRODUCER(pSelf)->video.width = 352;
- TMEDIA_PRODUCER(pSelf)->video.height = 288;
+ TMEDIA_PRODUCER(pSelf)->video.fps = 15;
+ TMEDIA_PRODUCER(pSelf)->video.width = 352;
+ TMEDIA_PRODUCER(pSelf)->video.height = 288;
#if TDAV_UNDER_WINDOWS_PHONE
- pSelf->videoCapturePhone = ref new VideoCapturePhone();
+ pSelf->videoCapturePhone = ref new VideoCapturePhone();
#endif
- }
- return self;
+ }
+ return self;
}
/* destructor */
static tsk_object_t* tdav_producer_video_mf_dtor(tsk_object_t * self)
-{
- tdav_producer_video_mf_t *pSelf = (tdav_producer_video_mf_t *)self;
- if(pSelf){
- /* stop */
- //if(pSelf->started){
- tdav_producer_video_mf_stop((tmedia_producer_t*)self);
- //}
-
- /* deinit base */
- tmedia_producer_deinit(TMEDIA_PRODUCER(pSelf));
- /* deinit self */
- if(pSelf->pDeviceList){
- delete pSelf->pDeviceList, pSelf->pDeviceList = NULL;
- }
- if(pSelf->pSource){
- pSelf->pSource->Shutdown();
- }
- if(pSelf->pSession){
- pSelf->pSession->Shutdown();
- }
-
- SafeRelease(&pSelf->pSession);
- SafeRelease(&pSelf->pSource);
- SafeRelease(&pSelf->pCallback);
- SafeRelease(&pSelf->pSinkActivate);
- SafeRelease(&pSelf->pTopology);
- SafeRelease(&pSelf->pType);
- }
-
- return self;
+{
+ tdav_producer_video_mf_t *pSelf = (tdav_producer_video_mf_t *)self;
+ if(pSelf) {
+ /* stop */
+ //if(pSelf->started){
+ tdav_producer_video_mf_stop((tmedia_producer_t*)self);
+ //}
+
+ /* deinit base */
+ tmedia_producer_deinit(TMEDIA_PRODUCER(pSelf));
+ /* deinit self */
+ if(pSelf->pDeviceList) {
+ delete pSelf->pDeviceList, pSelf->pDeviceList = NULL;
+ }
+ if(pSelf->pSource) {
+ pSelf->pSource->Shutdown();
+ }
+ if(pSelf->pSession) {
+ pSelf->pSession->Shutdown();
+ }
+
+ SafeRelease(&pSelf->pSession);
+ SafeRelease(&pSelf->pSource);
+ SafeRelease(&pSelf->pCallback);
+ SafeRelease(&pSelf->pSinkActivate);
+ SafeRelease(&pSelf->pTopology);
+ SafeRelease(&pSelf->pType);
+ }
+
+ return self;
}
/* object definition */
-static const tsk_object_def_t tdav_producer_video_mf_def_s =
-{
- sizeof(tdav_producer_video_mf_t),
- tdav_producer_video_mf_ctor,
- tdav_producer_video_mf_dtor,
- tsk_null,
+static const tsk_object_def_t tdav_producer_video_mf_def_s = {
+ sizeof(tdav_producer_video_mf_t),
+ tdav_producer_video_mf_ctor,
+ tdav_producer_video_mf_dtor,
+ tsk_null,
};
/* plugin definition*/
-static const tmedia_producer_plugin_def_t tdav_producer_video_mf_plugin_def_s =
-{
- &tdav_producer_video_mf_def_s,
+static const tmedia_producer_plugin_def_t tdav_producer_video_mf_plugin_def_s = {
+ &tdav_producer_video_mf_def_s,
- tmedia_video,
- "Microsoft Windows Media Foundation producer (Video)",
+ tmedia_video,
+ "Microsoft Windows Media Foundation producer (Video)",
- tdav_producer_video_mf_set,
- tdav_producer_video_mf_prepare,
- tdav_producer_video_mf_start,
- tdav_producer_video_mf_pause,
- tdav_producer_video_mf_stop
+ tdav_producer_video_mf_set,
+ tdav_producer_video_mf_prepare,
+ tdav_producer_video_mf_start,
+ tdav_producer_video_mf_pause,
+ tdav_producer_video_mf_stop
};
const tmedia_producer_plugin_def_t *tdav_producer_video_mf_plugin_def_t = &tdav_producer_video_mf_plugin_def_s;
@@ -467,8 +462,7 @@ const tmedia_producer_plugin_def_t *tdav_producer_video_mf_plugin_def_t = &tdav_
//
void DeviceList::Clear()
{
- for (UINT32 i = 0; i < m_cDevices; i++)
- {
+ for (UINT32 i = 0; i < m_cDevices; i++) {
SafeRelease(&m_ppDevices[i]);
}
CoTaskMemFree(m_ppDevices);
@@ -490,17 +484,15 @@ HRESULT DeviceList::EnumerateDevices()
hr = MFCreateAttributes(&pAttributes, 1);
// Ask for source type = video capture devices
- if (SUCCEEDED(hr))
- {
+ if (SUCCEEDED(hr)) {
hr = pAttributes->SetGUID(
- MF_DEVSOURCE_ATTRIBUTE_SOURCE_TYPE,
- MF_DEVSOURCE_ATTRIBUTE_SOURCE_TYPE_VIDCAP_GUID
- );
+ MF_DEVSOURCE_ATTRIBUTE_SOURCE_TYPE,
+ MF_DEVSOURCE_ATTRIBUTE_SOURCE_TYPE_VIDCAP_GUID
+ );
}
// Enumerate devices.
- if (SUCCEEDED(hr))
- {
+ if (SUCCEEDED(hr)) {
hr = MFEnumDeviceSources(pAttributes, &m_ppDevices, &m_cDevices);
}
@@ -512,8 +504,7 @@ HRESULT DeviceList::EnumerateDevices()
HRESULT DeviceList::GetDeviceAtIndex(UINT32 index, IMFActivate **ppActivate)
{
- if (index >= Count())
- {
+ if (index >= Count()) {
return E_INVALIDARG;
}
@@ -525,24 +516,23 @@ HRESULT DeviceList::GetDeviceAtIndex(UINT32 index, IMFActivate **ppActivate)
HRESULT DeviceList::GetDeviceBest(IMFActivate **ppActivate)
{
- // for now we just get the default video source device (index = 0)
- return GetDeviceAtIndex(0, ppActivate);
+ // for now we just get the default video source device (index = 0)
+ return GetDeviceAtIndex(0, ppActivate);
}
HRESULT DeviceList::GetDeviceName(UINT32 index, WCHAR **ppszName)
{
- if (index >= Count())
- {
+ if (index >= Count()) {
return E_INVALIDARG;
}
HRESULT hr = S_OK;
hr = m_ppDevices[index]->GetAllocatedString(
- MF_DEVSOURCE_ATTRIBUTE_FRIENDLY_NAME,
- ppszName,
- NULL
- );
+ MF_DEVSOURCE_ATTRIBUTE_FRIENDLY_NAME,
+ ppszName,
+ NULL
+ );
return hr;
}
@@ -558,8 +548,7 @@ HRESULT SampleGrabberCB::CreateInstance(const struct tdav_producer_video_mf_s* p
{
*ppCB = new (std::nothrow) SampleGrabberCB(pcWrappedProducer);
- if (ppCB == NULL)
- {
+ if (ppCB == NULL) {
return E_OUTOFMEMORY;
}
return S_OK;
@@ -567,8 +556,7 @@ HRESULT SampleGrabberCB::CreateInstance(const struct tdav_producer_video_mf_s* p
STDMETHODIMP SampleGrabberCB::QueryInterface(REFIID riid, void** ppv)
{
- static const QITAB qit[] =
- {
+ static const QITAB qit[] = {
QITABENT(SampleGrabberCB, IMFSampleGrabberSinkCallback),
QITABENT(SampleGrabberCB, IMFClockStateSink),
{ 0 }
@@ -584,8 +572,7 @@ STDMETHODIMP_(ULONG) SampleGrabberCB::AddRef()
STDMETHODIMP_(ULONG) SampleGrabberCB::Release()
{
ULONG cRef = InterlockedDecrement(&m_cRef);
- if (cRef == 0)
- {
+ if (cRef == 0) {
delete this;
}
return cRef;
@@ -594,7 +581,7 @@ STDMETHODIMP_(ULONG) SampleGrabberCB::Release()
// IMFClockStateSink methods.
-// In these example, the IMFClockStateSink methods do not perform any actions.
+// In these example, the IMFClockStateSink methods do not perform any actions.
// You can use these methods to track the state of the sample grabber sink.
STDMETHODIMP SampleGrabberCB::OnClockStart(MFTIME hnsSystemTime, LONGLONG llClockStartOffset)
@@ -630,13 +617,12 @@ STDMETHODIMP SampleGrabberCB::OnSetPresentationClock(IMFPresentationClock* pCloc
}
STDMETHODIMP SampleGrabberCB::OnProcessSample(REFGUID guidMajorMediaType, DWORD dwSampleFlags,
- LONGLONG llSampleTime, LONGLONG llSampleDuration, const BYTE * pSampleBuffer,
- DWORD dwSampleSize)
+ LONGLONG llSampleTime, LONGLONG llSampleDuration, const BYTE * pSampleBuffer,
+ DWORD dwSampleSize)
{
- if(m_pWrappedProducer && TMEDIA_PRODUCER(m_pWrappedProducer)->enc_cb.callback)
- {
- TMEDIA_PRODUCER(m_pWrappedProducer)->enc_cb.callback(TMEDIA_PRODUCER(m_pWrappedProducer)->enc_cb.callback_data, pSampleBuffer, dwSampleSize);
- }
+ if(m_pWrappedProducer && TMEDIA_PRODUCER(m_pWrappedProducer)->enc_cb.callback) {
+ TMEDIA_PRODUCER(m_pWrappedProducer)->enc_cb.callback(TMEDIA_PRODUCER(m_pWrappedProducer)->enc_cb.callback_data, pSampleBuffer, dwSampleSize);
+ }
return S_OK;
}
@@ -667,9 +653,8 @@ static HRESULT CreateTopology(IMFMediaSource *pSource, IMFActivate *pSinkActivat
CHECK_HR(hr = MFCreateTopology(&pTopology));
CHECK_HR(hr = pSource->CreatePresentationDescriptor(&pPD));
CHECK_HR(hr = pPD->GetStreamDescriptorCount(&cStreams));
-
- for (DWORD i = 0; i < cStreams; i++)
- {
+
+ for (DWORD i = 0; i < cStreams; i++) {
// In this example, we look for audio streams and connect them to the sink.
BOOL fSelected = FALSE;
@@ -679,15 +664,13 @@ static HRESULT CreateTopology(IMFMediaSource *pSource, IMFActivate *pSinkActivat
CHECK_HR(hr = pSD->GetMediaTypeHandler(&pHandler));
CHECK_HR(hr = pHandler->GetMajorType(&majorType));
- if (majorType == MFMediaType_Video && fSelected)
- {
+ if (majorType == MFMediaType_Video && fSelected) {
CHECK_HR(hr = AddSourceNode(pTopology, pSource, pPD, pSD, &pNode1));
CHECK_HR(hr = AddOutputNode(pTopology, pSinkActivate, 0, &pNode2));
CHECK_HR(hr = pNode1->ConnectOutput(0, pNode2, 0));
break;
}
- else
- {
+ else {
CHECK_HR(hr = pPD->DeselectStream(i));
}
SafeRelease(&pSD);
@@ -766,23 +749,22 @@ static HRESULT RunSession(IMFMediaSession *pSession, IMFTopology *pTopology)
PROPVARIANT var;
PropVariantInit(&var);
- MediaEventType met;
- HRESULT hrStatus = S_OK;
+ MediaEventType met;
+ HRESULT hrStatus = S_OK;
HRESULT hr = S_OK;
CHECK_HR(hr = pSession->SetTopology(0, pTopology));
CHECK_HR(hr = pSession->Start(&GUID_NULL, &var));
- // Check first event
- hr = pSession->GetEvent(MF_EVENT_FLAG_NO_WAIT, &pEvent);
- if(hr == MF_E_NO_EVENTS_AVAILABLE){
- hr = S_OK;
- goto bail;
- }
+ // Check first event
+ hr = pSession->GetEvent(MF_EVENT_FLAG_NO_WAIT, &pEvent);
+ if(hr == MF_E_NO_EVENTS_AVAILABLE) {
+ hr = S_OK;
+ goto bail;
+ }
CHECK_HR(hr = pEvent->GetStatus(&hrStatus));
- if (FAILED(hrStatus))
- {
- CHECK_HR(hr = pEvent->GetType(&met));
- TSK_DEBUG_ERROR("Session error: 0x%x (event id: %d)\n", hrStatus, met);
+ if (FAILED(hrStatus)) {
+ CHECK_HR(hr = pEvent->GetType(&met));
+ TSK_DEBUG_ERROR("Session error: 0x%x (event id: %d)\n", hrStatus, met);
hr = hrStatus;
goto bail;
}
@@ -794,62 +776,60 @@ bail:
// Stop session
static HRESULT StopSession(
- IMFMediaSession *pSession,
- IMFMediaSource *pSource)
+ IMFMediaSession *pSession,
+ IMFMediaSource *pSource)
{
- // MUST be source then session
- if(pSource){
- pSource->Shutdown();
- }
- if(pSession){
- pSession->Shutdown();
- }
- return S_OK;
+ // MUST be source then session
+ if(pSource) {
+ pSource->Shutdown();
+ }
+ if(pSession) {
+ pSession->Shutdown();
+ }
+ return S_OK;
}
// Pause session
static HRESULT PauseSession(
- IMFMediaSession *pSession)
+ IMFMediaSession *pSession)
{
- if(!pSession){
- return E_INVALIDARG;
- }
- return pSession->Pause();
+ if(!pSession) {
+ return E_INVALIDARG;
+ }
+ return pSession->Pause();
}
// Run session async thread
static void* TSK_STDCALL RunSessionThread(void *pArg)
{
- tdav_producer_video_mf_t *pSelf = (tdav_producer_video_mf_t *)pArg;
- HRESULT hrStatus = S_OK;
- HRESULT hr = S_OK;
- IMFMediaEvent *pEvent = NULL;
+ tdav_producer_video_mf_t *pSelf = (tdav_producer_video_mf_t *)pArg;
+ HRESULT hrStatus = S_OK;
+ HRESULT hr = S_OK;
+ IMFMediaEvent *pEvent = NULL;
MediaEventType met;
- TSK_DEBUG_INFO("RunSessionThread - ENTER");
+ TSK_DEBUG_INFO("RunSessionThread - ENTER");
- while(pSelf->bStarted){
+ while(pSelf->bStarted) {
CHECK_HR(hr = pSelf->pSession->GetEvent(0, &pEvent));
CHECK_HR(hr = pEvent->GetStatus(&hrStatus));
CHECK_HR(hr = pEvent->GetType(&met));
- if (FAILED(hrStatus))
- {
- TSK_DEBUG_ERROR("Session error: 0x%x (event id: %d)\n", hrStatus, met);
+ if (FAILED(hrStatus)) {
+ TSK_DEBUG_ERROR("Session error: 0x%x (event id: %d)\n", hrStatus, met);
hr = hrStatus;
goto bail;
}
- if (met == MESessionEnded)
- {
+ if (met == MESessionEnded) {
break;
}
SafeRelease(&pEvent);
- }
+ }
bail:
- TSK_DEBUG_INFO("RunSessionThread - EXIT");
+ TSK_DEBUG_INFO("RunSessionThread - EXIT");
- return NULL;
+ return NULL;
}
#endif /*HAVE_MF */
diff --git a/tinyDAV/src/video/tdav_consumer_video.c b/tinyDAV/src/video/tdav_consumer_video.c
index b7adeca..1c3c600 100755
--- a/tinyDAV/src/video/tdav_consumer_video.c
+++ b/tinyDAV/src/video/tdav_consumer_video.c
@@ -2,19 +2,19 @@
* Copyright (C) 2011 Doubango Telecom <http://www.doubango.org>
*
* Contact: Mamadou Diop <diopmamadou(at)doubango(DOT)org>
-*
+*
* This file is part of Open Source Doubango Framework.
*
* DOUBANGO is free software: you can redistribute it and/or modify
* it under the terms of the GNU General Public License as published by
* the Free Software Foundation, either version 3 of the License, or
* (at your option) any later version.
-*
+*
* DOUBANGO is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
* GNU General Public License for more details.
-*
+*
* You should have received a copy of the GNU General Public License
* along with DOUBANGO.
*
@@ -39,34 +39,34 @@
/** Initialize video consumer */
int tdav_consumer_video_init(tdav_consumer_video_t* self)
{
- int ret;
-
- if(!self){
- TSK_DEBUG_ERROR("Invalid parameter");
- return -1;
- }
- /* base */
- if((ret = tmedia_consumer_init(TMEDIA_CONSUMER(self)))){
- return ret;
- }
-
- /* self (should be update by prepare() by using the codec's info)*/
- TMEDIA_CONSUMER(self)->video.fps = TDAV_VIDEO_DEFAULT_FPS;
- TMEDIA_CONSUMER(self)->video.display.width = TDAV_VIDEO_DEFAULT_WIDTH;
- TMEDIA_CONSUMER(self)->video.display.height = TDAV_VIDEO_DEFAULT_HEIGHT;
- TMEDIA_CONSUMER(self)->video.display.auto_resize = TDAV_VIDEO_DEFAULT_AUTORESIZE;
-
- /* self:jitterbuffer */
- if(!self->jitterbuffer && !(self->jitterbuffer = tmedia_jitterbuffer_create(tmedia_video))){
- // -- TSK_DEBUG_WARN("Failed to create video jitter buffer");
- }
- if(self->jitterbuffer){
- tmedia_jitterbuffer_init(TMEDIA_JITTER_BUFFER(self->jitterbuffer));
- }
-
- tsk_safeobj_init(self);
-
- return 0;
+ int ret;
+
+ if(!self) {
+ TSK_DEBUG_ERROR("Invalid parameter");
+ return -1;
+ }
+ /* base */
+ if((ret = tmedia_consumer_init(TMEDIA_CONSUMER(self)))) {
+ return ret;
+ }
+
+ /* self (should be update by prepare() by using the codec's info)*/
+ TMEDIA_CONSUMER(self)->video.fps = TDAV_VIDEO_DEFAULT_FPS;
+ TMEDIA_CONSUMER(self)->video.display.width = TDAV_VIDEO_DEFAULT_WIDTH;
+ TMEDIA_CONSUMER(self)->video.display.height = TDAV_VIDEO_DEFAULT_HEIGHT;
+ TMEDIA_CONSUMER(self)->video.display.auto_resize = TDAV_VIDEO_DEFAULT_AUTORESIZE;
+
+ /* self:jitterbuffer */
+ if(!self->jitterbuffer && !(self->jitterbuffer = tmedia_jitterbuffer_create(tmedia_video))) {
+ // -- TSK_DEBUG_WARN("Failed to create video jitter buffer");
+ }
+ if(self->jitterbuffer) {
+ tmedia_jitterbuffer_init(TMEDIA_JITTER_BUFFER(self->jitterbuffer));
+ }
+
+ tsk_safeobj_init(self);
+
+ return 0;
}
/**
@@ -79,105 +79,106 @@ int tdav_consumer_video_init(tdav_consumer_video_t* self)
* >0 : @a consumer1 greater than @a consumer2.<br>
*/
int tdav_consumer_video_cmp(const tsk_object_t* consumer1, const tsk_object_t* consumer2)
-{
- int ret;
- tsk_subsat_int32_ptr(consumer1, consumer2, &ret);
- return ret;
+{
+ int ret;
+ tsk_subsat_int32_ptr(consumer1, consumer2, &ret);
+ return ret;
}
int tdav_consumer_video_set(tdav_consumer_video_t* self, const tmedia_param_t* param)
{
- if(!self){
- TSK_DEBUG_ERROR("Invalid parameter");
- return -1;
- }
+ if(!self) {
+ TSK_DEBUG_ERROR("Invalid parameter");
+ return -1;
+ }
- return 0;
+ return 0;
}
int tdav_consumer_video_put(tdav_consumer_video_t* self, const void* data, tsk_size_t data_size, const tsk_object_t* proto_hdr)
{
- const trtp_rtp_header_t* rtp_hdr = TRTP_RTP_HEADER(proto_hdr);
- int ret;
-
- if(!self || !data || !self->jitterbuffer || !rtp_hdr){
- TSK_DEBUG_ERROR("Invalid parameter");
- return -1;
- }
-
- tsk_safeobj_lock(self);
-
- if(!TMEDIA_JITTER_BUFFER(self->jitterbuffer)->opened){
- uint32_t frame_duration = (1000 / TMEDIA_CONSUMER(self)->video.fps);
- static uint32_t rate = 90000;
- static uint32_t channels = 1;
- if((ret = tmedia_jitterbuffer_open(TMEDIA_JITTER_BUFFER(self->jitterbuffer), frame_duration, rate, channels))){
- TSK_DEBUG_ERROR("Failed to open jitterbuffer (%d)", ret);
- tsk_safeobj_unlock(self);
- return ret;
- }
- }
- ret = tmedia_jitterbuffer_put(TMEDIA_JITTER_BUFFER(self->jitterbuffer), (void*)data, data_size, proto_hdr);
-
- tsk_safeobj_unlock(self);
-
- return ret;
+ const trtp_rtp_header_t* rtp_hdr = TRTP_RTP_HEADER(proto_hdr);
+ int ret;
+
+ if(!self || !data || !self->jitterbuffer || !rtp_hdr) {
+ TSK_DEBUG_ERROR("Invalid parameter");
+ return -1;
+ }
+
+ tsk_safeobj_lock(self);
+
+ if(!TMEDIA_JITTER_BUFFER(self->jitterbuffer)->opened) {
+ uint32_t frame_duration = (1000 / TMEDIA_CONSUMER(self)->video.fps);
+ static uint32_t rate = 90000;
+ static uint32_t channels = 1;
+ if((ret = tmedia_jitterbuffer_open(TMEDIA_JITTER_BUFFER(self->jitterbuffer), frame_duration, rate, channels))) {
+ TSK_DEBUG_ERROR("Failed to open jitterbuffer (%d)", ret);
+ tsk_safeobj_unlock(self);
+ return ret;
+ }
+ }
+ ret = tmedia_jitterbuffer_put(TMEDIA_JITTER_BUFFER(self->jitterbuffer), (void*)data, data_size, proto_hdr);
+
+ tsk_safeobj_unlock(self);
+
+ return ret;
}
/* get data drom the jitter buffer (consumers should always have ptime of 20ms) */
tsk_size_t tdav_consumer_video_get(tdav_consumer_video_t* self, void* out_data, tsk_size_t out_size)
{
- tsk_size_t ret_size = 0;
- if(!self && self->jitterbuffer){
- TSK_DEBUG_ERROR("Invalid parameter");
- return 0;
- }
-
- tsk_safeobj_lock(self);
-
- if(!TMEDIA_JITTER_BUFFER(self->jitterbuffer)->opened){
- int ret;
- uint32_t frame_duration = (1000 / TMEDIA_CONSUMER(self)->video.fps);
- static uint32_t rate = 90000;
- static uint32_t channles = 1;
- if((ret = tmedia_jitterbuffer_open(TMEDIA_JITTER_BUFFER(self->jitterbuffer), frame_duration, rate, channles))){
- TSK_DEBUG_ERROR("Failed to open jitterbuffer (%d)", ret);
- tsk_safeobj_unlock(self);
- return 0;
- }
- }
- ret_size = tmedia_jitterbuffer_get(TMEDIA_JITTER_BUFFER(self->jitterbuffer), out_data, out_size);
-
- tsk_safeobj_unlock(self);
-
-
-
-
- return ret_size;
+ tsk_size_t ret_size = 0;
+ if(!self && self->jitterbuffer) {
+ TSK_DEBUG_ERROR("Invalid parameter");
+ return 0;
+ }
+
+ tsk_safeobj_lock(self);
+
+ if(!TMEDIA_JITTER_BUFFER(self->jitterbuffer)->opened) {
+ int ret;
+ uint32_t frame_duration = (1000 / TMEDIA_CONSUMER(self)->video.fps);
+ static uint32_t rate = 90000;
+ static uint32_t channles = 1;
+ if((ret = tmedia_jitterbuffer_open(TMEDIA_JITTER_BUFFER(self->jitterbuffer), frame_duration, rate, channles))) {
+ TSK_DEBUG_ERROR("Failed to open jitterbuffer (%d)", ret);
+ tsk_safeobj_unlock(self);
+ return 0;
+ }
+ }
+ ret_size = tmedia_jitterbuffer_get(TMEDIA_JITTER_BUFFER(self->jitterbuffer), out_data, out_size);
+
+ tsk_safeobj_unlock(self);
+
+
+
+
+ return ret_size;
}
int tdav_consumer_video_tick(tdav_consumer_video_t* self)
{
- if(!self){
- TSK_DEBUG_ERROR("Invalid parameter");
- return 0;
- }
- return tmedia_jitterbuffer_tick(TMEDIA_JITTER_BUFFER(self->jitterbuffer));
+ if(!self) {
+ TSK_DEBUG_ERROR("Invalid parameter");
+ return 0;
+ }
+ return tmedia_jitterbuffer_tick(TMEDIA_JITTER_BUFFER(self->jitterbuffer));
}
/** Reset jitterbuffer */
-int tdav_consumer_video_reset(tdav_consumer_video_t* self){
- int ret;
- if(!self){
- TSK_DEBUG_ERROR("Invalid parameter");
- return -1;
- }
-
- tsk_safeobj_lock(self);
- ret = tmedia_jitterbuffer_reset(TMEDIA_JITTER_BUFFER(self->jitterbuffer));
- tsk_safeobj_unlock(self);
-
- return ret;
+int tdav_consumer_video_reset(tdav_consumer_video_t* self)
+{
+ int ret;
+ if(!self) {
+ TSK_DEBUG_ERROR("Invalid parameter");
+ return -1;
+ }
+
+ tsk_safeobj_lock(self);
+ ret = tmedia_jitterbuffer_reset(TMEDIA_JITTER_BUFFER(self->jitterbuffer));
+ tsk_safeobj_unlock(self);
+
+ return ret;
}
/* tsk_safeobj_lock(self); */
@@ -186,22 +187,22 @@ int tdav_consumer_video_reset(tdav_consumer_video_t* self){
/** DeInitialize video consumer */
int tdav_consumer_video_deinit(tdav_consumer_video_t* self)
{
- int ret;
+ int ret;
- if(!self){
- TSK_DEBUG_ERROR("Invalid parameter");
- return -1;
- }
+ if(!self) {
+ TSK_DEBUG_ERROR("Invalid parameter");
+ return -1;
+ }
- /* base */
- if((ret = tmedia_consumer_deinit(TMEDIA_CONSUMER(self)))){
- /* return ret; */
- }
+ /* base */
+ if((ret = tmedia_consumer_deinit(TMEDIA_CONSUMER(self)))) {
+ /* return ret; */
+ }
- /* self */
- TSK_OBJECT_SAFE_FREE(self->jitterbuffer);
+ /* self */
+ TSK_OBJECT_SAFE_FREE(self->jitterbuffer);
- tsk_safeobj_deinit(self);
+ tsk_safeobj_deinit(self);
- return 0;
+ return 0;
} \ No newline at end of file
diff --git a/tinyDAV/src/video/tdav_converter_video.cxx b/tinyDAV/src/video/tdav_converter_video.cxx
index 2195d79..cc60b95 100755
--- a/tinyDAV/src/video/tdav_converter_video.cxx
+++ b/tinyDAV/src/video/tdav_converter_video.cxx
@@ -36,32 +36,31 @@
using namespace libyuv;
-typedef struct tdav_converter_video_libyuv_s
-{
- TMEDIA_DECLARE_CONVERTER_VIDEO;
-
- enum FourCC srcFormat;
- enum FourCC dstFormat;
-
- tsk_bool_t toI420;
- tsk_bool_t fromI420;
-
- struct{
- uint8* ptr;
- int size;
- }chroma;
- struct{
- uint8* ptr;
- int size;
- }rotate;
- struct{
- uint8* ptr;
- int size;
- }scale;
- struct{
- uint8* ptr;
- int size;
- }mirror;
+typedef struct tdav_converter_video_libyuv_s {
+ TMEDIA_DECLARE_CONVERTER_VIDEO;
+
+ enum FourCC srcFormat;
+ enum FourCC dstFormat;
+
+ tsk_bool_t toI420;
+ tsk_bool_t fromI420;
+
+ struct {
+ uint8* ptr;
+ int size;
+ } chroma;
+ struct {
+ uint8* ptr;
+ int size;
+ } rotate;
+ struct {
+ uint8* ptr;
+ int size;
+ } scale;
+ struct {
+ uint8* ptr;
+ int size;
+ } mirror;
}
tdav_converter_video_libyuv_t;
@@ -70,89 +69,88 @@ tdav_converter_video_libyuv_t;
static inline tsk_bool_t _tdav_converter_video_libyuv_is_chroma_varsize(tmedia_chroma_t chroma)
{
- return chroma == tmedia_chroma_mjpeg;
+ return chroma == tmedia_chroma_mjpeg;
}
static inline tsk_size_t _tdav_converter_video_libyuv_get_size(tmedia_chroma_t chroma, tsk_size_t w, tsk_size_t h)
{
- switch (chroma){
- case tmedia_chroma_rgb24:
- case tmedia_chroma_bgr24:
- return (w * h * 3);
- case tmedia_chroma_rgb565le:
- return ((w * h) << 1);
- case tmedia_chroma_rgb32:
- return ((w * h) << 2);
- case tmedia_chroma_nv21:
- return ((w * h * 3) >> 1);
- case tmedia_chroma_nv12:
- return ((w * h * 3) >> 1);
- case tmedia_chroma_yuv422p:
- return ((w * h) << 1);
- case tmedia_chroma_uyvy422:
- case tmedia_chroma_yuyv422:
- return ((w * h) << 1);
- case tmedia_chroma_yuv420p:
- return ((w * h * 3) >> 1);
- case tmedia_chroma_mjpeg:
- return 0;
- default:
- TSK_DEBUG_ERROR("Invalid chroma %d", (int)chroma);
- return 0;
- }
+ switch (chroma) {
+ case tmedia_chroma_rgb24:
+ case tmedia_chroma_bgr24:
+ return (w * h * 3);
+ case tmedia_chroma_rgb565le:
+ return ((w * h) << 1);
+ case tmedia_chroma_rgb32:
+ return ((w * h) << 2);
+ case tmedia_chroma_nv21:
+ return ((w * h * 3) >> 1);
+ case tmedia_chroma_nv12:
+ return ((w * h * 3) >> 1);
+ case tmedia_chroma_yuv422p:
+ return ((w * h) << 1);
+ case tmedia_chroma_uyvy422:
+ case tmedia_chroma_yuyv422:
+ return ((w * h) << 1);
+ case tmedia_chroma_yuv420p:
+ return ((w * h * 3) >> 1);
+ case tmedia_chroma_mjpeg:
+ return 0;
+ default:
+ TSK_DEBUG_ERROR("Invalid chroma %d", (int)chroma);
+ return 0;
+ }
}
static inline enum FourCC _tdav_converter_video_libyuv_get_pixfmt(tmedia_chroma_t chroma)
{
- switch (chroma){
- case tmedia_chroma_rgb24:
- case tmedia_chroma_bgr24:
- return FOURCC_24BG;
- case tmedia_chroma_rgb565le:
- return FOURCC_RGBP;
- case tmedia_chroma_rgb32:
- return FOURCC_ARGB;
- case tmedia_chroma_nv21:
- return FOURCC_NV21;
- case tmedia_chroma_nv12:
- return FOURCC_NV12;
- case tmedia_chroma_yuv422p:
- return FOURCC_I422;
- case tmedia_chroma_uyvy422:
- return FOURCC_UYVY;
- case tmedia_chroma_yuyv422:
- return FOURCC_YUY2;
- case tmedia_chroma_yuv420p:
- return FOURCC_I420;
- case tmedia_chroma_mjpeg:
- return FOURCC_MJPG;
- default:
- TSK_DEBUG_ERROR("Invalid chroma %d", (int)chroma);
- return FOURCC_ANY;
- }
+ switch (chroma) {
+ case tmedia_chroma_rgb24:
+ case tmedia_chroma_bgr24:
+ return FOURCC_24BG;
+ case tmedia_chroma_rgb565le:
+ return FOURCC_RGBP;
+ case tmedia_chroma_rgb32:
+ return FOURCC_ARGB;
+ case tmedia_chroma_nv21:
+ return FOURCC_NV21;
+ case tmedia_chroma_nv12:
+ return FOURCC_NV12;
+ case tmedia_chroma_yuv422p:
+ return FOURCC_I422;
+ case tmedia_chroma_uyvy422:
+ return FOURCC_UYVY;
+ case tmedia_chroma_yuyv422:
+ return FOURCC_YUY2;
+ case tmedia_chroma_yuv420p:
+ return FOURCC_I420;
+ case tmedia_chroma_mjpeg:
+ return FOURCC_MJPG;
+ default:
+ TSK_DEBUG_ERROR("Invalid chroma %d", (int)chroma);
+ return FOURCC_ANY;
+ }
}
static int tdav_converter_video_libyuv_init(tmedia_converter_video_t* self, tsk_size_t srcWidth, tsk_size_t srcHeight, tmedia_chroma_t srcChroma, tsk_size_t dstWidth, tsk_size_t dstHeight, tmedia_chroma_t dstChroma)
{
- TSK_DEBUG_INFO("Initializing new LibYUV Video Converter src=(%dx%d@%d) dst=(%dx%d@%d)", (int)srcWidth, (int)srcHeight, (int)srcChroma, (int)dstWidth, (int)dstHeight, (int)dstChroma);
-
- if ((TDAV_CONVERTER_VIDEO_LIBYUV(self)->srcFormat = _tdav_converter_video_libyuv_get_pixfmt(srcChroma)) == FOURCC_ANY){
- TSK_DEBUG_ERROR("Invalid source chroma");
- return -2;
- }
- if ((TDAV_CONVERTER_VIDEO_LIBYUV(self)->dstFormat = _tdav_converter_video_libyuv_get_pixfmt(dstChroma)) == FOURCC_ANY){
- TSK_DEBUG_ERROR("Invalid destination chroma");
- return -3;
- }
-
- TDAV_CONVERTER_VIDEO_LIBYUV(self)->toI420 = (TDAV_CONVERTER_VIDEO_LIBYUV(self)->dstFormat == FOURCC_I420);
- TDAV_CONVERTER_VIDEO_LIBYUV(self)->fromI420 = (TDAV_CONVERTER_VIDEO_LIBYUV(self)->srcFormat == FOURCC_I420);
- if (!TDAV_CONVERTER_VIDEO_LIBYUV(self)->toI420 && !TDAV_CONVERTER_VIDEO_LIBYUV(self)->fromI420)
- {
- TSK_DEBUG_ERROR("LIBYUV only support from/to I420");
- return -1;
- }
- return 0;
+ TSK_DEBUG_INFO("Initializing new LibYUV Video Converter src=(%dx%d@%d) dst=(%dx%d@%d)", (int)srcWidth, (int)srcHeight, (int)srcChroma, (int)dstWidth, (int)dstHeight, (int)dstChroma);
+
+ if ((TDAV_CONVERTER_VIDEO_LIBYUV(self)->srcFormat = _tdav_converter_video_libyuv_get_pixfmt(srcChroma)) == FOURCC_ANY) {
+ TSK_DEBUG_ERROR("Invalid source chroma");
+ return -2;
+ }
+ if ((TDAV_CONVERTER_VIDEO_LIBYUV(self)->dstFormat = _tdav_converter_video_libyuv_get_pixfmt(dstChroma)) == FOURCC_ANY) {
+ TSK_DEBUG_ERROR("Invalid destination chroma");
+ return -3;
+ }
+
+ TDAV_CONVERTER_VIDEO_LIBYUV(self)->toI420 = (TDAV_CONVERTER_VIDEO_LIBYUV(self)->dstFormat == FOURCC_I420);
+ TDAV_CONVERTER_VIDEO_LIBYUV(self)->fromI420 = (TDAV_CONVERTER_VIDEO_LIBYUV(self)->srcFormat == FOURCC_I420);
+ if (!TDAV_CONVERTER_VIDEO_LIBYUV(self)->toI420 && !TDAV_CONVERTER_VIDEO_LIBYUV(self)->fromI420) {
+ TSK_DEBUG_ERROR("LIBYUV only support from/to I420");
+ return -1;
+ }
+ return 0;
}
static tsk_size_t tdav_converter_video_libyuv_process(tmedia_converter_video_t* _self, const void* buffer, tsk_size_t buffer_size, void** output, tsk_size_t* output_max_size)
@@ -166,335 +164,339 @@ static tsk_size_t tdav_converter_video_libyuv_process(tmedia_converter_video_t*
} \
(curr_size) = (new_size); \
}
- static const int crop_x = 0;
- static const int crop_y = 0;
-
- int ret;
- tdav_converter_video_libyuv_t* self = TDAV_CONVERTER_VIDEO_LIBYUV(_self);
- tsk_bool_t scale = ((_self->dstWidth != _self->srcWidth) || (_self->dstHeight != _self->srcHeight));
- int s, ls, src_y_stride, src_u_stride, src_v_stride, dst_y_stride, dst_u_stride, dst_v_stride;
- int src_w, src_h, dst_w, dst_h;
- uint8 *dst_y, *dst_u, *dst_v, *src_y, *src_u, *src_v;
-
- RotationMode rotation = kRotate0;
-
- switch (_self->rotation){
- case 90: rotation = kRotate90; break;
- case 180: rotation = kRotate180; break;
- case 270: rotation = kRotate270; break;
- }
- //rotation = kRotate0;
-
- // not square and rotaion=270/90 -> requires scaling unless disabled
- if ((rotation == kRotate90 || rotation == kRotate270) && _self->scale_rotated_frames){
- scale |= (_self->dstWidth != _self->dstHeight) && (rotation == kRotate90 || rotation == kRotate270);
- }
-
- src_w = (int)_self->srcWidth, src_h = (int)_self->srcHeight;
-
- if (self->toI420) {
- tsk_size_t x_in_size;
- // check input size
- x_in_size = _tdav_converter_video_libyuv_is_chroma_varsize(_self->srcChroma) ? buffer_size : _tdav_converter_video_libyuv_get_size(_self->srcChroma, src_w, src_h);
- if (x_in_size > buffer_size) { // Ignore any extra data. For example, "CVPixelBufferGetDataSize()" will return size padded with 8 extra bytes for RGB32.
- TSK_DEBUG_ERROR("Invalid input size: %u>%u", (unsigned)x_in_size, (unsigned)buffer_size);
- return 0;
- }
-
- dst_w = src_w, dst_h = src_h; // because no scaling when converting to I420
- ls = src_w * src_h;
- s = ((ls * 3) >> 1);
- if (scale || rotation != kRotate0){
- RESIZE_BUFFER(self->chroma.ptr, self->chroma.size, s);
- dst_y = self->chroma.ptr;
- }
- else{
- RESIZE_BUFFER((*output), (*output_max_size), s);
- dst_y = (uint8*)*output;
- }
- dst_u = (dst_y + ls);
- dst_v = dst_u + (ls >> 2);
- src_y_stride = dst_y_stride = src_w;
- src_u_stride = src_v_stride = dst_u_stride = dst_v_stride = ((dst_y_stride + 1) >> 1);
-
- // convert to I420 without scaling or rotation
- ret = ConvertToI420(
- (const uint8*)buffer, (int)x_in_size,
- dst_y, dst_y_stride,
- dst_u, dst_u_stride,
- dst_v, dst_v_stride,
- crop_x, crop_y,
- (int)_self->srcWidth, (int)(_self->flip ? (_self->srcHeight * -1) : _self->srcHeight), // vertical flip
- (int)_self->srcWidth, (int)_self->srcHeight,
- kRotate0,
- (uint32)self->srcFormat);
- // mirror: horizontal flip (front camera video)
- if (_self->mirror) {
- RESIZE_BUFFER(self->mirror.ptr, self->mirror.size, s);
- ret = I420Mirror(
- dst_y, dst_y_stride,
- dst_u, dst_u_stride,
- dst_v, dst_v_stride,
- self->mirror.ptr, dst_y_stride,
- (self->mirror.ptr + ls), dst_u_stride,
- (self->mirror.ptr + ls + (ls >> 2)), dst_v_stride,
- (int)_self->srcWidth, (int)_self->srcHeight);
- memcpy(dst_y, self->mirror.ptr, s);
- }
-
- if (ret){
- TSK_DEBUG_ERROR("ConvertToI420 failed with error code = %d, in_size:%u", ret, x_in_size);
- return 0;
- }
-
- // rotate
- if (rotation != kRotate0){
- dst_w = (int)((rotation == kRotate90 || rotation == kRotate270) ? _self->srcHeight : _self->srcWidth);
- dst_h = (int)((rotation == kRotate90 || rotation == kRotate270) ? _self->srcWidth : _self->srcHeight);
-
- src_y = dst_y, src_u = dst_u, src_v = dst_v;
- src_y_stride = src_y_stride, src_u_stride = src_u_stride, src_v_stride = src_v_stride;
- dst_y_stride = dst_w;
- dst_u_stride = dst_v_stride = ((dst_y_stride + 1) >> 1);
-
- if (scale){
- RESIZE_BUFFER(self->rotate.ptr, self->rotate.size, s);
- dst_y = self->rotate.ptr;
- }
- else{// last step
- RESIZE_BUFFER((*output), (*output_max_size), s);
- dst_y = (uint8*)*output;
- }
-
- dst_u = (dst_y + ls);
- dst_v = dst_u + (ls >> 2);
- ret = I420Rotate(
- src_y, src_y_stride,
- src_u, src_u_stride,
- src_v, src_v_stride,
- dst_y, dst_y_stride,
- dst_u, dst_u_stride,
- dst_v, dst_v_stride,
- (int)_self->srcWidth, (int)_self->srcHeight, rotation);
- if (ret){
- TSK_DEBUG_ERROR("I420Rotate failed with error code = %d", ret);
- return 0;
- }
-
- // scale to fit ratio, pad, crop then copy
- if ((rotation == kRotate90 || rotation == kRotate270) && _self->scale_rotated_frames){
- int iwidth = (int)_self->srcHeight;
- int iheight = (int)_self->srcWidth;
-
- src_y = dst_y, src_u = dst_u, src_v = dst_v;
- src_w = dst_w, src_h = dst_h;
- src_y_stride = dst_y_stride, src_u_stride = dst_u_stride, src_v_stride = dst_v_stride;
-
- if (_self->dstWidth != _self->dstHeight) {
- if (iwidth * _self->srcHeight > iheight * _self->srcWidth) {
- iwidth = (int)((iheight * _self->srcWidth / _self->srcHeight) & ~1);
- int iwidth_offset = (int)((_self->srcHeight - iwidth) >> 1);
- src_y += iwidth_offset;
- src_u += iwidth_offset >> 1;
- src_v += iwidth_offset >> 1;
- }
- else if (iwidth * _self->srcHeight < iheight * _self->srcWidth) {
- iheight = (int)(iwidth * _self->srcHeight / _self->srcWidth);
- int iheight_offset = (int)((_self->srcWidth - iheight) >> 2);
- iheight_offset <<= 1;
- src_y += iheight_offset * src_y_stride;
- src_u += (iheight_offset >> 1) * src_u_stride;
- src_v += (iheight_offset >> 1) * src_v_stride;
- }
-
- src_w = iwidth, src_h = iheight;
- src_y_stride = src_w;
- src_u_stride = src_v_stride = ((src_y_stride + 1) >> 1);
-
- dst_w = (int)_self->dstWidth;
- dst_h = (int)_self->dstHeight;
- ls = dst_w * dst_h;
- s = ((ls * 3) >> 1);
- RESIZE_BUFFER((*output), (*output_max_size), s);
- dst_y_stride = dst_w;
- dst_u_stride = dst_v_stride = ((dst_y_stride + 1) >> 1);
- uint8* dst_y = (uint8*)*output;
- uint8* dst_u = (dst_y + ls);
- uint8* dst_v = dst_u + (ls >> 2);
-
- ret = I420Scale(
- src_y, src_y_stride,
- src_u, src_u_stride,
- src_v, src_v_stride,
- src_w, src_h,
- dst_y, dst_y_stride,
- dst_u, dst_u_stride,
- dst_v, dst_v_stride,
- dst_w, dst_h,
- kFilterBox);
- if (ret){
- TSK_DEBUG_ERROR("I420Scale failed with error code = %d", ret);
- return 0;
- }
- return s;
- }
- }
- }
-
- // scale
- if (scale){
- src_w = dst_w, src_h = dst_h;
- dst_w = (int)(((rotation == kRotate90 || rotation == kRotate270) && !_self->scale_rotated_frames) ? _self->dstHeight : _self->dstWidth);
- dst_h = (int)(((rotation == kRotate90 || rotation == kRotate270) && !_self->scale_rotated_frames) ? _self->dstWidth : _self->dstHeight);
- src_y = dst_y, src_u = dst_u, src_v = dst_v;
- src_y_stride = dst_y_stride, src_u_stride = dst_u_stride, src_v_stride = dst_v_stride;
- dst_y_stride = dst_w;
- dst_u_stride = dst_v_stride = ((dst_y_stride + 1) >> 1);
-
- ls = dst_w * dst_h;
- s = ((ls * 3) >> 1);
- RESIZE_BUFFER((*output), (*output_max_size), s);
- dst_y = (uint8*)*output;
- dst_u = (dst_y + ls);
- dst_v = dst_u + (ls >> 2);
-
- ret = I420Scale(
- src_y, src_y_stride,
- src_u, src_u_stride,
- src_v, src_v_stride,
- src_w, src_h,
- dst_y, dst_y_stride,
- dst_u, dst_u_stride,
- dst_v, dst_v_stride,
- dst_w, dst_h,
- kFilterNone);
- if (ret){
- TSK_DEBUG_ERROR("I420Scale failed with error code = %d", ret);
- return 0;
- }
- }
-
- return ((dst_w * dst_h * 3) >> 1);
- }
- else if (self->fromI420){
- static const int dst_sample_stride = 0;
-
- dst_w = (int)_self->dstWidth, dst_h = (int)_self->dstHeight;
- src_y = (uint8*)buffer;
- src_u = (src_y + (src_w * src_h));
- src_v = (src_u + ((src_w * src_h) >> 2));
- src_y_stride = src_w;
- src_u_stride = src_v_stride = ((src_y_stride + 1) >> 1);
-
- // mirror: horizontal flip (front camera video)
- if ((_self->mirror)) {
- ls = src_w * src_h;
- s = ((ls * 3) >> 1);
- if (s < (int)buffer_size) { // security check
- RESIZE_BUFFER(self->mirror.ptr, self->mirror.size, s);
- ret = I420Mirror(
- src_y, src_y_stride,
- src_u, src_u_stride,
- src_v, src_v_stride,
- self->mirror.ptr, src_y_stride,
- (self->mirror.ptr + ls), src_u_stride,
- (self->mirror.ptr + ls + (ls >> 2)), src_v_stride,
- src_w, src_h);
- memcpy(src_y, self->mirror.ptr, s);
- }
- }
-
- if (scale){
- ls = dst_w * dst_h;
- s = ((ls * 3) >> 1);
-
- RESIZE_BUFFER(self->scale.ptr, self->scale.size, s);
- dst_y = self->scale.ptr;
- dst_u = (dst_y + (dst_w * dst_h));
- dst_v = (dst_u + ((dst_w * dst_h) >> 2));
- dst_y_stride = dst_w;
- dst_u_stride = dst_v_stride = ((dst_y_stride + 1) >> 1);
-
- ret = I420Scale(
- src_y, src_y_stride,
- src_u, src_u_stride,
- src_v, src_v_stride,
- src_w, src_h,
- dst_y, dst_y_stride,
- dst_u, dst_u_stride,
- dst_v, dst_v_stride,
- dst_w, dst_h,
- kFilterNone);
-
- if (ret){
- TSK_DEBUG_ERROR("I420Scale failed with error code = %d", ret);
- return 0;
- }
-
- src_y = dst_y;
- src_u = (dst_y + ls);
- src_v = (dst_u + (ls >> 2));
- src_y_stride = dst_y_stride;
- src_u_stride = src_v_stride = ((src_y_stride + 1) >> 1);
- }
-
- s = (int)_tdav_converter_video_libyuv_get_size(_self->dstChroma, _self->srcWidth, _self->srcHeight);
- RESIZE_BUFFER((*output), (*output_max_size), s);
-
- ret = ConvertFromI420(
- src_y, src_y_stride,
- src_u, src_u_stride,
- src_v, src_v_stride,
- (uint8*)*output, dst_sample_stride,
- (int)_self->dstWidth, (_self->flip ? ((int)_self->dstHeight * -1) : (int)_self->dstHeight), // vertical flip
- (uint32)self->dstFormat);
- if (ret){
- TSK_DEBUG_ERROR("ConvertFromI420 failed with error code = %d", ret);
- return 0;
- }
-
- return s;
- }
-
- // Must be from/to I420
- TSK_DEBUG_ERROR("Not expected code called");
- return 0;
+ static const int crop_x = 0;
+ static const int crop_y = 0;
+
+ int ret;
+ tdav_converter_video_libyuv_t* self = TDAV_CONVERTER_VIDEO_LIBYUV(_self);
+ tsk_bool_t scale = ((_self->dstWidth != _self->srcWidth) || (_self->dstHeight != _self->srcHeight));
+ int s, ls, src_y_stride, src_u_stride, src_v_stride, dst_y_stride, dst_u_stride, dst_v_stride;
+ int src_w, src_h, dst_w, dst_h;
+ uint8 *dst_y, *dst_u, *dst_v, *src_y, *src_u, *src_v;
+
+ RotationMode rotation = kRotate0;
+
+ switch (_self->rotation) {
+ case 90:
+ rotation = kRotate90;
+ break;
+ case 180:
+ rotation = kRotate180;
+ break;
+ case 270:
+ rotation = kRotate270;
+ break;
+ }
+ //rotation = kRotate0;
+
+ // not square and rotaion=270/90 -> requires scaling unless disabled
+ if ((rotation == kRotate90 || rotation == kRotate270) && _self->scale_rotated_frames) {
+ scale |= (_self->dstWidth != _self->dstHeight) && (rotation == kRotate90 || rotation == kRotate270);
+ }
+
+ src_w = (int)_self->srcWidth, src_h = (int)_self->srcHeight;
+
+ if (self->toI420) {
+ tsk_size_t x_in_size;
+ // check input size
+ x_in_size = _tdav_converter_video_libyuv_is_chroma_varsize(_self->srcChroma) ? buffer_size : _tdav_converter_video_libyuv_get_size(_self->srcChroma, src_w, src_h);
+ if (x_in_size > buffer_size) { // Ignore any extra data. For example, "CVPixelBufferGetDataSize()" will return size padded with 8 extra bytes for RGB32.
+ TSK_DEBUG_ERROR("Invalid input size: %u>%u", (unsigned)x_in_size, (unsigned)buffer_size);
+ return 0;
+ }
+
+ dst_w = src_w, dst_h = src_h; // because no scaling when converting to I420
+ ls = src_w * src_h;
+ s = ((ls * 3) >> 1);
+ if (scale || rotation != kRotate0) {
+ RESIZE_BUFFER(self->chroma.ptr, self->chroma.size, s);
+ dst_y = self->chroma.ptr;
+ }
+ else {
+ RESIZE_BUFFER((*output), (*output_max_size), s);
+ dst_y = (uint8*)*output;
+ }
+ dst_u = (dst_y + ls);
+ dst_v = dst_u + (ls >> 2);
+ src_y_stride = dst_y_stride = src_w;
+ src_u_stride = src_v_stride = dst_u_stride = dst_v_stride = ((dst_y_stride + 1) >> 1);
+
+ // convert to I420 without scaling or rotation
+ ret = ConvertToI420(
+ (const uint8*)buffer, (int)x_in_size,
+ dst_y, dst_y_stride,
+ dst_u, dst_u_stride,
+ dst_v, dst_v_stride,
+ crop_x, crop_y,
+ (int)_self->srcWidth, (int)(_self->flip ? (_self->srcHeight * -1) : _self->srcHeight), // vertical flip
+ (int)_self->srcWidth, (int)_self->srcHeight,
+ kRotate0,
+ (uint32)self->srcFormat);
+ // mirror: horizontal flip (front camera video)
+ if (_self->mirror) {
+ RESIZE_BUFFER(self->mirror.ptr, self->mirror.size, s);
+ ret = I420Mirror(
+ dst_y, dst_y_stride,
+ dst_u, dst_u_stride,
+ dst_v, dst_v_stride,
+ self->mirror.ptr, dst_y_stride,
+ (self->mirror.ptr + ls), dst_u_stride,
+ (self->mirror.ptr + ls + (ls >> 2)), dst_v_stride,
+ (int)_self->srcWidth, (int)_self->srcHeight);
+ memcpy(dst_y, self->mirror.ptr, s);
+ }
+
+ if (ret) {
+ TSK_DEBUG_ERROR("ConvertToI420 failed with error code = %d, in_size:%u", ret, x_in_size);
+ return 0;
+ }
+
+ // rotate
+ if (rotation != kRotate0) {
+ dst_w = (int)((rotation == kRotate90 || rotation == kRotate270) ? _self->srcHeight : _self->srcWidth);
+ dst_h = (int)((rotation == kRotate90 || rotation == kRotate270) ? _self->srcWidth : _self->srcHeight);
+
+ src_y = dst_y, src_u = dst_u, src_v = dst_v;
+ src_y_stride = src_y_stride, src_u_stride = src_u_stride, src_v_stride = src_v_stride;
+ dst_y_stride = dst_w;
+ dst_u_stride = dst_v_stride = ((dst_y_stride + 1) >> 1);
+
+ if (scale) {
+ RESIZE_BUFFER(self->rotate.ptr, self->rotate.size, s);
+ dst_y = self->rotate.ptr;
+ }
+ else { // last step
+ RESIZE_BUFFER((*output), (*output_max_size), s);
+ dst_y = (uint8*)*output;
+ }
+
+ dst_u = (dst_y + ls);
+ dst_v = dst_u + (ls >> 2);
+ ret = I420Rotate(
+ src_y, src_y_stride,
+ src_u, src_u_stride,
+ src_v, src_v_stride,
+ dst_y, dst_y_stride,
+ dst_u, dst_u_stride,
+ dst_v, dst_v_stride,
+ (int)_self->srcWidth, (int)_self->srcHeight, rotation);
+ if (ret) {
+ TSK_DEBUG_ERROR("I420Rotate failed with error code = %d", ret);
+ return 0;
+ }
+
+ // scale to fit ratio, pad, crop then copy
+ if ((rotation == kRotate90 || rotation == kRotate270) && _self->scale_rotated_frames) {
+ int iwidth = (int)_self->srcHeight;
+ int iheight = (int)_self->srcWidth;
+
+ src_y = dst_y, src_u = dst_u, src_v = dst_v;
+ src_w = dst_w, src_h = dst_h;
+ src_y_stride = dst_y_stride, src_u_stride = dst_u_stride, src_v_stride = dst_v_stride;
+
+ if (_self->dstWidth != _self->dstHeight) {
+ if (iwidth * _self->srcHeight > iheight * _self->srcWidth) {
+ iwidth = (int)((iheight * _self->srcWidth / _self->srcHeight) & ~1);
+ int iwidth_offset = (int)((_self->srcHeight - iwidth) >> 1);
+ src_y += iwidth_offset;
+ src_u += iwidth_offset >> 1;
+ src_v += iwidth_offset >> 1;
+ }
+ else if (iwidth * _self->srcHeight < iheight * _self->srcWidth) {
+ iheight = (int)(iwidth * _self->srcHeight / _self->srcWidth);
+ int iheight_offset = (int)((_self->srcWidth - iheight) >> 2);
+ iheight_offset <<= 1;
+ src_y += iheight_offset * src_y_stride;
+ src_u += (iheight_offset >> 1) * src_u_stride;
+ src_v += (iheight_offset >> 1) * src_v_stride;
+ }
+
+ src_w = iwidth, src_h = iheight;
+ src_y_stride = src_w;
+ src_u_stride = src_v_stride = ((src_y_stride + 1) >> 1);
+
+ dst_w = (int)_self->dstWidth;
+ dst_h = (int)_self->dstHeight;
+ ls = dst_w * dst_h;
+ s = ((ls * 3) >> 1);
+ RESIZE_BUFFER((*output), (*output_max_size), s);
+ dst_y_stride = dst_w;
+ dst_u_stride = dst_v_stride = ((dst_y_stride + 1) >> 1);
+ uint8* dst_y = (uint8*)*output;
+ uint8* dst_u = (dst_y + ls);
+ uint8* dst_v = dst_u + (ls >> 2);
+
+ ret = I420Scale(
+ src_y, src_y_stride,
+ src_u, src_u_stride,
+ src_v, src_v_stride,
+ src_w, src_h,
+ dst_y, dst_y_stride,
+ dst_u, dst_u_stride,
+ dst_v, dst_v_stride,
+ dst_w, dst_h,
+ kFilterBox);
+ if (ret) {
+ TSK_DEBUG_ERROR("I420Scale failed with error code = %d", ret);
+ return 0;
+ }
+ return s;
+ }
+ }
+ }
+
+ // scale
+ if (scale) {
+ src_w = dst_w, src_h = dst_h;
+ dst_w = (int)(((rotation == kRotate90 || rotation == kRotate270) && !_self->scale_rotated_frames) ? _self->dstHeight : _self->dstWidth);
+ dst_h = (int)(((rotation == kRotate90 || rotation == kRotate270) && !_self->scale_rotated_frames) ? _self->dstWidth : _self->dstHeight);
+ src_y = dst_y, src_u = dst_u, src_v = dst_v;
+ src_y_stride = dst_y_stride, src_u_stride = dst_u_stride, src_v_stride = dst_v_stride;
+ dst_y_stride = dst_w;
+ dst_u_stride = dst_v_stride = ((dst_y_stride + 1) >> 1);
+
+ ls = dst_w * dst_h;
+ s = ((ls * 3) >> 1);
+ RESIZE_BUFFER((*output), (*output_max_size), s);
+ dst_y = (uint8*)*output;
+ dst_u = (dst_y + ls);
+ dst_v = dst_u + (ls >> 2);
+
+ ret = I420Scale(
+ src_y, src_y_stride,
+ src_u, src_u_stride,
+ src_v, src_v_stride,
+ src_w, src_h,
+ dst_y, dst_y_stride,
+ dst_u, dst_u_stride,
+ dst_v, dst_v_stride,
+ dst_w, dst_h,
+ kFilterNone);
+ if (ret) {
+ TSK_DEBUG_ERROR("I420Scale failed with error code = %d", ret);
+ return 0;
+ }
+ }
+
+ return ((dst_w * dst_h * 3) >> 1);
+ }
+ else if (self->fromI420) {
+ static const int dst_sample_stride = 0;
+
+ dst_w = (int)_self->dstWidth, dst_h = (int)_self->dstHeight;
+ src_y = (uint8*)buffer;
+ src_u = (src_y + (src_w * src_h));
+ src_v = (src_u + ((src_w * src_h) >> 2));
+ src_y_stride = src_w;
+ src_u_stride = src_v_stride = ((src_y_stride + 1) >> 1);
+
+ // mirror: horizontal flip (front camera video)
+ if ((_self->mirror)) {
+ ls = src_w * src_h;
+ s = ((ls * 3) >> 1);
+ if (s < (int)buffer_size) { // security check
+ RESIZE_BUFFER(self->mirror.ptr, self->mirror.size, s);
+ ret = I420Mirror(
+ src_y, src_y_stride,
+ src_u, src_u_stride,
+ src_v, src_v_stride,
+ self->mirror.ptr, src_y_stride,
+ (self->mirror.ptr + ls), src_u_stride,
+ (self->mirror.ptr + ls + (ls >> 2)), src_v_stride,
+ src_w, src_h);
+ memcpy(src_y, self->mirror.ptr, s);
+ }
+ }
+
+ if (scale) {
+ ls = dst_w * dst_h;
+ s = ((ls * 3) >> 1);
+
+ RESIZE_BUFFER(self->scale.ptr, self->scale.size, s);
+ dst_y = self->scale.ptr;
+ dst_u = (dst_y + (dst_w * dst_h));
+ dst_v = (dst_u + ((dst_w * dst_h) >> 2));
+ dst_y_stride = dst_w;
+ dst_u_stride = dst_v_stride = ((dst_y_stride + 1) >> 1);
+
+ ret = I420Scale(
+ src_y, src_y_stride,
+ src_u, src_u_stride,
+ src_v, src_v_stride,
+ src_w, src_h,
+ dst_y, dst_y_stride,
+ dst_u, dst_u_stride,
+ dst_v, dst_v_stride,
+ dst_w, dst_h,
+ kFilterNone);
+
+ if (ret) {
+ TSK_DEBUG_ERROR("I420Scale failed with error code = %d", ret);
+ return 0;
+ }
+
+ src_y = dst_y;
+ src_u = (dst_y + ls);
+ src_v = (dst_u + (ls >> 2));
+ src_y_stride = dst_y_stride;
+ src_u_stride = src_v_stride = ((src_y_stride + 1) >> 1);
+ }
+
+ s = (int)_tdav_converter_video_libyuv_get_size(_self->dstChroma, _self->srcWidth, _self->srcHeight);
+ RESIZE_BUFFER((*output), (*output_max_size), s);
+
+ ret = ConvertFromI420(
+ src_y, src_y_stride,
+ src_u, src_u_stride,
+ src_v, src_v_stride,
+ (uint8*)*output, dst_sample_stride,
+ (int)_self->dstWidth, (_self->flip ? ((int)_self->dstHeight * -1) : (int)_self->dstHeight), // vertical flip
+ (uint32)self->dstFormat);
+ if (ret) {
+ TSK_DEBUG_ERROR("ConvertFromI420 failed with error code = %d", ret);
+ return 0;
+ }
+
+ return s;
+ }
+
+ // Must be from/to I420
+ TSK_DEBUG_ERROR("Not expected code called");
+ return 0;
}
static tsk_object_t* tdav_converter_video_libyuv_ctor(tsk_object_t * self, va_list * app)
{
- tdav_converter_video_libyuv_t *converter = (tdav_converter_video_libyuv_t *)self;
- if (converter){
+ tdav_converter_video_libyuv_t *converter = (tdav_converter_video_libyuv_t *)self;
+ if (converter) {
- }
- return self;
+ }
+ return self;
}
static tsk_object_t* tdav_converter_video_libyuv_dtor(tsk_object_t * self)
{
- tdav_converter_video_libyuv_t *converter = (tdav_converter_video_libyuv_t *)self;
- if (converter){
- TSK_FREE(converter->chroma.ptr);
- TSK_FREE(converter->rotate.ptr);
- TSK_FREE(converter->scale.ptr);
- TSK_FREE(converter->mirror.ptr);
- }
-
- return self;
+ tdav_converter_video_libyuv_t *converter = (tdav_converter_video_libyuv_t *)self;
+ if (converter) {
+ TSK_FREE(converter->chroma.ptr);
+ TSK_FREE(converter->rotate.ptr);
+ TSK_FREE(converter->scale.ptr);
+ TSK_FREE(converter->mirror.ptr);
+ }
+
+ return self;
}
-static const tsk_object_def_t tdav_converter_video_libyuv_def_s =
-{
- sizeof(tdav_converter_video_libyuv_t),
- tdav_converter_video_libyuv_ctor,
- tdav_converter_video_libyuv_dtor,
- tsk_null,
+static const tsk_object_def_t tdav_converter_video_libyuv_def_s = {
+ sizeof(tdav_converter_video_libyuv_t),
+ tdav_converter_video_libyuv_ctor,
+ tdav_converter_video_libyuv_dtor,
+ tsk_null,
};
const tsk_object_def_t *tdav_converter_video_libyuv_def_t = &tdav_converter_video_libyuv_def_s;
-static const tmedia_converter_video_plugin_def_t tdav_converter_video_libyuv_plugin_def_s =
-{
- &tdav_converter_video_libyuv_def_s,
+static const tmedia_converter_video_plugin_def_t tdav_converter_video_libyuv_plugin_def_s = {
+ &tdav_converter_video_libyuv_def_s,
- tdav_converter_video_libyuv_init,
- tdav_converter_video_libyuv_process
+ tdav_converter_video_libyuv_init,
+ tdav_converter_video_libyuv_process
};
const tmedia_converter_video_plugin_def_t *tdav_converter_video_libyuv_plugin_def_t = &tdav_converter_video_libyuv_plugin_def_s;
@@ -516,24 +518,23 @@ extern "C" {
}
#endif
-typedef struct tdav_converter_video_ffmpeg_s
-{
- TMEDIA_DECLARE_CONVERTER_VIDEO;
+typedef struct tdav_converter_video_ffmpeg_s {
+ TMEDIA_DECLARE_CONVERTER_VIDEO;
- struct SwsContext *context;
+ struct SwsContext *context;
- enum PixelFormat srcFormat;
- enum PixelFormat dstFormat;
+ enum PixelFormat srcFormat;
+ enum PixelFormat dstFormat;
- AVFrame* srcFrame;
- AVFrame* dstFrame;
+ AVFrame* srcFrame;
+ AVFrame* dstFrame;
- struct {
- struct SwsContext *context;
- AVFrame* frame;
- uint8_t* buffer;
- tsk_size_t buffer_size;
- } rot;
+ struct {
+ struct SwsContext *context;
+ AVFrame* frame;
+ uint8_t* buffer;
+ tsk_size_t buffer_size;
+ } rot;
}
tdav_converter_video_ffmpeg_t;
@@ -563,207 +564,207 @@ tdav_converter_video_ffmpeg_t;
static inline enum PixelFormat _tdav_converter_video_ffmpeg_get_pixfmt(tmedia_chroma_t chroma)
{
- switch(chroma){
- case tmedia_chroma_rgb24:
- return PIX_FMT_RGB24;
- case tmedia_chroma_bgr24:
- return PIX_FMT_BGR24;
- case tmedia_chroma_rgb32:
- return PIX_FMT_RGB32;
- case tmedia_chroma_rgb565le:
- return PIX_FMT_RGB565LE;
- case tmedia_chroma_rgb565be:
- return PIX_FMT_RGB565BE;
- case tmedia_chroma_nv21:
- return PIX_FMT_NV21;
- case tmedia_chroma_nv12:
- return PIX_FMT_NV12;
- case tmedia_chroma_yuv422p:
- return PIX_FMT_YUV422P;
- case tmedia_chroma_uyvy422:
- return PIX_FMT_UYVY422;
- case tmedia_chroma_yuyv422:
- return PIX_FMT_YUYV422;
- case tmedia_chroma_yuv420p:
- return PIX_FMT_YUV420P;
- default:
- TSK_DEBUG_ERROR("Invalid chroma %d", (int)chroma);
- return PIX_FMT_NONE;
- }
+ switch(chroma) {
+ case tmedia_chroma_rgb24:
+ return PIX_FMT_RGB24;
+ case tmedia_chroma_bgr24:
+ return PIX_FMT_BGR24;
+ case tmedia_chroma_rgb32:
+ return PIX_FMT_RGB32;
+ case tmedia_chroma_rgb565le:
+ return PIX_FMT_RGB565LE;
+ case tmedia_chroma_rgb565be:
+ return PIX_FMT_RGB565BE;
+ case tmedia_chroma_nv21:
+ return PIX_FMT_NV21;
+ case tmedia_chroma_nv12:
+ return PIX_FMT_NV12;
+ case tmedia_chroma_yuv422p:
+ return PIX_FMT_YUV422P;
+ case tmedia_chroma_uyvy422:
+ return PIX_FMT_UYVY422;
+ case tmedia_chroma_yuyv422:
+ return PIX_FMT_YUYV422;
+ case tmedia_chroma_yuv420p:
+ return PIX_FMT_YUV420P;
+ default:
+ TSK_DEBUG_ERROR("Invalid chroma %d", (int)chroma);
+ return PIX_FMT_NONE;
+ }
}
static int tdav_converter_video_ffmpeg_init(tmedia_converter_video_t* self, tsk_size_t srcWidth, tsk_size_t srcHeight, tmedia_chroma_t srcChroma, tsk_size_t dstWidth, tsk_size_t dstHeight, tmedia_chroma_t dstChroma)
{
- TSK_DEBUG_INFO("Initializing new FFmpeg Video Converter src=(%dx%d@%d) dst=(%dx%d@%d)", srcWidth, srcHeight, srcChroma, dstWidth, dstHeight, dstChroma);
-
- if((TDAV_CONVERTER_VIDEO_FFMPEG(self)->srcFormat = _tdav_converter_video_ffmpeg_get_pixfmt(srcChroma)) == PIX_FMT_NONE){
- TSK_DEBUG_ERROR("Invalid source chroma");
- return -2;
- }
- if((TDAV_CONVERTER_VIDEO_FFMPEG(self)->dstFormat = _tdav_converter_video_ffmpeg_get_pixfmt(dstChroma)) == PIX_FMT_NONE){
- TSK_DEBUG_ERROR("Invalid destination chroma");
- return -3;
- }
-
- return 0;
+ TSK_DEBUG_INFO("Initializing new FFmpeg Video Converter src=(%dx%d@%d) dst=(%dx%d@%d)", srcWidth, srcHeight, srcChroma, dstWidth, dstHeight, dstChroma);
+
+ if((TDAV_CONVERTER_VIDEO_FFMPEG(self)->srcFormat = _tdav_converter_video_ffmpeg_get_pixfmt(srcChroma)) == PIX_FMT_NONE) {
+ TSK_DEBUG_ERROR("Invalid source chroma");
+ return -2;
+ }
+ if((TDAV_CONVERTER_VIDEO_FFMPEG(self)->dstFormat = _tdav_converter_video_ffmpeg_get_pixfmt(dstChroma)) == PIX_FMT_NONE) {
+ TSK_DEBUG_ERROR("Invalid destination chroma");
+ return -3;
+ }
+
+ return 0;
}
static tsk_size_t tdav_converter_video_ffmpeg_process(tmedia_converter_video_t* _self, const void* buffer, tsk_size_t buffer_size, void** output, tsk_size_t* output_max_size)
{
- int ret, size;
- tsk_bool_t _rotate = tsk_false;
- tdav_converter_video_ffmpeg_t* self = TDAV_CONVERTER_VIDEO_FFMPEG(_self);
-
- if (!self || !buffer || !output){
- TSK_DEBUG_ERROR("Invalid parameter");
- return 0;
- }
-
- /* Pictures */
- if (!self->srcFrame){
- if (!(self->srcFrame = avcodec_alloc_frame())){
- TSK_DEBUG_ERROR("Failed to create picture");
- return 0;
- }
- }
- if (!self->dstFrame){
- if (!(self->dstFrame = avcodec_alloc_frame())){
- TSK_DEBUG_ERROR("Failed to create picture");
- return 0;
- }
- }
-
- size = avpicture_get_size(self->dstFormat, (int)_self->dstWidth, (int)_self->dstHeight);
- if ((int)*output_max_size < size){
- if (!(*output = tsk_realloc(*output, (size + FF_INPUT_BUFFER_PADDING_SIZE)))){
- *output_max_size = 0;
- TSK_DEBUG_ERROR("Failed to allocate buffer");
- return 0;
- }
- *output_max_size = size;
- }
-
- /* Wrap the source buffer */
- ret = avpicture_fill((AVPicture *)self->srcFrame, (uint8_t*)buffer, self->srcFormat, (int)_self->srcWidth, (int)_self->srcHeight);
- /* Wrap the destination buffer */
- ret = avpicture_fill((AVPicture *)self->dstFrame, (uint8_t*)*output, self->dstFormat, (int)_self->dstWidth, (int)_self->dstHeight);
-
- /* === performs conversion === */
- /* Context */
- if (!self->context) {
- self->context = sws_getContext(
- (int)_self->srcWidth, (int)_self->srcHeight, self->srcFormat,
- (int)_self->dstWidth, (int)_self->dstHeight, self->dstFormat,
- SWS_FAST_BILINEAR, NULL, NULL, NULL);
-
- if (!self->context) {
- TSK_DEBUG_ERROR("Failed to create context");
- return 0;
- }
- }
-
- /*FIXME: For now only 90\B0 rotation is supported this is why we always use libyuv on mobile devices */
- _rotate = (PIX_FMT_YUV420P == self->dstFormat) && _self->rotation == 90;
-
- // if no rotation then, flip while scaling othersize do it after rotation
- if (!_rotate && _self->flip) {
- _tdav_converter_video_ffmpeg_flip(self->dstFrame, _self->dstHeight);
- }
-
- // chroma conversion and scaling
- ret = sws_scale(self->context, (const uint8_t* const*)self->srcFrame->data, self->srcFrame->linesize, 0, (int)_self->srcHeight,
- self->dstFrame->data, self->dstFrame->linesize);
- if (ret < 0){
- TSK_FREE(*output);
- return 0;
- }
-
- // Rotation
- if (_rotate){
- // because we rotated 90 width = original height, height = original width
- int w = (int)_self->dstHeight;
- int h = (int)_self->dstWidth;
-
- // allocation rotation frame if not already done
- if (!(self->rot.frame) && !(self->rot.frame = avcodec_alloc_frame())){
- TSK_DEBUG_ERROR("failed to allocate rotation frame");
- TSK_FREE(*output);
- return(0);
- }
-
- // allocate rotation temporary buffer
- size = avpicture_get_size(self->dstFormat, w, h);
- if (self->rot.buffer_size != size){
- if (!(self->rot.buffer = (uint8_t *)av_realloc(self->rot.buffer, size))){
- TSK_DEBUG_ERROR("failed to allocate new buffer for the frame");
- self->rot.buffer_size = 0;
- return(0);
- }
- self->rot.buffer_size = size;
- }
-
- //wrap
- avpicture_fill((AVPicture *)self->rot.frame, self->rot.buffer, self->dstFormat, w, h);
- // rotate
- _tdav_converter_video_ffmpeg_rotate90(_self->dstWidth, _self->dstHeight, self->dstFrame->data[0], self->rot.frame->data[0]);
- _tdav_converter_video_ffmpeg_rotate90((_self->dstWidth >> 1), (_self->dstHeight >> 1), self->dstFrame->data[1], self->rot.frame->data[1]);
- _tdav_converter_video_ffmpeg_rotate90((_self->dstWidth >> 1), (_self->dstHeight >> 1), self->dstFrame->data[2], self->rot.frame->data[2]);
- // flip
- if (_self->flip){
- _tdav_converter_video_ffmpeg_flip(self->rot.frame, h);
- }
-
- {
- static const int y_shift = 1;
- static const int x_shift = 1;
- int r_size, r_w, r_h, left_band, top_band;
- int pad = ((int)_self->dstWidth - w) > ((int)_self->dstHeight - h) ? ((int)_self->dstWidth - w) : ((int)_self->dstHeight - h);
- if (pad < 0){
- pad = 0;
- }
- r_size;
- r_w = w + pad;
- r_h = h + pad;
- left_band = (int)((r_w - _self->dstWidth) / 2);
- top_band = (int)((r_h - _self->dstHeight) / 3);
-
- if (!self->rot.context){
- if (!(self->rot.context = sws_getContext(w, h, self->dstFormat, r_w, r_h, self->dstFormat, SWS_FAST_BILINEAR, NULL, NULL, NULL))){
- TSK_DEBUG_ERROR("Failed to create context");
- TSK_FREE(*output);
- return 0;
- }
- }
-
- r_size = avpicture_get_size(self->dstFormat, r_w, r_h);
- if ((int)*output_max_size < r_size){
- if (!(*output = tsk_realloc(*output, (r_size + FF_INPUT_BUFFER_PADDING_SIZE)))){
- *output_max_size = 0;
- TSK_DEBUG_ERROR("Failed to allocate buffer");
- return 0;
- }
- *output_max_size = r_size;
- }
-
- // re-wrap
- avpicture_fill((AVPicture *)self->dstFrame, (uint8_t*)*output, self->dstFormat, r_w, r_h);
-
- // pad
- sws_scale(self->rot.context, (const uint8_t* const*)self->rot.frame->data, self->rot.frame->linesize,
- 0, h, self->dstFrame->data, self->dstFrame->linesize);
-
- // crop
- self->dstFrame->data[0] = self->dstFrame->data[0] + (top_band * self->dstFrame->linesize[0]) + left_band;
- self->dstFrame->data[1] = self->dstFrame->data[1] + ((top_band >> y_shift) * self->dstFrame->linesize[1]) + (left_band >> x_shift);
- self->dstFrame->data[2] = self->dstFrame->data[2] + ((top_band >> y_shift) * self->dstFrame->linesize[2]) + (left_band >> x_shift);
-
- avpicture_layout((const AVPicture*)self->dstFrame, self->dstFormat, (int)_self->dstWidth, (int)_self->dstHeight, (unsigned char *)*output, (int)*output_max_size);
- }
-
- }//end of rotation
-
- return size;
+ int ret, size;
+ tsk_bool_t _rotate = tsk_false;
+ tdav_converter_video_ffmpeg_t* self = TDAV_CONVERTER_VIDEO_FFMPEG(_self);
+
+ if (!self || !buffer || !output) {
+ TSK_DEBUG_ERROR("Invalid parameter");
+ return 0;
+ }
+
+ /* Pictures */
+ if (!self->srcFrame) {
+ if (!(self->srcFrame = avcodec_alloc_frame())) {
+ TSK_DEBUG_ERROR("Failed to create picture");
+ return 0;
+ }
+ }
+ if (!self->dstFrame) {
+ if (!(self->dstFrame = avcodec_alloc_frame())) {
+ TSK_DEBUG_ERROR("Failed to create picture");
+ return 0;
+ }
+ }
+
+ size = avpicture_get_size(self->dstFormat, (int)_self->dstWidth, (int)_self->dstHeight);
+ if ((int)*output_max_size < size) {
+ if (!(*output = tsk_realloc(*output, (size + FF_INPUT_BUFFER_PADDING_SIZE)))) {
+ *output_max_size = 0;
+ TSK_DEBUG_ERROR("Failed to allocate buffer");
+ return 0;
+ }
+ *output_max_size = size;
+ }
+
+ /* Wrap the source buffer */
+ ret = avpicture_fill((AVPicture *)self->srcFrame, (uint8_t*)buffer, self->srcFormat, (int)_self->srcWidth, (int)_self->srcHeight);
+ /* Wrap the destination buffer */
+ ret = avpicture_fill((AVPicture *)self->dstFrame, (uint8_t*)*output, self->dstFormat, (int)_self->dstWidth, (int)_self->dstHeight);
+
+ /* === performs conversion === */
+ /* Context */
+ if (!self->context) {
+ self->context = sws_getContext(
+ (int)_self->srcWidth, (int)_self->srcHeight, self->srcFormat,
+ (int)_self->dstWidth, (int)_self->dstHeight, self->dstFormat,
+ SWS_FAST_BILINEAR, NULL, NULL, NULL);
+
+ if (!self->context) {
+ TSK_DEBUG_ERROR("Failed to create context");
+ return 0;
+ }
+ }
+
+ /*FIXME: For now only 90\B0 rotation is supported this is why we always use libyuv on mobile devices */
+ _rotate = (PIX_FMT_YUV420P == self->dstFormat) && _self->rotation == 90;
+
+ // if no rotation then, flip while scaling othersize do it after rotation
+ if (!_rotate && _self->flip) {
+ _tdav_converter_video_ffmpeg_flip(self->dstFrame, _self->dstHeight);
+ }
+
+ // chroma conversion and scaling
+ ret = sws_scale(self->context, (const uint8_t* const*)self->srcFrame->data, self->srcFrame->linesize, 0, (int)_self->srcHeight,
+ self->dstFrame->data, self->dstFrame->linesize);
+ if (ret < 0) {
+ TSK_FREE(*output);
+ return 0;
+ }
+
+ // Rotation
+ if (_rotate) {
+ // because we rotated 90 width = original height, height = original width
+ int w = (int)_self->dstHeight;
+ int h = (int)_self->dstWidth;
+
+ // allocation rotation frame if not already done
+ if (!(self->rot.frame) && !(self->rot.frame = avcodec_alloc_frame())) {
+ TSK_DEBUG_ERROR("failed to allocate rotation frame");
+ TSK_FREE(*output);
+ return(0);
+ }
+
+ // allocate rotation temporary buffer
+ size = avpicture_get_size(self->dstFormat, w, h);
+ if (self->rot.buffer_size != size) {
+ if (!(self->rot.buffer = (uint8_t *)av_realloc(self->rot.buffer, size))) {
+ TSK_DEBUG_ERROR("failed to allocate new buffer for the frame");
+ self->rot.buffer_size = 0;
+ return(0);
+ }
+ self->rot.buffer_size = size;
+ }
+
+ //wrap
+ avpicture_fill((AVPicture *)self->rot.frame, self->rot.buffer, self->dstFormat, w, h);
+ // rotate
+ _tdav_converter_video_ffmpeg_rotate90(_self->dstWidth, _self->dstHeight, self->dstFrame->data[0], self->rot.frame->data[0]);
+ _tdav_converter_video_ffmpeg_rotate90((_self->dstWidth >> 1), (_self->dstHeight >> 1), self->dstFrame->data[1], self->rot.frame->data[1]);
+ _tdav_converter_video_ffmpeg_rotate90((_self->dstWidth >> 1), (_self->dstHeight >> 1), self->dstFrame->data[2], self->rot.frame->data[2]);
+ // flip
+ if (_self->flip) {
+ _tdav_converter_video_ffmpeg_flip(self->rot.frame, h);
+ }
+
+ {
+ static const int y_shift = 1;
+ static const int x_shift = 1;
+ int r_size, r_w, r_h, left_band, top_band;
+ int pad = ((int)_self->dstWidth - w) > ((int)_self->dstHeight - h) ? ((int)_self->dstWidth - w) : ((int)_self->dstHeight - h);
+ if (pad < 0) {
+ pad = 0;
+ }
+ r_size;
+ r_w = w + pad;
+ r_h = h + pad;
+ left_band = (int)((r_w - _self->dstWidth) / 2);
+ top_band = (int)((r_h - _self->dstHeight) / 3);
+
+ if (!self->rot.context) {
+ if (!(self->rot.context = sws_getContext(w, h, self->dstFormat, r_w, r_h, self->dstFormat, SWS_FAST_BILINEAR, NULL, NULL, NULL))) {
+ TSK_DEBUG_ERROR("Failed to create context");
+ TSK_FREE(*output);
+ return 0;
+ }
+ }
+
+ r_size = avpicture_get_size(self->dstFormat, r_w, r_h);
+ if ((int)*output_max_size < r_size) {
+ if (!(*output = tsk_realloc(*output, (r_size + FF_INPUT_BUFFER_PADDING_SIZE)))) {
+ *output_max_size = 0;
+ TSK_DEBUG_ERROR("Failed to allocate buffer");
+ return 0;
+ }
+ *output_max_size = r_size;
+ }
+
+ // re-wrap
+ avpicture_fill((AVPicture *)self->dstFrame, (uint8_t*)*output, self->dstFormat, r_w, r_h);
+
+ // pad
+ sws_scale(self->rot.context, (const uint8_t* const*)self->rot.frame->data, self->rot.frame->linesize,
+ 0, h, self->dstFrame->data, self->dstFrame->linesize);
+
+ // crop
+ self->dstFrame->data[0] = self->dstFrame->data[0] + (top_band * self->dstFrame->linesize[0]) + left_band;
+ self->dstFrame->data[1] = self->dstFrame->data[1] + ((top_band >> y_shift) * self->dstFrame->linesize[1]) + (left_band >> x_shift);
+ self->dstFrame->data[2] = self->dstFrame->data[2] + ((top_band >> y_shift) * self->dstFrame->linesize[2]) + (left_band >> x_shift);
+
+ avpicture_layout((const AVPicture*)self->dstFrame, self->dstFormat, (int)_self->dstWidth, (int)_self->dstHeight, (unsigned char *)*output, (int)*output_max_size);
+ }
+
+ }//end of rotation
+
+ return size;
}
@@ -773,58 +774,56 @@ static tsk_size_t tdav_converter_video_ffmpeg_process(tmedia_converter_video_t*
//
static tsk_object_t* tdav_converter_video_ffmpeg_ctor(tsk_object_t * self, va_list * app)
{
- tdav_converter_video_ffmpeg_t *converter = (tdav_converter_video_ffmpeg_t *)self;
- if(converter){
+ tdav_converter_video_ffmpeg_t *converter = (tdav_converter_video_ffmpeg_t *)self;
+ if(converter) {
- }
- return self;
+ }
+ return self;
}
static tsk_object_t* tdav_converter_video_ffmpeg_dtor(tsk_object_t * self)
-{
- tdav_converter_video_ffmpeg_t *converter = (tdav_converter_video_ffmpeg_t *)self;
- if(converter){
- if(converter->context){
- sws_freeContext(converter->context);
- }
- if(converter->srcFrame){
- av_free(converter->srcFrame);
- }
- if(converter->dstFrame){
- av_free(converter->dstFrame);
- }
-
- // Rotation
- if(converter->rot.context){
- sws_freeContext(converter->rot.context);
- }
- if(converter->rot.frame){
- av_free(converter->rot.frame);
- }
- if(converter->rot.buffer){
- av_free(converter->rot.buffer);
- }
- }
-
- return self;
+{
+ tdav_converter_video_ffmpeg_t *converter = (tdav_converter_video_ffmpeg_t *)self;
+ if(converter) {
+ if(converter->context) {
+ sws_freeContext(converter->context);
+ }
+ if(converter->srcFrame) {
+ av_free(converter->srcFrame);
+ }
+ if(converter->dstFrame) {
+ av_free(converter->dstFrame);
+ }
+
+ // Rotation
+ if(converter->rot.context) {
+ sws_freeContext(converter->rot.context);
+ }
+ if(converter->rot.frame) {
+ av_free(converter->rot.frame);
+ }
+ if(converter->rot.buffer) {
+ av_free(converter->rot.buffer);
+ }
+ }
+
+ return self;
}
-static const tsk_object_def_t tdav_converter_video_ffmpeg_def_s =
-{
- sizeof(tdav_converter_video_ffmpeg_t),
- tdav_converter_video_ffmpeg_ctor,
- tdav_converter_video_ffmpeg_dtor,
- tsk_null,
+static const tsk_object_def_t tdav_converter_video_ffmpeg_def_s = {
+ sizeof(tdav_converter_video_ffmpeg_t),
+ tdav_converter_video_ffmpeg_ctor,
+ tdav_converter_video_ffmpeg_dtor,
+ tsk_null,
};
const tsk_object_def_t *tdav_converter_video_ffmpeg_def_t = &tdav_converter_video_ffmpeg_def_s;
/* plugin definition*/
-static const tmedia_converter_video_plugin_def_t tdav_converter_video_ffmpeg_plugin_def_s =
-{
- &tdav_converter_video_ffmpeg_def_s,
+static const tmedia_converter_video_plugin_def_t tdav_converter_video_ffmpeg_plugin_def_s = {
+ &tdav_converter_video_ffmpeg_def_s,
- tdav_converter_video_ffmpeg_init,
- tdav_converter_video_ffmpeg_process
+ tdav_converter_video_ffmpeg_init,
+ tdav_converter_video_ffmpeg_process
};
const tmedia_converter_video_plugin_def_t *tdav_converter_video_ffmpeg_plugin_def_t = &tdav_converter_video_ffmpeg_plugin_def_s;
diff --git a/tinyDAV/src/video/tdav_runnable_video.c b/tinyDAV/src/video/tdav_runnable_video.c
index c8102ea..cacc2f2 100755
--- a/tinyDAV/src/video/tdav_runnable_video.c
+++ b/tinyDAV/src/video/tdav_runnable_video.c
@@ -2,19 +2,19 @@
* Copyright (C) 2010-2011 Mamadou Diop.
*
* Contact: Mamadou Diop <diopmamadou(at)doubango.org>
-*
+*
* This file is part of Open Source Doubango Framework.
*
* DOUBANGO is free software: you can redistribute it and/or modify
* it under the terms of the GNU General Public License as published by
* the Free Software Foundation, either version 3 of the License, or
* (at your option) any later version.
-*
+*
* DOUBANGO is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
* GNU General Public License for more details.
-*
+*
* You should have received a copy of the GNU General Public License
* along with DOUBANGO.
*
@@ -33,33 +33,33 @@
tdav_runnable_video_t* tdav_runnable_video_create(tsk_runnable_func_run run_f, const void* userdata)
{
- tdav_runnable_video_t* runnable;
+ tdav_runnable_video_t* runnable;
- if((runnable = tsk_object_new(tdav_runnable_video_def_t))){
- TSK_RUNNABLE(runnable)->run = run_f;
- runnable->userdata = userdata;
- }
- return runnable;
+ if((runnable = tsk_object_new(tdav_runnable_video_def_t))) {
+ TSK_RUNNABLE(runnable)->run = run_f;
+ runnable->userdata = userdata;
+ }
+ return runnable;
}
int tdav_runnable_video_start(tdav_runnable_video_t* self)
{
- if(!self){
- TSK_DEBUG_ERROR("Invalid parameter");
- return -1;
- }
+ if(!self) {
+ TSK_DEBUG_ERROR("Invalid parameter");
+ return -1;
+ }
- return tsk_runnable_start(TSK_RUNNABLE(self), tsk_buffer_def_t);
+ return tsk_runnable_start(TSK_RUNNABLE(self), tsk_buffer_def_t);
}
int tdav_runnable_video_stop(tdav_runnable_video_t* self)
{
- if(!self){
- TSK_DEBUG_ERROR("Invalid parameter");
- return -1;
- }
+ if(!self) {
+ TSK_DEBUG_ERROR("Invalid parameter");
+ return -1;
+ }
- return tsk_runnable_stop(TSK_RUNNABLE(self));
+ return tsk_runnable_stop(TSK_RUNNABLE(self));
}
@@ -68,28 +68,27 @@ int tdav_runnable_video_stop(tdav_runnable_video_t* self)
//
static tsk_object_t* tdav_runnable_video_ctor(tsk_object_t * self, va_list * app)
{
- tdav_runnable_video_t *runnable = self;
- if(runnable){
-
- }
- return self;
+ tdav_runnable_video_t *runnable = self;
+ if(runnable) {
+
+ }
+ return self;
}
static tsk_object_t* tdav_runnable_video_dtor(tsk_object_t * self)
-{
- tdav_runnable_video_t *runnable = self;
- if(runnable){
- tsk_runnable_stop(TSK_RUNNABLE(runnable));
- }
+{
+ tdav_runnable_video_t *runnable = self;
+ if(runnable) {
+ tsk_runnable_stop(TSK_RUNNABLE(runnable));
+ }
- return self;
+ return self;
}
-static const tsk_object_def_t tdav_runnable_video_def_s =
-{
- sizeof(tdav_runnable_video_t),
- tdav_runnable_video_ctor,
- tdav_runnable_video_dtor,
- tsk_null,
+static const tsk_object_def_t tdav_runnable_video_def_s = {
+ sizeof(tdav_runnable_video_t),
+ tdav_runnable_video_ctor,
+ tdav_runnable_video_dtor,
+ tsk_null,
};
const tsk_object_def_t *tdav_runnable_video_def_t = &tdav_runnable_video_def_s;
diff --git a/tinyDAV/src/video/tdav_session_video.c b/tinyDAV/src/video/tdav_session_video.c
index 4ee6812..f773ede 100755
--- a/tinyDAV/src/video/tdav_session_video.c
+++ b/tinyDAV/src/video/tdav_session_video.c
@@ -1,6 +1,6 @@
/*
- * Copyright (C) 2010-2014 Mamadou DIOP.
- * Copyright (C) 2011-2014 Doubango Telecom.
+ * Copyright (C) 2010-2016 Mamadou DIOP.
+ * Copyright (C) 2011-2016 Doubango Telecom.
*
*
* This file is part of Open Source Doubango Framework.
@@ -62,25 +62,12 @@
# define TDAV_SESSION_VIDEO_AVPF_FIR_REQUEST_INTERVAL_MIN 1500 // millis
#endif
-#define TDAV_SESSION_VIDEO_PKT_LOSS_PROB_BAD 2
-#define TDAV_SESSION_VIDEO_PKT_LOSS_PROB_GOOD 6
-#define TDAV_SESSION_VIDEO_PKT_LOSS_FACT_MIN 0
-#define TDAV_SESSION_VIDEO_PKT_LOSS_FACT_MAX 8
-#define TDAV_SESSION_VIDEO_PKT_LOSS_LOW 9
-#define TDAV_SESSION_VIDEO_PKT_LOSS_MEDIUM 22
-#define TDAV_SESSION_VIDEO_PKT_LOSS_HIGH 63
-
-#if !defined(TDAV_SESSION_VIDEO_PKT_LOSS_NO_REPORT_BEFORE_INCREASING_BW)
-# define TDAV_SESSION_VIDEO_PKT_LOSS_NO_REPORT_BEFORE_INCREASING_BW 5000 // millis
-#endif
+// Interval to compute average quality metrics
+#define TDAV_SESSION_VIDEO_QOS_COMPUTE_INTERVAL 3000
// The maximum number of pakcet loss allowed
#define TDAV_SESSION_VIDEO_PKT_LOSS_MAX_COUNT_TO_REQUEST_FIR 50
-#if !defined (TDAV_GOOG_REMB_FULL_SUPPORT)
-# define TDAV_GOOG_REMB_FULL_SUPPORT 0
-#endif
-
static const tmedia_codec_action_t __action_encode_idr = tmedia_codec_action_encode_idr;
static const tmedia_codec_action_t __action_encode_bw_up = tmedia_codec_action_bw_up;
static const tmedia_codec_action_t __action_encode_bw_down = tmedia_codec_action_bw_down;
@@ -150,6 +137,9 @@ static int _tdav_session_video_jb_cb(const tdav_video_jb_cb_data_xt* data);
static int _tdav_session_video_open_decoder(tdav_session_video_t* self, uint8_t payload_type);
static int _tdav_session_video_decode(tdav_session_video_t* self, const trtp_rtp_packet_t* packet);
static int _tdav_session_video_set_callbacks(tmedia_session_t* self);
+static int _tdav_session_video_timer_cb(const void* arg, tsk_timer_id_t timer_id);
+static int _tdav_session_video_get_bw_usage_est(tdav_session_video_t* self, uint64_t* bw_kbps, tsk_bool_t in, tsk_bool_t reset);
+static int _tdav_session_video_report_bw_usage_and_jcng(tdav_session_video_t* self);
// Codec callback (From codec to the network)
// or Producer callback to sendRaw() data "as is"
@@ -161,22 +151,22 @@ static int tdav_session_video_raw_cb(const tmedia_video_encode_result_xt* result
trtp_rtp_packet_t* packet = tsk_null;
int ret = 0;
tsk_size_t s;
-
- if(base->rtp_manager && base->rtp_manager->is_started){
- if(rtp_header){
+
+ if(base->rtp_manager && base->rtp_manager->is_started) {
+ if(rtp_header) {
// uses negotiated SSRC (SDP)
rtp_header->ssrc = base->rtp_manager->rtp.ssrc.local;
// uses negotiated payload type
- if(base->pt_map.local != base->rtp_manager->rtp.payload_type || base->pt_map.remote != rtp_header->payload_type || base->pt_map.neg == -1){
- if(rtp_header->codec_id == tmedia_codec_id_none){
+ if(base->pt_map.local != base->rtp_manager->rtp.payload_type || base->pt_map.remote != rtp_header->payload_type || base->pt_map.neg == -1) {
+ if(rtp_header->codec_id == tmedia_codec_id_none) {
TSK_DEBUG_WARN("Internal codec id is equal to none");
}
- else{
+ else {
const tsk_list_item_t* item;
tsk_bool_t found = tsk_false;
tsk_list_lock(TMEDIA_SESSION(base)->neg_codecs);
- tsk_list_foreach(item, TMEDIA_SESSION(base)->neg_codecs){
- if((item->data) && ((const tmedia_codec_t*)item->data)->id == rtp_header->codec_id){
+ tsk_list_foreach(item, TMEDIA_SESSION(base)->neg_codecs) {
+ if((item->data) && ((const tmedia_codec_t*)item->data)->id == rtp_header->codec_id) {
base->pt_map.local = base->rtp_manager->rtp.payload_type;
base->pt_map.remote = rtp_header->payload_type;
base->pt_map.neg = atoi(((const tmedia_codec_t*)item->data)->neg_format);
@@ -185,10 +175,10 @@ static int tdav_session_video_raw_cb(const tmedia_video_encode_result_xt* result
}
}
tsk_list_unlock(TMEDIA_SESSION(base)->neg_codecs);
- if(found){
+ if(found) {
TSK_DEBUG_INFO("Codec PT mapping: local=%d, remote=%d, neg=%d", base->pt_map.local, base->pt_map.remote, base->pt_map.neg);
}
- else{
+ else {
TSK_DEBUG_ERROR("Failed to map codec PT: local=%d, remote=%d", base->rtp_manager->rtp.payload_type, rtp_header->payload_type);
}
}
@@ -196,12 +186,12 @@ static int tdav_session_video_raw_cb(const tmedia_video_encode_result_xt* result
rtp_header->payload_type = base->pt_map.neg;
}
packet = rtp_header
- ? trtp_rtp_packet_create_2(rtp_header)
- : trtp_rtp_packet_create(base->rtp_manager->rtp.ssrc.local, base->rtp_manager->rtp.seq_num, base->rtp_manager->rtp.timestamp, base->rtp_manager->rtp.payload_type, result->last_chunck);
-
- if(packet ){
+ ? trtp_rtp_packet_create_2(rtp_header)
+ : trtp_rtp_packet_create(base->rtp_manager->rtp.ssrc.local, base->rtp_manager->rtp.seq_num, base->rtp_manager->rtp.timestamp, base->rtp_manager->rtp.payload_type, result->last_chunck);
+
+ if(packet ) {
tsk_size_t rtp_hdr_size;
- if(result->last_chunck){
+ if(result->last_chunck) {
#if 1
#if 1
/* http://www.cs.columbia.edu/~hgs/rtp/faq.html#timestamp-computed
@@ -210,14 +200,14 @@ static int tdav_session_video_raw_cb(const tmedia_video_encode_result_xt* result
If a frame is transmitted as several RTP packets, these packets would all bear the same timestamp.
If the frame number cannot be determined or if frames are sampled aperiodically, as is typically the case for software codecs, the timestamp has to be computed from the system clock (e.g., gettimeofday())
*/
-
- if(!video->encoder.last_frame_time){
+
+ if(!video->encoder.last_frame_time) {
// For the first frame it's not possible to compute the duration as there is no previous one.
// In this case, we trust the duration from the result (computed based on the codec fps and rate).
video->encoder.last_frame_time = tsk_time_now();
base->rtp_manager->rtp.timestamp += result->duration;
}
- else{
+ else {
uint64_t now = tsk_time_now();
uint32_t duration = (uint32_t)(now - video->encoder.last_frame_time);
base->rtp_manager->rtp.timestamp += (duration * 90/* 90KHz */);
@@ -229,9 +219,9 @@ static int tdav_session_video_raw_cb(const tmedia_video_encode_result_xt* result
#else
base->rtp_manager->rtp.timestamp += result->duration;
#endif
-
+
}
-
+
packet->payload.data_const = result->buffer.ptr;
packet->payload.size = result->buffer.size;
s = trtp_manager_send_rtp_packet(base->rtp_manager, packet, tsk_false); // encrypt and send data
@@ -240,14 +230,14 @@ static int tdav_session_video_raw_cb(const tmedia_video_encode_result_xt* result
// without audio session iOS "audio" background mode is useless and UDP sockets will be closed: e.g. GE's video-only sessions
#if TDAV_UNDER_IPHONE
if (tnet_geterrno() == TNET_ERROR_BROKENPIPE) {
- TSK_DEBUG_INFO("iOS UDP pipe is broken (restoration is progress): failed to send packet with seqnum=%u. %u expected but only %u sent", (unsigned)packet->header->seq_num, (unsigned)packet->payload.size, (unsigned)s);
+ TSK_DEBUG_INFO("iOS UDP pipe is broken (restoration in progress): failed to send packet with seqnum=%u. %u expected but only %u sent", (unsigned)packet->header->seq_num, (unsigned)packet->payload.size, (unsigned)s);
}
#endif /* TDAV_UNDER_IPHONE */
TSK_DEBUG_ERROR("Failed to send packet with seqnum=%u. %u expected but only %u sent", (unsigned)packet->header->seq_num, (unsigned)packet->payload.size, (unsigned)s);
// save data expected to be sent in order to honor RTCP-NACK requests
s = base->rtp_manager->rtp.serial_buffer.index;
}
-
+
rtp_hdr_size = TRTP_RTP_HEADER_MIN_SIZE + (packet->header->csrc_count << 2);
// Save packet
if (base->avpf_mode_neg && (s > TRTP_RTP_HEADER_MIN_SIZE)) {
@@ -256,19 +246,19 @@ static int tdav_session_video_raw_cb(const tmedia_video_encode_result_xt* result
// Hack the RTP packet payload to point to the the SRTP data instead of unencrypted ptr
packet_avpf->payload.size = (s - rtp_hdr_size);
packet_avpf->payload.data_const = tsk_null;
- if(!(packet_avpf->payload.data = tsk_malloc(packet_avpf->payload.size))){// FIXME: to be optimized (reuse memory address)
+ if(!(packet_avpf->payload.data = tsk_malloc(packet_avpf->payload.size))) { // FIXME: to be optimized (reuse memory address)
TSK_DEBUG_ERROR("failed to allocate buffer");
goto bail;
}
memcpy(packet_avpf->payload.data, (((const uint8_t*)base->rtp_manager->rtp.serial_buffer.ptr) + rtp_hdr_size), packet_avpf->payload.size);
tsk_list_lock(video->avpf.packets);
- if(video->avpf.count > video->avpf.max){
+ if(video->avpf.count > video->avpf.max) {
tsk_list_remove_first_item(video->avpf.packets);
}
- else{
+ else {
++video->avpf.count;
}
-
+
// The packet must not added 'ascending' but 'back' because the sequence number coult wrap
// For example:
// - send(65533, 65534, 65535, 0, 1)
@@ -278,19 +268,19 @@ static int tdav_session_video_raw_cb(const tmedia_video_encode_result_xt* result
tsk_list_push_back_data(video->avpf.packets, (void**)&packet_avpf);
tsk_list_unlock(video->avpf.packets);
}
-
+
// Send FEC packet
// FIXME: protect only Intra and Params packets
- if(base->ulpfec.codec && (s > TRTP_RTP_HEADER_MIN_SIZE)){
+ if(base->ulpfec.codec && (s > TRTP_RTP_HEADER_MIN_SIZE)) {
packet->payload.data_const = (((const uint8_t*)base->rtp_manager->rtp.serial_buffer.ptr) + rtp_hdr_size);
packet->payload.size = (s - rtp_hdr_size);
ret = tdav_codec_ulpfec_enc_protect((struct tdav_codec_ulpfec_s*)base->ulpfec.codec, packet);
- if(result->last_chunck){
+ if(result->last_chunck) {
trtp_rtp_packet_t* packet_fec;
- if((packet_fec = trtp_rtp_packet_create(base->rtp_manager->rtp.ssrc.local, base->ulpfec.seq_num++, base->ulpfec.timestamp, base->ulpfec.payload_type, tsk_true))){
+ if((packet_fec = trtp_rtp_packet_create(base->rtp_manager->rtp.ssrc.local, base->ulpfec.seq_num++, base->ulpfec.timestamp, base->ulpfec.payload_type, tsk_true))) {
// serialize the FEC payload packet packet
s = tdav_codec_ulpfec_enc_serialize((const struct tdav_codec_ulpfec_s*)base->ulpfec.codec, &video->encoder.buffer, &video->encoder.buffer_size);
- if(s > 0){
+ if(s > 0) {
packet_fec->payload.data_const = video->encoder.buffer;
packet_fec->payload.size = s;
s = trtp_manager_send_rtp_packet(base->rtp_manager, packet_fec, tsk_true/* already encrypted */);
@@ -306,11 +296,11 @@ static int tdav_session_video_raw_cb(const tmedia_video_encode_result_xt* result
if (ret == 0 && video->red.codec) {
// don't need to lock as the buffer is never used by other codecs
tsk_size_t red_pay_size = video->red.codec->plugin->encode(
- video->red.codec,
- buffer, size,
- &video->encoder.buffer, &video->encoder.buffer_size
- );
- if(red_pay_size > 1){
+ video->red.codec,
+ buffer, size,
+ &video->encoder.buffer, &video->encoder.buffer_size
+ );
+ if(red_pay_size > 1) {
packet->header->payload_type = video->red.payload_type;
((uint8_t*)video->encoder.buffer)[0] = packet->header->payload_type;
packet->payload.data_const = video->encoder.buffer;
@@ -324,10 +314,10 @@ static int tdav_session_video_raw_cb(const tmedia_video_encode_result_xt* result
TSK_DEBUG_ERROR("Failed to create packet");
}
}
- else{
+ else {
//--TSK_DEBUG_WARN("Session not ready yet");
}
-
+
bail:
TSK_OBJECT_SAFE_FREE(packet);
return ret;
@@ -338,26 +328,25 @@ static int tdav_session_video_decode_cb(const tmedia_video_decode_result_xt* res
{
tdav_session_av_t* base = (tdav_session_av_t*)result->usr_data;
tdav_session_video_t* video = (tdav_session_video_t*)base;
-
- switch(result->type){
- case tmedia_video_decode_result_type_idr:
- {
- if(video->decoder.last_corrupted_timestamp != ((const trtp_rtp_header_t*)result->proto_hdr)->timestamp){
- TSK_DEBUG_INFO("IDR frame decoded");
- video->decoder.stream_corrupted = tsk_false;
- }
- else{
- TSK_DEBUG_INFO("IDR frame decoded but corrupted :(");
- }
- break;
+
+ switch(result->type) {
+ case tmedia_video_decode_result_type_idr: {
+ if(video->decoder.last_corrupted_timestamp != ((const trtp_rtp_header_t*)result->proto_hdr)->timestamp) {
+ TSK_DEBUG_INFO("IDR frame decoded");
+ video->decoder.stream_corrupted = tsk_false;
}
- case tmedia_video_decode_result_type_error:
- {
- TSK_DEBUG_INFO("Decoding failed -> request Full Intra Refresh (FIR)");
- _tdav_session_video_local_request_idr(TMEDIA_SESSION(video), "DECODED_FAILED", ((const trtp_rtp_header_t*)result->proto_hdr)->ssrc);
- break;
+ else {
+ TSK_DEBUG_INFO("IDR frame decoded but corrupted :(");
}
- default: break;
+ break;
+ }
+ case tmedia_video_decode_result_type_error: {
+ TSK_DEBUG_INFO("Decoding failed -> request Full Intra Refresh (FIR)");
+ _tdav_session_video_local_request_idr(TMEDIA_SESSION(video), "DECODED_FAILED", ((const trtp_rtp_header_t*)result->proto_hdr)->ssrc);
+ break;
+ }
+ default:
+ break;
}
return 0;
}
@@ -367,26 +356,27 @@ static int tdav_session_video_producer_enc_cb(const void* callback_data, const v
{
tdav_session_video_t* video = (tdav_session_video_t*)callback_data;
tdav_session_av_t* base = (tdav_session_av_t*)callback_data;
+ tmedia_session_t* session = (tmedia_session_t*)callback_data;
tsk_size_t yuv420p_size = 0;
int ret = 0;
-
- if(!base){
+
+ if(!base) {
TSK_DEBUG_ERROR("Null session");
return 0;
}
-
+
// do nothing if session is held
// when the session is held the end user will get feedback he also has possibilities to put the consumer and producer on pause
- if (TMEDIA_SESSION(base)->lo_held) {
+ if (session->lo_held) {
return 0;
}
-
+
// do nothing if not started yet
if (!video->started) {
TSK_DEBUG_INFO("Video session not started yet");
return 0;
}
-
+
// get best negotiated codec if not already done
// the encoder codec could be null when session is renegotiated without re-starting (e.g. hold/resume)
if (!video->encoder.codec) {
@@ -400,25 +390,28 @@ static int tdav_session_video_producer_enc_cb(const void* callback_data, const v
video->encoder.codec = tsk_object_ref(TSK_OBJECT(codec));
tsk_safeobj_unlock(base);
}
-
+
if (base->rtp_manager) {
//static int __rotation_counter = 0;
/* encode */
tsk_size_t out_size = 0;
tmedia_codec_t* codec_encoder = tsk_null;
-
+ uint64_t encode_start_time, encode_duration; // This time chroma conversion, scaling and encoding
+
if (!base->rtp_manager->is_started) {
TSK_DEBUG_ERROR("Not started");
goto bail;
}
-
+
// take a reference to the encoder to make sure it'll not be destroyed while we're using it
codec_encoder = tsk_object_ref(video->encoder.codec);
if (!codec_encoder) {
TSK_DEBUG_ERROR("The encoder is null");
goto bail;
}
-
+
+ encode_start_time = tsk_time_now();
+
#define PRODUCER_OUTPUT_FIXSIZE (base->producer->video.chroma != tmedia_chroma_mjpeg) // whether the output data has a fixed size/length
#define PRODUCER_OUTPUT_RAW (base->producer->encoder.codec_id == tmedia_codec_id_none) // Otherwise, frames from the producer are already encoded
#define PRODUCER_SIZE_CHANGED ((video->conv.producerWidth && video->conv.producerWidth != base->producer->video.width) || (video->conv.producerHeight && video->conv.producerHeight != base->producer->video.height) \
@@ -431,15 +424,15 @@ static int tdav_session_video_producer_enc_cb(const void* callback_data, const v
// Video codecs only accept YUV420P buffers ==> do conversion if needed or producer doesn't have the right size
if (PRODUCER_OUTPUT_RAW && (PRODUCED_FRAME_NEED_CHROMA_CONVERSION || PRODUCER_SIZE_CHANGED || ENCODED_NEED_FLIP || ENCODED_NEED_RESIZE ||PRODUCED_FRAME_NEED_ROTATION || PRODUCED_FRAME_NEED_MIRROR)) {
// Create video converter if not already done or producer size have changed
- if(!video->conv.toYUV420 || PRODUCER_SIZE_CHANGED){
+ if(!video->conv.toYUV420 || PRODUCER_SIZE_CHANGED) {
TSK_OBJECT_SAFE_FREE(video->conv.toYUV420);
video->conv.producerWidth = base->producer->video.width;
video->conv.producerHeight = base->producer->video.height;
video->conv.xProducerSize = size;
-
+
TSK_DEBUG_INFO("producer size = (%d, %d)", (int)base->producer->video.width, (int)base->producer->video.height);
if (!(video->conv.toYUV420 = tmedia_converter_video_create(base->producer->video.width, base->producer->video.height, base->producer->video.chroma, TMEDIA_CODEC_VIDEO(codec_encoder)->out.width, TMEDIA_CODEC_VIDEO(codec_encoder)->out.height,
- TMEDIA_CODEC_VIDEO(codec_encoder)->out.chroma))){
+ TMEDIA_CODEC_VIDEO(codec_encoder)->out.chroma))) {
TSK_DEBUG_ERROR("Failed to create video converter");
ret = -5;
goto bail;
@@ -448,19 +441,19 @@ static int tdav_session_video_producer_enc_cb(const void* callback_data, const v
tmedia_converter_video_set_scale_rotated_frames(video->conv.toYUV420, video->encoder.scale_rotated_frames);
}
}
-
- if(video->conv.toYUV420){
+
+ if(video->conv.toYUV420) {
video->encoder.scale_rotated_frames = video->conv.toYUV420->scale_rotated_frames;
// check if rotation have changed and alert the codec
// we avoid scalling the frame after rotation because it's CPU intensive and keeping the image ratio is difficult
// it's up to the encoder to swap (w,h) and to track the rotation value
- if(video->encoder.rotation != base->producer->video.rotation){
+ if(video->encoder.rotation != base->producer->video.rotation) {
tmedia_param_t* param = tmedia_param_create(tmedia_pat_set,
- tmedia_video,
- tmedia_ppt_codec,
- tmedia_pvt_int32,
- "rotation",
- (void*)&base->producer->video.rotation);
+ tmedia_video,
+ tmedia_ppt_codec,
+ tmedia_pvt_int32,
+ "rotation",
+ (void*)&base->producer->video.rotation);
if (!param) {
TSK_DEBUG_ERROR("Failed to create a media parameter");
return -1;
@@ -471,10 +464,10 @@ static int tdav_session_video_producer_enc_cb(const void* callback_data, const v
// (ret != 0) -> not supported by the codec -> to be done by the converter
video->encoder.scale_rotated_frames = (ret != 0);
}
-
+
// update one-shot parameters
tmedia_converter_video_set(video->conv.toYUV420, base->producer->video.rotation, TMEDIA_CODEC_VIDEO(codec_encoder)->out.flip, base->producer->video.mirror, video->encoder.scale_rotated_frames);
-
+
yuv420p_size = tmedia_converter_video_process(video->conv.toYUV420, buffer, size, &video->encoder.conv_buffer, &video->encoder.conv_buffer_size);
if (!yuv420p_size || !video->encoder.conv_buffer) {
TSK_DEBUG_ERROR("Failed to convert XXX buffer to YUV42P");
@@ -482,7 +475,7 @@ static int tdav_session_video_producer_enc_cb(const void* callback_data, const v
goto bail;
}
}
-
+
// Encode data
tsk_mutex_lock(video->encoder.h_mutex);
if (video->started && codec_encoder->opened) { // stop() function locks the encoder mutex before changing "started"
@@ -496,19 +489,27 @@ static int tdav_session_video_producer_enc_cb(const void* callback_data, const v
}
}
tsk_mutex_unlock(video->encoder.h_mutex);
-
+
if (out_size) {
/* Never called, see tdav_session_video_raw_cb() */
trtp_manager_send_rtp(base->rtp_manager, video->encoder.buffer, out_size, 6006, tsk_true, tsk_true);
}
- bail:
+
+ // This block of code is called on success, otherwise we'd jump to bail
+ encode_duration = (tsk_time_now() - encode_start_time);
+ if (base->congestion_ctrl_enabled) {
+ tsk_mutex_lock(video->h_mutex_qos);
+ session->qos_metrics.video_enc_avg_time = (unsigned)((session->qos_metrics.video_enc_avg_time + encode_duration) / (video->enc_avg_time_n++ ? 2 : 1));
+ tsk_mutex_unlock(video->h_mutex_qos);
+ }
+bail:
TSK_OBJECT_SAFE_FREE(codec_encoder);
}
else {
TSK_DEBUG_ERROR("Invalid parameter");
ret = -1;
}
-
+
return ret;
}
@@ -517,40 +518,40 @@ static int tdav_session_video_rtp_cb(const void* callback_data, const trtp_rtp_p
{
tdav_session_video_t* video = (tdav_session_video_t*)callback_data;
tdav_session_av_t* base = (tdav_session_av_t*)callback_data;
-
- if(!video || !packet || !packet->header){
+
+ if(!video || !packet || !packet->header) {
TSK_DEBUG_ERROR("Invalid parameter");
return -1;
}
-
- if(packet->header->payload_type == base->red.payload_type){
+
+ if(packet->header->payload_type == base->red.payload_type) {
static void* __red_buffer_ptr = tsk_null; // Never used
static tsk_size_t __red_buffer_size = 0; // Never used
- if(!base->red.codec){
+ if(!base->red.codec) {
TSK_DEBUG_ERROR("No RED codec could be found");
return -2;
}
// Decode RED data
base->red.codec->plugin->decode(
- base->red.codec,
- (packet->payload.data ? packet->payload.data : packet->payload.data_const), packet->payload.size,
- &__red_buffer_ptr, &__red_buffer_size,
- packet->header
- );
+ base->red.codec,
+ (packet->payload.data ? packet->payload.data : packet->payload.data_const), packet->payload.size,
+ &__red_buffer_ptr, &__red_buffer_size,
+ packet->header
+ );
return 0;
}
- else if(packet->header->payload_type == base->ulpfec.payload_type){
- if(!base->ulpfec.codec){
+ else if(packet->header->payload_type == base->ulpfec.payload_type) {
+ if(!base->ulpfec.codec) {
TSK_DEBUG_ERROR("No ULPFEC codec could be found");
return -2;
}
// FIXME: do something
return 0;
}
- else{
+ else {
return video->jb
- ? tdav_video_jb_put(video->jb, (trtp_rtp_packet_t*)packet)
- : _tdav_session_video_decode(video, packet);
+ ? tdav_video_jb_put(video->jb, (trtp_rtp_packet_t*)packet)
+ : _tdav_session_video_decode(video, packet);
}
}
@@ -561,200 +562,167 @@ static int tdav_session_video_rtcp_cb(const void* callback_data, const trtp_rtcp
const trtp_rtcp_report_psfb_t* psfb;
const trtp_rtcp_report_rtpfb_t* rtpfb;
const trtp_rtcp_rblocks_L_t* blocks = tsk_null;
-
+
tdav_session_video_t* video = (tdav_session_video_t*)callback_data;
tdav_session_av_t* base = (tdav_session_av_t*)callback_data;
+ tmedia_session_t* session = (tmedia_session_t*)callback_data;
tsk_size_t i;
-
+
if((blocks = (packet->header->type == trtp_rtcp_packet_type_rr) ? ((const trtp_rtcp_report_rr_t*)packet)->blocks :
- (packet->header->type == trtp_rtcp_packet_type_sr ? ((const trtp_rtcp_report_sr_t*)packet)->blocks : tsk_null))){
+ (packet->header->type == trtp_rtcp_packet_type_sr ? ((const trtp_rtcp_report_sr_t*)packet)->blocks : tsk_null))) {
const tsk_list_item_t* item;
const trtp_rtcp_rblock_t* block;
- tsk_list_foreach(item, blocks){
- if(!(block = item->data)) continue;
- if(base->rtp_manager->rtp.ssrc.local == block->ssrc){
- tdav_session_video_pkt_loss_level_t pkt_loss_level = tdav_session_video_pkt_loss_level_low;
- TSK_DEBUG_INFO("RTCP pkt loss fraction=%d", block->fraction);
- if(block->fraction > TDAV_SESSION_VIDEO_PKT_LOSS_HIGH) pkt_loss_level = tdav_session_video_pkt_loss_level_high;
- else if(block->fraction > TDAV_SESSION_VIDEO_PKT_LOSS_MEDIUM) pkt_loss_level = tdav_session_video_pkt_loss_level_medium;
- if (pkt_loss_level == tdav_session_video_pkt_loss_level_high || (pkt_loss_level > video->encoder.pkt_loss_level)){ // high or low -> medium
- video->encoder.pkt_loss_level = pkt_loss_level;
- if(video->encoder.pkt_loss_prob_bad-- <= 0){
- int32_t new_pkt_loss_fact = TSK_CLAMP(TDAV_SESSION_VIDEO_PKT_LOSS_FACT_MIN, (video->encoder.pkt_loss_fact + 1), TDAV_SESSION_VIDEO_PKT_LOSS_FACT_MAX);
- if (video->encoder.pkt_loss_fact != new_pkt_loss_fact) {
- TSK_DEBUG_INFO("Downgrade bandwidth %d->%d", video->encoder.pkt_loss_fact, new_pkt_loss_fact);
- video->encoder.pkt_loss_fact = new_pkt_loss_fact;
- _tdav_session_video_bw_down(video);
- }
- _tdav_session_video_reset_loss_prob(video);
- }
- }
- else{
- if (video->encoder.pkt_loss_prob_good-- <= 0) {
- int32_t new_pkt_loss_fact = TSK_CLAMP(TDAV_SESSION_VIDEO_PKT_LOSS_FACT_MIN, (video->encoder.pkt_loss_fact - 1), TDAV_SESSION_VIDEO_PKT_LOSS_FACT_MAX);
- if (video->encoder.pkt_loss_fact != new_pkt_loss_fact) {
- TSK_DEBUG_INFO("Upgrade bandwidth %d->%d", video->encoder.pkt_loss_fact, new_pkt_loss_fact);
- video->encoder.pkt_loss_fact = new_pkt_loss_fact;
- _tdav_session_video_bw_up(video);
- }
- _tdav_session_video_reset_loss_prob(video);
- }
+ tsk_list_foreach(item, blocks) {
+ if(!(block = item->data)) {
+ continue;
+ }
+ if(base->rtp_manager->rtp.ssrc.local == block->ssrc) {
+ TSK_DEBUG_INFO("RTCP pkt loss fraction=%d, congestion_ctrl_enabled=%d", block->fraction, (int)base->congestion_ctrl_enabled);
+ // Global packet loss estimation
+ if (base->congestion_ctrl_enabled) {
+ float q2;
+ q2 = block->fraction == 0 ? 1.f : ((float)block->fraction / 256.f);
+ tsk_mutex_lock(video->h_mutex_qos);
+ session->qos_metrics.q2 = (session->qos_metrics.q2 + q2) / (video->q2_n++ ? 2.f : 1.f);
+ tsk_mutex_unlock(video->h_mutex_qos);
}
break;
}
}
}
-
+
i = 0;
- while((psfb = (const trtp_rtcp_report_psfb_t*)trtp_rtcp_packet_get_at(packet, trtp_rtcp_packet_type_psfb, i++))){
- switch(psfb->fci_type){
- case trtp_rtcp_psfb_fci_type_fir:
- {
- TSK_DEBUG_INFO("Receiving RTCP-FIR (%u)", ((const trtp_rtcp_report_fb_t*)psfb)->ssrc_media);
+ while((psfb = (const trtp_rtcp_report_psfb_t*)trtp_rtcp_packet_get_at(packet, trtp_rtcp_packet_type_psfb, i++))) {
+ switch(psfb->fci_type) {
+ case trtp_rtcp_psfb_fci_type_fir: {
+ TSK_DEBUG_INFO("Receiving RTCP-FIR (%u)", ((const trtp_rtcp_report_fb_t*)psfb)->ssrc_media);
+ _tdav_session_video_remote_requested_idr(video, ((const trtp_rtcp_report_fb_t*)psfb)->ssrc_media);
+ break;
+ }
+ case trtp_rtcp_psfb_fci_type_pli: {
+ uint64_t now;
+ TSK_DEBUG_INFO("Receiving RTCP-PLI (%u)", ((const trtp_rtcp_report_fb_t*)psfb)->ssrc_media);
+ now = tsk_time_now();
+ // more than one PLI in 500ms ?
+ // "if" removed because PLI really means codec prediction chain is broken
+ /*if((now - video->avpf.last_pli_time) < 500)*/{
_tdav_session_video_remote_requested_idr(video, ((const trtp_rtcp_report_fb_t*)psfb)->ssrc_media);
- break;
}
- case trtp_rtcp_psfb_fci_type_pli:
- {
- uint64_t now;
- TSK_DEBUG_INFO("Receiving RTCP-PLI (%u)", ((const trtp_rtcp_report_fb_t*)psfb)->ssrc_media);
- now = tsk_time_now();
- // more than one PLI in 500ms ?
- // "if" removed because PLI really means codec prediction chain is broken
- /*if((now - video->avpf.last_pli_time) < 500)*/{
- _tdav_session_video_remote_requested_idr(video, ((const trtp_rtcp_report_fb_t*)psfb)->ssrc_media);
+ video->avpf.last_pli_time = now;
+ break;
+ }
+ case trtp_rtcp_psfb_fci_type_afb: {
+ if (psfb->afb.type == trtp_rtcp_psfb_afb_type_remb) {
+ uint64_t bw_up_reported_kpbs = ((psfb->afb.remb.mantissa << psfb->afb.remb.exp) >> 10);
+ TSK_DEBUG_INFO("Receiving RTCP-AFB-REMB (%u), exp=%u, mantissa=%u, bandwidth=%ukbps", ((const trtp_rtcp_report_fb_t*)psfb)->ssrc_media, psfb->afb.remb.exp, psfb->afb.remb.mantissa, bw_up_reported_kpbs);
+ if (base->congestion_ctrl_enabled) {
+ if (session->qos_metrics.bw_up_est_kbps != 0) {
+ float q3 = bw_up_reported_kpbs / (float)session->qos_metrics.bw_up_est_kbps;
+ q3 = TSK_CLAMP(0.f, q3, 1.f);
+ TSK_DEBUG_INFO("bw_up_estimated_kbps=%llu, bw_up_reported_kpbs=%llu, q3=%f", session->qos_metrics.bw_up_est_kbps, bw_up_reported_kpbs, q3);
+ tsk_mutex_lock(video->h_mutex_qos);
+ session->qos_metrics.q3 = (session->qos_metrics.q3 + q3) / (video->q3_n++ ? 2.f : 1.f);
+ tsk_mutex_unlock(video->h_mutex_qos);
+ }
}
- video->avpf.last_pli_time = now;
- break;
}
- case trtp_rtcp_psfb_fci_type_afb:
- {
- if (psfb->afb.type == trtp_rtcp_psfb_afb_type_remb) {
- uint32_t bandwidth_up_reported_kpbs = ((psfb->afb.remb.mantissa << psfb->afb.remb.exp) / 1024);
- TSK_DEBUG_INFO("Receiving RTCP-AFB-REMB (%u), exp=%u, mantissa=%u, bandwidth = %ukbps, congestion_ctrl_enabled=%s", ((const trtp_rtcp_report_fb_t*)psfb)->ssrc_media, psfb->afb.remb.exp, psfb->afb.remb.mantissa, bandwidth_up_reported_kpbs, base->congestion_ctrl_enabled ? "yes" : "no");
-#if TDAV_GOOG_REMB_FULL_SUPPORT
- if (base->congestion_ctrl_enabled) {
- uint32_t remb_upload_kbps = 0;
- tsk_bool_t remb_ok = tsk_false;
- uint64_t bytes_count_now;
- uint64_t bytes_count_out;
- static uint64_t* bytes_count_in_ptr_null = tsk_null;
-
- if ((ret = trtp_manager_get_bytes_count(base->rtp_manager, bytes_count_in_ptr_null, &bytes_count_out)) == 0) {
- uint64_t duration;
- bytes_count_now = tsk_time_now();
- duration = (bytes_count_now - base->bytes_out.count_last_time);
- remb_ok = (base->bytes_out.count_last_time != 0 && duration > 0);
- if (remb_ok) {
- remb_upload_kbps = (int32_t)((((bytes_count_out - base->bytes_out.count) * 8 * 1000) / 1024) / duration);
- TSK_DEBUG_INFO("remb_upload_kbps=%u, bandwidth_up_reported_kpbs=%u", remb_upload_kbps, bandwidth_up_reported_kpbs);
- }
- base->bytes_out.count_last_time = bytes_count_now;
- base->bytes_out.count = bytes_count_out;
- }
- if (remb_ok) {
- int32_t pkt_loss_percent = bandwidth_up_reported_kpbs >= remb_upload_kbps ? 0 : ((remb_upload_kbps - bandwidth_up_reported_kpbs) / remb_upload_kbps) * 100;
- TSK_DEBUG_INFO("GOO-REMB: pkt_loss_percent=%d", pkt_loss_percent);
- if (pkt_loss_percent > 5) {
- // more than 5% pkt loss
- TSK_DEBUG_WARN("pkt_loss_percent(%u) > 5%%, using lower bw(%d)", pkt_loss_percent, bandwidth_up_reported_kpbs);
- _tdav_session_video_bw_kbps(video, bandwidth_up_reported_kpbs);
- }
- else if (pkt_loss_percent == 0) {
-#if 0
- // no pkt loss --> increase bw
- int32_t target_bw_max_upload_kbps = base->bandwidth_max_upload_kbps; // user-defined (guard), INT_MAX if not defined
- if (video->encoder.codec) {
- target_bw_max_upload_kbps = TSK_MIN(
- tmedia_get_video_bandwidth_kbps_2(TMEDIA_CODEC_VIDEO(video->encoder.codec)->out.width, TMEDIA_CODEC_VIDEO(video->encoder.codec)->out.height, TMEDIA_CODEC_VIDEO(video->encoder.codec)->out.fps),
- target_bw_max_upload_kbps);
- }
- if (target_bw_max_upload_kbps > remb_upload_kbps + ((remb_upload_kbps / 100) * 20)) {
- // target (best) bw is 20% less than what we're sending --> increase by 5%
- uint32_t new_upload_kbps = remb_upload_kbps + ((remb_upload_kbps / 100) * 5);
- TSK_DEBUG_INFO("current upload bw is too low, increasing from %u to %u", remb_upload_kbps, new_upload_kbps);
- _tdav_session_video_bw_kbps(video, new_upload_kbps);
- }
-#endif /* 0 */
- }
- }
-
- }
-#else
- // for now we just don't respect the requested bandwidth
-#endif /* TDAV_GOOG_REMB_FULL_SUPPORT */
+ else if (psfb->afb.type == trtp_rtcp_psfb_afb_type_jcng) {
+ float jcng_q = ((float)psfb->afb.jcng.q / 255.f);
+ TSK_DEBUG_INFO("Receiving RTCP-AFB-JCNG (%u), q_recv=%u, q_dec=%f", ((const trtp_rtcp_report_fb_t*)psfb)->ssrc_media, psfb->afb.jcng.q, jcng_q);
+ if (base->congestion_ctrl_enabled) {
+ tsk_mutex_lock(video->h_mutex_qos);
+ session->qos_metrics.q5 = (session->qos_metrics.q5 + jcng_q) / (video->q5_n++ ? 2.f : 1.f);
+ tsk_mutex_unlock(video->h_mutex_qos);
}
- break;
}
- default: break;
+ break;
+ }
+ default:
+ break;
}
}
i = 0;
- while((rtpfb = (const trtp_rtcp_report_rtpfb_t*)trtp_rtcp_packet_get_at(packet, trtp_rtcp_packet_type_rtpfb, i++))){
- switch(rtpfb->fci_type){
- default: break;
- case trtp_rtcp_rtpfb_fci_type_nack:
- {
- if(rtpfb->nack.blp && rtpfb->nack.pid){
- tsk_size_t i;
- int32_t j;
- uint16_t pid, blp;
- const tsk_list_item_t* item;
- const trtp_rtp_packet_t* pkt_rtp;
- for(i = 0; i < rtpfb->nack.count; ++i){
- static const int32_t __Pow2[16] = { 0x1, 0x2, 0x4, 0x8, 0x10, 0x20, 0x40, 0x80, 0x100, 0x200, 0x400, 0x800, 0x1000, 0x2000, 0x4000, 0x8000 };
- int32_t blp_count;
- blp = rtpfb->nack.blp[i];
- blp_count = blp ? 16 : 0;
-
- for(j = -1/*Packet ID (PID)*/; j < blp_count; ++j){
- if(j == -1 || (blp & __Pow2[j])){
- pid = (rtpfb->nack.pid[i] + (j + 1));
- tsk_list_lock(video->avpf.packets);
- tsk_list_foreach(item, video->avpf.packets){
- if(!(pkt_rtp = item->data)){
- continue;
- }
-
- // Very Important: the seq_nums are not consecutive because of wrapping.
- // For example, '65533, 65534, 65535, 0, 1' is a valid sequences which means we have to check all packets (probaly need somthing smarter)
- if(pkt_rtp->header->seq_num == pid){
- TSK_DEBUG_INFO("NACK Found, pid=%d, blp=%u", pid, blp);
- trtp_manager_send_rtp_packet(base->rtp_manager, pkt_rtp, tsk_true);
- break;
- }
- if(item == video->avpf.packets->tail){
- // should never be called unless the tail is too small
- int32_t old_max = (int32_t)video->avpf.max;
- int32_t len_drop = (pkt_rtp->header->seq_num - pid);
- video->avpf.max = TSK_CLAMP((int32_t)tmedia_defaults_get_avpf_tail_min(), (old_max + len_drop), (int32_t)tmedia_defaults_get_avpf_tail_max());
- TSK_DEBUG_INFO("**NACK requesting dropped frames. List=[%d-%d], requested=%d, List.Max=%d, List.Count=%d. RTT is probably too high.",
- ((const trtp_rtp_packet_t*)TSK_LIST_FIRST_DATA(video->avpf.packets))->header->seq_num,
- ((const trtp_rtp_packet_t*)TSK_LIST_LAST_DATA(video->avpf.packets))->header->seq_num,
- pid,
- (int)video->avpf.max,
- (int)video->avpf.count);
- // FIR not really requested but needed
- /*_tdav_session_video_remote_requested_idr(video, ((const trtp_rtcp_report_fb_t*)rtpfb)->ssrc_media);
- tsk_list_clear_items(video->avpf.packets);
- video->avpf.count = 0;*/
- } // if(last_item)
- }// foreach(pkt)
- tsk_list_unlock(video->avpf.packets);
- }// if(BLP is set)
- }// foreach(BIT in BLP)
- }// foreach(nack)
- }// if(nack-blp and nack-pid are set)
- break;
- }// case
+ while((rtpfb = (const trtp_rtcp_report_rtpfb_t*)trtp_rtcp_packet_get_at(packet, trtp_rtcp_packet_type_rtpfb, i++))) {
+ switch(rtpfb->fci_type) {
+ default:
+ break;
+ case trtp_rtcp_rtpfb_fci_type_nack: {
+ if(rtpfb->nack.blp && rtpfb->nack.pid) {
+ tsk_size_t i;
+ int32_t j;
+ uint16_t pid, blp;
+ uint32_t r = 0; // the number of recoverable packets (lost but recovered using a NACK requests)
+ uint32_t u = 0; // the number of unrecoverable packets (lost but not recovered using NACK requests)
+ const tsk_list_item_t* item;
+ const trtp_rtp_packet_t* pkt_rtp;
+ for(i = 0; i < rtpfb->nack.count; ++i) {
+ static const int32_t __Pow2[16] = { 0x1, 0x2, 0x4, 0x8, 0x10, 0x20, 0x40, 0x80, 0x100, 0x200, 0x400, 0x800, 0x1000, 0x2000, 0x4000, 0x8000 };
+ int32_t blp_count;
+ blp = rtpfb->nack.blp[i];
+ blp_count = blp ? 16 : 0;
+
+ for(j = -1/*Packet ID (PID)*/; j < blp_count; ++j) {
+ if(j == -1 || (blp & __Pow2[j])) {
+ pid = (rtpfb->nack.pid[i] + (j + 1));
+ tsk_list_lock(video->avpf.packets);
+ tsk_list_foreach(item, video->avpf.packets) {
+ if(!(pkt_rtp = item->data)) {
+ continue;
+ }
+
+ // Very Important: the seq_nums are not consecutive because of wrapping.
+ // For example, '65533, 65534, 65535, 0, 1' is a valid sequences which means we have to check all packets (probaly need somthing smarter)
+ if(pkt_rtp->header->seq_num == pid) {
+ ++r;
+ TSK_DEBUG_INFO("NACK Found, pid=%d, blp=%u, r=%u", pid, blp, r);
+ trtp_manager_send_rtp_packet(base->rtp_manager, pkt_rtp, tsk_true);
+ break;
+ }
+ if(item == video->avpf.packets->tail) {
+ // should never be called unless the tail is too small
+ int32_t old_max = (int32_t)video->avpf.max;
+ int32_t len_drop = (pkt_rtp->header->seq_num - pid);
+ ++u;
+ video->avpf.max = TSK_CLAMP((int32_t)tmedia_defaults_get_avpf_tail_min(), (old_max + len_drop), (int32_t)tmedia_defaults_get_avpf_tail_max());
+ TSK_DEBUG_INFO("**NACK requesting dropped frames. List=[%d-%d], requested=%d, List.Max=%d, List.Count=%d, u=%u. RTT is probably too high.",
+ ((const trtp_rtp_packet_t*)TSK_LIST_FIRST_DATA(video->avpf.packets))->header->seq_num,
+ ((const trtp_rtp_packet_t*)TSK_LIST_LAST_DATA(video->avpf.packets))->header->seq_num,
+ pid,
+ (int)video->avpf.max,
+ (int)video->avpf.count,
+ (unsigned)u);
+ // FIR not really requested but needed
+ /*_tdav_session_video_remote_requested_idr(video, ((const trtp_rtcp_report_fb_t*)rtpfb)->ssrc_media);
+ tsk_list_clear_items(video->avpf.packets);
+ video->avpf.count = 0;*/
+ } // if(last_item)
+ }// foreach(pkt)
+ tsk_list_unlock(video->avpf.packets);
+ }// if(BLP is set)
+ }// foreach(BIT in BLP)
+ }// foreach(nack)
+ if (base->congestion_ctrl_enabled) {
+ // Compute q1
+ if (r || u) {
+ float q1 = 1.f - (((r * 0.2f) + (u * 0.8f)) / (r + u));
+ tsk_mutex_lock(video->h_mutex_qos);
+ session->qos_metrics.q1 = (session->qos_metrics.q1 + q1) / (video->q1_n++ ? 2.f : 1.f);
+ tsk_mutex_unlock(video->h_mutex_qos);
+ TSK_DEBUG_INFO("RTCP-NACK: r=%u, u=%u, q1=%f", r, u, q1);
+ }
+ }
+ }// if(nack-blp and nack-pid are set)
+ break;
+ }// case
}// switch
}// while(rtcp-pkt)
-
+
return ret;
}
static int _tdav_session_video_set_defaults(tdav_session_video_t* self)
{
+ tmedia_session_t* session = (tmedia_session_t*)self;
if (!self) {
TSK_DEBUG_ERROR("Invalid parameter");
return -1;
@@ -762,16 +730,29 @@ static int _tdav_session_video_set_defaults(tdav_session_video_t* self)
self->jb_enabled = tmedia_defaults_get_videojb_enabled();
self->zero_artifacts = tmedia_defaults_get_video_zeroartifacts_enabled();
self->avpf.max = tmedia_defaults_get_avpf_tail_min();
- self->encoder.pkt_loss_level = tdav_session_video_pkt_loss_level_low;
- self->encoder.pkt_loss_prob_bad = 0; // honor first report
- self->encoder.pkt_loss_prob_good = TDAV_SESSION_VIDEO_PKT_LOSS_PROB_GOOD;
self->encoder.last_frame_time = 0;
-
+ // Quality metrics
+ session->qos_metrics.q1 = 0.f, self->q1_n = 0;
+ session->qos_metrics.q2 = 0.f, self->q2_n = 0;
+ session->qos_metrics.q3 = 0.f, self->q3_n = 0;
+ session->qos_metrics.q4 = 0.f, self->q4_n = 0;
+ session->qos_metrics.qvag = 1.f;
+ session->qos_metrics.bw_up_est_kbps = 0;
+ session->qos_metrics.bw_down_est_kbps = 0;
+ session->qos_metrics.last_update_time = 0;
+ session->qos_metrics.video_out_width = 0;
+ session->qos_metrics.video_out_height = 0;
+ session->qos_metrics.video_in_width = 0;
+ session->qos_metrics.video_in_height = 0;
+ session->qos_metrics.video_in_avg_fps = tmedia_defaults_get_video_fps(), self->in_avg_fps_n = 1;
+ session->qos_metrics.video_dec_avg_time = 0, self->dec_avg_time_n = 0 ;
+ session->qos_metrics.video_enc_avg_time = 0, self->enc_avg_time_n = 0;
+
// reset rotation info (MUST for reINVITE when mobile device in portrait[90 degrees])
self->encoder.rotation = 0;
-
+
TSK_DEBUG_INFO("Video 'zero-artifacts' option = %s", self->zero_artifacts ? "yes" : "no");
-
+
return 0;
}
@@ -781,61 +762,61 @@ static int _tdav_session_video_jb_cb(const tdav_video_jb_cb_data_xt* data)
tdav_session_video_t* video = (tdav_session_video_t*)data->usr_data;
tdav_session_av_t* base = (tdav_session_av_t*)data->usr_data;
tmedia_session_t* session = (tmedia_session_t*)data->usr_data;
-
- switch(data->type){
- default: break;
- case tdav_video_jb_cb_data_type_rtp:
- {
- return _tdav_session_video_decode(video, data->rtp.pkt);
- }
- case tdav_video_jb_cb_data_type_tmfr:
- {
- base->time_last_frame_loss_report = tsk_time_now();
+
+ switch (data->type) {
+ default:
+ break;
+ case tdav_video_jb_cb_data_type_rtp: {
+ return _tdav_session_video_decode(video, data->rtp.pkt);
+ }
+ case tdav_video_jb_cb_data_type_tmfr: {
+ base->time_last_frame_loss_report = tsk_time_now();
+ _tdav_session_video_local_request_idr(session, "TMFR", data->ssrc);
+ }
+ case tdav_video_jb_cb_data_type_fl: {
+ base->time_last_frame_loss_report = tsk_time_now();
+ if(data->fl.count > TDAV_SESSION_VIDEO_PKT_LOSS_MAX_COUNT_TO_REQUEST_FIR) {
_tdav_session_video_local_request_idr(session, "TMFR", data->ssrc);
}
- case tdav_video_jb_cb_data_type_fl:
- {
- base->time_last_frame_loss_report = tsk_time_now();
- if(data->fl.count > TDAV_SESSION_VIDEO_PKT_LOSS_MAX_COUNT_TO_REQUEST_FIR){
- _tdav_session_video_local_request_idr(session, "TMFR", data->ssrc);
- }
- else {
- if (base->avpf_mode_neg || base->is_fb_nack_neg) { // AVPF?
- // Send RTCP-NACK
- tsk_size_t i, j, k;
- uint16_t seq_nums[16];
- for(i = 0; i < data->fl.count; i+=16){
- for(j = 0, k = i; j < 16 && k < data->fl.count; ++j, ++k){
- seq_nums[j] = (uint16_t)(data->fl.seq_num + i + j);
- TSK_DEBUG_INFO("Request re-send(%u)", seq_nums[j]);
- }
- trtp_manager_signal_pkt_loss(base->rtp_manager, data->ssrc, seq_nums, j);
+ else {
+ if (base->avpf_mode_neg || base->is_fb_nack_neg) { // AVPF?
+ // Send RTCP-NACK
+ tsk_size_t i, j, k;
+ uint16_t seq_nums[16];
+ for (i = 0; i < data->fl.count; i+=16) {
+ for(j = 0, k = i; j < 16 && k < data->fl.count; ++j, ++k) {
+ seq_nums[j] = (uint16_t)(data->fl.seq_num + i + j);
+ TSK_DEBUG_INFO("Request re-send(%u)", seq_nums[j]);
}
+ trtp_manager_signal_pkt_loss(base->rtp_manager, data->ssrc, seq_nums, j);
}
}
-
- break;
}
- case tdav_video_jb_cb_data_type_fps_changed:
- {
- if(base->congestion_ctrl_enabled){
- video->fps_changed = tsk_true;
- if(video->decoder.codec){
- TSK_DEBUG_INFO("Congestion control enabled and fps updated from %u to %u", data->fps.old, data->fps.new);
- TMEDIA_CODEC_VIDEO(video->decoder.codec)->in.fps = data->fps.new;
- }
+
+ break;
+ }
+ case tdav_video_jb_cb_data_type_fps_changed: {
+ if (base->congestion_ctrl_enabled) {
+ video->fps_changed = tsk_true;
+ if (video->decoder.codec) {
+ TSK_DEBUG_INFO("Congestion control enabled and fps updated from %u to %u", data->fps.old, data->fps.new);
+ TMEDIA_CODEC_VIDEO(video->decoder.codec)->in.fps = data->fps.new;
}
- break;
+ tsk_mutex_lock(video->h_mutex_qos);
+ session->qos_metrics.video_in_avg_fps = (session->qos_metrics.video_in_avg_fps + data->fps.new) / (video->in_avg_fps_n++ ? 2 : 1);
+ tsk_mutex_unlock(video->h_mutex_qos);
}
+ break;
}
-
+ }
+
return 0;
}
int _tdav_session_video_open_decoder(tdav_session_video_t* self, uint8_t payload_type)
{
int ret = 0;
-
+
if ((self->decoder.codec_payload_type != payload_type) || !self->decoder.codec) {
tsk_istr_t format;
TSK_OBJECT_SAFE_FREE(self->decoder.codec);
@@ -849,14 +830,14 @@ int _tdav_session_video_open_decoder(tdav_session_video_t* self, uint8_t payload
self->decoder.codec_decoded_frames_count = 0; // because we switched the codecs
}
// Open codec if not already done
- if (!TMEDIA_CODEC(self->decoder.codec)->opened){
+ if (!TMEDIA_CODEC(self->decoder.codec)->opened) {
if ((ret = tmedia_codec_open(self->decoder.codec))) {
TSK_DEBUG_ERROR("Failed to open [%s] codec", self->decoder.codec->plugin->desc);
goto bail;
}
self->decoder.codec_decoded_frames_count = 0; // because first time to use
}
-
+
bail:
return ret;
}
@@ -867,45 +848,49 @@ static int _tdav_session_video_decode(tdav_session_video_t* self, const trtp_rtp
static const trtp_rtp_header_t* __rtp_header = tsk_null;
static const tmedia_codec_id_t __codecs_supporting_zero_artifacts = (tmedia_codec_id_vp8 | tmedia_codec_id_h264_bp | tmedia_codec_id_h264_mp | tmedia_codec_id_h263);
int ret = 0;
-
- if(!self || !packet || !packet->header){
+
+ if(!self || !packet || !packet->header) {
TSK_DEBUG_ERROR("Invalid parameter");
return -1;
}
-
+
tsk_safeobj_lock(base);
-
+
if (self->started && base->consumer && base->consumer->is_started) {
tsk_size_t out_size, _size;
const void* _buffer;
tdav_session_video_t* video = (tdav_session_video_t*)base;
-
+ tmedia_session_t* session = (tmedia_session_t*)base;
+ uint64_t time_start, time_duration;
+
// Find the codec to use to decode the RTP payload
- if(!self->decoder.codec || self->decoder.codec_payload_type != packet->header->payload_type){
- if((ret = _tdav_session_video_open_decoder(self, packet->header->payload_type))){
+ if (!self->decoder.codec || self->decoder.codec_payload_type != packet->header->payload_type) {
+ if ((ret = _tdav_session_video_open_decoder(self, packet->header->payload_type))) {
goto bail;
}
}
-
+
// check whether bypassing is enabled (e.g. rtcweb breaker ON and media coder OFF)
- if(TMEDIA_SESSION(self)->bypass_decoding){
+ if(TMEDIA_SESSION(self)->bypass_decoding) {
// set codec id for internal use (useful to find codec with dynamic payload type)
TRTP_RTP_HEADER(packet->header)->codec_id = self->decoder.codec->id;
// consume the frame
ret = tmedia_consumer_consume(base->consumer, (packet->payload.data ? packet->payload.data : packet->payload.data_const), packet->payload.size, packet->header);
goto bail;
}
-
+
+ time_start = tsk_time_now();
+
// Check if stream is corrupted or not
- if(video->decoder.last_seqnum && (video->decoder.last_seqnum + 1) != packet->header->seq_num){
+ if(video->decoder.last_seqnum && (video->decoder.last_seqnum + 1) != packet->header->seq_num) {
TSK_DEBUG_INFO("/!\\Video stream corrupted because of packet loss [%u - %u]. Pause rendering if 'zero_artifacts' (supported = %s, enabled = %s).",
video->decoder.last_seqnum,
packet->header->seq_num,
(__codecs_supporting_zero_artifacts & self->decoder.codec->id) ? "yes" : "no",
self->zero_artifacts ? "yes" : "no"
- );
- if(!video->decoder.stream_corrupted){ // do not do the job twice
- if(self->zero_artifacts && (__codecs_supporting_zero_artifacts & self->decoder.codec->id)){
+ );
+ if(!video->decoder.stream_corrupted) { // do not do the job twice
+ if(self->zero_artifacts && (__codecs_supporting_zero_artifacts & self->decoder.codec->id)) {
// request IDR now and every time after 'TDAV_SESSION_VIDEO_AVPF_FIR_REQUEST_INTERVAL_MIN' ellapsed
// 'zero-artifacts' not enabled then, we'll request IDR when decoding fails
TSK_DEBUG_INFO("Sending FIR to request IDR...");
@@ -919,35 +904,43 @@ static int _tdav_session_video_decode(tdav_session_video_t* self, const trtp_rtp
video->decoder.last_corrupted_timestamp = packet->header->timestamp;
}
video->decoder.last_seqnum = packet->header->seq_num; // update last seqnum
-
+
// Decode data
out_size = self->decoder.codec->plugin->decode(
- self->decoder.codec,
- (packet->payload.data ? packet->payload.data : packet->payload.data_const), packet->payload.size,
- &self->decoder.buffer, &self->decoder.buffer_size,
- packet->header
- );
+ self->decoder.codec,
+ (packet->payload.data ? packet->payload.data : packet->payload.data_const), packet->payload.size,
+ &self->decoder.buffer, &self->decoder.buffer_size,
+ packet->header
+ );
+
+ // report to the remote party the bandwidth usage and jitter buffer congestion info
+ // this must be done here to make sure it won't be skipped by decoding issues or any other failure
+ ret = _tdav_session_video_report_bw_usage_and_jcng(self);
+
+ // inc() frame count
+ ++self->decoder.codec_decoded_frames_count;
+
// check
- if(!out_size || !self->decoder.buffer){
+ if (!out_size || !self->decoder.buffer) {
goto bail;
}
// check if stream is corrupted
// the above decoding process is required in order to reset stream corruption status when IDR frame is decoded
- if(self->zero_artifacts && self->decoder.stream_corrupted && (__codecs_supporting_zero_artifacts & self->decoder.codec->id)){
+ if (self->zero_artifacts && self->decoder.stream_corrupted && (__codecs_supporting_zero_artifacts & self->decoder.codec->id)) {
TSK_DEBUG_INFO("Do not render video frame because stream is corrupted and 'zero-artifacts' is enabled. Last seqnum=%u", video->decoder.last_seqnum);
- if(video->decoder.stream_corrupted && (tsk_time_now() - video->decoder.stream_corrupted_since) > TDAV_SESSION_VIDEO_AVPF_FIR_REQUEST_INTERVAL_MIN){
+ if (video->decoder.stream_corrupted && (tsk_time_now() - video->decoder.stream_corrupted_since) > TDAV_SESSION_VIDEO_AVPF_FIR_REQUEST_INTERVAL_MIN) {
TSK_DEBUG_INFO("Sending FIR to request IDR because frame corrupted since %llu...", video->decoder.stream_corrupted_since);
_tdav_session_video_local_request_idr(TMEDIA_SESSION(video), "ZERO_ART_CORRUPTED", packet->header->ssrc);
}
goto bail;
}
-
+
// important: do not override the display size (used by the end-user) unless requested
- if(base->consumer->video.display.auto_resize){
+ if(base->consumer->video.display.auto_resize) {
base->consumer->video.display.width = TMEDIA_CODEC_VIDEO(self->decoder.codec)->in.width;//decoded width
base->consumer->video.display.height = TMEDIA_CODEC_VIDEO(self->decoder.codec)->in.height;//decoded height
}
-
+
// Convert decoded data to the consumer chroma and size
#define CONSUMER_NEED_DECODER (base->consumer->decoder.codec_id == tmedia_codec_id_none) // Otherwise, the consumer requires encoded frames
#define CONSUMER_IN_N_DISPLAY_MISMATCH (!base->consumer->video.display.auto_resize && (base->consumer->video.in.width != base->consumer->video.display.width || base->consumer->video.in.height != base->consumer->video.display.height))
@@ -955,112 +948,64 @@ static int _tdav_session_video_decode(tdav_session_video_t* self, const trtp_rtp
#define CONSUMER_DISPLAY_N_CONVERTER_MISMATCH ( (self->conv.fromYUV420 && self->conv.fromYUV420->dstWidth != base->consumer->video.display.width) || (self->conv.fromYUV420 && self->conv.fromYUV420->dstHeight != base->consumer->video.display.height) )
#define CONSUMER_CHROMA_MISMATCH (base->consumer->video.display.chroma != TMEDIA_CODEC_VIDEO(self->decoder.codec)->in.chroma)
#define DECODED_NEED_FLIP (TMEDIA_CODEC_VIDEO(self->decoder.codec)->in.flip)
-
- if(CONSUMER_NEED_DECODER && (CONSUMER_CHROMA_MISMATCH || CONSUMER_DISPLAY_N_CODEC_MISMATCH || CONSUMER_IN_N_DISPLAY_MISMATCH || CONSUMER_DISPLAY_N_CONVERTER_MISMATCH || DECODED_NEED_FLIP)){
-
+
+ if(CONSUMER_NEED_DECODER && (CONSUMER_CHROMA_MISMATCH || CONSUMER_DISPLAY_N_CODEC_MISMATCH || CONSUMER_IN_N_DISPLAY_MISMATCH || CONSUMER_DISPLAY_N_CONVERTER_MISMATCH || DECODED_NEED_FLIP)) {
+
// Create video converter if not already done
- if(!self->conv.fromYUV420 || CONSUMER_DISPLAY_N_CONVERTER_MISMATCH){
+ if(!self->conv.fromYUV420 || CONSUMER_DISPLAY_N_CONVERTER_MISMATCH) {
TSK_OBJECT_SAFE_FREE(self->conv.fromYUV420);
-
+
// create converter
if(!(self->conv.fromYUV420 = tmedia_converter_video_create(TMEDIA_CODEC_VIDEO(self->decoder.codec)->in.width, TMEDIA_CODEC_VIDEO(self->decoder.codec)->in.height, TMEDIA_CODEC_VIDEO(self->decoder.codec)->in.chroma, base->consumer->video.display.width, base->consumer->video.display.height,
- base->consumer->video.display.chroma))){
+ base->consumer->video.display.chroma))) {
TSK_DEBUG_ERROR("Failed to create video converter");
ret = -3;
goto bail;
}
}
}
-
+
// update consumer size using the codec decoded values
// must be done here to avoid fooling "CONSUMER_IN_N_DISPLAY_MISMATCH" unless "auto_resize" option is enabled
base->consumer->video.in.width = TMEDIA_CODEC_VIDEO(self->decoder.codec)->in.width;//decoded width
base->consumer->video.in.height = TMEDIA_CODEC_VIDEO(self->decoder.codec)->in.height;//decoded height
-
- if(self->conv.fromYUV420){
+
+ if(self->conv.fromYUV420) {
// update one-shot parameters
tmedia_converter_video_set_flip(self->conv.fromYUV420, TMEDIA_CODEC_VIDEO(self->decoder.codec)->in.flip);
// convert data to the consumer's chroma
out_size = tmedia_converter_video_process(self->conv.fromYUV420, self->decoder.buffer, self->decoder.buffer_size, &self->decoder.conv_buffer, &self->decoder.conv_buffer_size);
- if(!out_size || !self->decoder.conv_buffer){
+ if(!out_size || !self->decoder.conv_buffer) {
TSK_DEBUG_ERROR("Failed to convert YUV420 buffer to consumer's chroma");
ret = -4;
goto bail;
}
-
+
_buffer = self->decoder.conv_buffer;
_size = out_size;
}
- else{
+ else {
_buffer = self->decoder.buffer;
_size = out_size;
}
-
- // congetion control
- // send RTCP-REMB if:
- // - congestion control is enabled and
- // - fps changed or
- // - first frame or
- // - approximately every 1 seconds (1 = 1 * 1)
- if (base->congestion_ctrl_enabled && base->rtp_manager && (self->fps_changed || self->decoder.codec_decoded_frames_count == 0 || ((self->decoder.codec_decoded_frames_count % (TMEDIA_CODEC_VIDEO(self->decoder.codec)->in.fps * 1)) == 0))){
- int32_t bandwidth_max_upload_kbps = base->bandwidth_max_upload_kbps;
- int32_t bandwidth_max_download_kbps = base->bandwidth_max_download_kbps; // user-defined (guard), INT_MAX if not defined
- // bandwidth already computed in start() but the decoded video size was not correct and based on the SDP negotiation
- bandwidth_max_download_kbps = TSK_MIN(
- tmedia_get_video_bandwidth_kbps_2(TMEDIA_CODEC_VIDEO(self->decoder.codec)->in.width, TMEDIA_CODEC_VIDEO(self->decoder.codec)->in.height, TMEDIA_CODEC_VIDEO(self->decoder.codec)->in.fps),
- bandwidth_max_download_kbps);
- if (self->encoder.codec) {
- bandwidth_max_upload_kbps = TSK_MIN(
- tmedia_get_video_bandwidth_kbps_2(TMEDIA_CODEC_VIDEO(self->encoder.codec)->out.width, TMEDIA_CODEC_VIDEO(self->encoder.codec)->out.height, TMEDIA_CODEC_VIDEO(self->encoder.codec)->out.fps),
- bandwidth_max_upload_kbps);
- }
-
-#if TDAV_GOOG_REMB_FULL_SUPPORT
- {
- tsk_bool_t remb_ok = tsk_false;
- int32_t remb_download_kbps = 0;
- uint64_t now = 0;
- uint64_t bytes_count_in;
- static uint64_t* bytes_count_out_ptr_null = tsk_null;
- if ((ret = trtp_manager_get_bytes_count(base->rtp_manager, &bytes_count_in, bytes_count_out_ptr_null)) == 0) {
- uint64_t duration;
- now = tsk_time_now();
- duration = (now - base->bytes_in.count_last_time);
- remb_ok = (base->bytes_in.count_last_time != 0 && duration > 0);
- if (remb_ok) {
- remb_download_kbps = (int32_t)((((bytes_count_in - base->bytes_in.count) * 8 * 1000) / 1024) / duration);
- TSK_DEBUG_INFO("remb_download_kbps=%d", remb_download_kbps);
- }
- base->bytes_in.count_last_time = now;
- base->bytes_in.count = bytes_count_in;
- }
- if (remb_ok) {
- // if "remb_ok" is true then "now" has a valid value
- if ((now - base->time_last_frame_loss_report) > TDAV_SESSION_VIDEO_PKT_LOSS_NO_REPORT_BEFORE_INCREASING_BW) {
- TSK_DEBUG_INFO("No pakt loss since %d millis ... adding 5%% to the estimated max bandwidth", TDAV_SESSION_VIDEO_PKT_LOSS_NO_REPORT_BEFORE_INCREASING_BW);
- remb_download_kbps += (remb_download_kbps / 100) * 5; // add 5% to the estimated bandwidth
- }
- // CLAMP is used to make sure we will not report more than what the user defined as max values even if the estimated values are higher
- bandwidth_max_download_kbps = TSK_CLAMP(0, remb_download_kbps, bandwidth_max_download_kbps);
- }
- }
-#endif /* TDAV_GOOG_REMB_FULL_SUPPORT */
-
- self->fps_changed = tsk_false; // reset
- TSK_DEBUG_INFO("video with congestion control enabled: max_bw_up(unused)=%d kpbs, max_bw_down=%d kpbs", bandwidth_max_upload_kbps, bandwidth_max_download_kbps);
- ret = trtp_manager_set_app_bandwidth_max(base->rtp_manager, bandwidth_max_upload_kbps/* unused */, bandwidth_max_download_kbps);
+
+ time_duration = tsk_time_now() - time_start;
+ if (base->congestion_ctrl_enabled) {
+ tsk_mutex_lock(video->h_mutex_qos);
+ session->qos_metrics.video_dec_avg_time = (unsigned)((session->qos_metrics.video_dec_avg_time + time_duration) / (video->dec_avg_time_n++ ? 2 : 1));
+ tsk_mutex_unlock(video->h_mutex_qos);
}
- // inc() frame count and consume decoded video
- ++self->decoder.codec_decoded_frames_count;
+
+ // consume decoded video
ret = tmedia_consumer_consume(base->consumer, _buffer, _size, __rtp_header);
}
else if (!base->consumer || !base->consumer->is_started) {
TSK_DEBUG_INFO("Consumer not started (is_null=%d)", !base->consumer);
}
-
+
bail:
tsk_safeobj_unlock(base);
-
+
return ret;
}
@@ -1071,36 +1016,36 @@ static int tdav_session_video_set(tmedia_session_t* self, const tmedia_param_t*
int ret = 0;
tdav_session_video_t* video;
tdav_session_av_t* base;
-
- if(!self){
+
+ if(!self) {
TSK_DEBUG_ERROR("Invalid parameter");
return -1;
}
-
+
// try with the base class to see if this option is supported or not
if (tdav_session_av_set(TDAV_SESSION_AV(self), param) == tsk_true) {
return 0;
}
-
+
video = (tdav_session_video_t*)self;
base = (tdav_session_av_t*)self;
-
- if(param->plugin_type == tmedia_ppt_codec){
+
+ if(param->plugin_type == tmedia_ppt_codec) {
tsk_mutex_lock(video->encoder.h_mutex);
ret = tmedia_codec_set((tmedia_codec_t*)video->encoder.codec, param);
tsk_mutex_unlock(video->encoder.h_mutex);
}
- else if(param->plugin_type == tmedia_ppt_consumer){
- if(!base->consumer){
+ else if(param->plugin_type == tmedia_ppt_consumer) {
+ if(!base->consumer) {
TSK_DEBUG_ERROR("No consumer associated to this session");
return -1;
}
- if(param->value_type == tmedia_pvt_int32){
- if(tsk_striequals(param->key, "flip")){
+ if(param->value_type == tmedia_pvt_int32) {
+ if(tsk_striequals(param->key, "flip")) {
tsk_list_item_t* item;
tsk_bool_t flip = (tsk_bool_t)TSK_TO_INT32((uint8_t*)param->value);
tmedia_codecs_L_t *codecs = tsk_object_ref(self->codecs);
- tsk_list_foreach(item, codecs){
+ tsk_list_foreach(item, codecs) {
TMEDIA_CODEC_VIDEO(item->data)->in.flip = flip;
}
tsk_object_unref(codecs);
@@ -1108,17 +1053,17 @@ static int tdav_session_video_set(tmedia_session_t* self, const tmedia_param_t*
}
ret = tmedia_consumer_set(base->consumer, param);
}
- else if(param->plugin_type == tmedia_ppt_producer){
- if(!base->producer){
+ else if(param->plugin_type == tmedia_ppt_producer) {
+ if(!base->producer) {
TSK_DEBUG_ERROR("No producer associated to this session");
return -1;
}
- if(param->value_type == tmedia_pvt_int32){
- if(tsk_striequals(param->key, "flip")){
+ if(param->value_type == tmedia_pvt_int32) {
+ if(tsk_striequals(param->key, "flip")) {
tsk_list_item_t* item;
tsk_bool_t flip = (tsk_bool_t)TSK_TO_INT32((uint8_t*)param->value);
tmedia_codecs_L_t *codecs = tsk_object_ref(self->codecs);
- tsk_list_foreach(item, codecs){
+ tsk_list_foreach(item, codecs) {
TMEDIA_CODEC_VIDEO(item->data)->out.flip = flip;
}
tsk_object_unref(codecs);
@@ -1126,20 +1071,20 @@ static int tdav_session_video_set(tmedia_session_t* self, const tmedia_param_t*
}
ret = tmedia_producer_set(base->producer, param);
}
- else{
- if (param->value_type == tmedia_pvt_int32){
- if (tsk_striequals(param->key, "bandwidth-level")){
+ else {
+ if (param->value_type == tmedia_pvt_int32) {
+ if (tsk_striequals(param->key, "bandwidth-level")) {
tsk_list_item_t* item;
self->bl = (tmedia_bandwidth_level_t)TSK_TO_INT32((uint8_t*)param->value);
self->codecs = tsk_object_ref(self->codecs);
- tsk_list_foreach(item, self->codecs){
+ tsk_list_foreach(item, self->codecs) {
((tmedia_codec_t*)item->data)->bl = self->bl;
}
tsk_object_unref(self->codecs);
}
}
}
-
+
return ret;
}
@@ -1149,7 +1094,7 @@ static int tdav_session_video_get(tmedia_session_t* self, tmedia_param_t* param)
TSK_DEBUG_ERROR("Invalid parameter");
return -1;
}
-
+
// try with the base class to see if this option is supported or not
if (tdav_session_av_get(TDAV_SESSION_AV(self), param) == tsk_true) {
return 0;
@@ -1164,7 +1109,7 @@ static int tdav_session_video_get(tmedia_session_t* self, tmedia_param_t* param)
}
}
}
-
+
TSK_DEBUG_WARN("This session doesn't support get(%s)", param->key);
return -2;
}
@@ -1174,17 +1119,17 @@ static int tdav_session_video_prepare(tmedia_session_t* self)
tdav_session_av_t* base = (tdav_session_av_t*)(self);
tdav_session_video_t* video = (tdav_session_video_t*)self;
int ret;
-
- if((ret = tdav_session_av_prepare(base))){
+
+ if((ret = tdav_session_av_prepare(base))) {
TSK_DEBUG_ERROR("tdav_session_av_prepare(video) failed");
return ret;
}
-
- if(base->rtp_manager){
+
+ if(base->rtp_manager) {
ret = trtp_manager_set_rtp_callback(base->rtp_manager, tdav_session_video_rtp_cb, base);
ret = trtp_manager_set_rtcp_callback(base->rtp_manager, tdav_session_video_rtcp_cb, base);
}
-
+
return ret;
}
@@ -1194,20 +1139,20 @@ static int tdav_session_video_start(tmedia_session_t* self)
tdav_session_video_t* video;
const tmedia_codec_t* codec;
tdav_session_av_t* base;
-
+
if (!self) {
TSK_DEBUG_ERROR("Invalid parameter");
return -1;
}
-
+
video = (tdav_session_video_t*)self;
base = (tdav_session_av_t*)self;
-
+
if (video->started) {
TSK_DEBUG_INFO("Video session already started");
return 0;
}
-
+
// ENCODER codec
if (!(codec = tdav_session_av_get_best_neg_codec(base))) {
TSK_DEBUG_ERROR("No codec matched");
@@ -1222,26 +1167,32 @@ static int tdav_session_video_start(tmedia_session_t* self)
return ret;
}
if (!TMEDIA_CODEC(video->encoder.codec)->opened) {
- if((ret = tmedia_codec_open(video->encoder.codec))){
+ if((ret = tmedia_codec_open(video->encoder.codec))) {
tsk_mutex_unlock(video->encoder.h_mutex);
TSK_DEBUG_ERROR("Failed to open [%s] codec", video->encoder.codec->plugin->desc);
return ret;
}
}
tsk_mutex_unlock(video->encoder.h_mutex);
-
+
if (video->jb) {
if ((ret = tdav_video_jb_start(video->jb))) {
TSK_DEBUG_ERROR("Failed to start jitter buffer");
return ret;
}
}
-
+
if ((ret = tdav_session_av_start(base, video->encoder.codec))) {
TSK_DEBUG_ERROR("tdav_session_av_start(video) failed");
return ret;
}
video->started = tsk_true;
+
+ // start timer manager (must be after setting "started" to true)
+ if (tsk_timer_mgr_global_start() == 0) {
+ video->timer.id_qos = tsk_timer_mgr_global_schedule(TDAV_SESSION_VIDEO_QOS_COMPUTE_INTERVAL, _tdav_session_video_timer_cb, video);
+ }
+
return ret;
}
@@ -1250,18 +1201,25 @@ static int tdav_session_video_stop(tmedia_session_t* self)
int ret;
tdav_session_video_t* video;
tdav_session_av_t* base;
-
+
TSK_DEBUG_INFO("tdav_session_video_stop");
-
+
video = (tdav_session_video_t*)self;
base = (tdav_session_av_t*)self;
-
+
+ // unschedule qos timer
+ if (video->timer.mgr) {
+ tsk_timer_mgr_global_cancel(video->timer.id_qos);
+ video->timer.id_qos = TSK_INVALID_TIMER_ID;
+ // must not stop global timer manager as it's used by other functions
+ }
+
// must be here to make sure no other thread will lock the encoder once we have done it
tsk_mutex_lock(video->encoder.h_mutex); // encoder thread will check "started" var right after the lock is passed
video->started = tsk_false;
tsk_mutex_unlock(video->encoder.h_mutex);
// at this step we're sure that encode() will no longer be called which means we can safely close the codec
-
+
if (video->jb) {
ret = tdav_video_jb_stop(video->jb);
}
@@ -1269,7 +1227,7 @@ static int tdav_session_video_stop(tmedia_session_t* self)
tsk_list_lock(video->avpf.packets);
tsk_list_clear_items(video->avpf.packets);
tsk_list_unlock(video->avpf.packets);
-
+
// tdav_session_av_stop() : stop producer and consumer, close encoder and all other codecs, stop rtpManager...
// no need to lock the encoder to avoid using a closed codec (see above)
// no need to lock the decoder as the rtpManager will be stop before closing the codec
@@ -1279,11 +1237,11 @@ static int tdav_session_video_stop(tmedia_session_t* self)
TSK_OBJECT_SAFE_FREE(video->encoder.codec);
tsk_mutex_unlock(video->encoder.h_mutex);
TSK_OBJECT_SAFE_FREE(video->decoder.codec);
-
+
// reset default values to make sure next start will be called with right defaults
// do not call this function in start to avoid overriding values defined between prepare() and start()
_tdav_session_video_set_defaults(video);
-
+
return ret;
}
@@ -1297,17 +1255,17 @@ static const tsdp_header_M_t* tdav_session_video_get_lo(tmedia_session_t* self)
tsk_bool_t updated = tsk_false;
const tsdp_header_M_t* ret;
tdav_session_av_t* base = TDAV_SESSION_AV(self);
-
- if(!(ret = tdav_session_av_get_lo(base, &updated))){
+
+ if(!(ret = tdav_session_av_get_lo(base, &updated))) {
TSK_DEBUG_ERROR("tdav_session_av_get_lo(video) failed");
return tsk_null;
}
-
- if(updated){
+
+ if(updated) {
// set callbacks
_tdav_session_video_set_callbacks(self);
}
-
+
return ret;
}
@@ -1316,28 +1274,30 @@ static int tdav_session_video_set_ro(tmedia_session_t* self, const tsdp_header_M
int ret;
tsk_bool_t updated = tsk_false;
tdav_session_av_t* base = TDAV_SESSION_AV(self);
-
- if((ret = tdav_session_av_set_ro(base, m, &updated))){
+
+ if((ret = tdav_session_av_set_ro(base, m, &updated))) {
TSK_DEBUG_ERROR("tdav_session_av_set_ro(video) failed");
return ret;
}
-
+
// Check if "RTCP-NACK" and "RTC-FIR" are supported
{
const tmedia_codec_t* codec;
- base->is_fb_fir_neg = base->is_fb_nack_neg = base->is_fb_googremb_neg = tsk_false;
+ base->is_fb_fir_neg = base->is_fb_nack_neg = base->is_fb_googremb_neg = base->is_fb_doubsjcng_neg = tsk_false;
if ((codec = tdav_session_av_get_best_neg_codec(base))) {
// a=rtcp-fb:* ccm fir
// a=rtcp-fb:* nack
// a=rtcp-fb:* goog-remb
- char attr_fir[256], attr_nack[256], attr_goog_remb[256];
+ // a=rtcp-fb:* doubs-jcng
+ char attr_fir[256], attr_nack[256], attr_goog_remb[256], attr_doubs_jcng[256];
int index = 0;
const tsdp_header_A_t* A;
-
+
sprintf(attr_fir, "%s ccm fir", codec->neg_format);
sprintf(attr_nack, "%s nack", codec->neg_format);
sprintf(attr_goog_remb, "%s goog-remb", codec->neg_format);
-
+ sprintf(attr_doubs_jcng, "%s doubs-jcng", codec->neg_format);
+
while ((A = tsdp_header_M_findA_at(m, "rtcp-fb", index++))) {
if (!base->is_fb_fir_neg) {
base->is_fb_fir_neg = (tsk_striequals(A->value, "* ccm fir") || tsk_striequals(A->value, attr_fir));
@@ -1348,15 +1308,18 @@ static int tdav_session_video_set_ro(tmedia_session_t* self, const tsdp_header_M
if (!base->is_fb_googremb_neg) {
base->is_fb_googremb_neg = (tsk_striequals(A->value, "* goog-remb") || tsk_striequals(A->value, attr_goog_remb));
}
- }
+ if (!base->is_fb_doubsjcng_neg) {
+ base->is_fb_doubsjcng_neg = (tsk_striequals(A->value, "* doubs-jcng") || tsk_striequals(A->value, attr_doubs_jcng));
+ }
+ }
}
}
-
+
if (updated) {
// set callbacks
ret = _tdav_session_video_set_callbacks(self);
}
-
+
return ret;
}
@@ -1365,20 +1328,20 @@ static int tdav_session_video_rtcp_set_onevent_cbfn(tmedia_session_t* self, cons
{
tdav_session_video_t* video;
tdav_session_av_t* base;
-
- if(!self){
+
+ if(!self) {
TSK_DEBUG_ERROR("Invalid parameter");
return -1;
}
-
+
video = (tdav_session_video_t*)self;
base = (tdav_session_av_t*)self;
-
+
tsk_safeobj_lock(base);
video->cb_rtcpevent.context = context;
video->cb_rtcpevent.func = func;
tsk_safeobj_unlock(base);
-
+
return 0;
}
@@ -1388,31 +1351,30 @@ static int tdav_session_video_rtcp_send_event(tmedia_session_t* self, tmedia_rtc
tdav_session_video_t* video;
tdav_session_av_t* base;
int ret = -1;
-
- if(!self){
+
+ if(!self) {
TSK_DEBUG_ERROR("Invalid parameter");
return -1;
}
-
+
video = (tdav_session_video_t*)self;
base = (tdav_session_av_t*)self;
-
+
tsk_safeobj_lock(base);
- switch(event_type){
- case tmedia_rtcp_event_type_fir:
- {
- if(base->rtp_manager && base->rtp_manager->is_started){
- if(!ssrc_media){ // when called from C++/Java/C# bindings "ssrc_media" is a default parameter with value=0
- ssrc_media = base->rtp_manager->rtp.ssrc.remote;
- }
- TSK_DEBUG_INFO("Send FIR(%u)", ssrc_media);
- _tdav_session_video_local_request_idr(self, "CALLBACK", ssrc_media);
+ switch(event_type) {
+ case tmedia_rtcp_event_type_fir: {
+ if(base->rtp_manager && base->rtp_manager->is_started) {
+ if(!ssrc_media) { // when called from C++/Java/C# bindings "ssrc_media" is a default parameter with value=0
+ ssrc_media = base->rtp_manager->rtp.ssrc.remote;
}
- break;
+ TSK_DEBUG_INFO("Send FIR(%u)", ssrc_media);
+ _tdav_session_video_local_request_idr(self, "CALLBACK", ssrc_media);
}
+ break;
+ }
}
tsk_safeobj_unlock(base);
-
+
return ret;
}
@@ -1422,39 +1384,179 @@ static int tdav_session_video_rtcp_recv_event(tmedia_session_t* self, tmedia_rtc
tdav_session_video_t* video;
tdav_session_av_t* base;
int ret = -1;
-
- if (!self){
+
+ if (!self) {
TSK_DEBUG_ERROR("Invalid parameter");
return -1;
}
-
+
video = (tdav_session_video_t*)self;
base = (tdav_session_av_t*)self;
-
+
tsk_safeobj_lock(base);
- switch(event_type){
- case tmedia_rtcp_event_type_fir:
- {
- _tdav_session_video_remote_requested_idr(video, ssrc_media);
- ret = 0;
- break;
- }
+ switch(event_type) {
+ case tmedia_rtcp_event_type_fir: {
+ _tdav_session_video_remote_requested_idr(video, ssrc_media);
+ ret = 0;
+ break;
+ }
}
tsk_safeobj_unlock(base);
-
+
+ return ret;
+}
+
+static int _tdav_session_video_timer_cb(const void* arg, tsk_timer_id_t timer_id)
+{
+ /*const*/ tdav_session_video_t* video = (/*const*/ tdav_session_video_t*)tsk_object_ref(TSK_OBJECT(arg));
+ tdav_session_av_t* base = (tdav_session_av_t*)video;
+ tmedia_session_t* session = (tmedia_session_t*)video;
+ if (video->timer.id_qos == timer_id) {
+ if (base->congestion_ctrl_enabled) {
+ tmedia_codec_video_t* codec = tsk_object_ref(TSK_OBJECT(video->encoder.codec));
+ if (codec && video->started) {
+ float q1, q2, q3, q4, q5, qavg, c;
+ uint64_t bw_est_kbps;
+ // Compute average QoS
+ tsk_mutex_lock(video->h_mutex_qos);
+ q1 = video->q1_n ? session->qos_metrics.q1 : 1.f;
+ q2 = video->q2_n ? session->qos_metrics.q2 : 1.f;
+ q3 = video->q3_n ? session->qos_metrics.q3 : 1.f;
+ q4 = video->q4_n ? session->qos_metrics.q4 : 1.f;
+ q5 = video->q5_n ? session->qos_metrics.q5 : 1.f;
+
+ // update bw info
+ if (_tdav_session_video_get_bw_usage_est(video, &bw_est_kbps, /*in=*/tsk_true, /*reset*/tsk_true) == 0 && bw_est_kbps != 0) {
+ session->qos_metrics.bw_down_est_kbps = (unsigned)bw_est_kbps;
+ }
+ if (_tdav_session_video_get_bw_usage_est(video, &bw_est_kbps, /*in=*/tsk_false, /*reset*/tsk_true) == 0 && bw_est_kbps != 0) {
+ session->qos_metrics.bw_up_est_kbps = (unsigned)bw_est_kbps;
+ }
+
+ // update video size
+ session->qos_metrics.video_out_width = codec->out.width;
+ session->qos_metrics.video_out_height = codec->out.height;
+ session->qos_metrics.video_in_width = codec->in.width;
+ session->qos_metrics.video_in_height = codec->in.height;
+ // reset n
+ video->q1_n = video->q2_n = video->q3_n = video->q4_n = video->q5_n = 0;
+ session->qos_metrics.q1 = session->qos_metrics.q2 = session->qos_metrics.q3 = session->qos_metrics.q4 = session->qos_metrics.q5 = 0.f;
+ // continue averaging
+ video->in_avg_fps_n = video->in_avg_fps_n ? 1 : 0;
+ video->dec_avg_time_n = video->dec_avg_time_n ? 1 : 0;
+ video->enc_avg_time_n = video->enc_avg_time_n ? 1 : 0;
+
+ // reset time
+ session->qos_metrics.last_update_time = tsk_time_now();
+ tsk_mutex_unlock(video->h_mutex_qos);
+
+#if 0
+ q1 /= 10.f;
+ q2 /= 10.f;
+ q3 /= 10.f;
+ q4 /= 10.f;
+ q5 /= 10.f;
+#endif
+
+ qavg = q1 * 0.1f + q2 * 0.4f + q3 * 0.1f + q4 * 0.1f + q5 * 0.3f;
+ c = /*fabs*/(qavg - session->qos_metrics.qvag);
+ c = c < 0.f ? -c : +c;
+ TSK_DEBUG_INFO("_tdav_session_video_timer_cb: q1=%f, q2=%f, q3=%f, q4=%f, q5=%f, qavg=%f, c=%f congestion_ctrl_enabled=true", q1, q2, q3, q4, q5, qavg, c);
+
+ if (c > 0.1f) { // quality change > or < 10%
+ // Update the upload bandwidth
+ int32_t bw_up_new_kbps, bw_up_base_kbps = base->bandwidth_max_upload_kbps; // user-defined maximum
+ bw_up_base_kbps = TSK_MIN(tmedia_get_video_bandwidth_kbps_2(codec->out.width, codec->out.height, codec->out.fps), bw_up_base_kbps);
+ bw_up_new_kbps = (int32_t)(bw_up_base_kbps * qavg);
+ TSK_DEBUG_INFO("Video quality change(%d%%) > 10%%, changing bw_up from base=%dkbps to new=%dkbps", (int)(c*100), bw_up_base_kbps, bw_up_new_kbps);
+ _tdav_session_video_bw_kbps(video, bw_up_new_kbps);
+ session->qos_metrics.qvag = qavg;
+ }
+ //!\ update qavg only if condition "c" is true
+ }
+ tsk_object_unref(TSK_OBJECT(codec));
+ }
+ else {
+ TSK_DEBUG_INFO("_tdav_session_video_timer_cb: congestion_ctrl_enabled=false");
+ }
+
+ if (video->started) {
+ video->timer.id_qos = tsk_timer_mgr_global_schedule(TDAV_SESSION_VIDEO_QOS_COMPUTE_INTERVAL, _tdav_session_video_timer_cb, video);
+ }
+ }
+ tsk_object_unref(TSK_OBJECT(video));
+ return 0;
+}
+
+static int _tdav_session_video_get_bw_usage_est(tdav_session_video_t* self, uint64_t* bw_kbps, tsk_bool_t in, tsk_bool_t reset)
+{
+ int ret;
+
+ tdav_session_av_t* base = (tdav_session_av_t*)self;
+ uint64_t bw_bytes = 0;
+ uint64_t* bytes_in = in ? &bw_bytes : tsk_null;
+ uint64_t* bytes_out = in ? tsk_null : &bw_bytes;
+ uint64_t* count_last_time = in ? &base->bytes_in.count_last_time : &base->bytes_out.count_last_time;
+ uint64_t* count = in ? &base->bytes_in.count : &base->bytes_out.count;
+
+ if ((ret = trtp_manager_get_bytes_count(base->rtp_manager, bytes_in, bytes_out)) == 0) {
+ uint64_t duration, bytes_count_now;
+ bytes_count_now = tsk_time_now();
+ duration = (bytes_count_now - *count_last_time);
+ if (*count_last_time != 0 && duration > 0) {
+ *bw_kbps = ((((bw_bytes - *count) * 8 * 1000) / 1024) / duration);
+ }
+ else {
+ ret = -2;
+ }
+ if (reset) {
+ *count_last_time = bytes_count_now;
+ *count = bw_bytes;
+ }
+ }
+
+ return ret;
+}
+
+static int _tdav_session_video_report_bw_usage_and_jcng(tdav_session_video_t* self)
+{
+ tdav_session_av_t* base = (tdav_session_av_t*)self;
+ tmedia_session_t* session = (tmedia_session_t*)self;
+ int ret = 0;
+ // congetion control
+ // send RTCP-REMB if:
+ // - congestion control is enabled and
+ // - fps changed or
+ // - first frame
+ if (base->congestion_ctrl_enabled && base->rtp_manager && session->qos_metrics.bw_down_est_kbps != 0) {
+# define TDAV_SESSION_VIDEO_QOS_COMPUTE_INTERVAL_HALF (TDAV_SESSION_VIDEO_QOS_COMPUTE_INTERVAL >> 1)
+ tsk_bool_t update_info = (self->fps_changed || self->decoder.codec_decoded_frames_count == 0 || session->qos_metrics.last_update_time == 0 || ((tsk_time_now() - session->qos_metrics.last_update_time) > TDAV_SESSION_VIDEO_QOS_COMPUTE_INTERVAL_HALF));
+ if (update_info) {
+ float jcng_q = 1.f;
+ if (self->jb && self->jb_enabled) {
+ float q5 = 1.f;
+ if ((ret = tdav_video_jb_get_qcong(self->jb, &q5)) == 0) {
+ jcng_q = q5;
+ }
+ }
+ self->fps_changed = tsk_false; // reset
+ TSK_DEBUG_INFO("video with congestion control enabled: est_bw_down=%llukbps, est_jcng=%f", session->qos_metrics.bw_down_est_kbps, jcng_q);
+ ret = trtp_manager_set_app_bw_and_jcng(base->rtp_manager, INT_MAX/* unused */, (int32_t)session->qos_metrics.bw_down_est_kbps, jcng_q);
+ }
+ }
return ret;
}
static int _tdav_session_video_set_callbacks(tmedia_session_t* self)
{
- if(self){
+ if(self) {
tsk_list_item_t* item;
- tsk_list_foreach(item, TMEDIA_SESSION(self)->neg_codecs){
+ tsk_list_foreach(item, TMEDIA_SESSION(self)->neg_codecs) {
// set codec callbacks
tmedia_codec_video_set_enc_callback(TMEDIA_CODEC_VIDEO(item->data), tdav_session_video_raw_cb, self);
tmedia_codec_video_set_dec_callback(TMEDIA_CODEC_VIDEO(item->data), tdav_session_video_decode_cb, self);
// set RED callback: redundant data to decode and send to the consumer
- if(TMEDIA_CODEC(item->data)->plugin == tdav_codec_red_plugin_def_t){
+ if(TMEDIA_CODEC(item->data)->plugin == tdav_codec_red_plugin_def_t) {
tdav_codec_red_set_callback((struct tdav_codec_red_s *)(item->data), tdav_session_video_rtp_cb, self);
}
}
@@ -1470,13 +1572,13 @@ static int _tdav_session_video_init(tdav_session_video_t *p_self, tmedia_type_t
TSK_DEBUG_ERROR("Invalid parameter");
return -1;
}
-
+
/* init() base */
if ((ret = tdav_session_av_init(p_base, e_media_type)) != 0) {
TSK_DEBUG_ERROR("tdav_session_av_init(video) failed");
return ret;
}
-
+
/* init() self */
_tdav_session_video_set_defaults(p_self);
if (!p_self->encoder.h_mutex && !(p_self->encoder.h_mutex = tsk_mutex_create())) {
@@ -1494,12 +1596,23 @@ static int _tdav_session_video_init(tdav_session_video_t *p_self, tmedia_type_t
}
tdav_video_jb_set_callback(p_self->jb, _tdav_session_video_jb_cb, p_self);
}
-
+
+ /* producer's callbacks */
if (p_base->producer) {
tmedia_producer_set_enc_callback(p_base->producer, tdav_session_video_producer_enc_cb, p_self);
tmedia_producer_set_raw_callback(p_base->producer, tdav_session_video_raw_cb, p_self);
}
-
+
+ if (!p_self->h_mutex_qos && !(p_self->h_mutex_qos = tsk_mutex_create())) {
+ TSK_DEBUG_ERROR("Failed to create qos mutex");
+ return -5;
+ }
+
+ /* timer manager */
+ if(!p_self->timer.mgr) {
+ p_self->timer.mgr = tsk_timer_mgr_global_ref();
+ }
+
return 0;
}
@@ -1511,7 +1624,7 @@ static int _tdav_session_video_init(tdav_session_video_t *p_self, tmedia_type_t
static tsk_object_t* tdav_session_video_ctor(tsk_object_t * self, va_list * app)
{
tdav_session_video_t *video = self;
- if(video){
+ if(video) {
if (_tdav_session_video_init(video, tmedia_video)) {
return tsk_null;
}
@@ -1520,71 +1633,78 @@ static tsk_object_t* tdav_session_video_ctor(tsk_object_t * self, va_list * app)
}
/* destructor */
static tsk_object_t* tdav_session_video_dtor(tsk_object_t * self)
-{
+{
tdav_session_video_t *video = self;
TSK_DEBUG_INFO("*** tdav_session_video_t destroyed ***");
- if(video){
+ if(video) {
tdav_session_video_stop((tmedia_session_t*)video);
// deinit self (rtp manager should be destroyed after the producer)
TSK_OBJECT_SAFE_FREE(video->conv.toYUV420);
TSK_OBJECT_SAFE_FREE(video->conv.fromYUV420);
-
+
TSK_FREE(video->encoder.buffer);
TSK_FREE(video->encoder.conv_buffer);
TSK_FREE(video->decoder.buffer);
TSK_FREE(video->decoder.conv_buffer);
-
+
TSK_OBJECT_SAFE_FREE(video->encoder.codec);
TSK_OBJECT_SAFE_FREE(video->decoder.codec);
-
+
TSK_OBJECT_SAFE_FREE(video->avpf.packets);
-
+
TSK_OBJECT_SAFE_FREE(video->jb);
-
- if(video->encoder.h_mutex){
+
+ /* timer manager */
+ if (video->timer.mgr) {
+ tsk_timer_mgr_global_unref(&video->timer.mgr);
+ }
+
+ if(video->encoder.h_mutex) {
tsk_mutex_destroy(&video->encoder.h_mutex);
}
-
+
+ if (video->h_mutex_qos) {
+ tsk_mutex_destroy(&video->h_mutex_qos);
+ }
+
/* deinit() base */
tdav_session_av_deinit(TDAV_SESSION_AV(video));
-
+
TSK_DEBUG_INFO("*** Video session destroyed ***");
}
-
+
return self;
}
/* object definition */
-static const tsk_object_def_t tdav_session_video_def_s =
-{
+static const tsk_object_def_t tdav_session_video_def_s = {
sizeof(tdav_session_video_t),
- tdav_session_video_ctor,
+ tdav_session_video_ctor,
tdav_session_video_dtor,
- tmedia_session_cmp,
+ tmedia_session_cmp,
};
/* plugin definition*/
-static const tmedia_session_plugin_def_t tdav_session_video_plugin_def_s =
-{
+static const tmedia_session_plugin_def_t tdav_session_video_plugin_def_s = {
&tdav_session_video_def_s,
-
+
tmedia_video,
"video",
-
+
tdav_session_video_set,
tdav_session_video_get,
tdav_session_video_prepare,
tdav_session_video_start,
tdav_session_video_pause,
tdav_session_video_stop,
-
+
/* Audio part */
{ tsk_null },
-
+
tdav_session_video_get_lo,
tdav_session_video_set_ro,
-
+
/* T.140 */
{ tsk_null },
-
+
/* Rtcp */
{
tdav_session_video_rtcp_set_onevent_cbfn,
@@ -1601,7 +1721,7 @@ const tmedia_session_plugin_def_t *tdav_session_video_plugin_def_t = &tdav_sessi
static tsk_object_t* tdav_session_bfcpvideo_ctor(tsk_object_t * self, va_list * app)
{
tdav_session_video_t *video = self;
- if(video){
+ if(video) {
if (_tdav_session_video_init(video, tmedia_bfcp_video)) {
return tsk_null;
}
@@ -1609,36 +1729,34 @@ static tsk_object_t* tdav_session_bfcpvideo_ctor(tsk_object_t * self, va_list *
return self;
}
/* object definition */
-static const tsk_object_def_t tdav_session_bfcpvideo_def_s =
-{
+static const tsk_object_def_t tdav_session_bfcpvideo_def_s = {
sizeof(tdav_session_video_t),
- tdav_session_bfcpvideo_ctor,
+ tdav_session_bfcpvideo_ctor,
tdav_session_video_dtor,
- tmedia_session_cmp,
+ tmedia_session_cmp,
};
-static const tmedia_session_plugin_def_t tdav_session_bfcpvideo_plugin_def_s =
-{
+static const tmedia_session_plugin_def_t tdav_session_bfcpvideo_plugin_def_s = {
&tdav_session_bfcpvideo_def_s,
-
+
tmedia_bfcp_video,
"video",
-
+
tdav_session_video_set,
tdav_session_video_get,
tdav_session_video_prepare,
tdav_session_video_start,
tdav_session_video_pause,
tdav_session_video_stop,
-
+
/* Audio part */
{ tsk_null },
-
+
tdav_session_video_get_lo,
tdav_session_video_set_ro,
-
+
/* T.140 */
{ tsk_null },
-
+
/* Rtcp */
{
tdav_session_video_rtcp_set_onevent_cbfn,
diff --git a/tinyDAV/src/video/v4linux/tdav_producer_video_v4l2.c b/tinyDAV/src/video/v4linux/tdav_producer_video_v4l2.c
index ef3152a..cb023fc 100755
--- a/tinyDAV/src/video/v4linux/tdav_producer_video_v4l2.c
+++ b/tinyDAV/src/video/v4linux/tdav_producer_video_v4l2.c
@@ -1,17 +1,17 @@
/* Copyright (C) 2014 Mamadou DIOP.
-*
+*
* This file is part of Open Source Doubango Framework.
*
* DOUBANGO is free software: you can redistribute it and/or modify
* it under the terms of the GNU General Public License as published by
* the Free Software Foundation, either version 3 of the License, or
* (at your option) any later version.
-*
+*
* DOUBANGO is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
* GNU General Public License for more details.
-*
+*
* You should have received a copy of the GNU General Public License
* along with DOUBANGO.
*/
@@ -53,60 +53,58 @@
#define V4L2_DEBUG_FATAL(FMT, ...) TSK_DEBUG_FATAL("[V4L2 Producer] " FMT, ##__VA_ARGS__)
typedef enum v4l2_io_method_e {
- V4L2_IO_METHOD_NONE = 0,
- V4L2_IO_METHOD_READ,
- V4L2_IO_METHOD_MMAP,
- V4L2_IO_METHOD_USERPTR,
+ V4L2_IO_METHOD_NONE = 0,
+ V4L2_IO_METHOD_READ,
+ V4L2_IO_METHOD_MMAP,
+ V4L2_IO_METHOD_USERPTR,
}
v4l2_io_method_t;
typedef struct v4l2_buffer_s {
- void *p_start;
- size_t n_length;
+ void *p_start;
+ size_t n_length;
}
v4l2_buffer_t;
// By preference order
static const v4l2_io_method_t io_method_prefs[] = {
- V4L2_IO_METHOD_MMAP,
- V4L2_IO_METHOD_USERPTR,
- V4L2_IO_METHOD_READ,
+ V4L2_IO_METHOD_MMAP,
+ V4L2_IO_METHOD_USERPTR,
+ V4L2_IO_METHOD_READ,
};
-static const unsigned int pix_format_prefs[] =
-{
- V4L2_PIX_FMT_YUV420,
- V4L2_PIX_FMT_NV12,
- V4L2_PIX_FMT_NV21,
- V4L2_PIX_FMT_YUYV,
- V4L2_PIX_FMT_UYVY, // SINCITY
- V4L2_PIX_FMT_RGB24,
- V4L2_PIX_FMT_RGB32,
- V4L2_PIX_FMT_MJPEG
+static const unsigned int pix_format_prefs[] = {
+ V4L2_PIX_FMT_YUV420,
+ V4L2_PIX_FMT_NV12,
+ V4L2_PIX_FMT_NV21,
+ V4L2_PIX_FMT_YUYV,
+ V4L2_PIX_FMT_UYVY, // SINCITY
+ V4L2_PIX_FMT_RGB24,
+ V4L2_PIX_FMT_RGB32,
+ V4L2_PIX_FMT_MJPEG
};
-typedef struct tdav_producer_video_v4l2_s
-{
- TMEDIA_DECLARE_PRODUCER;
-
- tsk_bool_t b_muted;
- tsk_bool_t b_started;
- tsk_bool_t b_prepared;
- tsk_bool_t b_paused;
-
- int fd;
- v4l2_io_method_t io;
- struct v4l2_format fmt;
- struct v4l2_capability cap;
- struct v4l2_cropcap cropcap;
- struct v4l2_crop crop;
- unsigned int n_buffers;
- v4l2_buffer_t* p_buffers;
-
- tsk_timer_manager_handle_t *p_timer_mgr;
- tsk_timer_id_t id_timer_grab;
- uint64_t u_timout_grab;
-
- TSK_DECLARE_SAFEOBJ;
+typedef struct tdav_producer_video_v4l2_s {
+ TMEDIA_DECLARE_PRODUCER;
+
+ tsk_bool_t b_muted;
+ tsk_bool_t b_started;
+ tsk_bool_t b_prepared;
+ tsk_bool_t b_paused;
+
+ int fd;
+ v4l2_io_method_t io;
+ struct v4l2_format fmt;
+ struct v4l2_capability cap;
+ struct v4l2_cropcap cropcap;
+ struct v4l2_crop crop;
+ unsigned int n_buffers;
+ v4l2_buffer_t* p_buffers;
+
+ tsk_timer_manager_handle_t *p_timer_mgr;
+ tsk_timer_id_t id_timer_grab;
+ uint64_t u_timout_grab;
+
+ TSK_DECLARE_SAFEOBJ;
}
tdav_producer_video_v4l2_t;
@@ -128,956 +126,960 @@ static int _tdav_producer_video_v4l2_grab(tdav_producer_video_v4l2_t* p_self);
/* ============ Media Producer Interface ================= */
static int _tdav_producer_video_v4l2_set(tmedia_producer_t *p_self, const tmedia_param_t* pc_param)
{
- int ret = 0;
- tdav_producer_video_v4l2_t* p_v4l2 = (tdav_producer_video_v4l2_t*)p_self;
-
- if (!p_v4l2 || !pc_param) {
- V4L2_DEBUG_ERROR("Invalid parameter");
- return -1;
- }
-
- if (pc_param->value_type == tmedia_pvt_pchar) {
- if (tsk_striequals(pc_param->key, "local-hwnd") || tsk_striequals(pc_param->key, "preview-hwnd")) {
- V4L2_DEBUG_ERROR("Not implemented yet");
- }
- else if (tsk_striequals(pc_param->key, "src-hwnd")) {
- V4L2_DEBUG_ERROR("Not implemented yet");
- }
- }
- else if (pc_param->value_type == tmedia_pvt_int32) {
- if (tsk_striequals(pc_param->key, "mute")) {
- p_v4l2->b_muted = (TSK_TO_INT32((uint8_t*)pc_param->value) != 0);
- }
- }
-
- return ret;
+ int ret = 0;
+ tdav_producer_video_v4l2_t* p_v4l2 = (tdav_producer_video_v4l2_t*)p_self;
+
+ if (!p_v4l2 || !pc_param) {
+ V4L2_DEBUG_ERROR("Invalid parameter");
+ return -1;
+ }
+
+ if (pc_param->value_type == tmedia_pvt_pchar) {
+ if (tsk_striequals(pc_param->key, "local-hwnd") || tsk_striequals(pc_param->key, "preview-hwnd")) {
+ V4L2_DEBUG_ERROR("Not implemented yet");
+ }
+ else if (tsk_striequals(pc_param->key, "src-hwnd")) {
+ V4L2_DEBUG_ERROR("Not implemented yet");
+ }
+ }
+ else if (pc_param->value_type == tmedia_pvt_int32) {
+ if (tsk_striequals(pc_param->key, "mute")) {
+ p_v4l2->b_muted = (TSK_TO_INT32((uint8_t*)pc_param->value) != 0);
+ }
+ }
+
+ return ret;
}
static int _tdav_producer_video_v4l2_prepare(tmedia_producer_t* p_self, const tmedia_codec_t* pc_codec)
{
- tdav_producer_video_v4l2_t* p_v4l2 = (tdav_producer_video_v4l2_t*)p_self;
- int ret = 0;
-
- if (!p_v4l2 || !pc_codec) {
- V4L2_DEBUG_ERROR("Invalid parameter");
- return -1;
- }
-
- tsk_safeobj_lock(p_v4l2);
-
- if (!p_v4l2->p_timer_mgr && !(p_v4l2->p_timer_mgr = tsk_timer_manager_create())) {
- V4L2_DEBUG_ERROR("Failed to create timer manager");
- ret = -2;
- goto bail;
- }
-
- TMEDIA_PRODUCER(p_v4l2)->video.fps = TMEDIA_CODEC_VIDEO(pc_codec)->out.fps;
- TMEDIA_PRODUCER(p_v4l2)->video.width = TMEDIA_CODEC_VIDEO(pc_codec)->out.width;
- TMEDIA_PRODUCER(p_v4l2)->video.height = TMEDIA_CODEC_VIDEO(pc_codec)->out.height;
-
- p_v4l2->u_timout_grab = (1000/TMEDIA_PRODUCER(p_v4l2)->video.fps);
-
- // prepare()
- if ((ret = _v4l2_prepare(p_v4l2))) {
- goto bail;
- }
-
- // update() - up to the "converter" to perform chroma conversion and scaling
- TMEDIA_PRODUCER(p_v4l2)->video.width = p_v4l2->fmt.fmt.pix.width;
- TMEDIA_PRODUCER(p_v4l2)->video.height = p_v4l2->fmt.fmt.pix.height;
+ tdav_producer_video_v4l2_t* p_v4l2 = (tdav_producer_video_v4l2_t*)p_self;
+ int ret = 0;
+
+ if (!p_v4l2 || !pc_codec) {
+ V4L2_DEBUG_ERROR("Invalid parameter");
+ return -1;
+ }
+
+ tsk_safeobj_lock(p_v4l2);
+
+ if (!p_v4l2->p_timer_mgr && !(p_v4l2->p_timer_mgr = tsk_timer_manager_create())) {
+ V4L2_DEBUG_ERROR("Failed to create timer manager");
+ ret = -2;
+ goto bail;
+ }
+
+ TMEDIA_PRODUCER(p_v4l2)->video.fps = TMEDIA_CODEC_VIDEO(pc_codec)->out.fps;
+ TMEDIA_PRODUCER(p_v4l2)->video.width = TMEDIA_CODEC_VIDEO(pc_codec)->out.width;
+ TMEDIA_PRODUCER(p_v4l2)->video.height = TMEDIA_CODEC_VIDEO(pc_codec)->out.height;
+
+ p_v4l2->u_timout_grab = (1000/TMEDIA_PRODUCER(p_v4l2)->video.fps);
+
+ // prepare()
+ if ((ret = _v4l2_prepare(p_v4l2))) {
+ goto bail;
+ }
+
+ // update() - up to the "converter" to perform chroma conversion and scaling
+ TMEDIA_PRODUCER(p_v4l2)->video.width = p_v4l2->fmt.fmt.pix.width;
+ TMEDIA_PRODUCER(p_v4l2)->video.height = p_v4l2->fmt.fmt.pix.height;
#if V4L2_FAKE_UYVY
- TMEDIA_PRODUCER(p_v4l2)->video.chroma = tmedia_chroma_uyvy422;
+ TMEDIA_PRODUCER(p_v4l2)->video.chroma = tmedia_chroma_uyvy422;
#else
- switch (p_v4l2->fmt.fmt.pix.pixelformat) {
- case V4L2_PIX_FMT_YUV420:
- TMEDIA_PRODUCER(p_v4l2)->video.chroma = tmedia_chroma_yuv420p;
- break;
- case V4L2_PIX_FMT_NV12:
- TMEDIA_PRODUCER(p_v4l2)->video.chroma = tmedia_chroma_nv12;
- break;
- case V4L2_PIX_FMT_NV21:
- TMEDIA_PRODUCER(p_v4l2)->video.chroma = tmedia_chroma_nv21;
- break;
- case V4L2_PIX_FMT_YUYV:
- TMEDIA_PRODUCER(p_v4l2)->video.chroma = tmedia_chroma_yuyv422;
- break;
- case V4L2_PIX_FMT_UYVY:
- TMEDIA_PRODUCER(p_v4l2)->video.chroma = tmedia_chroma_uyvy422; // SINCITY
- break;
- case V4L2_PIX_FMT_RGB24:
- TMEDIA_PRODUCER(p_v4l2)->video.chroma = tmedia_chroma_rgb24;
- break;
- case V4L2_PIX_FMT_RGB32:
- TMEDIA_PRODUCER(p_v4l2)->video.chroma = tmedia_chroma_rgb32;
- break;
- case V4L2_PIX_FMT_MJPEG:
- TMEDIA_PRODUCER(p_v4l2)->video.chroma = tmedia_chroma_mjpeg;
- break;
- default:
- V4L2_DEBUG_ERROR("Failed to match negotiated format: %d", p_v4l2->fmt.fmt.pix.pixelformat);
- ret = -1;
- goto bail;
- }
+ switch (p_v4l2->fmt.fmt.pix.pixelformat) {
+ case V4L2_PIX_FMT_YUV420:
+ TMEDIA_PRODUCER(p_v4l2)->video.chroma = tmedia_chroma_yuv420p;
+ break;
+ case V4L2_PIX_FMT_NV12:
+ TMEDIA_PRODUCER(p_v4l2)->video.chroma = tmedia_chroma_nv12;
+ break;
+ case V4L2_PIX_FMT_NV21:
+ TMEDIA_PRODUCER(p_v4l2)->video.chroma = tmedia_chroma_nv21;
+ break;
+ case V4L2_PIX_FMT_YUYV:
+ TMEDIA_PRODUCER(p_v4l2)->video.chroma = tmedia_chroma_yuyv422;
+ break;
+ case V4L2_PIX_FMT_UYVY:
+ TMEDIA_PRODUCER(p_v4l2)->video.chroma = tmedia_chroma_uyvy422; // SINCITY
+ break;
+ case V4L2_PIX_FMT_RGB24:
+ TMEDIA_PRODUCER(p_v4l2)->video.chroma = tmedia_chroma_rgb24;
+ break;
+ case V4L2_PIX_FMT_RGB32:
+ TMEDIA_PRODUCER(p_v4l2)->video.chroma = tmedia_chroma_rgb32;
+ break;
+ case V4L2_PIX_FMT_MJPEG:
+ TMEDIA_PRODUCER(p_v4l2)->video.chroma = tmedia_chroma_mjpeg;
+ break;
+ default:
+ V4L2_DEBUG_ERROR("Failed to match negotiated format: %d", p_v4l2->fmt.fmt.pix.pixelformat);
+ ret = -1;
+ goto bail;
+ }
#endif /* V4L2_FAKE_UYVY */
- V4L2_DEBUG_INFO("Negotiated caps: fps=%d, width=%d, height=%d, chroma=%d",
- TMEDIA_PRODUCER(p_v4l2)->video.fps,
- TMEDIA_PRODUCER(p_v4l2)->video.width,
- TMEDIA_PRODUCER(p_v4l2)->video.height,
- TMEDIA_PRODUCER(p_v4l2)->video.chroma);
- p_v4l2->b_prepared = (ret == 0) ? tsk_true : tsk_false;
-
+ V4L2_DEBUG_INFO("Negotiated caps: fps=%d, width=%d, height=%d, chroma=%d",
+ TMEDIA_PRODUCER(p_v4l2)->video.fps,
+ TMEDIA_PRODUCER(p_v4l2)->video.width,
+ TMEDIA_PRODUCER(p_v4l2)->video.height,
+ TMEDIA_PRODUCER(p_v4l2)->video.chroma);
+ p_v4l2->b_prepared = (ret == 0) ? tsk_true : tsk_false;
+
bail:
- tsk_safeobj_unlock(p_v4l2);
- return ret;
+ tsk_safeobj_unlock(p_v4l2);
+ return ret;
}
static int _tdav_producer_video_v4l2_start(tmedia_producer_t* p_self)
{
- tdav_producer_video_v4l2_t* p_v4l2 = (tdav_producer_video_v4l2_t*)p_self;
- int ret = 0;
-
- if (!p_v4l2) {
- V4L2_DEBUG_ERROR("Invalid parameter");
- return -1;
- }
-
- tsk_safeobj_lock(p_v4l2);
-
- if (!p_v4l2->b_prepared) {
- V4L2_DEBUG_INFO("Not prepared");
- ret = -1;
- goto bail;
- }
-
- p_v4l2->b_paused = tsk_false;
-
- if (p_v4l2->b_started) {
- V4L2_DEBUG_INFO("Already started");
- goto bail;
- }
-
- if ((ret = tsk_timer_manager_start(p_v4l2->p_timer_mgr))) {
- goto bail;
- }
-
- // start()
- if ((ret = _v4l2_start(p_v4l2))) {
- goto bail;
- }
-
- p_v4l2->b_started = tsk_true;
-
- // Schedule frame grabbing
- p_v4l2->id_timer_grab = tsk_timer_manager_schedule(p_v4l2->p_timer_mgr, p_v4l2->u_timout_grab, _tdav_producer_video_v4l2_timer_cb, p_v4l2);
- if (!TSK_TIMER_ID_IS_VALID(p_v4l2->id_timer_grab)) {
- V4L2_DEBUG_ERROR("Failed to schedule timer with timeout=%llu", p_v4l2->u_timout_grab);
- ret = -2;
- goto bail;
- }
+ tdav_producer_video_v4l2_t* p_v4l2 = (tdav_producer_video_v4l2_t*)p_self;
+ int ret = 0;
+
+ if (!p_v4l2) {
+ V4L2_DEBUG_ERROR("Invalid parameter");
+ return -1;
+ }
+
+ tsk_safeobj_lock(p_v4l2);
+
+ if (!p_v4l2->b_prepared) {
+ V4L2_DEBUG_INFO("Not prepared");
+ ret = -1;
+ goto bail;
+ }
+
+ p_v4l2->b_paused = tsk_false;
+
+ if (p_v4l2->b_started) {
+ V4L2_DEBUG_INFO("Already started");
+ goto bail;
+ }
+
+ if ((ret = tsk_timer_manager_start(p_v4l2->p_timer_mgr))) {
+ goto bail;
+ }
+
+ // start()
+ if ((ret = _v4l2_start(p_v4l2))) {
+ goto bail;
+ }
+
+ p_v4l2->b_started = tsk_true;
+
+ // Schedule frame grabbing
+ p_v4l2->id_timer_grab = tsk_timer_manager_schedule(p_v4l2->p_timer_mgr, p_v4l2->u_timout_grab, _tdav_producer_video_v4l2_timer_cb, p_v4l2);
+ if (!TSK_TIMER_ID_IS_VALID(p_v4l2->id_timer_grab)) {
+ V4L2_DEBUG_ERROR("Failed to schedule timer with timeout=%llu", p_v4l2->u_timout_grab);
+ ret = -2;
+ goto bail;
+ }
bail:
- if (ret) {
- _v4l2_stop(p_v4l2);
- p_v4l2->b_started = tsk_false;
- if (p_v4l2->p_timer_mgr) {
- tsk_timer_manager_stop(p_v4l2->p_timer_mgr);
- }
- }
- else {
- V4L2_DEBUG_INFO("Started :)");
- }
- tsk_safeobj_unlock(p_v4l2);
-
- return ret;
+ if (ret) {
+ _v4l2_stop(p_v4l2);
+ p_v4l2->b_started = tsk_false;
+ if (p_v4l2->p_timer_mgr) {
+ tsk_timer_manager_stop(p_v4l2->p_timer_mgr);
+ }
+ }
+ else {
+ V4L2_DEBUG_INFO("Started :)");
+ }
+ tsk_safeobj_unlock(p_v4l2);
+
+ return ret;
}
static int _tdav_producer_video_v4l2_pause(tmedia_producer_t* p_self)
{
- tdav_producer_video_v4l2_t* p_v4l2 = (tdav_producer_video_v4l2_t*)p_self;
- int ret;
+ tdav_producer_video_v4l2_t* p_v4l2 = (tdav_producer_video_v4l2_t*)p_self;
+ int ret;
- if (!p_v4l2) {
- V4L2_DEBUG_ERROR("Invalid parameter");
- return -1;
- }
+ if (!p_v4l2) {
+ V4L2_DEBUG_ERROR("Invalid parameter");
+ return -1;
+ }
- tsk_safeobj_lock(p_v4l2);
+ tsk_safeobj_lock(p_v4l2);
- if ((ret = _v4l2_pause(p_v4l2))) {
- goto bail;
- }
+ if ((ret = _v4l2_pause(p_v4l2))) {
+ goto bail;
+ }
- p_v4l2->b_paused = tsk_true;
- goto bail;
+ p_v4l2->b_paused = tsk_true;
+ goto bail;
bail:
- tsk_safeobj_unlock(p_v4l2);
+ tsk_safeobj_unlock(p_v4l2);
- return ret;
+ return ret;
}
static int _tdav_producer_video_v4l2_stop(tmedia_producer_t* p_self)
{
- tdav_producer_video_v4l2_t* p_v4l2 = (tdav_producer_video_v4l2_t*)p_self;
-
- if (!p_v4l2) {
- V4L2_DEBUG_ERROR("Invalid parameter");
- return -1;
- }
-
- tsk_safeobj_lock(p_v4l2);
-
- if (!p_v4l2->b_started) {
- V4L2_DEBUG_INFO("Already stopped");
- goto bail;
- }
-
- if (p_v4l2->p_timer_mgr) {
- tsk_timer_manager_stop(p_v4l2->p_timer_mgr);
- }
-
- // next start will be called after prepare()
- _v4l2_unprepare(p_v4l2); // stop() then unprepare()
-
- p_v4l2->b_started = tsk_false;
- p_v4l2->b_paused = tsk_false;
- p_v4l2->b_prepared = tsk_false;
+ tdav_producer_video_v4l2_t* p_v4l2 = (tdav_producer_video_v4l2_t*)p_self;
+
+ if (!p_v4l2) {
+ V4L2_DEBUG_ERROR("Invalid parameter");
+ return -1;
+ }
+
+ tsk_safeobj_lock(p_v4l2);
+
+ if (!p_v4l2->b_started) {
+ V4L2_DEBUG_INFO("Already stopped");
+ goto bail;
+ }
+
+ if (p_v4l2->p_timer_mgr) {
+ tsk_timer_manager_stop(p_v4l2->p_timer_mgr);
+ }
+
+ // next start will be called after prepare()
+ _v4l2_unprepare(p_v4l2); // stop() then unprepare()
+
+ p_v4l2->b_started = tsk_false;
+ p_v4l2->b_paused = tsk_false;
+ p_v4l2->b_prepared = tsk_false;
bail:
- tsk_safeobj_unlock(p_v4l2);
- V4L2_DEBUG_INFO("Stopped");
+ tsk_safeobj_unlock(p_v4l2);
+ V4L2_DEBUG_INFO("Stopped");
- return 0;
+ return 0;
}
static int _v4l2_prepare(tdav_producer_video_v4l2_t* p_self)
{
-const char* device_names[] =
- {
- tmedia_producer_get_friendly_name(TMEDIA_PRODUCER(p_self)->plugin->type),
- "/dev/video0",
- }; // FIXME: VIDIOC_C_ENUM_INPUT and choose best one
- const char* device_name;
- int i, err = -1;
- struct stat st;
- unsigned int min;
-
- V4L2_DEBUG_INFO("--- PREPARE ---");
-
- if (p_self->fd > 0) {
- V4L2_DEBUG_WARN("Producer already prepared");
- return 0;
- }
- for (i = 0; i < sizeof(device_names)/sizeof(device_names[0]); ++i) {
- if ((device_name = device_names[i])) {
- V4L2_DEBUG_INFO("Preparing '%s'...", device_name);
- if (stat(device_name, &st) == -1) {
- V4L2_DEBUG_WARN("stat('%s'): %d, %s", device_name, errno, strerror(errno));
- continue;
- }
- if (!S_ISCHR(st.st_mode)) {
- V4L2_DEBUG_WARN("'%s' not a valid device", device_name);
- continue;
- }
- if ((p_self->fd = open(device_name, O_RDWR /* required */ | O_NONBLOCK, 0)) == -1) {
- V4L2_DEBUG_WARN("Failed to open '%s': %d, %s\n", device_name, errno, strerror(errno));
- continue;
- }
- V4L2_DEBUG_INFO("'%s' successfully opened", device_name);
- }
- }
- if (p_self->fd == -1) {
- V4L2_DEBUG_ERROR("No valid device found");
- goto bail;
- }
-
- if (-1 == _v4l2_xioctl(p_self->fd, VIDIOC_QUERYCAP, &p_self->cap)) {
- if (EINVAL == errno) {
- V4L2_DEBUG_ERROR("%s is no V4L2 device", device_name);
- goto bail;
- } else {
- V4L2_DEBUG_ERROR("xioctl(%s, VIDIOC_QUERYCAP) failed: %s error %d", device_name, strerror(errno), errno);
- goto bail;
- }
- }
-
- if (!(p_self->cap.capabilities & V4L2_CAP_VIDEO_CAPTURE)) {
- V4L2_DEBUG_ERROR("%s is no video capture device", device_name);
- goto bail;
- }
-
- // Get best io method
- p_self->io = V4L2_IO_METHOD_NONE;
- for (i = 0; i < sizeof(io_method_prefs)/sizeof(io_method_prefs[0]) && p_self->io == V4L2_IO_METHOD_NONE; ++i) {
- V4L2_DEBUG_INFO("Trying with io method=%d", io_method_prefs[i]);
- switch (io_method_prefs[i]) {
- case V4L2_IO_METHOD_READ:
- if (!(p_self->cap.capabilities & V4L2_CAP_READWRITE)) {
- V4L2_DEBUG_WARN("%s does not support read i/o", device_name);
- continue;
- }
- p_self->io = io_method_prefs[i];
- break;
-
- case V4L2_IO_METHOD_MMAP:
- case V4L2_IO_METHOD_USERPTR:
- if (!(p_self->cap.capabilities & V4L2_CAP_STREAMING)) {
- V4L2_DEBUG_WARN("%s does not support streaming i/o", device_name);
- continue;
- }
- p_self->io = io_method_prefs[i];
- break;
- }
- }
- if (p_self->io == V4L2_IO_METHOD_NONE) {
- V4L2_DEBUG_ERROR("Failed to peek an i/o method for '%s' device", device_name);
- goto bail;
- }
- V4L2_DEBUG_INFO("i/o method for '%s' device is %d", device_name, p_self->io);
-
- /* Select video input, video standard and tune here. */
-
- V4L2_CLEAR(p_self->cropcap);
-
- p_self->cropcap.type = V4L2_BUF_TYPE_VIDEO_CAPTURE;
-
- if (0 == _v4l2_xioctl(p_self->fd, VIDIOC_CROPCAP, &p_self->cropcap)) {
- p_self->crop.type = V4L2_BUF_TYPE_VIDEO_CAPTURE;
- p_self->crop.c = p_self->cropcap.defrect; /* reset to default */
-
- if (-1 == _v4l2_xioctl(p_self->fd, VIDIOC_S_CROP, &p_self->crop)) {
- switch (errno) {
- case EINVAL:
- default:
- V4L2_DEBUG_INFO("'%s' device doesn't support cropping", device_name);
- break;
- }
- }
- else {
- V4L2_DEBUG_INFO("'%s' device supports cropping with type = %d", device_name, p_self->crop.type);
- }
- } else {
- V4L2_DEBUG_INFO("'%s' device doesn't support cropping", device_name);
- }
-
- /* Best format */
- V4L2_CLEAR(p_self->fmt);
- // get()
- if (_v4l2_get_best_format(p_self, device_name, &p_self->fmt) != 0) {
- V4L2_DEBUG_ERROR("Failed to peek best format for '%s' device", device_name);
- goto bail;
- }
- // set()
- if (_v4l2_xioctl(p_self->fd, VIDIOC_S_FMT, &p_self->fmt) == -1) {
- goto bail;
- }
- V4L2_DEBUG_INFO("device '%s' best format: width:%d, height:%d, field:%d, pixelformat:%d",
- device_name, p_self->fmt.fmt.pix.width, p_self->fmt.fmt.pix.height, p_self->fmt.fmt.pix.field, p_self->fmt.fmt.pix.pixelformat);
-
- /* Buggy driver paranoia. */
+ const char* device_names[] = {
+ tmedia_producer_get_friendly_name(TMEDIA_PRODUCER(p_self)->plugin->type),
+ "/dev/video0",
+ }; // FIXME: VIDIOC_C_ENUM_INPUT and choose best one
+ const char* device_name;
+ int i, err = -1;
+ struct stat st;
+ unsigned int min;
+
+ V4L2_DEBUG_INFO("--- PREPARE ---");
+
+ if (p_self->fd > 0) {
+ V4L2_DEBUG_WARN("Producer already prepared");
+ return 0;
+ }
+ for (i = 0; i < sizeof(device_names)/sizeof(device_names[0]); ++i) {
+ if ((device_name = device_names[i])) {
+ V4L2_DEBUG_INFO("Preparing '%s'...", device_name);
+ if (stat(device_name, &st) == -1) {
+ V4L2_DEBUG_WARN("stat('%s'): %d, %s", device_name, errno, strerror(errno));
+ continue;
+ }
+ if (!S_ISCHR(st.st_mode)) {
+ V4L2_DEBUG_WARN("'%s' not a valid device", device_name);
+ continue;
+ }
+ if ((p_self->fd = open(device_name, O_RDWR /* required */ | O_NONBLOCK, 0)) == -1) {
+ V4L2_DEBUG_WARN("Failed to open '%s': %d, %s\n", device_name, errno, strerror(errno));
+ continue;
+ }
+ V4L2_DEBUG_INFO("'%s' successfully opened", device_name);
+ }
+ }
+ if (p_self->fd == -1) {
+ V4L2_DEBUG_ERROR("No valid device found");
+ goto bail;
+ }
+
+ if (-1 == _v4l2_xioctl(p_self->fd, VIDIOC_QUERYCAP, &p_self->cap)) {
+ if (EINVAL == errno) {
+ V4L2_DEBUG_ERROR("%s is no V4L2 device", device_name);
+ goto bail;
+ }
+ else {
+ V4L2_DEBUG_ERROR("xioctl(%s, VIDIOC_QUERYCAP) failed: %s error %d", device_name, strerror(errno), errno);
+ goto bail;
+ }
+ }
+
+ if (!(p_self->cap.capabilities & V4L2_CAP_VIDEO_CAPTURE)) {
+ V4L2_DEBUG_ERROR("%s is no video capture device", device_name);
+ goto bail;
+ }
+
+ // Get best io method
+ p_self->io = V4L2_IO_METHOD_NONE;
+ for (i = 0; i < sizeof(io_method_prefs)/sizeof(io_method_prefs[0]) && p_self->io == V4L2_IO_METHOD_NONE; ++i) {
+ V4L2_DEBUG_INFO("Trying with io method=%d", io_method_prefs[i]);
+ switch (io_method_prefs[i]) {
+ case V4L2_IO_METHOD_READ:
+ if (!(p_self->cap.capabilities & V4L2_CAP_READWRITE)) {
+ V4L2_DEBUG_WARN("%s does not support read i/o", device_name);
+ continue;
+ }
+ p_self->io = io_method_prefs[i];
+ break;
+
+ case V4L2_IO_METHOD_MMAP:
+ case V4L2_IO_METHOD_USERPTR:
+ if (!(p_self->cap.capabilities & V4L2_CAP_STREAMING)) {
+ V4L2_DEBUG_WARN("%s does not support streaming i/o", device_name);
+ continue;
+ }
+ p_self->io = io_method_prefs[i];
+ break;
+ }
+ }
+ if (p_self->io == V4L2_IO_METHOD_NONE) {
+ V4L2_DEBUG_ERROR("Failed to peek an i/o method for '%s' device", device_name);
+ goto bail;
+ }
+ V4L2_DEBUG_INFO("i/o method for '%s' device is %d", device_name, p_self->io);
+
+ /* Select video input, video standard and tune here. */
+
+ V4L2_CLEAR(p_self->cropcap);
+
+ p_self->cropcap.type = V4L2_BUF_TYPE_VIDEO_CAPTURE;
+
+ if (0 == _v4l2_xioctl(p_self->fd, VIDIOC_CROPCAP, &p_self->cropcap)) {
+ p_self->crop.type = V4L2_BUF_TYPE_VIDEO_CAPTURE;
+ p_self->crop.c = p_self->cropcap.defrect; /* reset to default */
+
+ if (-1 == _v4l2_xioctl(p_self->fd, VIDIOC_S_CROP, &p_self->crop)) {
+ switch (errno) {
+ case EINVAL:
+ default:
+ V4L2_DEBUG_INFO("'%s' device doesn't support cropping", device_name);
+ break;
+ }
+ }
+ else {
+ V4L2_DEBUG_INFO("'%s' device supports cropping with type = %d", device_name, p_self->crop.type);
+ }
+ }
+ else {
+ V4L2_DEBUG_INFO("'%s' device doesn't support cropping", device_name);
+ }
+
+ /* Best format */
+ V4L2_CLEAR(p_self->fmt);
+ // get()
+ if (_v4l2_get_best_format(p_self, device_name, &p_self->fmt) != 0) {
+ V4L2_DEBUG_ERROR("Failed to peek best format for '%s' device", device_name);
+ goto bail;
+ }
+ // set()
+ if (_v4l2_xioctl(p_self->fd, VIDIOC_S_FMT, &p_self->fmt) == -1) {
+ goto bail;
+ }
+ V4L2_DEBUG_INFO("device '%s' best format: width:%d, height:%d, field:%d, pixelformat:%d",
+ device_name, p_self->fmt.fmt.pix.width, p_self->fmt.fmt.pix.height, p_self->fmt.fmt.pix.field, p_self->fmt.fmt.pix.pixelformat);
+
+ /* Buggy driver paranoia. */
#if 1
- min = p_self->fmt.fmt.pix.width * 2;
- if (p_self->fmt.fmt.pix.bytesperline < min) {
- p_self->fmt.fmt.pix.bytesperline = min;
- }
- min = p_self->fmt.fmt.pix.bytesperline * p_self->fmt.fmt.pix.height;
- if (p_self->fmt.fmt.pix.sizeimage < min) {
- p_self->fmt.fmt.pix.sizeimage = min;
- }
+ min = p_self->fmt.fmt.pix.width * 2;
+ if (p_self->fmt.fmt.pix.bytesperline < min) {
+ p_self->fmt.fmt.pix.bytesperline = min;
+ }
+ min = p_self->fmt.fmt.pix.bytesperline * p_self->fmt.fmt.pix.height;
+ if (p_self->fmt.fmt.pix.sizeimage < min) {
+ p_self->fmt.fmt.pix.sizeimage = min;
+ }
#endif
- switch (p_self->io) {
- case V4L2_IO_METHOD_READ:
- if (_v4l2_init_read(p_self, p_self->fmt.fmt.pix.sizeimage) != 0) {
- goto bail;
- }
- break;
-
- case V4L2_IO_METHOD_MMAP:
- if (_v4l2_init_mmap(p_self, device_name) != 0) {
- goto bail;
- }
- break;
-
- case V4L2_IO_METHOD_USERPTR:
- if (_v4l2_init_userp(p_self, p_self->fmt.fmt.pix.sizeimage, device_name) != 0) {
- goto bail;
- }
- break;
- }
- V4L2_DEBUG_INFO("'%s' device initialized using i/o method=%d", device_name, p_self->io);
-
- // all is OK
- err = 0;
+ switch (p_self->io) {
+ case V4L2_IO_METHOD_READ:
+ if (_v4l2_init_read(p_self, p_self->fmt.fmt.pix.sizeimage) != 0) {
+ goto bail;
+ }
+ break;
+
+ case V4L2_IO_METHOD_MMAP:
+ if (_v4l2_init_mmap(p_self, device_name) != 0) {
+ goto bail;
+ }
+ break;
+
+ case V4L2_IO_METHOD_USERPTR:
+ if (_v4l2_init_userp(p_self, p_self->fmt.fmt.pix.sizeimage, device_name) != 0) {
+ goto bail;
+ }
+ break;
+ }
+ V4L2_DEBUG_INFO("'%s' device initialized using i/o method=%d", device_name, p_self->io);
+
+ // all is OK
+ err = 0;
bail:
- if (err) {
- _v4l2_unprepare(p_self);
- }
- else {
- V4L2_DEBUG_INFO("Prepared :)");
- }
- return err;
+ if (err) {
+ _v4l2_unprepare(p_self);
+ }
+ else {
+ V4L2_DEBUG_INFO("Prepared :)");
+ }
+ return err;
}
static int _v4l2_start(tdav_producer_video_v4l2_t* p_self)
{
- unsigned int i;
- enum v4l2_buf_type type;
-
- V4L2_DEBUG_INFO("--- START ---");
-
- if (p_self->b_started) {
- V4L2_DEBUG_WARN("Already started");
- return 0;
- }
-
- switch (p_self->io) {
- case V4L2_IO_METHOD_READ:
- /* Nothing to do. */
- break;
-
- case V4L2_IO_METHOD_MMAP:
- for (i = 0; i < p_self->n_buffers; ++i) {
- struct v4l2_buffer buf;
-
- V4L2_CLEAR(buf);
- buf.type = V4L2_BUF_TYPE_VIDEO_CAPTURE;
- buf.memory = V4L2_MEMORY_MMAP;
- buf.index = i;
-
- if (-1 == _v4l2_xioctl(p_self->fd, VIDIOC_QBUF, &buf)) {
- V4L2_DEBUG_ERROR("xioctl(VIDIOC_QBUF) failed: %s error %d", strerror(errno), errno);
- return -1;
- }
- }
- type = V4L2_BUF_TYPE_VIDEO_CAPTURE;
- if (-1 == _v4l2_xioctl(p_self->fd, VIDIOC_STREAMON, &type)) {
- V4L2_DEBUG_ERROR("xioctl(VIDIOC_STREAMON) failed: %s error %d", strerror(errno), errno);
- return -1;
- }
- break;
-
- case V4L2_IO_METHOD_USERPTR:
- for (i = 0; i < p_self->n_buffers; ++i) {
- struct v4l2_buffer buf;
-
- V4L2_CLEAR(buf);
- buf.type = V4L2_BUF_TYPE_VIDEO_CAPTURE;
- buf.memory = V4L2_MEMORY_USERPTR;
- buf.index = i;
- buf.m.userptr = (unsigned long)p_self->p_buffers[i].p_start;
- buf.length = p_self->p_buffers[i].n_length;
-
- if (-1 == _v4l2_xioctl(p_self->fd, VIDIOC_QBUF, &buf)) {
- V4L2_DEBUG_ERROR("xioctl(VIDIOC_QBUF) failed: %s error %d", strerror(errno), errno);
- return -1;
- }
- }
- type = V4L2_BUF_TYPE_VIDEO_CAPTURE;
- if (-1 == _v4l2_xioctl(p_self->fd, VIDIOC_STREAMON, &type)) {
- V4L2_DEBUG_ERROR("xioctl(VIDIOC_STREAMON) failed: %s error %d", strerror(errno), errno);
- return -1;
- }
- break;
- }
- return 0;
+ unsigned int i;
+ enum v4l2_buf_type type;
+
+ V4L2_DEBUG_INFO("--- START ---");
+
+ if (p_self->b_started) {
+ V4L2_DEBUG_WARN("Already started");
+ return 0;
+ }
+
+ switch (p_self->io) {
+ case V4L2_IO_METHOD_READ:
+ /* Nothing to do. */
+ break;
+
+ case V4L2_IO_METHOD_MMAP:
+ for (i = 0; i < p_self->n_buffers; ++i) {
+ struct v4l2_buffer buf;
+
+ V4L2_CLEAR(buf);
+ buf.type = V4L2_BUF_TYPE_VIDEO_CAPTURE;
+ buf.memory = V4L2_MEMORY_MMAP;
+ buf.index = i;
+
+ if (-1 == _v4l2_xioctl(p_self->fd, VIDIOC_QBUF, &buf)) {
+ V4L2_DEBUG_ERROR("xioctl(VIDIOC_QBUF) failed: %s error %d", strerror(errno), errno);
+ return -1;
+ }
+ }
+ type = V4L2_BUF_TYPE_VIDEO_CAPTURE;
+ if (-1 == _v4l2_xioctl(p_self->fd, VIDIOC_STREAMON, &type)) {
+ V4L2_DEBUG_ERROR("xioctl(VIDIOC_STREAMON) failed: %s error %d", strerror(errno), errno);
+ return -1;
+ }
+ break;
+
+ case V4L2_IO_METHOD_USERPTR:
+ for (i = 0; i < p_self->n_buffers; ++i) {
+ struct v4l2_buffer buf;
+
+ V4L2_CLEAR(buf);
+ buf.type = V4L2_BUF_TYPE_VIDEO_CAPTURE;
+ buf.memory = V4L2_MEMORY_USERPTR;
+ buf.index = i;
+ buf.m.userptr = (unsigned long)p_self->p_buffers[i].p_start;
+ buf.length = p_self->p_buffers[i].n_length;
+
+ if (-1 == _v4l2_xioctl(p_self->fd, VIDIOC_QBUF, &buf)) {
+ V4L2_DEBUG_ERROR("xioctl(VIDIOC_QBUF) failed: %s error %d", strerror(errno), errno);
+ return -1;
+ }
+ }
+ type = V4L2_BUF_TYPE_VIDEO_CAPTURE;
+ if (-1 == _v4l2_xioctl(p_self->fd, VIDIOC_STREAMON, &type)) {
+ V4L2_DEBUG_ERROR("xioctl(VIDIOC_STREAMON) failed: %s error %d", strerror(errno), errno);
+ return -1;
+ }
+ break;
+ }
+ return 0;
}
static int _v4l2_pause(tdav_producer_video_v4l2_t* p_self)
{
- V4L2_DEBUG_INFO("--- PAUSE ---");
+ V4L2_DEBUG_INFO("--- PAUSE ---");
- return 0;
+ return 0;
}
static int _v4l2_stop(tdav_producer_video_v4l2_t* p_self)
{
- enum v4l2_buf_type type;
-
- V4L2_DEBUG_INFO("--- STOP ---");
-
- switch (p_self->io) {
- case V4L2_IO_METHOD_READ:
- /* Nothing to do. */
- break;
-
- case V4L2_IO_METHOD_MMAP:
- case V4L2_IO_METHOD_USERPTR:
- type = V4L2_BUF_TYPE_VIDEO_CAPTURE;
- if (p_self->fd != -1 && -1 == _v4l2_xioctl(p_self->fd, VIDIOC_STREAMOFF, &type)) {
- if (p_self->b_started) { // display error only if the device is marked as "started"
- V4L2_DEBUG_ERROR("xioctl(VIDIOC_STREAMOFF) failed: %s error %d", strerror(errno), errno);
- return -1;
- }
- }
- break;
- }
-
- return 0;
+ enum v4l2_buf_type type;
+
+ V4L2_DEBUG_INFO("--- STOP ---");
+
+ switch (p_self->io) {
+ case V4L2_IO_METHOD_READ:
+ /* Nothing to do. */
+ break;
+
+ case V4L2_IO_METHOD_MMAP:
+ case V4L2_IO_METHOD_USERPTR:
+ type = V4L2_BUF_TYPE_VIDEO_CAPTURE;
+ if (p_self->fd != -1 && -1 == _v4l2_xioctl(p_self->fd, VIDIOC_STREAMOFF, &type)) {
+ if (p_self->b_started) { // display error only if the device is marked as "started"
+ V4L2_DEBUG_ERROR("xioctl(VIDIOC_STREAMOFF) failed: %s error %d", strerror(errno), errno);
+ return -1;
+ }
+ }
+ break;
+ }
+
+ return 0;
}
static int _v4l2_unprepare(tdav_producer_video_v4l2_t* p_self)
{
- unsigned int i;
- V4L2_DEBUG_INFO("--- UNPREPARE ---");
-
- _v4l2_stop(p_self);
-
- switch (p_self->io) {
- case V4L2_IO_METHOD_READ:
- if (p_self->p_buffers && p_self->p_buffers[0].p_start) {
- free(p_self->p_buffers[0].p_start);
- p_self->p_buffers[0].p_start = NULL;
- }
- break;
- case V4L2_IO_METHOD_MMAP:
- for (i = 0; i < p_self->n_buffers; ++i) {
- if (p_self->p_buffers && p_self->p_buffers[i].p_start) {
- if (-1 == munmap(p_self->p_buffers[i].p_start, p_self->p_buffers[i].n_length)) {
- V4L2_DEBUG_ERROR("munmap(%d) failed", i);
- }
- }
- }
- break;
-
- case V4L2_IO_METHOD_USERPTR:
- for (i = 0; i < p_self->n_buffers; ++i) {
- if (p_self->p_buffers && p_self->p_buffers[i].p_start) {
- free(p_self->p_buffers[i].p_start);
- p_self->p_buffers[i].p_start = NULL;
- }
- }
- break;
- }
-
- if (p_self->p_buffers) {
- free(p_self->p_buffers);
- p_self->p_buffers = NULL;
- }
- p_self->n_buffers = 0;
-
- if (p_self->fd > 0) {
- close(p_self->fd);
- }
- p_self->fd = -1;
-
- return 0;
+ unsigned int i;
+ V4L2_DEBUG_INFO("--- UNPREPARE ---");
+
+ _v4l2_stop(p_self);
+
+ switch (p_self->io) {
+ case V4L2_IO_METHOD_READ:
+ if (p_self->p_buffers && p_self->p_buffers[0].p_start) {
+ free(p_self->p_buffers[0].p_start);
+ p_self->p_buffers[0].p_start = NULL;
+ }
+ break;
+ case V4L2_IO_METHOD_MMAP:
+ for (i = 0; i < p_self->n_buffers; ++i) {
+ if (p_self->p_buffers && p_self->p_buffers[i].p_start) {
+ if (-1 == munmap(p_self->p_buffers[i].p_start, p_self->p_buffers[i].n_length)) {
+ V4L2_DEBUG_ERROR("munmap(%d) failed", i);
+ }
+ }
+ }
+ break;
+
+ case V4L2_IO_METHOD_USERPTR:
+ for (i = 0; i < p_self->n_buffers; ++i) {
+ if (p_self->p_buffers && p_self->p_buffers[i].p_start) {
+ free(p_self->p_buffers[i].p_start);
+ p_self->p_buffers[i].p_start = NULL;
+ }
+ }
+ break;
+ }
+
+ if (p_self->p_buffers) {
+ free(p_self->p_buffers);
+ p_self->p_buffers = NULL;
+ }
+ p_self->n_buffers = 0;
+
+ if (p_self->fd > 0) {
+ close(p_self->fd);
+ }
+ p_self->fd = -1;
+
+ return 0;
}
static int _v4l2_xioctl(int fh, int request, void *arg)
{
- int r;
- do {
- r = ioctl(fh, request, arg);
- } while (-1 == r && EINTR == errno);
- return r;
+ int r;
+ do {
+ r = ioctl(fh, request, arg);
+ }
+ while (-1 == r && EINTR == errno);
+ return r;
}
static int _v4l2_get_best_format(tdav_producer_video_v4l2_t* p_self, const char* device_name, struct v4l2_format* fmt_ret)
{
- struct v4l2_format fmt, fmt_default, fmt_best;
- struct v4l2_fmtdesc fmtdesc;
- int i, j, field, size;
- int ok = 0;
-
- if (!fmt_ret) {
- V4L2_DEBUG_ERROR("Invalid parameter");
- return -1;
- }
-
- // get default format
- V4L2_CLEAR(fmt_default);
- fmt_default.type = V4L2_BUF_TYPE_VIDEO_CAPTURE;
- if (_v4l2_xioctl(p_self->fd, VIDIOC_G_FMT, &fmt_default) == -1) {
- V4L2_DEBUG_ERROR("xioctl(%s, VIDIOC_G_FMT) failed: %s error %d", device_name, strerror(errno), errno);
- return -1;
- }
- V4L2_DEBUG_INFO("device '%s' default format: width:%d, height:%d, field:%d, pixelformat:%d",
- device_name, fmt_default.fmt.pix.width, fmt_default.fmt.pix.height, fmt_default.fmt.pix.field, fmt_default.fmt.pix.pixelformat);
-
- /* Best format (using preference) */
- V4L2_CLEAR(fmt);
- fmt.type = V4L2_BUF_TYPE_VIDEO_CAPTURE;
- for (i = 0; i < sizeof(pix_format_prefs)/sizeof(pix_format_prefs[0]); ++i) {
- for (size = 0; size < 2; ++size) {
- for (field = 0; field < 2; ++field) {
- fmt.fmt.pix.width = (size == 0) ? TMEDIA_PRODUCER(p_self)->video.width : fmt_default.fmt.pix.width;
- fmt.fmt.pix.height = (size == 0) ? TMEDIA_PRODUCER(p_self)->video.height : fmt_default.fmt.pix.height;
- fmt.fmt.pix.pixelformat = pix_format_prefs[i];
- fmt.fmt.pix.field = (field == 0) ? V4L2_FIELD_NONE : V4L2_FIELD_INTERLACED;
- if ((ok = (_v4l2_xioctl(p_self->fd, VIDIOC_TRY_FMT, &fmt) != -1))) {
- goto bail;
- }
- }
- }
- }
-
- /* Best format (using caps) */
- for (i = 0; ; ++i) {
- V4L2_CLEAR(fmtdesc);
- fmtdesc.type = V4L2_BUF_TYPE_VIDEO_CAPTURE;
- fmtdesc.index = i;
-
- if (_v4l2_xioctl(p_self->fd, VIDIOC_ENUM_FMT, &fmtdesc) == -1) {
- break;
- }
- V4L2_DEBUG_INFO("CAPS: device name=%s, fmtdesc index=%d, type=%d, description=%s, pixelformat=%d",
- device_name, fmtdesc.index, fmtdesc.type, fmtdesc.description, fmtdesc.pixelformat);
- for (j = 0; j < sizeof(pix_format_prefs)/sizeof(pix_format_prefs[0]); ++j) {
- if (fmtdesc.pixelformat == pix_format_prefs[j]) {
- for (size = 0; size < 2; ++size) {
- for (field = 0; field < 2; ++field) {
- fmt.fmt.pix.width = (size == 0) ? TMEDIA_PRODUCER(p_self)->video.width : fmt_default.fmt.pix.width;
- fmt.fmt.pix.height = (size == 0) ? TMEDIA_PRODUCER(p_self)->video.height : fmt_default.fmt.pix.height;
- fmt.fmt.pix.pixelformat = pix_format_prefs[i];
- fmt.fmt.pix.field = (field == 0) ? V4L2_FIELD_NONE : V4L2_FIELD_INTERLACED;
- if ((ok = (_v4l2_xioctl(p_self->fd, VIDIOC_TRY_FMT, &fmt) != -1))) {
- goto bail;
- }
- }
- }
- }
- }
- }
+ struct v4l2_format fmt, fmt_default, fmt_best;
+ struct v4l2_fmtdesc fmtdesc;
+ int i, j, field, size;
+ int ok = 0;
+
+ if (!fmt_ret) {
+ V4L2_DEBUG_ERROR("Invalid parameter");
+ return -1;
+ }
+
+ // get default format
+ V4L2_CLEAR(fmt_default);
+ fmt_default.type = V4L2_BUF_TYPE_VIDEO_CAPTURE;
+ if (_v4l2_xioctl(p_self->fd, VIDIOC_G_FMT, &fmt_default) == -1) {
+ V4L2_DEBUG_ERROR("xioctl(%s, VIDIOC_G_FMT) failed: %s error %d", device_name, strerror(errno), errno);
+ return -1;
+ }
+ V4L2_DEBUG_INFO("device '%s' default format: width:%d, height:%d, field:%d, pixelformat:%d",
+ device_name, fmt_default.fmt.pix.width, fmt_default.fmt.pix.height, fmt_default.fmt.pix.field, fmt_default.fmt.pix.pixelformat);
+
+ /* Best format (using preference) */
+ V4L2_CLEAR(fmt);
+ fmt.type = V4L2_BUF_TYPE_VIDEO_CAPTURE;
+ for (i = 0; i < sizeof(pix_format_prefs)/sizeof(pix_format_prefs[0]); ++i) {
+ for (size = 0; size < 2; ++size) {
+ for (field = 0; field < 2; ++field) {
+ fmt.fmt.pix.width = (size == 0) ? TMEDIA_PRODUCER(p_self)->video.width : fmt_default.fmt.pix.width;
+ fmt.fmt.pix.height = (size == 0) ? TMEDIA_PRODUCER(p_self)->video.height : fmt_default.fmt.pix.height;
+ fmt.fmt.pix.pixelformat = pix_format_prefs[i];
+ fmt.fmt.pix.field = (field == 0) ? V4L2_FIELD_NONE : V4L2_FIELD_INTERLACED;
+ if ((ok = (_v4l2_xioctl(p_self->fd, VIDIOC_TRY_FMT, &fmt) != -1))) {
+ goto bail;
+ }
+ }
+ }
+ }
+
+ /* Best format (using caps) */
+ for (i = 0; ; ++i) {
+ V4L2_CLEAR(fmtdesc);
+ fmtdesc.type = V4L2_BUF_TYPE_VIDEO_CAPTURE;
+ fmtdesc.index = i;
+
+ if (_v4l2_xioctl(p_self->fd, VIDIOC_ENUM_FMT, &fmtdesc) == -1) {
+ break;
+ }
+ V4L2_DEBUG_INFO("CAPS: device name=%s, fmtdesc index=%d, type=%d, description=%s, pixelformat=%d",
+ device_name, fmtdesc.index, fmtdesc.type, fmtdesc.description, fmtdesc.pixelformat);
+ for (j = 0; j < sizeof(pix_format_prefs)/sizeof(pix_format_prefs[0]); ++j) {
+ if (fmtdesc.pixelformat == pix_format_prefs[j]) {
+ for (size = 0; size < 2; ++size) {
+ for (field = 0; field < 2; ++field) {
+ fmt.fmt.pix.width = (size == 0) ? TMEDIA_PRODUCER(p_self)->video.width : fmt_default.fmt.pix.width;
+ fmt.fmt.pix.height = (size == 0) ? TMEDIA_PRODUCER(p_self)->video.height : fmt_default.fmt.pix.height;
+ fmt.fmt.pix.pixelformat = pix_format_prefs[i];
+ fmt.fmt.pix.field = (field == 0) ? V4L2_FIELD_NONE : V4L2_FIELD_INTERLACED;
+ if ((ok = (_v4l2_xioctl(p_self->fd, VIDIOC_TRY_FMT, &fmt) != -1))) {
+ goto bail;
+ }
+ }
+ }
+ }
+ }
+ }
bail:
- if (ok) {
- memcpy(fmt_ret, &fmt, sizeof(fmt));
- }
- return ok ? 0 : -1;
+ if (ok) {
+ memcpy(fmt_ret, &fmt, sizeof(fmt));
+ }
+ return ok ? 0 : -1;
}
static int _v4l2_init_read(tdav_producer_video_v4l2_t* p_self, unsigned int buffer_size)
{
- if (p_self->p_buffers) {
- V4L2_DEBUG_ERROR("Buffers already initialized");
- return -1;
- }
- if (!buffer_size) {
- V4L2_DEBUG_ERROR("Invalid parameter");
- return -1;
- }
-
- if (!(p_self->p_buffers = calloc(1, sizeof(*p_self->p_buffers)))) {
- V4L2_DEBUG_ERROR("Out of memory");
- return -1;
- }
-
- p_self->p_buffers[0].n_length = buffer_size;
- p_self->p_buffers[0].p_start = tsk_malloc(buffer_size);
-
- if (!p_self->p_buffers[0].p_start) {
- V4L2_DEBUG_ERROR("Out of memory");
- return -1;
- }
-
- return 0;
+ if (p_self->p_buffers) {
+ V4L2_DEBUG_ERROR("Buffers already initialized");
+ return -1;
+ }
+ if (!buffer_size) {
+ V4L2_DEBUG_ERROR("Invalid parameter");
+ return -1;
+ }
+
+ if (!(p_self->p_buffers = calloc(1, sizeof(*p_self->p_buffers)))) {
+ V4L2_DEBUG_ERROR("Out of memory");
+ return -1;
+ }
+
+ p_self->p_buffers[0].n_length = buffer_size;
+ p_self->p_buffers[0].p_start = tsk_malloc(buffer_size);
+
+ if (!p_self->p_buffers[0].p_start) {
+ V4L2_DEBUG_ERROR("Out of memory");
+ return -1;
+ }
+
+ return 0;
}
static int _v4l2_init_mmap(tdav_producer_video_v4l2_t* p_self, const char* device_name)
{
- struct v4l2_requestbuffers req;
-
- V4L2_CLEAR(req);
-
- req.count = 4;
- req.type = V4L2_BUF_TYPE_VIDEO_CAPTURE;
- req.memory = V4L2_MEMORY_MMAP;
-
- if (-1 == _v4l2_xioctl(p_self->fd, VIDIOC_REQBUFS, &req)) {
- if (EINVAL == errno) {
- V4L2_DEBUG_ERROR("%s does not support memory mapping", device_name);
- return -1;
- } else {
- V4L2_DEBUG_ERROR("xioctl(%s, VIDIOC_REQBUFS) failed: %s error %d", device_name, strerror(errno), errno);
- return -1;
- }
- }
-
- if (req.count < 2) {
- V4L2_DEBUG_ERROR("Insufficient buffer memory on %s", device_name);
- return -1;
- }
-
- if (!(p_self->p_buffers = tsk_calloc(req.count, sizeof(*p_self->p_buffers)))) {
- V4L2_DEBUG_ERROR("Out of memory");
- return -1;
- }
-
- for (p_self->n_buffers = 0; p_self->n_buffers < req.count; ++p_self->n_buffers) {
- struct v4l2_buffer buf;
-
- V4L2_CLEAR(buf);
-
- buf.type = V4L2_BUF_TYPE_VIDEO_CAPTURE;
- buf.memory = V4L2_MEMORY_MMAP;
- buf.index = p_self->n_buffers;
-
- if (-1 == _v4l2_xioctl(p_self->fd, VIDIOC_QUERYBUF, &buf)) {
- V4L2_DEBUG_ERROR("xioctl(%s, VIDIOC_REQBUFS) failed: %s error %d", device_name, strerror(errno), errno);
- return -1;
- }
-
- p_self->p_buffers[p_self->n_buffers].n_length = buf.length;
- p_self->p_buffers[p_self->n_buffers].p_start = mmap(NULL /* start anywhere */,
- buf.length,
- PROT_READ | PROT_WRITE /* required */,
- MAP_SHARED /* recommended */,
- p_self->fd, buf.m.offset);
-
- if (MAP_FAILED == p_self->p_buffers[p_self->n_buffers].p_start) {
- V4L2_DEBUG_ERROR("mmap(%s) failed: %s error %d", device_name, strerror(errno), errno);
- return -1;
- }
- }
-
- return 0;
+ struct v4l2_requestbuffers req;
+
+ V4L2_CLEAR(req);
+
+ req.count = 4;
+ req.type = V4L2_BUF_TYPE_VIDEO_CAPTURE;
+ req.memory = V4L2_MEMORY_MMAP;
+
+ if (-1 == _v4l2_xioctl(p_self->fd, VIDIOC_REQBUFS, &req)) {
+ if (EINVAL == errno) {
+ V4L2_DEBUG_ERROR("%s does not support memory mapping", device_name);
+ return -1;
+ }
+ else {
+ V4L2_DEBUG_ERROR("xioctl(%s, VIDIOC_REQBUFS) failed: %s error %d", device_name, strerror(errno), errno);
+ return -1;
+ }
+ }
+
+ if (req.count < 2) {
+ V4L2_DEBUG_ERROR("Insufficient buffer memory on %s", device_name);
+ return -1;
+ }
+
+ if (!(p_self->p_buffers = tsk_calloc(req.count, sizeof(*p_self->p_buffers)))) {
+ V4L2_DEBUG_ERROR("Out of memory");
+ return -1;
+ }
+
+ for (p_self->n_buffers = 0; p_self->n_buffers < req.count; ++p_self->n_buffers) {
+ struct v4l2_buffer buf;
+
+ V4L2_CLEAR(buf);
+
+ buf.type = V4L2_BUF_TYPE_VIDEO_CAPTURE;
+ buf.memory = V4L2_MEMORY_MMAP;
+ buf.index = p_self->n_buffers;
+
+ if (-1 == _v4l2_xioctl(p_self->fd, VIDIOC_QUERYBUF, &buf)) {
+ V4L2_DEBUG_ERROR("xioctl(%s, VIDIOC_REQBUFS) failed: %s error %d", device_name, strerror(errno), errno);
+ return -1;
+ }
+
+ p_self->p_buffers[p_self->n_buffers].n_length = buf.length;
+ p_self->p_buffers[p_self->n_buffers].p_start = mmap(NULL /* start anywhere */,
+ buf.length,
+ PROT_READ | PROT_WRITE /* required */,
+ MAP_SHARED /* recommended */,
+ p_self->fd, buf.m.offset);
+
+ if (MAP_FAILED == p_self->p_buffers[p_self->n_buffers].p_start) {
+ V4L2_DEBUG_ERROR("mmap(%s) failed: %s error %d", device_name, strerror(errno), errno);
+ return -1;
+ }
+ }
+
+ return 0;
}
static int _v4l2_init_userp(tdav_producer_video_v4l2_t* p_self, unsigned int buffer_size, const char* device_name)
{
- struct v4l2_requestbuffers req;
-
- V4L2_CLEAR(req);
-
- req.count = 4;
- req.type = V4L2_BUF_TYPE_VIDEO_CAPTURE;
- req.memory = V4L2_MEMORY_USERPTR;
-
- if (-1 == _v4l2_xioctl(p_self->fd, VIDIOC_REQBUFS, &req)) {
- if (EINVAL == errno) {
- V4L2_DEBUG_ERROR("%s does not support user pointer i/o", device_name);
- return -1;
- } else {
- V4L2_DEBUG_ERROR("xioctl(%s, VIDIOC_REQBUFS) failed: %s error %d", device_name, strerror(errno), errno);
- return -1;
- }
- }
-
- if (!(p_self->p_buffers = tsk_calloc(4, sizeof(*p_self->p_buffers)))) {
- V4L2_DEBUG_ERROR("Out of memory");
- return -1;
- }
-
- for (p_self->n_buffers = 0; p_self->n_buffers < 4; ++p_self->n_buffers) {
- p_self->p_buffers[p_self->n_buffers].n_length = buffer_size;
- p_self->p_buffers[p_self->n_buffers].p_start = tsk_malloc(buffer_size);
-
- if (!p_self->p_buffers[p_self->n_buffers].p_start) {
- V4L2_DEBUG_ERROR("Out of memory");
- return -1;
- }
- }
-
- return 0;
+ struct v4l2_requestbuffers req;
+
+ V4L2_CLEAR(req);
+
+ req.count = 4;
+ req.type = V4L2_BUF_TYPE_VIDEO_CAPTURE;
+ req.memory = V4L2_MEMORY_USERPTR;
+
+ if (-1 == _v4l2_xioctl(p_self->fd, VIDIOC_REQBUFS, &req)) {
+ if (EINVAL == errno) {
+ V4L2_DEBUG_ERROR("%s does not support user pointer i/o", device_name);
+ return -1;
+ }
+ else {
+ V4L2_DEBUG_ERROR("xioctl(%s, VIDIOC_REQBUFS) failed: %s error %d", device_name, strerror(errno), errno);
+ return -1;
+ }
+ }
+
+ if (!(p_self->p_buffers = tsk_calloc(4, sizeof(*p_self->p_buffers)))) {
+ V4L2_DEBUG_ERROR("Out of memory");
+ return -1;
+ }
+
+ for (p_self->n_buffers = 0; p_self->n_buffers < 4; ++p_self->n_buffers) {
+ p_self->p_buffers[p_self->n_buffers].n_length = buffer_size;
+ p_self->p_buffers[p_self->n_buffers].p_start = tsk_malloc(buffer_size);
+
+ if (!p_self->p_buffers[p_self->n_buffers].p_start) {
+ V4L2_DEBUG_ERROR("Out of memory");
+ return -1;
+ }
+ }
+
+ return 0;
}
static int _v4l2_send_frame(tdav_producer_video_v4l2_t* p_self)
{
- struct v4l2_buffer buf;
- unsigned int i;
+ struct v4l2_buffer buf;
+ unsigned int i;
#define V4L2_SEND_BUFF(_buff, _size) \
TMEDIA_PRODUCER(p_self)->enc_cb.callback(TMEDIA_PRODUCER(p_self)->enc_cb.callback_data, (_buff), (_size));
#if V4L2_FAKE_UYVY
- {
- tsk_size_t size = (TMEDIA_PRODUCER(p_self)->video.width * TMEDIA_PRODUCER(p_self)->video.height) << 1;
- uint8_t* buff = (uint8_t*)tsk_malloc(size);
- if (buff) {
- tsk_size_t i;
- for (i = 0; i < size; ++i) {
- buff[i] = rand() & 254;
- }
- V4L2_SEND_BUFF(buff, size);
- tsk_free((void**)&buff);
- }
- return 0;
- }
+ {
+ tsk_size_t size = (TMEDIA_PRODUCER(p_self)->video.width * TMEDIA_PRODUCER(p_self)->video.height) << 1;
+ uint8_t* buff = (uint8_t*)tsk_malloc(size);
+ if (buff) {
+ tsk_size_t i;
+ for (i = 0; i < size; ++i) {
+ buff[i] = rand() & 254;
+ }
+ V4L2_SEND_BUFF(buff, size);
+ tsk_free((void**)&buff);
+ }
+ return 0;
+ }
#endif
- switch (p_self->io) {
- case V4L2_IO_METHOD_READ:
- if (-1 == read(p_self->fd, p_self->p_buffers[0].p_start, p_self->p_buffers[0].n_length)) {
- switch (errno) {
- case EAGAIN:
- return 0;
+ switch (p_self->io) {
+ case V4L2_IO_METHOD_READ:
+ if (-1 == read(p_self->fd, p_self->p_buffers[0].p_start, p_self->p_buffers[0].n_length)) {
+ switch (errno) {
+ case EAGAIN:
+ return 0;
- case EIO:
- /* Could ignore EIO, see spec. */
+ case EIO:
+ /* Could ignore EIO, see spec. */
- /* fall through */
+ /* fall through */
- default:
- V4L2_DEBUG_ERROR("read() failed: %s error %d", strerror(errno), errno);
- break;
- }
- }
+ default:
+ V4L2_DEBUG_ERROR("read() failed: %s error %d", strerror(errno), errno);
+ break;
+ }
+ }
- V4L2_SEND_BUFF(p_self->p_buffers[0].p_start, p_self->p_buffers[0].n_length);
- return 0;
+ V4L2_SEND_BUFF(p_self->p_buffers[0].p_start, p_self->p_buffers[0].n_length);
+ return 0;
- case V4L2_IO_METHOD_MMAP:
- V4L2_CLEAR(buf);
+ case V4L2_IO_METHOD_MMAP:
+ V4L2_CLEAR(buf);
- buf.type = V4L2_BUF_TYPE_VIDEO_CAPTURE;
- buf.memory = V4L2_MEMORY_MMAP;
+ buf.type = V4L2_BUF_TYPE_VIDEO_CAPTURE;
+ buf.memory = V4L2_MEMORY_MMAP;
- if (-1 == _v4l2_xioctl(p_self->fd, VIDIOC_DQBUF, &buf)) {
- switch (errno) {
- case EAGAIN:
- V4L2_DEBUG_INFO("EAGAIN");
- return 0;
+ if (-1 == _v4l2_xioctl(p_self->fd, VIDIOC_DQBUF, &buf)) {
+ switch (errno) {
+ case EAGAIN:
+ V4L2_DEBUG_INFO("EAGAIN");
+ return 0;
- case EIO:
- /* Could ignore EIO, see spec. */
+ case EIO:
+ /* Could ignore EIO, see spec. */
- /* fall through */
+ /* fall through */
- default:
- V4L2_DEBUG_ERROR("xioctl(VIDIOC_DQBUF) failed: %s error %d", strerror(errno), errno);
- break;
- }
- }
+ default:
+ V4L2_DEBUG_ERROR("xioctl(VIDIOC_DQBUF) failed: %s error %d", strerror(errno), errno);
+ break;
+ }
+ }
- assert(buf.index < p_self->n_buffers);
+ assert(buf.index < p_self->n_buffers);
- V4L2_SEND_BUFF(p_self->p_buffers[buf.index].p_start, buf.bytesused);
+ V4L2_SEND_BUFF(p_self->p_buffers[buf.index].p_start, buf.bytesused);
- if (-1 == _v4l2_xioctl(p_self->fd, VIDIOC_QBUF, &buf)) {
- V4L2_DEBUG_ERROR("xioctl(VIDIOC_DQBUF) failed: %s error %d", strerror(errno), errno);
- break;
- }
- return 0;
+ if (-1 == _v4l2_xioctl(p_self->fd, VIDIOC_QBUF, &buf)) {
+ V4L2_DEBUG_ERROR("xioctl(VIDIOC_DQBUF) failed: %s error %d", strerror(errno), errno);
+ break;
+ }
+ return 0;
- case V4L2_IO_METHOD_USERPTR:
- V4L2_CLEAR(buf);
+ case V4L2_IO_METHOD_USERPTR:
+ V4L2_CLEAR(buf);
- buf.type = V4L2_BUF_TYPE_VIDEO_CAPTURE;
- buf.memory = V4L2_MEMORY_USERPTR;
+ buf.type = V4L2_BUF_TYPE_VIDEO_CAPTURE;
+ buf.memory = V4L2_MEMORY_USERPTR;
- if (-1 == _v4l2_xioctl(p_self->fd, VIDIOC_DQBUF, &buf)) {
- switch (errno) {
- case EAGAIN:
- V4L2_DEBUG_INFO("EAGAIN");
- return 0;
+ if (-1 == _v4l2_xioctl(p_self->fd, VIDIOC_DQBUF, &buf)) {
+ switch (errno) {
+ case EAGAIN:
+ V4L2_DEBUG_INFO("EAGAIN");
+ return 0;
- case EIO:
- /* Could ignore EIO, see spec. */
+ case EIO:
+ /* Could ignore EIO, see spec. */
- /* fall through */
+ /* fall through */
- default:
- V4L2_DEBUG_ERROR("xioctl(VIDIOC_DQBUF) failed: %s error %d", strerror(errno), errno);
- break;
- }
- }
+ default:
+ V4L2_DEBUG_ERROR("xioctl(VIDIOC_DQBUF) failed: %s error %d", strerror(errno), errno);
+ break;
+ }
+ }
- for (i = 0; i < p_self->n_buffers; ++i) {
- if (buf.m.userptr == (unsigned long)p_self->p_buffers[i].p_start && buf.length == p_self->p_buffers[i].n_length) {
- break;
- }
- }
+ for (i = 0; i < p_self->n_buffers; ++i) {
+ if (buf.m.userptr == (unsigned long)p_self->p_buffers[i].p_start && buf.length == p_self->p_buffers[i].n_length) {
+ break;
+ }
+ }
- V4L2_SEND_BUFF((void *)buf.m.userptr, buf.bytesused);
+ V4L2_SEND_BUFF((void *)buf.m.userptr, buf.bytesused);
- if (-1 == _v4l2_xioctl(p_self->fd, VIDIOC_QBUF, &buf)) {
- V4L2_DEBUG_ERROR("xioctl(VIDIOC_DQBUF) failed: %s error %d", strerror(errno), errno);
- break;
- }
- return 0;
- }
+ if (-1 == _v4l2_xioctl(p_self->fd, VIDIOC_QBUF, &buf)) {
+ V4L2_DEBUG_ERROR("xioctl(VIDIOC_DQBUF) failed: %s error %d", strerror(errno), errno);
+ break;
+ }
+ return 0;
+ }
- return -1;
+ return -1;
}
static int _tdav_producer_video_v4l2_timer_cb(const void* arg, tsk_timer_id_t timer_id)
{
- tdav_producer_video_v4l2_t* p_v4l2 = (tdav_producer_video_v4l2_t*)arg;
- int ret = 0;
-
- tsk_safeobj_lock(p_v4l2);
-
- if (p_v4l2->id_timer_grab == timer_id) {
- if (ret = _tdav_producer_video_v4l2_grab(p_v4l2)) {
- // goto bail;
- }
- if (p_v4l2->b_started) {
- p_v4l2->id_timer_grab = tsk_timer_manager_schedule(p_v4l2->p_timer_mgr, p_v4l2->u_timout_grab, _tdav_producer_video_v4l2_timer_cb, p_v4l2);
- if (!TSK_TIMER_ID_IS_VALID(p_v4l2->id_timer_grab)) {
- V4L2_DEBUG_ERROR("Failed to schedule timer with timeout=%llu", p_v4l2->u_timout_grab);
- ret = -2;
- goto bail;
- }
- }
- }
+ tdav_producer_video_v4l2_t* p_v4l2 = (tdav_producer_video_v4l2_t*)arg;
+ int ret = 0;
+
+ tsk_safeobj_lock(p_v4l2);
+
+ if (p_v4l2->id_timer_grab == timer_id) {
+ if (ret = _tdav_producer_video_v4l2_grab(p_v4l2)) {
+ // goto bail;
+ }
+ if (p_v4l2->b_started) {
+ p_v4l2->id_timer_grab = tsk_timer_manager_schedule(p_v4l2->p_timer_mgr, p_v4l2->u_timout_grab, _tdav_producer_video_v4l2_timer_cb, p_v4l2);
+ if (!TSK_TIMER_ID_IS_VALID(p_v4l2->id_timer_grab)) {
+ V4L2_DEBUG_ERROR("Failed to schedule timer with timeout=%llu", p_v4l2->u_timout_grab);
+ ret = -2;
+ goto bail;
+ }
+ }
+ }
bail:
- tsk_safeobj_unlock(p_v4l2);
- return ret;
+ tsk_safeobj_unlock(p_v4l2);
+ return ret;
}
static int _tdav_producer_video_v4l2_grab(tdav_producer_video_v4l2_t* p_self)
{
- int ret = 0, r;
- fd_set fds;
- struct timeval tv;
-
- if (!p_self) {
- V4L2_DEBUG_ERROR("Invalid parameter");
- return -1;
- }
-
- tsk_safeobj_lock(p_self);
-
- if (!p_self->b_started) {
- V4L2_DEBUG_ERROR("producer not started yet");
- ret = -2;
- goto bail;
- }
-
- if (!TMEDIA_PRODUCER(p_self)->enc_cb.callback) {
- goto bail;
- }
-
- FD_ZERO(&fds);
- FD_SET(p_self->fd, &fds);
-
- /* Timeout. */
- tv.tv_sec = 0;
- tv.tv_usec = (p_self->id_timer_grab * 1000);
- while (tv.tv_usec >= 1000000) {
- tv.tv_usec -= 1000000;
- tv.tv_sec++;
- }
-
- r = select(p_self->fd + 1, &fds, NULL, NULL, &tv);
-
- if (-1 == r) {
- if (EINTR == errno) {
- V4L2_DEBUG_INFO("select() returned EINTR");
- }
- else {
- V4L2_DEBUG_ERROR("select() failed: %s error %d", strerror(errno), errno);
- }
- goto bail;
- }
-
- if (0 == r) {
- V4L2_DEBUG_INFO("select() timeout: %s error %d", strerror(errno), errno);
- goto bail;
- }
- // Grab a frame
- if ((ret = _v4l2_send_frame(p_self))) {
- goto bail;
- }
+ int ret = 0, r;
+ fd_set fds;
+ struct timeval tv;
+
+ if (!p_self) {
+ V4L2_DEBUG_ERROR("Invalid parameter");
+ return -1;
+ }
+
+ tsk_safeobj_lock(p_self);
+
+ if (!p_self->b_started) {
+ V4L2_DEBUG_ERROR("producer not started yet");
+ ret = -2;
+ goto bail;
+ }
+
+ if (!TMEDIA_PRODUCER(p_self)->enc_cb.callback) {
+ goto bail;
+ }
+
+ FD_ZERO(&fds);
+ FD_SET(p_self->fd, &fds);
+
+ /* Timeout. */
+ tv.tv_sec = 0;
+ tv.tv_usec = (p_self->id_timer_grab * 1000);
+ while (tv.tv_usec >= 1000000) {
+ tv.tv_usec -= 1000000;
+ tv.tv_sec++;
+ }
+
+ r = select(p_self->fd + 1, &fds, NULL, NULL, &tv);
+
+ if (-1 == r) {
+ if (EINTR == errno) {
+ V4L2_DEBUG_INFO("select() returned EINTR");
+ }
+ else {
+ V4L2_DEBUG_ERROR("select() failed: %s error %d", strerror(errno), errno);
+ }
+ goto bail;
+ }
+
+ if (0 == r) {
+ V4L2_DEBUG_INFO("select() timeout: %s error %d", strerror(errno), errno);
+ goto bail;
+ }
+ // Grab a frame
+ if ((ret = _v4l2_send_frame(p_self))) {
+ goto bail;
+ }
bail:
- tsk_safeobj_unlock(p_self);
+ tsk_safeobj_unlock(p_self);
- return ret;
+ return ret;
}
//
@@ -1086,78 +1088,75 @@ bail:
/* constructor */
static tsk_object_t* _tdav_producer_video_v4l2_ctor(tsk_object_t *self, va_list * app)
{
- tdav_producer_video_v4l2_t *p_v4l2 = (tdav_producer_video_v4l2_t *)self;
- if (p_v4l2) {
- /* init base */
- tmedia_producer_init(TMEDIA_PRODUCER(p_v4l2));
- TMEDIA_PRODUCER(p_v4l2)->video.chroma = tmedia_chroma_yuv420p;
- /* init self with default values*/
- p_v4l2->fd = -1;
- TMEDIA_PRODUCER(p_v4l2)->video.fps = 15;
- TMEDIA_PRODUCER(p_v4l2)->video.width = 352;
- TMEDIA_PRODUCER(p_v4l2)->video.height = 288;
-
- tsk_safeobj_init(p_v4l2);
- }
- return self;
+ tdav_producer_video_v4l2_t *p_v4l2 = (tdav_producer_video_v4l2_t *)self;
+ if (p_v4l2) {
+ /* init base */
+ tmedia_producer_init(TMEDIA_PRODUCER(p_v4l2));
+ TMEDIA_PRODUCER(p_v4l2)->video.chroma = tmedia_chroma_yuv420p;
+ /* init self with default values*/
+ p_v4l2->fd = -1;
+ TMEDIA_PRODUCER(p_v4l2)->video.fps = 15;
+ TMEDIA_PRODUCER(p_v4l2)->video.width = 352;
+ TMEDIA_PRODUCER(p_v4l2)->video.height = 288;
+
+ tsk_safeobj_init(p_v4l2);
+ }
+ return self;
}
/* destructor */
static tsk_object_t* _tdav_producer_video_v4l2_dtor(tsk_object_t * self)
-{
- tdav_producer_video_v4l2_t *p_v4l2 = (tdav_producer_video_v4l2_t *)self;
- if (p_v4l2) {
- /* stop */
- if (p_v4l2->b_started) {
- _tdav_producer_video_v4l2_stop((tmedia_producer_t*)p_v4l2);
- }
-
- /* deinit base */
- tmedia_producer_deinit(TMEDIA_PRODUCER(p_v4l2));
- /* deinit self */
- _v4l2_unprepare(p_v4l2);
- TSK_OBJECT_SAFE_FREE(p_v4l2->p_timer_mgr);
- tsk_safeobj_deinit(p_v4l2);
-
- V4L2_DEBUG_INFO("*** destroyed ***");
- }
-
- return self;
+{
+ tdav_producer_video_v4l2_t *p_v4l2 = (tdav_producer_video_v4l2_t *)self;
+ if (p_v4l2) {
+ /* stop */
+ if (p_v4l2->b_started) {
+ _tdav_producer_video_v4l2_stop((tmedia_producer_t*)p_v4l2);
+ }
+
+ /* deinit base */
+ tmedia_producer_deinit(TMEDIA_PRODUCER(p_v4l2));
+ /* deinit self */
+ _v4l2_unprepare(p_v4l2);
+ TSK_OBJECT_SAFE_FREE(p_v4l2->p_timer_mgr);
+ tsk_safeobj_deinit(p_v4l2);
+
+ V4L2_DEBUG_INFO("*** destroyed ***");
+ }
+
+ return self;
}
/* object definition */
-static const tsk_object_def_t tdav_producer_video_v4l2_def_s =
-{
- sizeof(tdav_producer_video_v4l2_t),
- _tdav_producer_video_v4l2_ctor,
- _tdav_producer_video_v4l2_dtor,
- tsk_null,
+static const tsk_object_def_t tdav_producer_video_v4l2_def_s = {
+ sizeof(tdav_producer_video_v4l2_t),
+ _tdav_producer_video_v4l2_ctor,
+ _tdav_producer_video_v4l2_dtor,
+ tsk_null,
};
/* plugin definition*/
// Video
-static const tmedia_producer_plugin_def_t tdav_producer_video_v4l2_plugin_def_s =
-{
- &tdav_producer_video_v4l2_def_s,
- tmedia_video,
- "V4L2 video producer",
-
- _tdav_producer_video_v4l2_set,
- _tdav_producer_video_v4l2_prepare,
- _tdav_producer_video_v4l2_start,
- _tdav_producer_video_v4l2_pause,
- _tdav_producer_video_v4l2_stop
+static const tmedia_producer_plugin_def_t tdav_producer_video_v4l2_plugin_def_s = {
+ &tdav_producer_video_v4l2_def_s,
+ tmedia_video,
+ "V4L2 video producer",
+
+ _tdav_producer_video_v4l2_set,
+ _tdav_producer_video_v4l2_prepare,
+ _tdav_producer_video_v4l2_start,
+ _tdav_producer_video_v4l2_pause,
+ _tdav_producer_video_v4l2_stop
};
const tmedia_producer_plugin_def_t *tdav_producer_video_v4l2_plugin_def_t = &tdav_producer_video_v4l2_plugin_def_s;
// Screencast
-static const tmedia_producer_plugin_def_t tdav_producer_screencast_v4l2_plugin_def_s =
-{
- &tdav_producer_video_v4l2_def_s,
- tmedia_bfcp_video,
- "V4L2 screencast producer",
-
- _tdav_producer_video_v4l2_set,
- _tdav_producer_video_v4l2_prepare,
- _tdav_producer_video_v4l2_start,
- _tdav_producer_video_v4l2_pause,
- _tdav_producer_video_v4l2_stop
+static const tmedia_producer_plugin_def_t tdav_producer_screencast_v4l2_plugin_def_s = {
+ &tdav_producer_video_v4l2_def_s,
+ tmedia_bfcp_video,
+ "V4L2 screencast producer",
+
+ _tdav_producer_video_v4l2_set,
+ _tdav_producer_video_v4l2_prepare,
+ _tdav_producer_video_v4l2_start,
+ _tdav_producer_video_v4l2_pause,
+ _tdav_producer_video_v4l2_stop
};
const tmedia_producer_plugin_def_t *tdav_producer_screencast_v4l2_plugin_def_t = &tdav_producer_screencast_v4l2_plugin_def_s;
#endif /* HAVE_LINUX_VIDEODEV2_H */
diff --git a/tinyDAV/src/video/winm/tdav_consumer_winm.cxx b/tinyDAV/src/video/winm/tdav_consumer_winm.cxx
index b608a72..6bae661 100755
--- a/tinyDAV/src/video/winm/tdav_consumer_winm.cxx
+++ b/tinyDAV/src/video/winm/tdav_consumer_winm.cxx
@@ -1,17 +1,17 @@
/*Copyright (C) 2013 Doubango Telecom <http://www.doubango.org>
-*
+*
* This file is part of Open Source Doubango Framework.
*
* DOUBANGO is free software: you can redistribute it and/or modify
* it under the terms of the GNU General Public License as published by
* the Free Software Foundation, either version 3 of the License, or
* (at your option) any later version.
-*
+*
* DOUBANGO is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
* GNU General Public License for more details.
-*
+*
* You should have received a copy of the GNU General Public License
* along with DOUBANGO.
*/
@@ -33,9 +33,8 @@ using namespace doubango_rt::BackEnd;
#endif
-typedef struct tdav_consumer_winm_s
-{
- TMEDIA_DECLARE_CONSUMER;
+typedef struct tdav_consumer_winm_s {
+ TMEDIA_DECLARE_CONSUMER;
}
tdav_consumer_winm_t;
@@ -44,111 +43,109 @@ tdav_consumer_winm_t;
/* ============ Media Producer Interface ================= */
int tdav_consumer_winm_set(tmedia_consumer_t *self, const tmedia_param_t* param)
{
- int ret = 0;
+ int ret = 0;
- if(!self || !param){
- TSK_DEBUG_ERROR("Invalid parameter");
- return -1;
- }
+ if(!self || !param) {
+ TSK_DEBUG_ERROR("Invalid parameter");
+ return -1;
+ }
- return ret;
+ return ret;
}
int tdav_consumer_winm_prepare(tmedia_consumer_t* self, const tmedia_codec_t* codec)
{
- tdav_consumer_winm_t* consumer = (tdav_consumer_winm_t*)self;
-
- if(!consumer || !codec && codec->plugin){
- TSK_DEBUG_ERROR("Invalid parameter");
- return -1;
- }
-
- if(codec->id == tmedia_codec_id_h264_bp || codec->id == tmedia_codec_id_h264_mp) {
- TMEDIA_CONSUMER(consumer)->decoder.codec_id = codec->id;
- }
- else {
- TMEDIA_CONSUMER(consumer)->decoder.codec_id = tmedia_codec_id_none;
- }
-
- TMEDIA_CONSUMER(consumer)->video.fps = TMEDIA_CODEC_VIDEO(codec)->in.fps;
- TMEDIA_CONSUMER(consumer)->video.in.width = TMEDIA_CODEC_VIDEO(codec)->in.width;
- TMEDIA_CONSUMER(consumer)->video.in.height = TMEDIA_CODEC_VIDEO(codec)->in.height;
-
- if(!TMEDIA_CONSUMER(consumer)->video.display.width){
- TMEDIA_CONSUMER(consumer)->video.display.width = TMEDIA_CONSUMER(consumer)->video.in.width;
- }
- if(!TMEDIA_CONSUMER(consumer)->video.display.height){
- TMEDIA_CONSUMER(consumer)->video.display.height = TMEDIA_CONSUMER(consumer)->video.in.height;
- }
-
- return 0;
+ tdav_consumer_winm_t* consumer = (tdav_consumer_winm_t*)self;
+
+ if(!consumer || !codec && codec->plugin) {
+ TSK_DEBUG_ERROR("Invalid parameter");
+ return -1;
+ }
+
+ if(codec->id == tmedia_codec_id_h264_bp || codec->id == tmedia_codec_id_h264_mp) {
+ TMEDIA_CONSUMER(consumer)->decoder.codec_id = codec->id;
+ }
+ else {
+ TMEDIA_CONSUMER(consumer)->decoder.codec_id = tmedia_codec_id_none;
+ }
+
+ TMEDIA_CONSUMER(consumer)->video.fps = TMEDIA_CODEC_VIDEO(codec)->in.fps;
+ TMEDIA_CONSUMER(consumer)->video.in.width = TMEDIA_CODEC_VIDEO(codec)->in.width;
+ TMEDIA_CONSUMER(consumer)->video.in.height = TMEDIA_CODEC_VIDEO(codec)->in.height;
+
+ if(!TMEDIA_CONSUMER(consumer)->video.display.width) {
+ TMEDIA_CONSUMER(consumer)->video.display.width = TMEDIA_CONSUMER(consumer)->video.in.width;
+ }
+ if(!TMEDIA_CONSUMER(consumer)->video.display.height) {
+ TMEDIA_CONSUMER(consumer)->video.display.height = TMEDIA_CONSUMER(consumer)->video.in.height;
+ }
+
+ return 0;
}
int tdav_consumer_winm_start(tmedia_consumer_t* self)
{
- tdav_consumer_winm_t* consumer = (tdav_consumer_winm_t*)self;
+ tdav_consumer_winm_t* consumer = (tdav_consumer_winm_t*)self;
- if(!consumer){
- TSK_DEBUG_ERROR("Invalid parameter");
- return -1;
- }
+ if(!consumer) {
+ TSK_DEBUG_ERROR("Invalid parameter");
+ return -1;
+ }
#if TDAV_UNDER_WINDOWS_PHONE
- if(Globals::Instance->VideoRenderer != nullptr)
- {
- Globals::Instance->VideoRenderer->Start();
- }
+ if(Globals::Instance->VideoRenderer != nullptr) {
+ Globals::Instance->VideoRenderer->Start();
+ }
#endif
- return 0;
+ return 0;
}
int tdav_consumer_winm_consume(tmedia_consumer_t* self, const void* buffer, tsk_size_t size, const tsk_object_t* proto_hdr)
{
- tdav_consumer_winm_t* consumer = (tdav_consumer_winm_t*)self;
+ tdav_consumer_winm_t* consumer = (tdav_consumer_winm_t*)self;
#if TDAV_UNDER_WINDOWS_PHONE
- static const UINT64 hnsPresenationTime = 0;
- static const UINT64 hnsSampleDuration = 0;
+ static const UINT64 hnsPresenationTime = 0;
+ static const UINT64 hnsSampleDuration = 0;
- Globals::Instance->ReceiveVideoFrame((BYTE*)buffer, size, hnsPresenationTime, hnsSampleDuration);
+ Globals::Instance->ReceiveVideoFrame((BYTE*)buffer, size, hnsPresenationTime, hnsSampleDuration);
#endif
-
- return 0;
+
+ return 0;
}
int tdav_consumer_winm_pause(tmedia_consumer_t* self)
{
- tdav_consumer_winm_t* consumer = (tdav_consumer_winm_t*)self;
+ tdav_consumer_winm_t* consumer = (tdav_consumer_winm_t*)self;
- if(!consumer){
- TSK_DEBUG_ERROR("Invalid parameter");
- return -1;
- }
+ if(!consumer) {
+ TSK_DEBUG_ERROR("Invalid parameter");
+ return -1;
+ }
- return 0;
+ return 0;
}
int tdav_consumer_winm_stop(tmedia_consumer_t* self)
{
- tdav_consumer_winm_t* consumer = (tdav_consumer_winm_t*)self;
+ tdav_consumer_winm_t* consumer = (tdav_consumer_winm_t*)self;
- TSK_DEBUG_INFO("tdav_consumer_winm_stop");
+ TSK_DEBUG_INFO("tdav_consumer_winm_stop");
- if(!self){
- TSK_DEBUG_ERROR("Invalid parameter");
- return -1;
- }
+ if(!self) {
+ TSK_DEBUG_ERROR("Invalid parameter");
+ return -1;
+ }
#if TDAV_UNDER_WINDOWS_PHONE
- if(Globals::Instance->VideoRenderer != nullptr)
- {
- Globals::Instance->VideoRenderer->Stop();
- }
+ if(Globals::Instance->VideoRenderer != nullptr) {
+ Globals::Instance->VideoRenderer->Stop();
+ }
#endif
- return 0;
+ return 0;
}
@@ -158,60 +155,58 @@ int tdav_consumer_winm_stop(tmedia_consumer_t* self)
/* constructor */
static tsk_object_t* tdav_consumer_winm_ctor(tsk_object_t * self, va_list * app)
{
- tdav_consumer_winm_t *consumer = (tdav_consumer_winm_t *)self;
- if(consumer){
- /* init base */
- tmedia_consumer_init(TMEDIA_CONSUMER(consumer));
- TMEDIA_CONSUMER(consumer)->video.display.chroma = tmedia_chroma_yuv420p; // To avoid chroma conversion
-
- /* init self */
- TMEDIA_CONSUMER(consumer)->video.fps = 15;
- TMEDIA_CONSUMER(consumer)->video.display.width = 352;
- TMEDIA_CONSUMER(consumer)->video.display.height = 288;
- TMEDIA_CONSUMER(consumer)->video.display.auto_resize = tsk_true;
- }
- return self;
+ tdav_consumer_winm_t *consumer = (tdav_consumer_winm_t *)self;
+ if(consumer) {
+ /* init base */
+ tmedia_consumer_init(TMEDIA_CONSUMER(consumer));
+ TMEDIA_CONSUMER(consumer)->video.display.chroma = tmedia_chroma_yuv420p; // To avoid chroma conversion
+
+ /* init self */
+ TMEDIA_CONSUMER(consumer)->video.fps = 15;
+ TMEDIA_CONSUMER(consumer)->video.display.width = 352;
+ TMEDIA_CONSUMER(consumer)->video.display.height = 288;
+ TMEDIA_CONSUMER(consumer)->video.display.auto_resize = tsk_true;
+ }
+ return self;
}
/* destructor */
static tsk_object_t* tdav_consumer_winm_dtor(tsk_object_t * self)
-{
- tdav_consumer_winm_t *consumer = (tdav_consumer_winm_t *)self;
- if(consumer){
+{
+ tdav_consumer_winm_t *consumer = (tdav_consumer_winm_t *)self;
+ if(consumer) {
- /* stop */
- //if(consumer->started){
- tdav_consumer_winm_stop((tmedia_consumer_t*)self);
- //}
+ /* stop */
+ //if(consumer->started){
+ tdav_consumer_winm_stop((tmedia_consumer_t*)self);
+ //}
- /* deinit base */
- tmedia_consumer_deinit(TMEDIA_CONSUMER(consumer));
- /* deinit self */
- }
+ /* deinit base */
+ tmedia_consumer_deinit(TMEDIA_CONSUMER(consumer));
+ /* deinit self */
+ }
- return self;
+ return self;
}
/* object definition */
-static const tsk_object_def_t tdav_consumer_winm_def_s =
-{
- sizeof(tdav_consumer_winm_t),
- tdav_consumer_winm_ctor,
- tdav_consumer_winm_dtor,
- tsk_null,
+static const tsk_object_def_t tdav_consumer_winm_def_s = {
+ sizeof(tdav_consumer_winm_t),
+ tdav_consumer_winm_ctor,
+ tdav_consumer_winm_dtor,
+ tsk_null,
};
/* plugin definition*/
-static const tmedia_consumer_plugin_def_t tdav_consumer_winm_plugin_def_s =
-{
- &tdav_consumer_winm_def_s,
-
- tmedia_video,
- "Microsoft Windows Media consumer (Video)",
-
- tdav_consumer_winm_set,
- tdav_consumer_winm_prepare,
- tdav_consumer_winm_start,
- tdav_consumer_winm_consume,
- tdav_consumer_winm_pause,
- tdav_consumer_winm_stop
+static const tmedia_consumer_plugin_def_t tdav_consumer_winm_plugin_def_s = {
+ &tdav_consumer_winm_def_s,
+
+ tmedia_video,
+ "Microsoft Windows Media consumer (Video)",
+
+ tdav_consumer_winm_set,
+ tdav_consumer_winm_prepare,
+ tdav_consumer_winm_start,
+ tdav_consumer_winm_consume,
+ tdav_consumer_winm_pause,
+ tdav_consumer_winm_stop
};
extern const tmedia_consumer_plugin_def_t *tdav_consumer_winm_plugin_def_t = &tdav_consumer_winm_plugin_def_s;
diff --git a/tinyDAV/src/video/winm/tdav_producer_winm.cxx b/tinyDAV/src/video/winm/tdav_producer_winm.cxx
index 398340a..3854cdc 100755
--- a/tinyDAV/src/video/winm/tdav_producer_winm.cxx
+++ b/tinyDAV/src/video/winm/tdav_producer_winm.cxx
@@ -1,17 +1,17 @@
/*Copyright (C) 2013 Doubango Telecom <http://www.doubango.org>
-*
+*
* This file is part of Open Source Doubango Framework.
*
* DOUBANGO is free software: you can redistribute it and/or modify
* it under the terms of the GNU General Public License as published by
* the Free Software Foundation, either version 3 of the License, or
* (at your option) any later version.
-*
+*
* DOUBANGO is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
* GNU General Public License for more details.
-*
+*
* You should have received a copy of the GNU General Public License
* along with DOUBANGO.
*/
@@ -44,88 +44,84 @@ struct tdav_producer_winm_s;
namespace Doubango
{
- namespace VoIP
- {
- class CaptureSampleSink :
- public Microsoft::WRL::RuntimeClass<
- Microsoft::WRL::RuntimeClassFlags<Microsoft::WRL::RuntimeClassType::ClassicCom>,
- ICameraCaptureSampleSink>
- {
- DWORD m_dwSampleCount;
- const struct tdav_producer_winm_s* m_pProducer;
-
- public:
-
- STDMETHODIMP RuntimeClassInitialize(const struct tdav_producer_winm_s* pProducer)
- {
- m_dwSampleCount = 0;
- m_pProducer = pProducer;
- return S_OK;
- }
+namespace VoIP
+{
+class CaptureSampleSink :
+ public Microsoft::WRL::RuntimeClass<
+ Microsoft::WRL::RuntimeClassFlags<Microsoft::WRL::RuntimeClassType::ClassicCom>,
+ ICameraCaptureSampleSink>
+{
+ DWORD m_dwSampleCount;
+ const struct tdav_producer_winm_s* m_pProducer;
- DWORD GetSampleCount()
- {
- return m_dwSampleCount;
- }
+public:
- IFACEMETHODIMP_(void)
- OnSampleAvailable(
- ULONGLONG hnsPresentationTime,
- ULONGLONG hnsSampleDuration,
- DWORD cbSample,
- BYTE* pSample)
- {
- m_dwSampleCount++;
- if(m_pProducer && TMEDIA_PRODUCER(m_pProducer)->enc_cb.callback)
- {
- TMEDIA_PRODUCER(m_pProducer)->enc_cb.callback(TMEDIA_PRODUCER(m_pProducer)->enc_cb.callback_data, pSample, cbSample);
- }
- }
- };
+ STDMETHODIMP RuntimeClassInitialize(const struct tdav_producer_winm_s* pProducer) {
+ m_dwSampleCount = 0;
+ m_pProducer = pProducer;
+ return S_OK;
+ }
- ref class VideoCapturePhone sealed
- {
- public:
- virtual ~VideoCapturePhone();
- internal:
- VideoCapturePhone();
+ DWORD GetSampleCount() {
+ return m_dwSampleCount;
+ }
- int Prepare(const struct tdav_producer_winm_s* winm);
- int Start();
- int Pause();
- int Stop();
- void SetCameraLocation(Windows::Phone::Media::Capture::CameraSensorLocation cameraLocation);
- void ToggleCamera();
+ IFACEMETHODIMP_(void)
+ OnSampleAvailable(
+ ULONGLONG hnsPresentationTime,
+ ULONGLONG hnsSampleDuration,
+ DWORD cbSample,
+ BYTE* pSample) {
+ m_dwSampleCount++;
+ if(m_pProducer && TMEDIA_PRODUCER(m_pProducer)->enc_cb.callback) {
+ TMEDIA_PRODUCER(m_pProducer)->enc_cb.callback(TMEDIA_PRODUCER(m_pProducer)->enc_cb.callback_data, pSample, cbSample);
+ }
+ }
+};
+
+ref class VideoCapturePhone sealed
+{
+public:
+ virtual ~VideoCapturePhone();
+internal:
+ VideoCapturePhone();
+
+ int Prepare(const struct tdav_producer_winm_s* winm);
+ int Start();
+ int Pause();
+ int Stop();
+ void SetCameraLocation(Windows::Phone::Media::Capture::CameraSensorLocation cameraLocation);
+ void ToggleCamera();
- private:
- int UnPrepare();
- void ToggleCameraThread(Windows::Foundation::IAsyncAction^ operation);
+private:
+ int UnPrepare();
+ void ToggleCameraThread(Windows::Foundation::IAsyncAction^ operation);
- tsk_mutex_handle_t* m_hMutex;
+ tsk_mutex_handle_t* m_hMutex;
- const tdav_producer_winm_s* m_pWrappedPlugin;
+ const tdav_producer_winm_s* m_pWrappedPlugin;
- // Has capture started?
- bool m_bStarted, m_bPrepared;
+ // Has capture started?
+ bool m_bStarted, m_bPrepared;
- // Events to signal whether capture has stopped/started
- HANDLE m_hStopCompleted;
- HANDLE m_hStartCompleted;
+ // Events to signal whether capture has stopped/started
+ HANDLE m_hStopCompleted;
+ HANDLE m_hStartCompleted;
- IAsyncOperation<AudioVideoCaptureDevice^> ^m_pOpenOperation;
+ IAsyncOperation<AudioVideoCaptureDevice^> ^m_pOpenOperation;
- Windows::Foundation::IAsyncAction^ m_ToggleThread;
+ Windows::Foundation::IAsyncAction^ m_ToggleThread;
- // Native sink and video device
- CaptureSampleSink *m_pVideoSink;
- IAudioVideoCaptureDeviceNative *m_pVideoDevice;
+ // Native sink and video device
+ CaptureSampleSink *m_pVideoSink;
+ IAudioVideoCaptureDeviceNative *m_pVideoDevice;
- Windows::Phone::Media::Capture::CameraSensorLocation m_eCameraLocation;
+ Windows::Phone::Media::Capture::CameraSensorLocation m_eCameraLocation;
- Windows::Phone::Media::Capture::AudioVideoCaptureDevice ^m_pVideoOnlyDevice;
- Windows::Foundation::IAsyncAction ^m_pVideoCaptureAction;
- };
- }
+ Windows::Phone::Media::Capture::AudioVideoCaptureDevice ^m_pVideoOnlyDevice;
+ Windows::Foundation::IAsyncAction ^m_pVideoCaptureAction;
+};
+}
}
using namespace Doubango::VoIP;
@@ -133,11 +129,10 @@ using namespace Doubango::VoIP;
#endif
-typedef struct tdav_producer_winm_s
-{
- TMEDIA_DECLARE_PRODUCER;
+typedef struct tdav_producer_winm_s {
+ TMEDIA_DECLARE_PRODUCER;
#if TDAV_UNDER_WINDOWS_PHONE
- VideoCapturePhone^ videoCapturePhone;
+ VideoCapturePhone^ videoCapturePhone;
#endif
}
tdav_producer_winm_t;
@@ -146,104 +141,103 @@ tdav_producer_winm_t;
/* ============ Media Producer Interface ================= */
static int tdav_producer_winm_set(tmedia_producer_t *self, const tmedia_param_t* param)
{
- int ret = 0;
- tdav_producer_winm_t* producer = (tdav_producer_winm_t*)self;
-
- if(!producer || !param){
- TSK_DEBUG_ERROR("Invalid parameter");
- return -1;
- }
-
- if(param->value_type == tmedia_pvt_int32){
- if(tsk_striequals(param->key, "camera-location")){
- Windows::Phone::Media::Capture::CameraSensorLocation cameraLocation = (Windows::Phone::Media::Capture::CameraSensorLocation)*((int32_t*)param->value);
- if(producer->videoCapturePhone)
- {
- producer->videoCapturePhone->SetCameraLocation(cameraLocation);
- return 0;
- }
- }
- }
-
- return ret;
+ int ret = 0;
+ tdav_producer_winm_t* producer = (tdav_producer_winm_t*)self;
+
+ if(!producer || !param) {
+ TSK_DEBUG_ERROR("Invalid parameter");
+ return -1;
+ }
+
+ if(param->value_type == tmedia_pvt_int32) {
+ if(tsk_striequals(param->key, "camera-location")) {
+ Windows::Phone::Media::Capture::CameraSensorLocation cameraLocation = (Windows::Phone::Media::Capture::CameraSensorLocation)*((int32_t*)param->value);
+ if(producer->videoCapturePhone) {
+ producer->videoCapturePhone->SetCameraLocation(cameraLocation);
+ return 0;
+ }
+ }
+ }
+
+ return ret;
}
static int tdav_producer_winm_prepare(tmedia_producer_t* self, const tmedia_codec_t* codec)
{
- tdav_producer_winm_t* producer = (tdav_producer_winm_t*)self;
-
- if(!producer || !codec && codec->plugin){
- TSK_DEBUG_ERROR("Invalid parameter");
- return -1;
- }
-
- if(codec->id == tmedia_codec_id_h264_bp || codec->id == tmedia_codec_id_h264_mp) {
- TMEDIA_PRODUCER(producer)->encoder.codec_id = codec->id;
- }
- else {
- TMEDIA_PRODUCER(producer)->encoder.codec_id = tmedia_codec_id_none;
- }
- TMEDIA_PRODUCER(producer)->video.fps = TMEDIA_CODEC_VIDEO(codec)->out.fps;
- TMEDIA_PRODUCER(producer)->video.width = TMEDIA_CODEC_VIDEO(codec)->out.width;
- TMEDIA_PRODUCER(producer)->video.height = TMEDIA_CODEC_VIDEO(codec)->out.height;
+ tdav_producer_winm_t* producer = (tdav_producer_winm_t*)self;
+
+ if(!producer || !codec && codec->plugin) {
+ TSK_DEBUG_ERROR("Invalid parameter");
+ return -1;
+ }
+
+ if(codec->id == tmedia_codec_id_h264_bp || codec->id == tmedia_codec_id_h264_mp) {
+ TMEDIA_PRODUCER(producer)->encoder.codec_id = codec->id;
+ }
+ else {
+ TMEDIA_PRODUCER(producer)->encoder.codec_id = tmedia_codec_id_none;
+ }
+ TMEDIA_PRODUCER(producer)->video.fps = TMEDIA_CODEC_VIDEO(codec)->out.fps;
+ TMEDIA_PRODUCER(producer)->video.width = TMEDIA_CODEC_VIDEO(codec)->out.width;
+ TMEDIA_PRODUCER(producer)->video.height = TMEDIA_CODEC_VIDEO(codec)->out.height;
#if TDAV_UNDER_WINDOWS_PHONE
- return producer->videoCapturePhone->Prepare(producer);
+ return producer->videoCapturePhone->Prepare(producer);
#else
- TSK_DEBUG_ERROR("Unexpected code called");
- return -1;
+ TSK_DEBUG_ERROR("Unexpected code called");
+ return -1;
#endif
}
static int tdav_producer_winm_start(tmedia_producer_t* self)
{
- tdav_producer_winm_t* producer = (tdav_producer_winm_t*)self;
+ tdav_producer_winm_t* producer = (tdav_producer_winm_t*)self;
- if(!producer){
- TSK_DEBUG_ERROR("Invalid parameter");
- return -1;
- }
+ if(!producer) {
+ TSK_DEBUG_ERROR("Invalid parameter");
+ return -1;
+ }
#if TDAV_UNDER_WINDOWS_PHONE
- return producer->videoCapturePhone->Start();
+ return producer->videoCapturePhone->Start();
#else
- TSK_DEBUG_ERROR("Unexpected code called");
- return -1;
+ TSK_DEBUG_ERROR("Unexpected code called");
+ return -1;
#endif
}
static int tdav_producer_winm_pause(tmedia_producer_t* self)
{
- tdav_producer_winm_t* producer = (tdav_producer_winm_t*)self;
+ tdav_producer_winm_t* producer = (tdav_producer_winm_t*)self;
- if(!producer){
- TSK_DEBUG_ERROR("Invalid parameter");
- return -1;
- }
+ if(!producer) {
+ TSK_DEBUG_ERROR("Invalid parameter");
+ return -1;
+ }
#if TDAV_UNDER_WINDOWS_PHONE
- return producer->videoCapturePhone->Pause();
+ return producer->videoCapturePhone->Pause();
#else
- TSK_DEBUG_ERROR("Unexpected code called");
- return -1;
+ TSK_DEBUG_ERROR("Unexpected code called");
+ return -1;
#endif
}
static int tdav_producer_winm_stop(tmedia_producer_t* self)
{
- tdav_producer_winm_t* producer = (tdav_producer_winm_t*)self;
+ tdav_producer_winm_t* producer = (tdav_producer_winm_t*)self;
- if(!self){
- TSK_DEBUG_ERROR("Invalid parameter");
- return -1;
- }
+ if(!self) {
+ TSK_DEBUG_ERROR("Invalid parameter");
+ return -1;
+ }
#if TDAV_UNDER_WINDOWS_PHONE
- return producer->videoCapturePhone->Stop();
+ return producer->videoCapturePhone->Stop();
#else
- TSK_DEBUG_ERROR("Unexpected code called");
- return -1;
+ TSK_DEBUG_ERROR("Unexpected code called");
+ return -1;
#endif
}
@@ -252,273 +246,242 @@ static int tdav_producer_winm_stop(tmedia_producer_t* self)
VideoCapturePhone::VideoCapturePhone() :
m_bStarted(false),
- m_bPrepared(false),
+ m_bPrepared(false),
m_pVideoOnlyDevice(nullptr),
m_pVideoSink(NULL),
m_pVideoDevice(NULL),
- m_pWrappedPlugin(NULL),
- m_pOpenOperation(nullptr),
+ m_pWrappedPlugin(NULL),
+ m_pOpenOperation(nullptr),
m_eCameraLocation(CameraSensorLocation::Front)
{
- if(!(m_hMutex = tsk_mutex_create())){
- throw ref new Platform::FailureException(L"Failed to create mutex");
- }
+ if(!(m_hMutex = tsk_mutex_create())) {
+ throw ref new Platform::FailureException(L"Failed to create mutex");
+ }
m_hStopCompleted = CreateEventEx(NULL, NULL, CREATE_EVENT_MANUAL_RESET, EVENT_ALL_ACCESS);
- if (!m_hStopCompleted)
- {
+ if (!m_hStopCompleted) {
throw ref new Platform::Exception(HRESULT_FROM_WIN32(GetLastError()), L"Could not create shutdown event");
}
m_hStartCompleted = CreateEventEx(NULL, NULL, CREATE_EVENT_MANUAL_RESET, EVENT_ALL_ACCESS);
- if (!m_hStartCompleted)
- {
+ if (!m_hStartCompleted) {
throw ref new Platform::Exception(HRESULT_FROM_WIN32(GetLastError()), L"Could not create start event");
}
}
VideoCapturePhone::~VideoCapturePhone()
{
- Stop();
+ Stop();
- if(m_ToggleThread)
- {
+ if(m_ToggleThread) {
m_ToggleThread->Cancel();
m_ToggleThread->Close();
m_ToggleThread = nullptr;
}
- tsk_mutex_destroy(&m_hMutex);
+ tsk_mutex_destroy(&m_hMutex);
}
int VideoCapturePhone::Prepare(const struct tdav_producer_winm_s* winm)
{
- HRESULT hr = E_FAIL;
- int ret = 0;
- Windows::Foundation::Size dimensionsRequested, dimensionsClosest;
- Collections::IVectorView<Size> ^availableSizes;
- Collections::IIterator<Windows::Foundation::Size> ^availableSizesIterator;
- bool bClosestFound = false;
-
- #define WINM_SET_ERROR(code) ret = (code); goto bail;
-
- tsk_mutex_lock(m_hMutex);
-
- if(m_bPrepared)
- {
- TSK_DEBUG_INFO("#WASAPI: Audio producer already prepared");
- goto bail;
- }
-
- if(!winm)
- {
- TSK_DEBUG_ERROR("Invalid parameter");
- WINM_SET_ERROR(-1);
- }
-
- if(m_pVideoCaptureAction || m_pVideoDevice || m_pVideoOnlyDevice || m_pVideoSink || m_pOpenOperation){
- TSK_DEBUG_ERROR("Producer already prepared");
- WINM_SET_ERROR(-2);
- }
-
- dimensionsClosest.Width = dimensionsRequested.Width = (float)TMEDIA_PRODUCER(winm)->video.width;
- dimensionsClosest.Height = dimensionsRequested.Height = (float)TMEDIA_PRODUCER(winm)->video.height;
+ HRESULT hr = E_FAIL;
+ int ret = 0;
+ Windows::Foundation::Size dimensionsRequested, dimensionsClosest;
+ Collections::IVectorView<Size> ^availableSizes;
+ Collections::IIterator<Windows::Foundation::Size> ^availableSizesIterator;
+ bool bClosestFound = false;
+
+#define WINM_SET_ERROR(code) ret = (code); goto bail;
+
+ tsk_mutex_lock(m_hMutex);
+
+ if(m_bPrepared) {
+ TSK_DEBUG_INFO("#WASAPI: Audio producer already prepared");
+ goto bail;
+ }
+
+ if(!winm) {
+ TSK_DEBUG_ERROR("Invalid parameter");
+ WINM_SET_ERROR(-1);
+ }
+
+ if(m_pVideoCaptureAction || m_pVideoDevice || m_pVideoOnlyDevice || m_pVideoSink || m_pOpenOperation) {
+ TSK_DEBUG_ERROR("Producer already prepared");
+ WINM_SET_ERROR(-2);
+ }
+
+ dimensionsClosest.Width = dimensionsRequested.Width = (float)TMEDIA_PRODUCER(winm)->video.width;
+ dimensionsClosest.Height = dimensionsRequested.Height = (float)TMEDIA_PRODUCER(winm)->video.height;
availableSizes = AudioVideoCaptureDevice::GetAvailableCaptureResolutions(m_eCameraLocation);
availableSizesIterator = availableSizes->First();
-
- while(!m_pOpenOperation && availableSizesIterator->HasCurrent)
- {
- TSK_DEBUG_INFO("Camera Supported size: (%f, %f)", availableSizesIterator->Current.Width, availableSizesIterator->Current.Height);
- if(availableSizesIterator->Current.Height == dimensionsRequested.Width && availableSizesIterator->Current.Width == dimensionsRequested.Height)
- {
+
+ while(!m_pOpenOperation && availableSizesIterator->HasCurrent) {
+ TSK_DEBUG_INFO("Camera Supported size: (%f, %f)", availableSizesIterator->Current.Width, availableSizesIterator->Current.Height);
+ if(availableSizesIterator->Current.Height == dimensionsRequested.Width && availableSizesIterator->Current.Width == dimensionsRequested.Height) {
m_pOpenOperation = AudioVideoCaptureDevice::OpenForVideoOnlyAsync(m_eCameraLocation, dimensionsRequested);
- TSK_DEBUG_INFO("Camera::Open(%d, %d)", dimensionsRequested.Width, dimensionsRequested.Height);
- break;
+ TSK_DEBUG_INFO("Camera::Open(%d, %d)", dimensionsRequested.Width, dimensionsRequested.Height);
+ break;
+ }
+ else if(!bClosestFound && (availableSizesIterator->Current.Height <= dimensionsRequested.Height && availableSizesIterator->Current.Width <= dimensionsRequested.Width)) {
+ dimensionsClosest.Height = availableSizesIterator->Current.Height;
+ dimensionsClosest.Width = availableSizesIterator->Current.Width;
+ bClosestFound = true;
}
- else if(!bClosestFound && (availableSizesIterator->Current.Height <= dimensionsRequested.Height && availableSizesIterator->Current.Width <= dimensionsRequested.Width))
- {
- dimensionsClosest.Height = availableSizesIterator->Current.Height;
- dimensionsClosest.Width = availableSizesIterator->Current.Width;
- bClosestFound = true;
- }
availableSizesIterator->MoveNext();
}
- if(!m_pOpenOperation)
- {
- m_pOpenOperation = AudioVideoCaptureDevice::OpenForVideoOnlyAsync(m_eCameraLocation, dimensionsClosest);
- TSK_DEBUG_INFO("Camera::Open(%f, %f)", dimensionsClosest.Width, dimensionsClosest.Height);
- }
+ if(!m_pOpenOperation) {
+ m_pOpenOperation = AudioVideoCaptureDevice::OpenForVideoOnlyAsync(m_eCameraLocation, dimensionsClosest);
+ TSK_DEBUG_INFO("Camera::Open(%f, %f)", dimensionsClosest.Width, dimensionsClosest.Height);
+ }
bail:
- if(ret != 0){
- UnPrepare();
- }
- if((m_bPrepared = (ret == 0)))
- {
- m_pWrappedPlugin = winm;
- }
+ if(ret != 0) {
+ UnPrepare();
+ }
+ if((m_bPrepared = (ret == 0))) {
+ m_pWrappedPlugin = winm;
+ }
- tsk_mutex_unlock(m_hMutex);
+ tsk_mutex_unlock(m_hMutex);
- return ret;
+ return ret;
}
int VideoCapturePhone::Start()
{
- tsk_mutex_lock(m_hMutex);
-
- if(m_bStarted)
- {
- TSK_DEBUG_INFO("#WINM: Video producer already started");
- goto bail;
- }
- if(!m_bPrepared)
- {
- TSK_DEBUG_ERROR("#WINM: Video producer not prepared");
- goto bail;
- }
-
- m_bStarted = true;
-
- m_pOpenOperation->Completed = ref new AsyncOperationCompletedHandler<AudioVideoCaptureDevice^>([this] (IAsyncOperation<AudioVideoCaptureDevice^> ^operation, Windows::Foundation::AsyncStatus status)
- {
- tsk_mutex_lock(m_hMutex);
- if(m_bStarted)
- {
-
- if(status == Windows::Foundation::AsyncStatus::Completed)
- {
-
- TSK_DEBUG_INFO("+[VideoCapturePhone::Prepare] => OpenAsyncOperation started");
-
- auto videoDevice = operation->GetResults();
-
- m_pVideoOnlyDevice = videoDevice;
- IAudioVideoCaptureDeviceNative *pNativeDevice = NULL;
- HRESULT hr = reinterpret_cast<IUnknown*>(videoDevice)->QueryInterface(__uuidof(IAudioVideoCaptureDeviceNative), (void**) &pNativeDevice);
-
- if (NULL == pNativeDevice || FAILED(hr))
- {
- throw ref new FailureException("Unable to QI IAudioVideoCaptureDeviceNative");
- }
-
- // Save off the native device
- m_pVideoDevice = pNativeDevice;
-
- // Set Fps
- CameraCapturePropertyRange^ cameraCapturePropertyRange = m_pVideoOnlyDevice->GetSupportedPropertyRange(m_eCameraLocation, KnownCameraAudioVideoProperties::VideoFrameRate);
- if(cameraCapturePropertyRange)
- {
- try
- {
- Windows::Foundation::IPropertyValue^ vMin = dynamic_cast<Windows::Foundation::IPropertyValue^>(cameraCapturePropertyRange->Min);
- Windows::Foundation::IPropertyValue^ vMax = dynamic_cast<Windows::Foundation::IPropertyValue^>(cameraCapturePropertyRange->Max);
- UINT32 nFps = TSK_CLAMP(vMin->GetUInt32(), (UINT32)TMEDIA_PRODUCER(m_pWrappedPlugin)->video.fps, vMax->GetUInt32());
- m_pVideoOnlyDevice->SetProperty(KnownCameraAudioVideoProperties::VideoFrameRate, nFps);
- }
- catch(...){ }
- }
-
- // Set Camera Rotation
- try
- {
- m_pVideoOnlyDevice->SetProperty(
- KnownCameraGeneralProperties::EncodeWithOrientation,
- m_eCameraLocation == Windows::Phone::Media::Capture::CameraSensorLocation::Back ? 90 : -90
- );
- }
- catch(...){ }
-
- // Create the sink
- MakeAndInitialize<CaptureSampleSink>(&(m_pVideoSink), m_pWrappedPlugin);
- pNativeDevice->SetVideoSampleSink(m_pVideoSink);
-
- // Use the same encoding format as in VideoMediaStreamSource.cs
- videoDevice->VideoEncodingFormat = CameraCaptureVideoFormat::H264;
-
- SetEvent(m_hStartCompleted);
-
- // Start recording to our sink
- m_pVideoCaptureAction = videoDevice->StartRecordingToSinkAsync();
- m_pVideoCaptureAction->Completed = ref new AsyncActionCompletedHandler([this] (IAsyncAction ^asyncInfo, Windows::Foundation::AsyncStatus status)
- {
- if(status == Windows::Foundation::AsyncStatus::Completed)
- {
- TSK_DEBUG_INFO("[VideoCapturePhone::Prepare] => StartRecordingToSinkAsync completed");
- }
- else if(status == Windows::Foundation::AsyncStatus::Error || status == Windows::Foundation::AsyncStatus::Canceled)
- {
- TSK_DEBUG_INFO("[VideoCapturePhone::Prepare] => StartRecordingToSinkAsync did not complete");
- }
- });
-
- TSK_DEBUG_INFO("-[VideoCapturePhone::Prepare] => OpenAsyncOperation Completed");
- }
- else if(status == Windows::Foundation::AsyncStatus::Canceled)
- {
- TSK_DEBUG_INFO("[VideoCapturePhone::Prepare] => OpenAsyncOperation Canceled");
- }
- else if(status == Windows::Foundation::AsyncStatus::Error)
- {
- TSK_DEBUG_INFO("[VideoCapturePhone::Prepare] => OpenAsyncOperation encountered an error");
- }
- }
-
- tsk_mutex_unlock(m_hMutex);
+ tsk_mutex_lock(m_hMutex);
+
+ if(m_bStarted) {
+ TSK_DEBUG_INFO("#WINM: Video producer already started");
+ goto bail;
+ }
+ if(!m_bPrepared) {
+ TSK_DEBUG_ERROR("#WINM: Video producer not prepared");
+ goto bail;
+ }
+
+ m_bStarted = true;
+
+ m_pOpenOperation->Completed = ref new AsyncOperationCompletedHandler<AudioVideoCaptureDevice^>([this] (IAsyncOperation<AudioVideoCaptureDevice^> ^operation, Windows::Foundation::AsyncStatus status) {
+ tsk_mutex_lock(m_hMutex);
+ if(m_bStarted) {
+
+ if(status == Windows::Foundation::AsyncStatus::Completed) {
+
+ TSK_DEBUG_INFO("+[VideoCapturePhone::Prepare] => OpenAsyncOperation started");
+
+ auto videoDevice = operation->GetResults();
+
+ m_pVideoOnlyDevice = videoDevice;
+ IAudioVideoCaptureDeviceNative *pNativeDevice = NULL;
+ HRESULT hr = reinterpret_cast<IUnknown*>(videoDevice)->QueryInterface(__uuidof(IAudioVideoCaptureDeviceNative), (void**) &pNativeDevice);
+
+ if (NULL == pNativeDevice || FAILED(hr)) {
+ throw ref new FailureException("Unable to QI IAudioVideoCaptureDeviceNative");
+ }
+
+ // Save off the native device
+ m_pVideoDevice = pNativeDevice;
+
+ // Set Fps
+ CameraCapturePropertyRange^ cameraCapturePropertyRange = m_pVideoOnlyDevice->GetSupportedPropertyRange(m_eCameraLocation, KnownCameraAudioVideoProperties::VideoFrameRate);
+ if(cameraCapturePropertyRange) {
+ try {
+ Windows::Foundation::IPropertyValue^ vMin = dynamic_cast<Windows::Foundation::IPropertyValue^>(cameraCapturePropertyRange->Min);
+ Windows::Foundation::IPropertyValue^ vMax = dynamic_cast<Windows::Foundation::IPropertyValue^>(cameraCapturePropertyRange->Max);
+ UINT32 nFps = TSK_CLAMP(vMin->GetUInt32(), (UINT32)TMEDIA_PRODUCER(m_pWrappedPlugin)->video.fps, vMax->GetUInt32());
+ m_pVideoOnlyDevice->SetProperty(KnownCameraAudioVideoProperties::VideoFrameRate, nFps);
+ }
+ catch(...) { }
+ }
+
+ // Set Camera Rotation
+ try {
+ m_pVideoOnlyDevice->SetProperty(
+ KnownCameraGeneralProperties::EncodeWithOrientation,
+ m_eCameraLocation == Windows::Phone::Media::Capture::CameraSensorLocation::Back ? 90 : -90
+ );
+ }
+ catch(...) { }
+
+ // Create the sink
+ MakeAndInitialize<CaptureSampleSink>(&(m_pVideoSink), m_pWrappedPlugin);
+ pNativeDevice->SetVideoSampleSink(m_pVideoSink);
+
+ // Use the same encoding format as in VideoMediaStreamSource.cs
+ videoDevice->VideoEncodingFormat = CameraCaptureVideoFormat::H264;
+
+ SetEvent(m_hStartCompleted);
+
+ // Start recording to our sink
+ m_pVideoCaptureAction = videoDevice->StartRecordingToSinkAsync();
+ m_pVideoCaptureAction->Completed = ref new AsyncActionCompletedHandler([this] (IAsyncAction ^asyncInfo, Windows::Foundation::AsyncStatus status) {
+ if(status == Windows::Foundation::AsyncStatus::Completed) {
+ TSK_DEBUG_INFO("[VideoCapturePhone::Prepare] => StartRecordingToSinkAsync completed");
+ }
+ else if(status == Windows::Foundation::AsyncStatus::Error || status == Windows::Foundation::AsyncStatus::Canceled) {
+ TSK_DEBUG_INFO("[VideoCapturePhone::Prepare] => StartRecordingToSinkAsync did not complete");
+ }
+ });
+
+ TSK_DEBUG_INFO("-[VideoCapturePhone::Prepare] => OpenAsyncOperation Completed");
+ }
+ else if(status == Windows::Foundation::AsyncStatus::Canceled) {
+ TSK_DEBUG_INFO("[VideoCapturePhone::Prepare] => OpenAsyncOperation Canceled");
+ }
+ else if(status == Windows::Foundation::AsyncStatus::Error) {
+ TSK_DEBUG_INFO("[VideoCapturePhone::Prepare] => OpenAsyncOperation encountered an error");
+ }
+ }
+
+ tsk_mutex_unlock(m_hMutex);
});
-
+
bail:
- tsk_mutex_unlock(m_hMutex);
+ tsk_mutex_unlock(m_hMutex);
return (m_bStarted ? 0 : -2);
}
int VideoCapturePhone::Pause()
{
- tsk_mutex_lock(m_hMutex);
+ tsk_mutex_lock(m_hMutex);
- if(m_bStarted)
- {
+ if(m_bStarted) {
- }
+ }
- tsk_mutex_unlock(m_hMutex);
+ tsk_mutex_unlock(m_hMutex);
- return 0;
+ return 0;
}
int VideoCapturePhone::Stop()
{
- tsk_mutex_lock(m_hMutex);
+ tsk_mutex_lock(m_hMutex);
- TSK_DEBUG_INFO("+[VideoCapturePhone::Stop] => Trying to stop capture");
- if (m_pVideoOnlyDevice)
- {
+ TSK_DEBUG_INFO("+[VideoCapturePhone::Stop] => Trying to stop capture");
+ if (m_pVideoOnlyDevice) {
TSK_DEBUG_INFO("Destroying VideoCaptureDevice");
- try
- {
- if(m_bStarted)
- {
- m_pVideoOnlyDevice->StopRecordingAsync()->Completed = ref new AsyncActionCompletedHandler([this] (IAsyncAction ^action, Windows::Foundation::AsyncStatus status){
- if(status == Windows::Foundation::AsyncStatus::Completed)
- {
- TSK_DEBUG_INFO("[VideoCapturePhone::StopRecordingAsync] Video successfully stopped");
- }
- else
- {
- TSK_DEBUG_INFO("[VideoCapturePhone::StopRecordingAsync] Error occurred while stopping recording");
- }
- m_pVideoCaptureAction = nullptr;
- m_pVideoOnlyDevice = nullptr;
- m_bStarted = false;
- SetEvent(m_hStopCompleted);
- });
- }
+ try {
+ if(m_bStarted) {
+ m_pVideoOnlyDevice->StopRecordingAsync()->Completed = ref new AsyncActionCompletedHandler([this] (IAsyncAction ^action, Windows::Foundation::AsyncStatus status) {
+ if(status == Windows::Foundation::AsyncStatus::Completed) {
+ TSK_DEBUG_INFO("[VideoCapturePhone::StopRecordingAsync] Video successfully stopped");
+ }
+ else {
+ TSK_DEBUG_INFO("[VideoCapturePhone::StopRecordingAsync] Error occurred while stopping recording");
+ }
+ m_pVideoCaptureAction = nullptr;
+ m_pVideoOnlyDevice = nullptr;
+ m_bStarted = false;
+ SetEvent(m_hStopCompleted);
+ });
+ }
}
- catch(...)
- {
+ catch(...) {
// A Platform::ObjectDisposedException can be raised if the app has had its access
// to video revoked (most commonly when the app is going out of the foreground)
TSK_DEBUG_ERROR("Exception caught while destroying video capture");
@@ -528,90 +491,79 @@ int VideoCapturePhone::Stop()
SetEvent(m_hStopCompleted);
}
- if (m_pVideoDevice)
- {
+ if (m_pVideoDevice) {
m_pVideoDevice->Release();
m_pVideoDevice = NULL;
}
- if (m_pVideoSink)
- {
+ if (m_pVideoSink) {
m_pVideoSink->Release();
m_pVideoSink = NULL;
}
}
- else
- {
- m_bStarted = false;
- }
+ else {
+ m_bStarted = false;
+ }
- TSK_DEBUG_INFO("-[VideoCapturePhone::Stop] => finished stopping capture\n");
+ TSK_DEBUG_INFO("-[VideoCapturePhone::Stop] => finished stopping capture\n");
- // will be prepared again before next start()
- UnPrepare();
+ // will be prepared again before next start()
+ UnPrepare();
- tsk_mutex_unlock(m_hMutex);
+ tsk_mutex_unlock(m_hMutex);
- return 0;
+ return 0;
}
void VideoCapturePhone::SetCameraLocation(Windows::Phone::Media::Capture::CameraSensorLocation cameraLocation)
{
- if(m_eCameraLocation != cameraLocation)
- {
- if(m_bStarted)
- {
- ToggleCamera();
- }
- else
- {
- m_eCameraLocation = cameraLocation;
- }
- }
+ if(m_eCameraLocation != cameraLocation) {
+ if(m_bStarted) {
+ ToggleCamera();
+ }
+ else {
+ m_eCameraLocation = cameraLocation;
+ }
+ }
}
int VideoCapturePhone::UnPrepare()
{
- tsk_mutex_lock(m_hMutex);
-
- if(m_bStarted)
- {
- ResetEvent(m_hStopCompleted);
- Stop();
- DWORD waitResult = WaitForSingleObjectEx(m_hStopCompleted, 5000, FALSE);
- if(waitResult != WAIT_OBJECT_0)
- {
- TSK_DEBUG_ERROR("Failed to stop video producer");
- }
- }
-
- if (m_pVideoDevice)
- {
+ tsk_mutex_lock(m_hMutex);
+
+ if(m_bStarted) {
+ ResetEvent(m_hStopCompleted);
+ Stop();
+ DWORD waitResult = WaitForSingleObjectEx(m_hStopCompleted, 5000, FALSE);
+ if(waitResult != WAIT_OBJECT_0) {
+ TSK_DEBUG_ERROR("Failed to stop video producer");
+ }
+ }
+
+ if (m_pVideoDevice) {
m_pVideoDevice->Release();
m_pVideoDevice = NULL;
}
- if (m_pVideoSink)
- {
+ if (m_pVideoSink) {
m_pVideoSink->Release();
m_pVideoSink = NULL;
}
- m_pOpenOperation = nullptr;
+ m_pOpenOperation = nullptr;
- m_bPrepared = false;
+ m_bPrepared = false;
- tsk_mutex_unlock(m_hMutex);
+ tsk_mutex_unlock(m_hMutex);
- return 0;
+ return 0;
}
void VideoCapturePhone::ToggleCamera()
{
tsk_mutex_lock(m_hMutex);
- if(m_ToggleThread)
- {
+ if(m_ToggleThread) {
m_ToggleThread->Cancel();
m_ToggleThread->Close();
m_ToggleThread = nullptr;
@@ -619,7 +571,7 @@ void VideoCapturePhone::ToggleCamera()
m_ToggleThread = ThreadPool::RunAsync(ref new WorkItemHandler(this, &VideoCapturePhone::ToggleCameraThread), WorkItemPriority::High, WorkItemOptions::TimeSliced);
- tsk_mutex_unlock(m_hMutex);
+ tsk_mutex_unlock(m_hMutex);
}
@@ -630,35 +582,29 @@ void VideoCapturePhone::ToggleCameraThread(Windows::Foundation::IAsyncAction^ op
ResetEvent(m_hStopCompleted);
Stop();
DWORD waitResult = WaitForSingleObjectEx(m_hStopCompleted, INFINITE, FALSE);
- if(waitResult == WAIT_OBJECT_0)
- {
+ if(waitResult == WAIT_OBJECT_0) {
ResetEvent(m_hStartCompleted);
- if(m_eCameraLocation == Windows::Phone::Media::Capture::CameraSensorLocation::Back)
- {
+ if(m_eCameraLocation == Windows::Phone::Media::Capture::CameraSensorLocation::Back) {
m_eCameraLocation = Windows::Phone::Media::Capture::CameraSensorLocation::Front;
}
- else
- {
+ else {
m_eCameraLocation = Windows::Phone::Media::Capture::CameraSensorLocation::Back;
}
- Prepare(m_pWrappedPlugin);
- Start();
+ Prepare(m_pWrappedPlugin);
+ Start();
}
- else
- {
+ else {
throw ref new Platform::Exception(HRESULT_FROM_WIN32(waitResult), L"Error waiting for capture to stop when toggling cameras");
}
waitResult = WaitForSingleObjectEx(m_hStartCompleted, INFINITE, FALSE);
- if(waitResult == WAIT_OBJECT_0)
- {
+ if(waitResult == WAIT_OBJECT_0) {
// CameraLocationChanged(newCameraLocation);
}
- else
- {
+ else {
throw ref new Platform::Exception(HRESULT_FROM_WIN32(waitResult), L"Error waiting for capture to start when toggling cameras");
}
- TSK_DEBUG_INFO("-[VideoCapturePhone::ToggleCamera] => Toggling camera");
+ TSK_DEBUG_INFO("-[VideoCapturePhone::ToggleCamera] => Toggling camera");
}
#endif /* TDAV_UNDER_WINDOWS_PHONE */
@@ -670,67 +616,64 @@ void VideoCapturePhone::ToggleCameraThread(Windows::Foundation::IAsyncAction^ op
/* constructor */
static tsk_object_t* tdav_producer_winm_ctor(tsk_object_t * self, va_list * app)
{
- tdav_producer_winm_t *producer = (tdav_producer_winm_t *)self;
- if(producer){
- /* init base */
- tmedia_producer_init(TMEDIA_PRODUCER(producer));
- TMEDIA_PRODUCER(producer)->video.chroma = tmedia_chroma_yuv420p; // To avoid chroma conversion
- /* init self with default values*/
+ tdav_producer_winm_t *producer = (tdav_producer_winm_t *)self;
+ if(producer) {
+ /* init base */
+ tmedia_producer_init(TMEDIA_PRODUCER(producer));
+ TMEDIA_PRODUCER(producer)->video.chroma = tmedia_chroma_yuv420p; // To avoid chroma conversion
+ /* init self with default values*/
- TMEDIA_PRODUCER(producer)->video.fps = 15;
- TMEDIA_PRODUCER(producer)->video.width = 352;
- TMEDIA_PRODUCER(producer)->video.height = 288;
+ TMEDIA_PRODUCER(producer)->video.fps = 15;
+ TMEDIA_PRODUCER(producer)->video.width = 352;
+ TMEDIA_PRODUCER(producer)->video.height = 288;
#if TDAV_UNDER_WINDOWS_PHONE
- producer->videoCapturePhone = ref new VideoCapturePhone();
+ producer->videoCapturePhone = ref new VideoCapturePhone();
#endif
- }
- return self;
+ }
+ return self;
}
/* destructor */
static tsk_object_t* tdav_producer_winm_dtor(tsk_object_t * self)
-{
- tdav_producer_winm_t *producer = (tdav_producer_winm_t *)self;
- if(producer){
- /* stop */
- //if(producer->started){
- tdav_producer_winm_stop((tmedia_producer_t*)self);
- //}
-
- /* deinit base */
- tmedia_producer_deinit(TMEDIA_PRODUCER(producer));
- /* deinit self */
+{
+ tdav_producer_winm_t *producer = (tdav_producer_winm_t *)self;
+ if(producer) {
+ /* stop */
+ //if(producer->started){
+ tdav_producer_winm_stop((tmedia_producer_t*)self);
+ //}
+
+ /* deinit base */
+ tmedia_producer_deinit(TMEDIA_PRODUCER(producer));
+ /* deinit self */
#if TDAV_UNDER_WINDOWS_PHONE
- if(producer->videoCapturePhone)
- {
- delete producer->videoCapturePhone;
- }
+ if(producer->videoCapturePhone) {
+ delete producer->videoCapturePhone;
+ }
#endif
- }
+ }
- return self;
+ return self;
}
/* object definition */
-static const tsk_object_def_t tdav_producer_winm_def_s =
-{
- sizeof(tdav_producer_winm_t),
- tdav_producer_winm_ctor,
- tdav_producer_winm_dtor,
- tsk_null,
+static const tsk_object_def_t tdav_producer_winm_def_s = {
+ sizeof(tdav_producer_winm_t),
+ tdav_producer_winm_ctor,
+ tdav_producer_winm_dtor,
+ tsk_null,
};
/* plugin definition*/
-static const tmedia_producer_plugin_def_t tdav_producer_winm_plugin_def_s =
-{
- &tdav_producer_winm_def_s,
+static const tmedia_producer_plugin_def_t tdav_producer_winm_plugin_def_s = {
+ &tdav_producer_winm_def_s,
- tmedia_video,
- "Microsoft Windows Media producer (Video)",
+ tmedia_video,
+ "Microsoft Windows Media producer (Video)",
- tdav_producer_winm_set,
- tdav_producer_winm_prepare,
- tdav_producer_winm_start,
- tdav_producer_winm_pause,
- tdav_producer_winm_stop
+ tdav_producer_winm_set,
+ tdav_producer_winm_prepare,
+ tdav_producer_winm_start,
+ tdav_producer_winm_pause,
+ tdav_producer_winm_stop
};
const tmedia_producer_plugin_def_t *tdav_producer_winm_plugin_def_t = &tdav_producer_winm_plugin_def_s;
OpenPOWER on IntegriCloud