summaryrefslogtreecommitdiff
path: root/legacy/emotion/src/modules/xine/emotion_xine_vo_out.c
diff options
context:
space:
mode:
authorGustavo Sverzut Barbieri <barbieri@gmail.com>2013-01-10 03:43:32 +0000
committerGustavo Sverzut Barbieri <barbieri@gmail.com>2013-01-10 03:43:32 +0000
commitdfb84c1657bfb14a5236b881193b81f4c0b8a69b (patch)
treeb51b210fc88a21eec8e5907b8bbfe12ebc669f90 /legacy/emotion/src/modules/xine/emotion_xine_vo_out.c
parent532284dbbe4259a9f2291f44d3eff376849e8031 (diff)
efl: merge emotion.
this one was quite a huge work, but hopefully it's correct. NOTES: * removed vlc generic module, it should go into a separate package. * gstreamer is enabled by default (see --disable-gstreamer) * xine is disabled by default (see --enable-gstreamer) * generic is always built statically if supported * gstreamer and xine can't be configured as static (just lacks command line options, build system supports it) * v4l2 is enabled by default on linux if eeze is built (see --disable-v4l2) * emotion_test moved to src/tests/emotion and depends on EFL_ENABLE_TESTS (--with-tests), but is still installed if enabled. TODO (need your help!): * fix warnings with gstreamer and xine engine * call engine shutdown functions if building as static * remove direct usage of PACKAGE_*_DIR and use eina_prefix * add eina_prefix checkme file as evas and others * add support for $EFL_RUN_IN_TREE * create separate package for emotion_generic_modules * check docs hierarchy (doxygen is segv'in here) SVN revision: 82501
Diffstat (limited to '')
-rw-r--r--src/modules/emotion/xine/emotion_xine_vo_out.c (renamed from legacy/emotion/src/modules/xine/emotion_xine_vo_out.c)26
1 files changed, 13 insertions, 13 deletions
diff --git a/legacy/emotion/src/modules/xine/emotion_xine_vo_out.c b/src/modules/emotion/xine/emotion_xine_vo_out.c
index f777196246..e6370279fc 100644
--- a/legacy/emotion/src/modules/xine/emotion_xine_vo_out.c
+++ b/src/modules/emotion/xine/emotion_xine_vo_out.c
@@ -123,7 +123,7 @@ plugin_info_t emotion_xine_plugin_info[] =
123 123
124/***************************************************************************/ 124/***************************************************************************/
125static void * 125static void *
126_emotion_class_init(xine_t *xine, void *visual __UNUSED__) 126_emotion_class_init(xine_t *xine, void *visual EINA_UNUSED)
127{ 127{
128 Emotion_Class *cl; 128 Emotion_Class *cl;
129 129
@@ -155,13 +155,13 @@ _emotion_class_dispose(video_driver_class_t *driver_class)
155} 155}
156 156
157static char * 157static char *
158_emotion_class_identifier_get(video_driver_class_t *driver_class __UNUSED__) 158_emotion_class_identifier_get(video_driver_class_t *driver_class EINA_UNUSED)
159{ 159{
160 return "emotion"; 160 return "emotion";
161} 161}
162 162
163static char * 163static char *
164_emotion_class_description_get(video_driver_class_t *driver_class __UNUSED__) 164_emotion_class_description_get(video_driver_class_t *driver_class EINA_UNUSED)
165{ 165{
166 return "Emotion xine video output plugin"; 166 return "Emotion xine video output plugin";
167} 167}
@@ -214,7 +214,7 @@ _emotion_dispose(vo_driver_t *vo_driver)
214 214
215/***************************************************************************/ 215/***************************************************************************/
216static int 216static int
217_emotion_redraw(vo_driver_t *vo_driver __UNUSED__) 217_emotion_redraw(vo_driver_t *vo_driver EINA_UNUSED)
218{ 218{
219// DBG(""); 219// DBG("");
220 return 0; 220 return 0;
@@ -222,7 +222,7 @@ _emotion_redraw(vo_driver_t *vo_driver __UNUSED__)
222 222
223/***************************************************************************/ 223/***************************************************************************/
224static uint32_t 224static uint32_t
225_emotion_capabilities_get(vo_driver_t *vo_driver __UNUSED__) 225_emotion_capabilities_get(vo_driver_t *vo_driver EINA_UNUSED)
226{ 226{
227// DBG(""); 227// DBG("");
228 return VO_CAP_YV12 | VO_CAP_YUY2; 228 return VO_CAP_YV12 | VO_CAP_YUY2;
@@ -230,7 +230,7 @@ _emotion_capabilities_get(vo_driver_t *vo_driver __UNUSED__)
230 230
231/***************************************************************************/ 231/***************************************************************************/
232static int 232static int
233_emotion_gui_data_exchange(vo_driver_t *vo_driver __UNUSED__, int data_type, void *data __UNUSED__) 233_emotion_gui_data_exchange(vo_driver_t *vo_driver EINA_UNUSED, int data_type, void *data EINA_UNUSED)
234{ 234{
235// DBG(""); 235// DBG("");
236 switch (data_type) 236 switch (data_type)
@@ -294,7 +294,7 @@ _emotion_property_get(vo_driver_t *vo_driver, int property)
294} 294}
295 295
296static void 296static void
297_emotion_property_min_max_get(vo_driver_t *vo_driver __UNUSED__, int property __UNUSED__, int *min, int *max) 297_emotion_property_min_max_get(vo_driver_t *vo_driver EINA_UNUSED, int property EINA_UNUSED, int *min, int *max)
298{ 298{
299// DBG(""); 299// DBG("");
300 *min = 0; 300 *min = 0;
@@ -303,7 +303,7 @@ _emotion_property_min_max_get(vo_driver_t *vo_driver __UNUSED__, int property __
303 303
304/***************************************************************************/ 304/***************************************************************************/
305static vo_frame_t * 305static vo_frame_t *
306_emotion_frame_alloc(vo_driver_t *vo_driver __UNUSED__) 306_emotion_frame_alloc(vo_driver_t *vo_driver EINA_UNUSED)
307{ 307{
308 Emotion_Frame *fr; 308 Emotion_Frame *fr;
309 309
@@ -336,7 +336,7 @@ _emotion_frame_dispose(vo_frame_t *vo_frame)
336} 336}
337 337
338static void 338static void
339_emotion_frame_format_update(vo_driver_t *vo_driver, vo_frame_t *vo_frame, uint32_t width, uint32_t height, double ratio, int format, int flags __UNUSED__) 339_emotion_frame_format_update(vo_driver_t *vo_driver, vo_frame_t *vo_frame, uint32_t width, uint32_t height, double ratio, int format, int flags EINA_UNUSED)
340{ 340{
341 Emotion_Driver *dv; 341 Emotion_Driver *dv;
342 Emotion_Frame *fr; 342 Emotion_Frame *fr;
@@ -463,7 +463,7 @@ _emotion_frame_display(vo_driver_t *vo_driver, vo_frame_t *vo_frame)
463} 463}
464 464
465static void 465static void
466_emotion_frame_field(vo_frame_t *vo_frame __UNUSED__, int which_field __UNUSED__) 466_emotion_frame_field(vo_frame_t *vo_frame EINA_UNUSED, int which_field EINA_UNUSED)
467{ 467{
468// DBG(""); 468// DBG("");
469} 469}
@@ -502,19 +502,19 @@ _emotion_frame_data_unlock(Emotion_Frame *fr)
502 502
503/***************************************************************************/ 503/***************************************************************************/
504static void 504static void
505_emotion_overlay_begin(vo_driver_t *vo_driver __UNUSED__, vo_frame_t *vo_frame __UNUSED__, int changed __UNUSED__) 505_emotion_overlay_begin(vo_driver_t *vo_driver EINA_UNUSED, vo_frame_t *vo_frame EINA_UNUSED, int changed EINA_UNUSED)
506{ 506{
507// DBG(""); 507// DBG("");
508} 508}
509 509
510static void 510static void
511_emotion_overlay_end(vo_driver_t *vo_driver __UNUSED__, vo_frame_t *vo_frame __UNUSED__) 511_emotion_overlay_end(vo_driver_t *vo_driver EINA_UNUSED, vo_frame_t *vo_frame EINA_UNUSED)
512{ 512{
513// DBG(""); 513// DBG("");
514} 514}
515 515
516static void 516static void
517_emotion_overlay_blend(vo_driver_t *vo_driver __UNUSED__, vo_frame_t *vo_frame, vo_overlay_t *vo_overlay __UNUSED__) 517_emotion_overlay_blend(vo_driver_t *vo_driver EINA_UNUSED, vo_frame_t *vo_frame, vo_overlay_t *vo_overlay EINA_UNUSED)
518{ 518{
519 Emotion_Frame *fr; 519 Emotion_Frame *fr;
520 520