summaryrefslogtreecommitdiff
path: root/legacy/emotion/src
diff options
context:
space:
mode:
authorGustavo Sverzut Barbieri <barbieri@gmail.com>2013-01-10 03:43:32 +0000
committerGustavo Sverzut Barbieri <barbieri@gmail.com>2013-01-10 03:43:32 +0000
commitdfb84c1657bfb14a5236b881193b81f4c0b8a69b (patch)
treeb51b210fc88a21eec8e5907b8bbfe12ebc669f90 /legacy/emotion/src
parent532284dbbe4259a9f2291f44d3eff376849e8031 (diff)
efl: merge emotion.
this one was quite a huge work, but hopefully it's correct. NOTES: * removed vlc generic module, it should go into a separate package. * gstreamer is enabled by default (see --disable-gstreamer) * xine is disabled by default (see --enable-gstreamer) * generic is always built statically if supported * gstreamer and xine can't be configured as static (just lacks command line options, build system supports it) * v4l2 is enabled by default on linux if eeze is built (see --disable-v4l2) * emotion_test moved to src/tests/emotion and depends on EFL_ENABLE_TESTS (--with-tests), but is still installed if enabled. TODO (need your help!): * fix warnings with gstreamer and xine engine * call engine shutdown functions if building as static * remove direct usage of PACKAGE_*_DIR and use eina_prefix * add eina_prefix checkme file as evas and others * add support for $EFL_RUN_IN_TREE * create separate package for emotion_generic_modules * check docs hierarchy (doxygen is segv'in here) SVN revision: 82501
Diffstat (limited to 'legacy/emotion/src')
-rw-r--r--legacy/emotion/src/bin/emotion_test_main.c748
-rw-r--r--legacy/emotion/src/edje_external/emotion.c516
-rw-r--r--legacy/emotion/src/examples/emotion_basic_example.c81
-rw-r--r--legacy/emotion/src/examples/emotion_border_example.c238
-rw-r--r--legacy/emotion/src/examples/emotion_generic_example.c233
-rw-r--r--legacy/emotion/src/examples/emotion_generic_subtitle_example.c97
-rw-r--r--legacy/emotion/src/examples/emotion_signals_example.c173
-rw-r--r--legacy/emotion/src/lib/Emotion.h1314
-rw-r--r--legacy/emotion/src/lib/emotion_main.c481
-rw-r--r--legacy/emotion/src/lib/emotion_private.h137
-rw-r--r--legacy/emotion/src/lib/emotion_smart.c2133
-rw-r--r--legacy/emotion/src/modules/generic/Emotion_Generic_Plugin.h145
-rw-r--r--legacy/emotion/src/modules/generic/README79
-rw-r--r--legacy/emotion/src/modules/generic/emotion_generic.c1820
-rw-r--r--legacy/emotion/src/modules/generic/emotion_generic.h113
-rw-r--r--legacy/emotion/src/modules/gstreamer/emotion_alloc.c91
-rw-r--r--legacy/emotion/src/modules/gstreamer/emotion_convert.c252
-rw-r--r--legacy/emotion/src/modules/gstreamer/emotion_fakeeos.c71
-rw-r--r--legacy/emotion/src/modules/gstreamer/emotion_gstreamer.c2157
-rw-r--r--legacy/emotion/src/modules/gstreamer/emotion_gstreamer.h330
-rw-r--r--legacy/emotion/src/modules/gstreamer/emotion_sink.c1391
-rw-r--r--legacy/emotion/src/modules/xine/emotion_xine.c1723
-rw-r--r--legacy/emotion/src/modules/xine/emotion_xine.h98
-rw-r--r--legacy/emotion/src/modules/xine/emotion_xine_vo_out.c767
24 files changed, 0 insertions, 15188 deletions
diff --git a/legacy/emotion/src/bin/emotion_test_main.c b/legacy/emotion/src/bin/emotion_test_main.c
deleted file mode 100644
index 13380d6700..0000000000
--- a/legacy/emotion/src/bin/emotion_test_main.c
+++ /dev/null
@@ -1,748 +0,0 @@
1#ifdef HAVE_CONFIG_H
2# include "config.h"
3#endif
4
5#include <stdlib.h>
6#include <stdio.h>
7#include <string.h>
8
9#include <Evas.h>
10#include <Ecore.h>
11#include <Ecore_Getopt.h>
12#include <Ecore_Evas.h>
13#include <Edje.h>
14
15#include "Emotion.h"
16
17static const Ecore_Getopt options = {
18 "emotion_test",
19 "%prog [options] <filename>",
20 "1.0.0",
21 "(C) 2011 Enlightenment",
22 "BSD\nThis is a 3 clause bsd bla bla",
23 "a simple test program for emotion.",
24 1,
25 {
26 ECORE_GETOPT_STORE_STR('e', "engine", "ecore-evas engine to use"),
27 ECORE_GETOPT_CALLBACK_NOARGS('E', "list-engines", "list ecore-evas engines",
28 ecore_getopt_callback_ecore_evas_list_engines, NULL),
29 ECORE_GETOPT_CALLBACK_ARGS('g', "geometry", "geometry to use in x:y:w:h form.", "X:Y:W:H",
30 ecore_getopt_callback_geometry_parse, NULL),
31 ECORE_GETOPT_STORE_STR('b', "backend", "backend to use"),
32 ECORE_GETOPT_STORE_INT('v', "vis", "visualization type"),
33 ECORE_GETOPT_COUNT('v', "verbose", "be more verbose"),
34 ECORE_GETOPT_STORE_TRUE('R', "reflex", "show video reflex effect"),
35 ECORE_GETOPT_VERSION('V', "version"),
36 ECORE_GETOPT_COPYRIGHT('R', "copyright"),
37 ECORE_GETOPT_LICENSE('L', "license"),
38 ECORE_GETOPT_HELP('h', "help"),
39 ECORE_GETOPT_SENTINEL
40 }
41};
42
43typedef struct _Frame_Data Frame_Data;
44
45struct _Frame_Data
46{
47 unsigned char moving : 1;
48 unsigned char resizing : 1;
49 int button;
50 Evas_Coord x, y;
51};
52
53static void main_resize(Ecore_Evas *ee);
54static Eina_Bool main_signal_exit(void *data, int ev_type, void *ev);
55static void main_delete_request(Ecore_Evas *ee);
56
57static void bg_setup(void);
58static void bg_resize(Evas_Coord w, Evas_Coord h);
59static void bg_key_down(void *data, Evas * e, Evas_Object * obj, void *event_info);
60
61static Evas_Object *o_bg = NULL;
62
63static double start_time = 0.0;
64static Ecore_Evas *ecore_evas = NULL;
65static Evas *evas = NULL;
66static int startw = 800;
67static int starth = 600;
68
69static Eina_List *video_objs = NULL;
70static Emotion_Vis vis = EMOTION_VIS_NONE;
71static unsigned char reflex = 0;
72
73static void
74main_resize(Ecore_Evas *ee)
75{
76 Evas_Coord w, h;
77
78 evas_output_viewport_get(ecore_evas_get(ee), NULL, NULL, &w, &h);
79 bg_resize(w, h);
80}
81
82static Eina_Bool
83main_signal_exit(void *data __UNUSED__, int ev_type __UNUSED__, void *ev __UNUSED__)
84{
85 Evas_Object *o;
86
87 ecore_main_loop_quit();
88 EINA_LIST_FREE(video_objs, o)
89 {
90 emotion_object_last_position_save(o);
91 evas_object_del(o);
92 }
93 return EINA_TRUE;
94}
95
96static void
97main_delete_request(Ecore_Evas *ee __UNUSED__)
98{
99 ecore_main_loop_quit();
100}
101
102void
103bg_setup(void)
104{
105 Evas_Object *o;
106
107 o = edje_object_add(evas);
108 edje_object_file_set(o, PACKAGE_DATA_DIR"/data/theme.edj", "background");
109 evas_object_move(o, 0, 0);
110 evas_object_resize(o, startw, starth);
111 evas_object_layer_set(o, -999);
112 evas_object_show(o);
113 evas_object_focus_set(o, 1);
114 evas_object_event_callback_add(o, EVAS_CALLBACK_KEY_DOWN, bg_key_down, NULL);
115 o_bg = o;
116}
117
118void
119bg_resize(Evas_Coord w, Evas_Coord h)
120{
121 evas_object_resize(o_bg, w, h);
122}
123
124static void
125broadcast_event(Emotion_Event ev)
126{
127 Eina_List *l;
128 Evas_Object *obj;
129
130 EINA_LIST_FOREACH(video_objs, l, obj)
131 emotion_object_event_simple_send(obj, ev);
132}
133
134static void
135bg_key_down(void *data __UNUSED__, Evas *e __UNUSED__, Evas_Object *obj __UNUSED__, void *event_info)
136{
137 Evas_Event_Key_Down *ev = event_info;
138 Eina_List *l;
139 Evas_Object *o;
140
141 if (!strcmp(ev->keyname, "Escape"))
142 ecore_main_loop_quit();
143 else if (!strcmp(ev->keyname, "Up"))
144 broadcast_event(EMOTION_EVENT_UP);
145 else if (!strcmp(ev->keyname, "Down"))
146 broadcast_event(EMOTION_EVENT_DOWN);
147 else if (!strcmp(ev->keyname, "Left"))
148 broadcast_event(EMOTION_EVENT_LEFT);
149 else if (!strcmp(ev->keyname, "Right"))
150 broadcast_event(EMOTION_EVENT_RIGHT);
151 else if (!strcmp(ev->keyname, "Return"))
152 broadcast_event(EMOTION_EVENT_SELECT);
153 else if (!strcmp(ev->keyname, "m"))
154 broadcast_event(EMOTION_EVENT_MENU1);
155 else if (!strcmp(ev->keyname, "Prior"))
156 broadcast_event(EMOTION_EVENT_PREV);
157 else if (!strcmp(ev->keyname, "Next"))
158 broadcast_event(EMOTION_EVENT_NEXT);
159 else if (!strcmp(ev->keyname, "0"))
160 broadcast_event(EMOTION_EVENT_0);
161 else if (!strcmp(ev->keyname, "1"))
162 broadcast_event(EMOTION_EVENT_1);
163 else if (!strcmp(ev->keyname, "2"))
164 broadcast_event(EMOTION_EVENT_2);
165 else if (!strcmp(ev->keyname, "3"))
166 broadcast_event(EMOTION_EVENT_3);
167 else if (!strcmp(ev->keyname, "4"))
168 broadcast_event(EMOTION_EVENT_4);
169 else if (!strcmp(ev->keyname, "5"))
170 broadcast_event(EMOTION_EVENT_5);
171 else if (!strcmp(ev->keyname, "6"))
172 broadcast_event(EMOTION_EVENT_6);
173 else if (!strcmp(ev->keyname, "7"))
174 broadcast_event(EMOTION_EVENT_7);
175 else if (!strcmp(ev->keyname, "8"))
176 broadcast_event(EMOTION_EVENT_8);
177 else if (!strcmp(ev->keyname, "9"))
178 broadcast_event(EMOTION_EVENT_9);
179 else if (!strcmp(ev->keyname, "-"))
180 broadcast_event(EMOTION_EVENT_10);
181 else if (!strcmp(ev->keyname, "bracketleft"))
182 {
183 EINA_LIST_FOREACH(video_objs, l, o)
184 emotion_object_audio_volume_set(o, emotion_object_audio_volume_get(o) - 0.1);
185 }
186 else if (!strcmp(ev->keyname, "bracketright"))
187 {
188 EINA_LIST_FOREACH(video_objs, l, o)
189 emotion_object_audio_volume_set(o, emotion_object_audio_volume_get(o) + 0.1);
190 }
191 else if (!strcmp(ev->keyname, "v"))
192 {
193 EINA_LIST_FOREACH(video_objs, l, o)
194 {
195 if (emotion_object_video_mute_get(o))
196 emotion_object_video_mute_set(o, 0);
197 else
198 emotion_object_video_mute_set(o, 1);
199 }
200 }
201 else if (!strcmp(ev->keyname, "a"))
202 {
203 EINA_LIST_FOREACH(video_objs, l, o)
204 {
205 if (emotion_object_audio_mute_get(o))
206 {
207 emotion_object_audio_mute_set(o, 0);
208 printf("unmute\n");
209 }
210 else
211 {
212 emotion_object_audio_mute_set(o, 1);
213 printf("mute\n");
214 }
215 }
216 }
217 else if (!strcmp(ev->keyname, "i"))
218 {
219 EINA_LIST_FOREACH(video_objs, l, o)
220 {
221 printf("audio channels: %i\n", emotion_object_audio_channel_count(o));
222 printf("video channels: %i\n", emotion_object_video_channel_count(o));
223 printf("spu channels: %i\n", emotion_object_spu_channel_count(o));
224 printf("seekable: %i\n", emotion_object_seekable_get(o));
225 }
226 }
227 else if (!strcmp(ev->keyname, "f"))
228 {
229 if (!ecore_evas_fullscreen_get(ecore_evas))
230 ecore_evas_fullscreen_set(ecore_evas, 1);
231 else
232 ecore_evas_fullscreen_set(ecore_evas, 0);
233 }
234 else if (!strcmp(ev->keyname, "d"))
235 {
236 if (!ecore_evas_avoid_damage_get(ecore_evas))
237 ecore_evas_avoid_damage_set(ecore_evas, 1);
238 else
239 ecore_evas_avoid_damage_set(ecore_evas, 0);
240 }
241 else if (!strcmp(ev->keyname, "s"))
242 {
243 if (!ecore_evas_shaped_get(ecore_evas))
244 {
245 ecore_evas_shaped_set(ecore_evas, 1);
246 evas_object_hide(o_bg);
247 }
248 else
249 {
250 ecore_evas_shaped_set(ecore_evas, 0);
251 evas_object_show(o_bg);
252 }
253 }
254 else if (!strcmp(ev->keyname, "b"))
255 {
256 if (!ecore_evas_borderless_get(ecore_evas))
257 ecore_evas_borderless_set(ecore_evas, 1);
258 else
259 ecore_evas_borderless_set(ecore_evas, 0);
260 }
261 else if (!strcmp(ev->keyname, "q"))
262 {
263 ecore_main_loop_quit();
264 while (video_objs)
265 {
266 printf("del obj!\n");
267 evas_object_del(video_objs->data);
268 video_objs = eina_list_remove_list(video_objs, video_objs);
269 printf("done\n");
270 }
271 }
272 else if (!strcmp(ev->keyname, "z"))
273 {
274 vis = (vis + 1) % EMOTION_VIS_LAST;
275 printf("new visualization: %d\n", vis);
276
277 EINA_LIST_FOREACH(video_objs, l, o)
278 {
279 Eina_Bool supported;
280
281 supported = emotion_object_vis_supported(o, vis);
282 if (supported)
283 emotion_object_vis_set(o, vis);
284 else
285 {
286 const char *file;
287
288 file = emotion_object_file_get(o);
289 printf("object %p (%s) does not support visualization %d\n",
290 o, file, vis);
291 }
292 }
293 }
294 else
295 {
296 printf("UNHANDLED: %s\n", ev->keyname);
297 }
298}
299
300static void
301video_obj_time_changed(Evas_Object *obj, Evas_Object *edje)
302{
303 double pos, len, scale;
304 char buf[256];
305 int ph, pm, ps, pf, lh, lm, ls;
306
307 pos = emotion_object_position_get(obj);
308 len = emotion_object_play_length_get(obj);
309 scale = (len > 0.0) ? pos / len : 0.0;
310 edje_object_part_drag_value_set(edje, "video_progress", scale, 0.0);
311 lh = len / 3600;
312 lm = len / 60 - (lh * 60);
313 ls = len - (lm * 60);
314 ph = pos / 3600;
315 pm = pos / 60 - (ph * 60);
316 ps = pos - (pm * 60);
317 pf = pos * 100 - (ps * 100) - (pm * 60 * 100) - (ph * 60 * 60 * 100);
318 snprintf(buf, sizeof(buf), "%i:%02i:%02i.%02i / %i:%02i:%02i",
319 ph, pm, ps, pf, lh, lm, ls);
320 edje_object_part_text_set(edje, "video_progress_txt", buf);
321}
322
323static void
324video_obj_frame_decode_cb(void *data, Evas_Object *obj, void *event_info __UNUSED__)
325{
326 video_obj_time_changed(obj, data);
327
328 if (0)
329 {
330 double t;
331 static double pt = 0.0;
332 t = ecore_time_get();
333 printf("FPS: %3.3f\n", 1.0 / (t - pt));
334 pt = t;
335 }
336}
337
338static void
339video_obj_frame_resize_cb(void *data, Evas_Object *obj, void *event_info __UNUSED__)
340{
341 Evas_Object *oe;
342 int iw, ih;
343 Evas_Coord w, h;
344 double ratio;
345
346 oe = data;
347 emotion_object_size_get(obj, &iw, &ih);
348 ratio = emotion_object_ratio_get(obj);
349 printf("HANDLE %ix%i @ %3.3f\n", iw, ih, ratio);
350 if (ratio > 0.0) iw = (ih * ratio) + 0.5;
351 edje_extern_object_min_size_set(obj, iw, ih);
352 edje_object_part_swallow(oe, "video_swallow", obj);
353 edje_object_size_min_calc(oe, &w, &h);
354 evas_object_resize(oe, w, h);
355 edje_extern_object_min_size_set(obj, 0, 0);
356 edje_object_part_swallow(oe, "video_swallow", obj);
357}
358
359static void
360video_obj_length_change_cb(void *data, Evas_Object *obj, void *event_info __UNUSED__)
361{
362 video_obj_time_changed(obj, data);
363}
364
365static void
366video_obj_position_update_cb(void *data, Evas_Object *obj, void *event_info __UNUSED__)
367{
368 video_obj_time_changed(obj, data);
369}
370
371static void
372video_obj_stopped_cb(void *data __UNUSED__, Evas_Object *obj, void *event_info __UNUSED__)
373{
374 printf("video stopped!\n");
375 emotion_object_position_set(obj, 0.0);
376 emotion_object_play_set(obj, 1);
377}
378
379static void
380video_obj_channels_cb(void *data __UNUSED__, Evas_Object *obj, void *event_info __UNUSED__)
381{
382 printf("channels changed: [AUD %i][VID %i][SPU %i]\n",
383 emotion_object_audio_channel_count(obj),
384 emotion_object_video_channel_count(obj),
385 emotion_object_spu_channel_count(obj));
386}
387
388static void
389video_obj_title_cb(void *data __UNUSED__, Evas_Object *obj, void *event_info __UNUSED__)
390{
391 printf("video title to: \"%s\"\n", emotion_object_title_get(obj));
392}
393
394static void
395video_obj_progress_cb(void *data __UNUSED__, Evas_Object *obj, void *event_info __UNUSED__)
396{
397 printf("progress: \"%s\" %3.3f\n",
398 emotion_object_progress_info_get(obj),
399 emotion_object_progress_status_get(obj));
400}
401
402static void
403video_obj_ref_cb(void *data __UNUSED__, Evas_Object *obj, void *event_info __UNUSED__)
404{
405 printf("video ref to: \"%s\" %i\n",
406 emotion_object_ref_file_get(obj),
407 emotion_object_ref_num_get(obj));
408}
409
410static void
411video_obj_button_num_cb(void *data __UNUSED__, Evas_Object *obj, void *event_info __UNUSED__)
412{
413 printf("video spu buttons to: %i\n",
414 emotion_object_spu_button_count_get(obj));
415}
416
417static void
418video_obj_button_cb(void *data __UNUSED__, Evas_Object *obj, void *event_info __UNUSED__)
419{
420 printf("video selected spu button: %i\n",
421 emotion_object_spu_button_get(obj));
422}
423
424
425
426static void
427video_obj_signal_play_cb(void *data, Evas_Object *o, const char *emission __UNUSED__, const char *source __UNUSED__)
428{
429 Evas_Object *ov = data;
430 emotion_object_play_set(ov, 1);
431 edje_object_signal_emit(o, "video_state", "play");
432}
433
434static void
435video_obj_signal_pause_cb(void *data, Evas_Object *o, const char *emission __UNUSED__, const char *source __UNUSED__)
436{
437 Evas_Object *ov = data;
438 emotion_object_play_set(ov, 0);
439 edje_object_signal_emit(o, "video_state", "pause");
440}
441
442static void
443video_obj_signal_stop_cb(void *data, Evas_Object *o, const char *emission __UNUSED__, const char *source __UNUSED__)
444{
445 Evas_Object *ov = data;
446 emotion_object_play_set(ov, 0);
447 emotion_object_position_set(ov, 0);
448 edje_object_signal_emit(o, "video_state", "stop");
449}
450
451static void
452video_obj_signal_jump_cb(void *data, Evas_Object *o, const char *emission __UNUSED__, const char *source __UNUSED__)
453{
454 Evas_Object *ov = data;
455 double len;
456 double x, y;
457
458 edje_object_part_drag_value_get(o, source, &x, &y);
459 len = emotion_object_play_length_get(ov);
460 emotion_object_position_set(ov, x * len);
461}
462
463static void
464video_obj_signal_speed_cb(void *data, Evas_Object *o, const char *emission __UNUSED__, const char *source __UNUSED__)
465{
466 Evas_Object *ov = data;
467 double spd;
468 double x, y;
469 char buf[256];
470
471 edje_object_part_drag_value_get(o, source, &x, &y);
472 spd = 255 * y;
473 evas_object_color_set(ov, spd, spd, spd, spd);
474 snprintf(buf, sizeof(buf), "%.0f", spd);
475 edje_object_part_text_set(o, "video_speed_txt", buf);
476}
477
478static void
479video_obj_signal_frame_move_start_cb(void *data __UNUSED__, Evas_Object *o, const char *emission __UNUSED__, const char *source __UNUSED__)
480{
481 Frame_Data *fd;
482 Evas_Coord x, y;
483
484 fd = evas_object_data_get(o, "frame_data");
485 fd->moving = 1;
486 evas_pointer_canvas_xy_get(evas_object_evas_get(o), &x, &y);
487 fd->x = x;
488 fd->y = y;
489 evas_object_raise(o);
490}
491
492static void
493video_obj_signal_frame_move_stop_cb(void *data __UNUSED__, Evas_Object *o, const char *emission __UNUSED__, const char *source __UNUSED__)
494{
495 Frame_Data *fd;
496
497 fd = evas_object_data_get(o, "frame_data");
498 fd->moving = 0;
499}
500
501static void
502video_obj_signal_frame_resize_start_cb(void *data __UNUSED__, Evas_Object *o, const char *emission __UNUSED__, const char *source __UNUSED__)
503{
504 Frame_Data *fd;
505 Evas_Coord x, y;
506
507 fd = evas_object_data_get(o, "frame_data");
508 fd->resizing = 1;
509 evas_pointer_canvas_xy_get(evas_object_evas_get(o), &x, &y);
510 fd->x = x;
511 fd->y = y;
512 evas_object_raise(o);
513}
514
515static void
516video_obj_signal_frame_resize_stop_cb(void *data __UNUSED__, Evas_Object *o, const char *emission __UNUSED__, const char *source __UNUSED__)
517{
518 Frame_Data *fd;
519
520 fd = evas_object_data_get(o, "frame_data");
521 fd->resizing = 0;
522}
523
524static void
525video_obj_signal_frame_move_cb(void *data __UNUSED__, Evas_Object *o, const char *emission __UNUSED__, const char *source __UNUSED__)
526{
527 Frame_Data *fd;
528
529 fd = evas_object_data_get(o, "frame_data");
530 if (fd->moving)
531 {
532 Evas_Coord x, y, ox, oy;
533
534 evas_pointer_canvas_xy_get(evas_object_evas_get(o), &x, &y);
535 evas_object_geometry_get(o, &ox, &oy, NULL, NULL);
536 evas_object_move(o, ox + (x - fd->x), oy + (y - fd->y));
537 fd->x = x;
538 fd->y = y;
539 }
540 else if (fd->resizing)
541 {
542 Evas_Coord x, y, ow, oh;
543
544 evas_pointer_canvas_xy_get(evas_object_evas_get(o), &x, &y);
545 evas_object_geometry_get(o, NULL, NULL, &ow, &oh);
546 evas_object_resize(o, ow + (x - fd->x), oh + (y - fd->y));
547 fd->x = x;
548 fd->y = y;
549 }
550}
551
552
553static void
554init_video_object(const char *module_filename, const char *filename)
555{
556 Evas_Object *o, *oe;
557 int iw, ih;
558 Evas_Coord w, h;
559 Frame_Data *fd;
560
561
562/* basic video object setup */
563 o = emotion_object_add(evas);
564 if (!emotion_object_init(o, module_filename))
565 return;
566 emotion_object_vis_set(o, vis);
567 if (!emotion_object_file_set(o, filename))
568 {
569 return;
570 }
571 emotion_object_last_position_load(o);
572 emotion_object_play_set(o, 1);
573 evas_object_move(o, 0, 0);
574 evas_object_resize(o, 320, 240);
575 emotion_object_smooth_scale_set(o, 1);
576 evas_object_show(o);
577/* end basic video setup. all the rest here is just to be fancy */
578
579
580 video_objs = eina_list_append(video_objs, o);
581
582 emotion_object_size_get(o, &iw, &ih);
583 w = iw; h = ih;
584
585 fd = calloc(1, sizeof(Frame_Data));
586
587 oe = edje_object_add(evas);
588 evas_object_data_set(oe, "frame_data", fd);
589 if (reflex)
590 edje_object_file_set(oe, PACKAGE_DATA_DIR"/data/theme.edj", "video_controller/reflex");
591 else
592 edje_object_file_set(oe, PACKAGE_DATA_DIR"/data/theme.edj", "video_controller");
593 edje_extern_object_min_size_set(o, w, h);
594 edje_object_part_swallow(oe, "video_swallow", o);
595 edje_object_size_min_calc(oe, &w, &h);
596// evas_object_move(oe, rand() % (int)(startw - w), rand() % (int)(starth - h));
597 evas_object_move(oe, 0, 0);
598 evas_object_resize(oe, w, h);
599 edje_extern_object_min_size_set(o, 0, 0);
600 edje_object_part_swallow(oe, "video_swallow", o);
601
602 evas_object_smart_callback_add(o, "frame_decode", video_obj_frame_decode_cb, oe);
603 evas_object_smart_callback_add(o, "frame_resize", video_obj_frame_resize_cb, oe);
604 evas_object_smart_callback_add(o, "length_change", video_obj_length_change_cb, oe);
605 evas_object_smart_callback_add(o, "position_update", video_obj_position_update_cb, oe);
606
607 evas_object_smart_callback_add(o, "decode_stop", video_obj_stopped_cb, oe);
608 evas_object_smart_callback_add(o, "channels_change", video_obj_channels_cb, oe);
609 evas_object_smart_callback_add(o, "title_change", video_obj_title_cb, oe);
610 evas_object_smart_callback_add(o, "progress_change", video_obj_progress_cb, oe);
611 evas_object_smart_callback_add(o, "ref_change", video_obj_ref_cb, oe);
612 evas_object_smart_callback_add(o, "button_num_change", video_obj_button_num_cb, oe);
613 evas_object_smart_callback_add(o, "button_change", video_obj_button_cb, oe);
614
615 edje_object_signal_callback_add(oe, "video_control", "play", video_obj_signal_play_cb, o);
616 edje_object_signal_callback_add(oe, "video_control", "pause", video_obj_signal_pause_cb, o);
617 edje_object_signal_callback_add(oe, "video_control", "stop", video_obj_signal_stop_cb, o);
618 edje_object_signal_callback_add(oe, "drag", "video_progress", video_obj_signal_jump_cb, o);
619 edje_object_signal_callback_add(oe, "drag", "video_speed", video_obj_signal_speed_cb, o);
620
621 edje_object_signal_callback_add(oe, "frame_move", "start", video_obj_signal_frame_move_start_cb, oe);
622 edje_object_signal_callback_add(oe, "frame_move", "stop", video_obj_signal_frame_move_stop_cb, oe);
623 edje_object_signal_callback_add(oe, "frame_resize", "start", video_obj_signal_frame_resize_start_cb, oe);
624 edje_object_signal_callback_add(oe, "frame_resize", "stop", video_obj_signal_frame_resize_stop_cb, oe);
625 edje_object_signal_callback_add(oe, "mouse,move", "*", video_obj_signal_frame_move_cb, oe);
626
627 edje_object_part_drag_value_set(oe, "video_speed", 0.0, 1.0);
628 edje_object_part_text_set(oe, "video_speed_txt", "1.0");
629
630 edje_object_signal_emit(o, "video_state", "play");
631
632 evas_object_show(oe);
633}
634
635static Eina_Bool
636check_positions(void *data __UNUSED__)
637{
638 const Eina_List *lst;
639 Evas_Object *o;
640
641 EINA_LIST_FOREACH(video_objs, lst, o)
642 video_obj_time_changed(o, evas_object_smart_parent_get(o));
643
644 return !!video_objs;
645}
646
647int
648main(int argc, char **argv)
649{
650 int args;
651 Eina_Rectangle geometry = {0, 0, startw, starth};
652 char *engine = NULL;
653 char *backend = NULL;
654 int verbose = 0;
655 int visual = EMOTION_VIS_NONE;
656 unsigned char help = 0;
657 unsigned char engines_listed = 0;
658 Ecore_Getopt_Value values[] = {
659 ECORE_GETOPT_VALUE_STR(engine),
660 ECORE_GETOPT_VALUE_BOOL(engines_listed),
661 ECORE_GETOPT_VALUE_PTR_CAST(geometry),
662 ECORE_GETOPT_VALUE_STR(backend),
663 ECORE_GETOPT_VALUE_INT(visual),
664 ECORE_GETOPT_VALUE_INT(verbose),
665 ECORE_GETOPT_VALUE_BOOL(reflex),
666 ECORE_GETOPT_VALUE_NONE,
667 ECORE_GETOPT_VALUE_NONE,
668 ECORE_GETOPT_VALUE_NONE,
669 ECORE_GETOPT_VALUE_BOOL(help),
670 ECORE_GETOPT_VALUE_NONE
671 };
672
673
674 if (!ecore_evas_init())
675 return -1;
676 if (!edje_init())
677 goto shutdown_ecore_evas;
678
679 start_time = ecore_time_get();
680 ecore_event_handler_add(ECORE_EVENT_SIGNAL_EXIT, main_signal_exit, NULL);
681 edje_frametime_set(1.0 / 30.0);
682
683 ecore_app_args_set(argc, (const char **)argv);
684 args = ecore_getopt_parse(&options, values, argc, argv);
685 if (args < 0) goto shutdown_edje;
686 else if (help) goto shutdown_edje;
687 else if (engines_listed) goto shutdown_edje;
688 else if (args == argc)
689 {
690 printf("must provide at least one file to play!\n");
691 goto shutdown_edje;
692 }
693
694 if ((geometry.w == 0) || (geometry.h == 0))
695 {
696 if (geometry.w == 0) geometry.w = 320;
697 if (geometry.h == 0) geometry.h = 240;
698 }
699
700 printf("evas engine: %s\n", engine ? engine : "<auto>");
701 printf("emotion backend: %s\n", backend ? backend : "<auto>");
702 printf("vis: %d\n", vis);
703 printf("geometry: %d %d %dx%d\n", geometry.x, geometry.y, geometry.w, geometry.h);
704
705 ecore_evas = ecore_evas_new
706 (engine, geometry.x, geometry.y, geometry.w, geometry.h, NULL);
707 if (!ecore_evas)
708 goto shutdown_edje;
709
710// ecore_evas_alpha_set(ecore_evas, EINA_TRUE);
711
712 ecore_evas_callback_delete_request_set(ecore_evas, main_delete_request);
713 ecore_evas_callback_resize_set(ecore_evas, main_resize);
714 ecore_evas_title_set(ecore_evas, "Evas Media Test Program");
715 ecore_evas_name_class_set(ecore_evas, "evas_media_test", "main");
716 ecore_evas_show(ecore_evas);
717 evas = ecore_evas_get(ecore_evas);
718 evas_image_cache_set(evas, 8 * 1024 * 1024);
719 evas_font_cache_set(evas, 1 * 1024 * 1024);
720 evas_font_path_append(evas, PACKAGE_DATA_DIR"/data/fonts");
721
722 emotion_init();
723
724 bg_setup();
725
726 for (; args < argc; args++)
727 init_video_object(backend, argv[args]);
728
729 ecore_animator_add(check_positions, NULL);
730
731 ecore_main_loop_begin();
732
733 main_signal_exit(NULL, 0, NULL);
734
735 emotion_shutdown();
736 ecore_evas_free(ecore_evas);
737 ecore_evas_shutdown();
738 edje_shutdown();
739
740 return 0;
741
742 shutdown_edje:
743 edje_shutdown();
744 shutdown_ecore_evas:
745 ecore_evas_shutdown();
746
747 return -1;
748}
diff --git a/legacy/emotion/src/edje_external/emotion.c b/legacy/emotion/src/edje_external/emotion.c
deleted file mode 100644
index d7fc012906..0000000000
--- a/legacy/emotion/src/edje_external/emotion.c
+++ /dev/null
@@ -1,516 +0,0 @@
1#ifdef HAVE_CONFIG_H
2# include "config.h"
3#endif
4
5#include <Edje.h>
6
7#include "Emotion.h"
8
9typedef struct _External_Emotion_Params External_Emotion_Params;
10typedef struct _External_Emotion_Signals_Proxy_Context External_Emotion_Signals_Proxy_Context;
11
12struct _External_Emotion_Params
13{
14#define _STR(M) const char *M
15#define _BOOL(M) Eina_Bool M:1; Eina_Bool M##_exists:1
16#define _INT(M) int M; Eina_Bool M##_exists:1
17#define _DOUBLE(M) double M; Eina_Bool M##_exists:1
18 _STR(file);
19 _BOOL(play);
20 _DOUBLE(position);
21 _BOOL(smooth_scale);
22 _DOUBLE(audio_volume);
23 _BOOL(audio_mute);
24 _INT(audio_channel);
25 _BOOL(video_mute);
26 _INT(video_channel);
27 _BOOL(spu_mute);
28 _INT(spu_channel);
29 _INT(chapter);
30 _DOUBLE(play_speed);
31 _DOUBLE(play_length);
32 //_INT(vis);
33#undef _STR
34#undef _BOOL
35#undef _INT
36#undef _DOUBLE
37};
38
39struct _External_Emotion_Signals_Proxy_Context
40{
41 const char *emission;
42 const char *source;
43 Evas_Object *edje;
44};
45
46static int _log_dom = -1;
47#define CRITICAL(...) EINA_LOG_DOM_CRIT(_log_dom, __VA_ARGS__)
48#define ERR(...) EINA_LOG_DOM_ERR(_log_dom, __VA_ARGS__)
49#define WRN(...) EINA_LOG_DOM_WARN(_log_dom, __VA_ARGS__)
50#define INF(...) EINA_LOG_DOM_INFO(_log_dom, __VA_ARGS__)
51#define DBG(...) EINA_LOG_DOM_DBG(_log_dom, __VA_ARGS__)
52
53static const char *_external_emotion_engines[] = {
54#ifdef EMOTION_BUILD_XINE
55 "xine",
56#endif
57#ifdef EMOTION_BUILD_GSTREAMER
58 "gstreamer",
59#endif
60#ifdef EMOTION_BUILD_GENERIC
61 "generic",
62#endif
63 NULL,
64};
65
66static const char _external_emotion_engine_def[] =
67#if defined(EMOTION_BUILD_XINE)
68 "xine";
69#elif defined(EMOTION_BUILD_GSTREAMER)
70 "gstreamer";
71#elif defined(EMOTION_BUILD_GENERIC)
72 "generic";
73#else
74 "impossible";
75#endif
76
77static void
78_external_emotion_signal_proxy_free_cb(void *data, Evas *e __UNUSED__, Evas_Object *obj __UNUSED__, void *event_info __UNUSED__)
79{
80 External_Emotion_Signals_Proxy_Context *ctxt = data;
81 free(ctxt);
82}
83
84static void
85_external_emotion_signal_proxy_cb(void *data, Evas_Object *obj __UNUSED__, void *event_info __UNUSED__)
86{
87 External_Emotion_Signals_Proxy_Context *ctxt = data;
88 // TODO: Is it worth to check Evas_Smart_Cb_Description and do something
89 // TODO: with event_info given its description?
90 edje_object_signal_emit(ctxt->edje, ctxt->emission, ctxt->source);
91}
92
93static Evas_Object *
94_external_emotion_add(void *data __UNUSED__, Evas *evas, Evas_Object *edje __UNUSED__, const Eina_List *params, const char *part_name)
95{
96 const Evas_Smart_Cb_Description **cls_descs, **inst_descs;
97 unsigned int cls_count, inst_count, total;
98 External_Emotion_Signals_Proxy_Context *ctxt;
99 Evas_Object *obj;
100 const char *engine;
101
102 if (!edje_external_param_choice_get(params, "engine", &engine))
103 engine = NULL;
104 if (!engine) engine = _external_emotion_engine_def;
105
106 obj = emotion_object_add(evas);
107 if (!emotion_object_init(obj, engine))
108 {
109 ERR("failed to initialize emotion with engine '%s'.", engine);
110 return NULL;
111 }
112
113 evas_object_smart_callbacks_descriptions_get
114 (obj, &cls_descs, &cls_count, &inst_descs, &inst_count);
115
116 total = cls_count + inst_count;
117 if (!total) goto end;
118 ctxt = malloc(sizeof(External_Emotion_Signals_Proxy_Context) * total);
119 if (!ctxt) goto end;
120 evas_object_event_callback_add
121 (obj, EVAS_CALLBACK_DEL, _external_emotion_signal_proxy_free_cb, ctxt);
122
123 for (; cls_count > 0; cls_count--, cls_descs++, ctxt++)
124 {
125 const Evas_Smart_Cb_Description *d = *cls_descs;
126 ctxt->emission = d->name;
127 ctxt->source = part_name;
128 ctxt->edje = edje;
129 evas_object_smart_callback_add
130 (obj, d->name, _external_emotion_signal_proxy_cb, ctxt);
131 }
132
133 for (; inst_count > 0; inst_count--, inst_descs++, ctxt++)
134 {
135 const Evas_Smart_Cb_Description *d = *inst_descs;
136 ctxt->emission = d->name;
137 ctxt->source = part_name;
138 ctxt->edje = edje;
139 evas_object_smart_callback_add
140 (obj, d->name, _external_emotion_signal_proxy_cb, ctxt);
141 }
142
143 end:
144 return obj;
145}
146
147static void
148_external_emotion_signal(void *data __UNUSED__, Evas_Object *obj __UNUSED__, const char *signal, const char *source)
149{
150 DBG("External Signal received: '%s' '%s'", signal, source);
151}
152
153static void
154_external_emotion_state_set(void *data __UNUSED__, Evas_Object *obj, const void *from_params, const void *to_params, float pos __UNUSED__)
155{
156 const External_Emotion_Params *p;
157
158 if (to_params) p = to_params;
159 else if (from_params) p = from_params;
160 else return;
161
162#define _STR(M) if (p->M) emotion_object_##M##_set(obj, p->M)
163#define _BOOL(M) if (p->M##_exists) emotion_object_##M##_set(obj, p->M)
164#define _INT(M) if (p->M##_exists) emotion_object_##M##_set(obj, p->M)
165#define _DOUBLE(M) if (p->M##_exists) emotion_object_##M##_set(obj, p->M)
166 _STR(file);
167 _BOOL(play);
168 //_DOUBLE(position);
169 if (p->position_exists)
170 WRN("position should not be set from state description! Ignored.");
171 _BOOL(smooth_scale);
172 _DOUBLE(audio_volume);
173 _BOOL(audio_mute);
174 _INT(audio_channel);
175 _BOOL(video_mute);
176 _INT(video_channel);
177 _BOOL(spu_mute);
178 _INT(spu_channel);
179 _INT(chapter);
180 _DOUBLE(play_speed);
181 if (p->play_length_exists) ERR("play_length is read-only");
182 //_INT(vis);
183#undef _STR
184#undef _BOOL
185#undef _INT
186#undef _DOUBLE
187}
188
189static Eina_Bool
190_external_emotion_param_set(void *data __UNUSED__, Evas_Object *obj, const Edje_External_Param *param)
191{
192 if (!strcmp(param->name, "engine"))
193 {
194 // TODO
195 WRN("engine is a property that can be set only at object creation!");
196 return EINA_FALSE;
197 }
198
199#define _STR(M) \
200 else if (!strcmp(param->name, #M)) \
201 { \
202 if (param->type == EDJE_EXTERNAL_PARAM_TYPE_STRING) \
203 { \
204 emotion_object_##M##_set(obj, param->s); \
205 return EINA_TRUE; \
206 } \
207 }
208#define _BOOL(M) \
209 else if (!strcmp(param->name, #M)) \
210 { \
211 if (param->type == EDJE_EXTERNAL_PARAM_TYPE_BOOL) \
212 { \
213 emotion_object_##M##_set(obj, param->i); \
214 return EINA_TRUE; \
215 } \
216 }
217#define _INT(M) \
218 else if (!strcmp(param->name, #M)) \
219 { \
220 if (param->type == EDJE_EXTERNAL_PARAM_TYPE_INT) \
221 { \
222 emotion_object_##M##_set(obj, param->i); \
223 return EINA_TRUE; \
224 } \
225 }
226#define _DOUBLE(M) \
227 else if (!strcmp(param->name, #M)) \
228 { \
229 if (param->type == EDJE_EXTERNAL_PARAM_TYPE_DOUBLE) \
230 { \
231 emotion_object_##M##_set(obj, param->d); \
232 return EINA_TRUE; \
233 } \
234 }
235
236 if (0) {} // so else if works...
237 _STR(file)
238 _BOOL(play)
239 _DOUBLE(position)
240 _BOOL(smooth_scale)
241 _DOUBLE(audio_volume)
242 _BOOL(audio_mute)
243 _INT(audio_channel)
244 _BOOL(video_mute)
245 _INT(video_channel)
246 _BOOL(spu_mute)
247 _INT(spu_channel)
248 _INT(chapter)
249 _DOUBLE(play_speed)
250 else if (!strcmp(param->name, "play_length"))
251 {
252 ERR("play_length is read-only");
253 return EINA_FALSE;
254 }
255 //_INT(vis);
256#undef _STR
257#undef _BOOL
258#undef _INT
259#undef _DOUBLE
260
261 ERR("unknown parameter '%s' of type '%s'",
262 param->name, edje_external_param_type_str(param->type));
263
264 return EINA_FALSE;
265}
266
267static Eina_Bool
268_external_emotion_param_get(void *data __UNUSED__, const Evas_Object *obj, Edje_External_Param *param)
269{
270#define _STR(M) \
271 else if (!strcmp(param->name, #M)) \
272 { \
273 if (param->type == EDJE_EXTERNAL_PARAM_TYPE_STRING) \
274 { \
275 param->s = emotion_object_##M##_get(obj); \
276 return EINA_TRUE; \
277 } \
278 }
279#define _BOOL(M) \
280 else if (!strcmp(param->name, #M)) \
281 { \
282 if (param->type == EDJE_EXTERNAL_PARAM_TYPE_BOOL) \
283 { \
284 param->i = emotion_object_##M##_get(obj); \
285 return EINA_TRUE; \
286 } \
287 }
288#define _INT(M) \
289 else if (!strcmp(param->name, #M)) \
290 { \
291 if (param->type == EDJE_EXTERNAL_PARAM_TYPE_INT) \
292 { \
293 param->i = emotion_object_##M##_get(obj); \
294 return EINA_TRUE; \
295 } \
296 }
297#define _DOUBLE(M) \
298 else if (!strcmp(param->name, #M)) \
299 { \
300 if (param->type == EDJE_EXTERNAL_PARAM_TYPE_DOUBLE) \
301 { \
302 param->d = emotion_object_##M##_get(obj); \
303 return EINA_TRUE; \
304 } \
305 }
306
307 if (0) {} // so else if works...
308 _STR(file)
309 _BOOL(play)
310 _DOUBLE(position)
311 _BOOL(smooth_scale)
312 _DOUBLE(audio_volume)
313 _BOOL(audio_mute)
314 _INT(audio_channel)
315 _BOOL(video_mute)
316 _INT(video_channel)
317 _BOOL(spu_mute)
318 _INT(spu_channel)
319 _INT(chapter)
320 _DOUBLE(play_speed)
321 _DOUBLE(play_length)
322 //_INT(vis)
323#undef _STR
324#undef _BOOL
325#undef _INT
326#undef _DOUBLE
327
328 ERR("unknown parameter '%s' of type '%s'",
329 param->name, edje_external_param_type_str(param->type));
330
331 return EINA_FALSE;
332}
333
334static void *
335_external_emotion_params_parse(void *data __UNUSED__, Evas_Object *obj __UNUSED__, const Eina_List *params)
336{
337 const Edje_External_Param *param;
338 const Eina_List *l;
339 External_Emotion_Params *p = calloc(1, sizeof(External_Emotion_Params));
340 if (!p) return NULL;
341
342 EINA_LIST_FOREACH(params, l, param)
343 {
344#define _STR(M) \
345 if (!strcmp(param->name, #M)) p->M = eina_stringshare_add(param->s)
346#define _BOOL(M) \
347 if (!strcmp(param->name, #M)) \
348 { \
349 p->M = param->i; \
350 p->M##_exists = EINA_TRUE; \
351 }
352#define _INT(M) \
353 if (!strcmp(param->name, #M)) \
354 { \
355 p->M = param->i; \
356 p->M##_exists = EINA_TRUE; \
357 }
358#define _DOUBLE(M) \
359 if (!strcmp(param->name, #M)) \
360 { \
361 p->M = param->d; \
362 p->M##_exists = EINA_TRUE; \
363 }
364
365 _STR(file);
366 _BOOL(play);
367 _DOUBLE(position);
368 _BOOL(smooth_scale);
369 _DOUBLE(audio_volume);
370 _BOOL(audio_mute);
371 _INT(audio_channel);
372 _BOOL(video_mute);
373 _INT(video_channel);
374 _BOOL(spu_mute);
375 _INT(spu_channel);
376 _INT(chapter);
377 _DOUBLE(play_speed);
378 _DOUBLE(play_length);
379 //_INT(vis);
380#undef _STR
381#undef _BOOL
382#undef _INT
383#undef _DOUBLE
384 }
385
386 return p;
387}
388
389static void
390_external_emotion_params_free(void *params)
391{
392 External_Emotion_Params *p = params;
393
394#define _STR(M) eina_stringshare_del(p->M)
395#define _BOOL(M) do {} while (0)
396#define _INT(M) do {} while (0)
397#define _DOUBLE(M) do {} while (0)
398 _STR(file);
399 _BOOL(play);
400 _DOUBLE(position);
401 _BOOL(smooth_scale);
402 _DOUBLE(audio_volume);
403 _BOOL(audio_mute);
404 _INT(audio_channel);
405 _BOOL(video_mute);
406 _INT(video_channel);
407 _BOOL(spu_mute);
408 _INT(spu_channel);
409 _INT(chapter);
410 _DOUBLE(play_speed);
411 _DOUBLE(play_length);
412 //_INT(vis);
413#undef _STR
414#undef _BOOL
415#undef _INT
416#undef _DOUBLE
417 free(p);
418}
419
420static const char *
421_external_emotion_label_get(void *data __UNUSED__)
422{
423 return "Emotion";
424}
425
426static Evas_Object *
427_external_emotion_icon_add(void *data __UNUSED__, Evas *e)
428{
429 Evas_Object *ic;
430 int w = 0, h = 0;
431
432 ic = edje_object_add(e);
433 edje_object_file_set(ic, PACKAGE_DATA_DIR"/data/icon.edj", "icon");
434 edje_object_size_min_get(ic, &w, &h);
435 if (w < 1) w = 20;
436 if (h < 1) h = 10;
437 evas_object_size_hint_min_set(ic, w, h);
438 evas_object_size_hint_max_set(ic, w, h);
439
440 return ic;
441}
442
443static const char *
444_external_emotion_translate(void *data __UNUSED__, const char *orig)
445{
446 // in future, mark all params as translatable and use dgettext()
447 // with "emotion" text domain here.
448 return orig;
449}
450
451static Edje_External_Param_Info _external_emotion_params[] = {
452 EDJE_EXTERNAL_PARAM_INFO_CHOICE_FULL
453 ("engine", _external_emotion_engine_def, _external_emotion_engines),
454 EDJE_EXTERNAL_PARAM_INFO_STRING("file"),
455 EDJE_EXTERNAL_PARAM_INFO_BOOL_DEFAULT("play", EINA_FALSE),
456 EDJE_EXTERNAL_PARAM_INFO_DOUBLE("position"),
457 EDJE_EXTERNAL_PARAM_INFO_BOOL_DEFAULT("smooth_scale", EINA_FALSE),
458 EDJE_EXTERNAL_PARAM_INFO_DOUBLE_DEFAULT("audio_volume", 0.9),
459 EDJE_EXTERNAL_PARAM_INFO_BOOL_DEFAULT("audio_mute", EINA_FALSE),
460 EDJE_EXTERNAL_PARAM_INFO_INT_DEFAULT("audio_channel", 0),
461 EDJE_EXTERNAL_PARAM_INFO_BOOL_DEFAULT("video_mute", EINA_FALSE),
462 EDJE_EXTERNAL_PARAM_INFO_INT_DEFAULT("video_channel", 0),
463 EDJE_EXTERNAL_PARAM_INFO_BOOL_DEFAULT("spu_mute", EINA_FALSE),
464 EDJE_EXTERNAL_PARAM_INFO_INT_DEFAULT("spu_channel", 0),
465 EDJE_EXTERNAL_PARAM_INFO_INT("chapter"),
466 EDJE_EXTERNAL_PARAM_INFO_DOUBLE_DEFAULT("play_speed", 1.0),
467 EDJE_EXTERNAL_PARAM_INFO_DOUBLE("play_length"),
468 //EDJE_EXTERNAL_PARAM_INFO_CHOICE_FULL("vis", ...),
469 EDJE_EXTERNAL_PARAM_INFO_SENTINEL
470};
471
472static const Edje_External_Type _external_emotion_type = {
473 .abi_version = EDJE_EXTERNAL_TYPE_ABI_VERSION,
474 .module = "emotion",
475 .module_name = "Emotion",
476 .add = _external_emotion_add,
477 .state_set = _external_emotion_state_set,
478 .signal_emit = _external_emotion_signal,
479 .param_set = _external_emotion_param_set,
480 .param_get = _external_emotion_param_get,
481 .params_parse = _external_emotion_params_parse,
482 .params_free = _external_emotion_params_free,
483 .label_get = _external_emotion_label_get,
484 .description_get = NULL,
485 .icon_add = _external_emotion_icon_add,
486 .preview_add = NULL,
487 .translate = _external_emotion_translate,
488 .parameters_info = _external_emotion_params,
489 .data = NULL
490};
491
492static Edje_External_Type_Info _external_emotion_types[] =
493{
494 {"emotion", &_external_emotion_type},
495 {NULL, NULL}
496};
497
498static Eina_Bool
499external_emotion_mod_init(void)
500{
501 _log_dom = eina_log_domain_register
502 ("emotion-externals", EINA_COLOR_LIGHTBLUE);
503 edje_external_type_array_register(_external_emotion_types);
504 return EINA_TRUE;
505}
506
507static void
508external_emotion_mod_shutdown(void)
509{
510 edje_external_type_array_unregister(_external_emotion_types);
511 eina_log_domain_unregister(_log_dom);
512 _log_dom = -1;
513}
514
515EINA_MODULE_INIT(external_emotion_mod_init);
516EINA_MODULE_SHUTDOWN(external_emotion_mod_shutdown);
diff --git a/legacy/emotion/src/examples/emotion_basic_example.c b/legacy/emotion/src/examples/emotion_basic_example.c
deleted file mode 100644
index 7e3e4c2e4b..0000000000
--- a/legacy/emotion/src/examples/emotion_basic_example.c
+++ /dev/null
@@ -1,81 +0,0 @@
1#include <Ecore.h>
2#include <Ecore_Evas.h>
3#include <Evas.h>
4#include <Emotion.h>
5#include <stdio.h>
6
7#define WIDTH (320)
8#define HEIGHT (240)
9
10static void
11_playback_started_cb(void *data, Evas_Object *o, void *event_info)
12{
13 printf("Emotion object started playback.\n");
14}
15
16int
17main(int argc, const char *argv[])
18{
19 Ecore_Evas *ee;
20 Evas *e;
21 Evas_Object *bg, *em;
22 const char *filename = NULL;
23
24 if (argc < 2)
25 {
26 printf("One argument is necessary. Usage:\n");
27 printf("\t%s <filename>\n", argv[0]);
28 }
29
30 filename = argv[1];
31
32 if (!ecore_evas_init())
33 return EXIT_FAILURE;
34
35 /* this will give you a window with an Evas canvas under the first
36 * engine available */
37 ee = ecore_evas_new(NULL, 10, 10, WIDTH, HEIGHT, NULL);
38 if (!ee)
39 goto error;
40
41 ecore_evas_show(ee);
42
43 /* the canvas pointer, de facto */
44 e = ecore_evas_get(ee);
45
46 /* adding a background to this example */
47 bg = evas_object_rectangle_add(e);
48 evas_object_name_set(bg, "our dear rectangle");
49 evas_object_color_set(bg, 255, 255, 255, 255); /* white bg */
50 evas_object_move(bg, 0, 0); /* at canvas' origin */
51 evas_object_resize(bg, WIDTH, HEIGHT); /* covers full canvas */
52 evas_object_show(bg);
53
54 /* Creating the emotion object */
55 em = emotion_object_add(e);
56 emotion_object_init(em, NULL);
57
58 evas_object_smart_callback_add(
59 em, "playback_started", _playback_started_cb, NULL);
60
61 emotion_object_file_set(em, filename);
62
63 evas_object_move(em, 0, 0);
64 evas_object_resize(em, WIDTH, HEIGHT);
65 evas_object_show(em);
66
67 emotion_object_play_set(em, EINA_TRUE);
68
69 ecore_main_loop_begin();
70
71 ecore_evas_free(ee);
72 ecore_evas_shutdown();
73 return 0;
74
75error:
76 fprintf(stderr, "you got to have at least one evas engine built and linked"
77 " up to ecore-evas for this example to run properly.\n");
78
79 ecore_evas_shutdown();
80 return -1;
81}
diff --git a/legacy/emotion/src/examples/emotion_border_example.c b/legacy/emotion/src/examples/emotion_border_example.c
deleted file mode 100644
index 9df53f4333..0000000000
--- a/legacy/emotion/src/examples/emotion_border_example.c
+++ /dev/null
@@ -1,238 +0,0 @@
1#include <Ecore.h>
2#include <Ecore_Evas.h>
3#include <Evas.h>
4#include <Emotion.h>
5#include <stdio.h>
6#include <string.h>
7
8#define WIDTH (320)
9#define HEIGHT (240)
10
11static Eina_List *filenames = NULL;
12static Eina_List *curfile = NULL;
13
14static void
15_playback_started_cb(void *data, Evas_Object *o, void *event_info)
16{
17 printf("Emotion object started playback.\n");
18}
19
20static Evas_Object *
21_create_emotion_object(Evas *e)
22{
23 Evas_Object *em = emotion_object_add(e);
24
25 emotion_object_init(em, "gstreamer");
26
27 evas_object_smart_callback_add(
28 em, "playback_started", _playback_started_cb, NULL);
29
30 return em;
31}
32
33static void
34_on_key_down(void *data, Evas *e, Evas_Object *o, void *event_info)
35{
36 Evas_Event_Key_Down *ev = event_info;
37 Evas_Object *em = data;
38
39 if (!strcmp(ev->keyname, "Return"))
40 {
41 emotion_object_play_set(em, EINA_TRUE);
42 }
43 else if (!strcmp(ev->keyname, "space"))
44 {
45 emotion_object_play_set(em, EINA_FALSE);
46 }
47 else if (!strcmp(ev->keyname, "Escape"))
48 {
49 ecore_main_loop_quit();
50 }
51 else if (!strcmp(ev->keyname, "n"))
52 {
53 const char *file;
54 if (!curfile)
55 curfile = filenames;
56 else
57 curfile = eina_list_next(curfile);
58 file = eina_list_data_get(curfile);
59 fprintf(stderr, "playing next file: %s\n", file);
60 emotion_object_file_set(em, file);
61 }
62 else if (!strcmp(ev->keyname, "p"))
63 {
64 const char *file;
65 if (!curfile)
66 curfile = eina_list_last(filenames);
67 else
68 curfile = eina_list_prev(curfile);
69 file = eina_list_data_get(curfile);
70 fprintf(stderr, "playing next file: %s\n", file);
71 emotion_object_file_set(em, file);
72 }
73 else if (!strcmp(ev->keyname, "b"))
74 {
75 emotion_object_border_set(em, 0, 0, 50, 50);
76 }
77 else if (!strcmp(ev->keyname, "0"))
78 {
79 emotion_object_keep_aspect_set(em, EMOTION_ASPECT_KEEP_NONE);
80 }
81 else if (!strcmp(ev->keyname, "w"))
82 {
83 emotion_object_keep_aspect_set(em, EMOTION_ASPECT_KEEP_WIDTH);
84 }
85 else if (!strcmp(ev->keyname, "h"))
86 {
87 emotion_object_keep_aspect_set(em, EMOTION_ASPECT_KEEP_HEIGHT);
88 }
89 else if (!strcmp(ev->keyname, "2"))
90 {
91 emotion_object_keep_aspect_set(em, EMOTION_ASPECT_KEEP_BOTH);
92 }
93 else if (!strcmp(ev->keyname, "c"))
94 {
95 emotion_object_keep_aspect_set(em, EMOTION_ASPECT_CROP);
96 }
97 else
98 {
99 fprintf(stderr, "unhandled key: %s\n", ev->keyname);
100 }
101}
102
103static void
104_frame_decode_cb(void *data, Evas_Object *o, void *event_info)
105{
106 // fprintf(stderr, "smartcb: frame_decode\n");
107}
108
109static void
110_length_change_cb(void *data, Evas_Object *o, void *event_info)
111{
112 fprintf(stderr, "smartcb: length_change: %0.3f\n", emotion_object_play_length_get(o));
113}
114
115static void
116_position_update_cb(void *data, Evas_Object *o, void *event_info)
117{
118 fprintf(stderr, "smartcb: position_update: %0.3f\n", emotion_object_position_get(o));
119}
120
121static void
122_progress_change_cb(void *data, Evas_Object *o, void *event_info)
123{
124 fprintf(stderr, "smartcb: progress_change: %0.3f, %s\n",
125 emotion_object_progress_status_get(o),
126 emotion_object_progress_info_get(o));
127}
128
129static void
130_frame_resize_cb(void *data, Evas_Object *o, void *event_info)
131{
132 int w, h;
133 emotion_object_size_get(o, &w, &h);
134 fprintf(stderr, "smartcb: frame_resize: %dx%d\n", w, h);
135}
136
137static void /* adjust canvas' contents on resizes */
138_canvas_resize_cb(Ecore_Evas *ee)
139{
140 int w, h;
141 Evas_Object *bg, *em;
142
143 ecore_evas_geometry_get(ee, NULL, NULL, &w, &h);
144
145 bg = ecore_evas_data_get(ee, "bg");
146 em = ecore_evas_data_get(ee, "emotion");
147
148 evas_object_resize(bg, w, h);
149 evas_object_move(em, 10, 10);
150 evas_object_resize(em, w - 20, h - 20);
151}
152
153int
154main(int argc, const char *argv[])
155{
156 Ecore_Evas *ee;
157 Evas *e;
158 Evas_Object *bg, *em;
159 int i;
160
161 if (argc < 2)
162 {
163 printf("One argument is necessary. Usage:\n");
164 printf("\t%s <filename>\n", argv[0]);
165 }
166
167 eina_init();
168 for (i = 1; i < argc; i++)
169 filenames = eina_list_append(filenames, eina_stringshare_add(argv[i]));
170
171 curfile = filenames;
172
173 if (!ecore_evas_init())
174 return EXIT_FAILURE;
175
176 /* this will give you a window with an Evas canvas under the first
177 * engine available */
178 ee = ecore_evas_new(NULL, 10, 10, WIDTH, HEIGHT, NULL);
179 if (!ee)
180 goto error;
181
182 ecore_evas_callback_resize_set(ee, _canvas_resize_cb);
183
184 ecore_evas_show(ee);
185
186 /* the canvas pointer, de facto */
187 e = ecore_evas_get(ee);
188
189 /* adding a background to this example */
190 bg = evas_object_rectangle_add(e);
191 evas_object_name_set(bg, "our dear rectangle");
192 evas_object_color_set(bg, 255, 0, 0, 255); /* white bg */
193 evas_object_move(bg, 0, 0); /* at canvas' origin */
194 evas_object_resize(bg, WIDTH, HEIGHT); /* covers full canvas */
195 evas_object_show(bg);
196
197 ecore_evas_data_set(ee, "bg", bg);
198
199 /* Creating the emotion object */
200 em = _create_emotion_object(e);
201 emotion_object_file_set(em, eina_list_data_get(curfile));
202 evas_object_move(em, 10, 10);
203 evas_object_resize(em, WIDTH, HEIGHT);
204 evas_object_resize(em, WIDTH - 20, HEIGHT - 20);
205 emotion_object_keep_aspect_set(em, EMOTION_ASPECT_KEEP_BOTH);
206 emotion_object_bg_color_set(em, 0, 128, 0, 255);
207 evas_object_show(em);
208
209 ecore_evas_data_set(ee, "emotion", em);
210
211 evas_object_smart_callback_add(em, "frame_decode", _frame_decode_cb, NULL);
212 evas_object_smart_callback_add(em, "length_change", _length_change_cb, NULL);
213 evas_object_smart_callback_add(em, "position_update", _position_update_cb, NULL);
214 evas_object_smart_callback_add(em, "progress_change", _progress_change_cb, NULL);
215 evas_object_smart_callback_add(em, "frame_resize", _frame_resize_cb, NULL);
216
217 evas_object_event_callback_add(bg, EVAS_CALLBACK_KEY_DOWN, _on_key_down, em);
218 evas_object_focus_set(bg, EINA_TRUE);
219
220 emotion_object_play_set(em, EINA_TRUE);
221
222 ecore_main_loop_begin();
223
224 ecore_evas_free(ee);
225 ecore_evas_shutdown();
226 return 0;
227
228error:
229 fprintf(stderr, "you got to have at least one evas engine built and linked"
230 " up to ecore-evas for this example to run properly.\n");
231
232 EINA_LIST_FREE(filenames, curfile)
233 eina_stringshare_del(eina_list_data_get(curfile));
234
235 ecore_evas_shutdown();
236 eina_shutdown();
237 return -1;
238}
diff --git a/legacy/emotion/src/examples/emotion_generic_example.c b/legacy/emotion/src/examples/emotion_generic_example.c
deleted file mode 100644
index b8382862d5..0000000000
--- a/legacy/emotion/src/examples/emotion_generic_example.c
+++ /dev/null
@@ -1,233 +0,0 @@
1#include <Ecore.h>
2#include <Ecore_Evas.h>
3#include <Evas.h>
4#include <Emotion.h>
5#include <stdio.h>
6#include <string.h>
7#include <unistd.h>
8
9#define WIDTH (320)
10#define HEIGHT (240)
11
12static Eina_List *filenames = NULL;
13static Eina_List *curfile = NULL;
14
15static void
16_playback_started_cb(void *data, Evas_Object *o, void *event_info)
17{
18 printf("Emotion object started playback.\n");
19}
20
21static void
22_playback_stopped_cb(void *data, Evas_Object *o, void *event_info)
23{
24 printf("Emotion playback stopped.\n");
25 emotion_object_play_set(o, EINA_FALSE);
26 emotion_object_position_set(o, 0);
27}
28
29static Evas_Object *
30_create_emotion_object(Evas *e)
31{
32 Evas_Object *em = emotion_object_add(e);
33
34 emotion_object_init(em, "generic");
35
36 evas_object_smart_callback_add(
37 em, "playback_started", _playback_started_cb, NULL);
38 evas_object_smart_callback_add(
39 em, "playback_finished", _playback_stopped_cb, NULL);
40
41 return em;
42}
43
44static void
45_on_key_down(void *data, Evas *e, Evas_Object *o, void *event_info)
46{
47 Evas_Event_Key_Down *ev = event_info;
48 Evas_Object *em = data;
49
50 if (!strcmp(ev->keyname, "Return"))
51 {
52 emotion_object_play_set(em, EINA_TRUE);
53 }
54 else if (!strcmp(ev->keyname, "space"))
55 {
56 emotion_object_play_set(em, EINA_FALSE);
57 }
58 else if (!strcmp(ev->keyname, "Escape"))
59 {
60 ecore_main_loop_quit();
61 }
62 else if (!strcmp(ev->keyname, "t"))
63 {
64 int w, h;
65 emotion_object_size_get(em, &w, &h);
66 fprintf(stderr, "example -> size: %dx%d\n", w, h);
67 }
68 else if (!strcmp(ev->keyname, "s"))
69 {
70 float len, pos;
71 len = emotion_object_play_length_get(em);
72 pos = 0.98 * len;
73 fprintf(stderr, "skipping to position %0.3f\n", pos);
74 emotion_object_position_set(em, pos);
75 }
76 else if (!strcmp(ev->keyname, "1"))
77 {
78 fprintf(stderr, "setting speed to 1.0\n");
79 emotion_object_play_speed_set(em, 1.0);
80 }
81 else if (!strcmp(ev->keyname, "2"))
82 {
83 fprintf(stderr, "setting speed to 2.0\n");
84 emotion_object_play_speed_set(em, 2.0);
85 }
86 else if (!strcmp(ev->keyname, "n"))
87 {
88 const char *file;
89 if (!curfile)
90 curfile = filenames;
91 else
92 curfile = eina_list_next(curfile);
93 file = eina_list_data_get(curfile);
94 fprintf(stderr, "playing next file: %s\n", file);
95 emotion_object_file_set(em, file);
96 }
97 else if (!strcmp(ev->keyname, "p"))
98 {
99 const char *file;
100 if (!curfile)
101 curfile = eina_list_last(filenames);
102 else
103 curfile = eina_list_prev(curfile);
104 file = eina_list_data_get(curfile);
105 fprintf(stderr, "playing next file: %s\n", file);
106 emotion_object_file_set(em, file);
107 }
108 else if (!strcmp(ev->keyname, "d"))
109 {
110 evas_object_del(em);
111 }
112 else if (!strcmp(ev->keyname, "l"))
113 {
114 // force frame dropping
115 sleep(5);
116 }
117 else
118 {
119 fprintf(stderr, "unhandled key: %s\n", ev->keyname);
120 }
121}
122
123static void
124_frame_decode_cb(void *data, Evas_Object *o, void *event_info)
125{
126 // fprintf(stderr, "smartcb: frame_decode\n");
127}
128
129static void
130_length_change_cb(void *data, Evas_Object *o, void *event_info)
131{
132 fprintf(stderr, "smartcb: length_change: %0.3f\n", emotion_object_play_length_get(o));
133}
134
135static void
136_position_update_cb(void *data, Evas_Object *o, void *event_info)
137{
138 fprintf(stderr, "smartcb: position_update: %0.3f\n", emotion_object_position_get(o));
139}
140
141static void
142_progress_change_cb(void *data, Evas_Object *o, void *event_info)
143{
144 fprintf(stderr, "smartcb: progress_change: %0.3f, %s\n",
145 emotion_object_progress_status_get(o),
146 emotion_object_progress_info_get(o));
147}
148
149static void
150_frame_resize_cb(void *data, Evas_Object *o, void *event_info)
151{
152 int w, h;
153 emotion_object_size_get(o, &w, &h);
154 fprintf(stderr, "smartcb: frame_resize: %dx%d\n", w, h);
155}
156
157int
158main(int argc, const char *argv[])
159{
160 Ecore_Evas *ee;
161 Evas *e;
162 Evas_Object *bg, *em;
163 int i;
164
165 if (argc < 2)
166 {
167 printf("One argument is necessary. Usage:\n");
168 printf("\t%s <filename>\n", argv[0]);
169 }
170
171 eina_init();
172 for (i = 1; i < argc; i++)
173 filenames = eina_list_append(filenames, eina_stringshare_add(argv[i]));
174
175 curfile = filenames;
176
177 if (!ecore_evas_init())
178 return EXIT_FAILURE;
179
180 /* this will give you a window with an Evas canvas under the first
181 * engine available */
182 ee = ecore_evas_new(NULL, 10, 10, WIDTH, HEIGHT, NULL);
183 if (!ee)
184 goto error;
185
186 ecore_evas_show(ee);
187
188 /* the canvas pointer, de facto */
189 e = ecore_evas_get(ee);
190
191 /* adding a background to this example */
192 bg = evas_object_rectangle_add(e);
193 evas_object_name_set(bg, "our dear rectangle");
194 evas_object_color_set(bg, 255, 255, 255, 255); /* white bg */
195 evas_object_move(bg, 0, 0); /* at canvas' origin */
196 evas_object_resize(bg, WIDTH, HEIGHT); /* covers full canvas */
197 evas_object_show(bg);
198
199 /* Creating the emotion object */
200 em = _create_emotion_object(e);
201 emotion_object_file_set(em, eina_list_data_get(curfile));
202 evas_object_move(em, 0, 0);
203 evas_object_resize(em, WIDTH, HEIGHT);
204 evas_object_show(em);
205
206 evas_object_smart_callback_add(em, "frame_decode", _frame_decode_cb, NULL);
207 evas_object_smart_callback_add(em, "length_change", _length_change_cb, NULL);
208 evas_object_smart_callback_add(em, "position_update", _position_update_cb, NULL);
209 evas_object_smart_callback_add(em, "progress_change", _progress_change_cb, NULL);
210 evas_object_smart_callback_add(em, "frame_resize", _frame_resize_cb, NULL);
211
212 evas_object_event_callback_add(bg, EVAS_CALLBACK_KEY_DOWN, _on_key_down, em);
213 evas_object_focus_set(bg, EINA_TRUE);
214
215 emotion_object_play_set(em, EINA_TRUE);
216
217 ecore_main_loop_begin();
218
219 ecore_evas_free(ee);
220 ecore_evas_shutdown();
221 return 0;
222
223error:
224 fprintf(stderr, "you got to have at least one evas engine built and linked"
225 " up to ecore-evas for this example to run properly.\n");
226
227 EINA_LIST_FREE(filenames, curfile)
228 eina_stringshare_del(eina_list_data_get(curfile));
229
230 ecore_evas_shutdown();
231 eina_shutdown();
232 return -1;
233}
diff --git a/legacy/emotion/src/examples/emotion_generic_subtitle_example.c b/legacy/emotion/src/examples/emotion_generic_subtitle_example.c
deleted file mode 100644
index 448b505449..0000000000
--- a/legacy/emotion/src/examples/emotion_generic_subtitle_example.c
+++ /dev/null
@@ -1,97 +0,0 @@
1#include <Ecore.h>
2#include <Ecore_Evas.h>
3#include <Evas.h>
4#include <Emotion.h>
5#include <stdio.h>
6
7#define WIDTH (320)
8#define HEIGHT (240)
9
10static void
11_playback_started_cb(void *data, Evas_Object *o, void *event_info)
12{
13 printf("Emotion object started playback.\n");
14}
15
16static void
17_on_delete(Ecore_Evas *ee)
18{
19 ecore_main_loop_quit();
20}
21
22int
23main(int argc, const char *argv[])
24{
25 Ecore_Evas *ee;
26 Evas *e;
27 Evas_Object *bg, *em;
28 const char *filename = NULL;
29 const char *subtitle_filename = NULL;
30
31 if (argc < 2)
32 {
33 printf("At least one argument is necessary. Usage:\n");
34 printf("\t%s <filename> <subtitle filename>\n", argv[0]);
35 return -1;
36 }
37
38 filename = argv[1];
39
40 if (argc > 2)
41 subtitle_filename = argv[2];
42
43 if (!ecore_evas_init())
44 return EXIT_FAILURE;
45
46 /* this will give you a window with an Evas canvas under the first
47 * engine available */
48 ee = ecore_evas_new(NULL, 10, 10, WIDTH, HEIGHT, NULL);
49 if (!ee)
50 goto error;
51
52 ecore_evas_callback_delete_request_set(ee, _on_delete);
53
54 ecore_evas_show(ee);
55
56 /* the canvas pointer, de facto */
57 e = ecore_evas_get(ee);
58
59 /* adding a background to this example */
60 bg = evas_object_rectangle_add(e);
61 evas_object_name_set(bg, "our dear rectangle");
62 evas_object_color_set(bg, 255, 255, 255, 255); /* white bg */
63 evas_object_move(bg, 0, 0); /* at canvas' origin */
64 evas_object_resize(bg, WIDTH, HEIGHT); /* covers full canvas */
65 evas_object_show(bg);
66
67 /* Creating the emotion object */
68 em = emotion_object_add(e);
69 emotion_object_init(em, "generic");
70
71 if (subtitle_filename)
72 emotion_object_video_subtitle_file_set(em, subtitle_filename);
73
74 evas_object_smart_callback_add(
75 em, "playback_started", _playback_started_cb, NULL);
76
77 emotion_object_file_set(em, filename);
78
79 evas_object_move(em, 0, 0);
80 evas_object_resize(em, WIDTH, HEIGHT);
81 evas_object_show(em);
82
83 emotion_object_play_set(em, EINA_TRUE);
84
85 ecore_main_loop_begin();
86
87 ecore_evas_free(ee);
88 ecore_evas_shutdown();
89 return 0;
90
91error:
92 fprintf(stderr, "you got to have at least one evas engine built and linked"
93 " up to ecore-evas for this example to run properly.\n");
94
95 ecore_evas_shutdown();
96 return -1;
97}
diff --git a/legacy/emotion/src/examples/emotion_signals_example.c b/legacy/emotion/src/examples/emotion_signals_example.c
deleted file mode 100644
index 2469c468ba..0000000000
--- a/legacy/emotion/src/examples/emotion_signals_example.c
+++ /dev/null
@@ -1,173 +0,0 @@
1#include <Ecore.h>
2#include <Ecore_Evas.h>
3#include <Evas.h>
4#include <Emotion.h>
5#include <stdio.h>
6
7#define WIDTH (320)
8#define HEIGHT (240)
9
10static void
11_display_info(Evas_Object *o)
12{
13 int w, h;
14 printf("playing: %d\n", emotion_object_play_get(o));
15 printf("meta title: %s\n",
16 emotion_object_meta_info_get(o, EMOTION_META_INFO_TRACK_TITLE));
17 printf("seek position: %0.3f\n",
18 emotion_object_position_get(o));
19 printf("play length: %0.3f\n",
20 emotion_object_play_length_get(o));
21 printf("is seekable: %d\n",
22 emotion_object_seekable_get(o));
23 emotion_object_size_get(o, &w, &h);
24 printf("video geometry: %dx%d\n", w, h);
25 printf("video width / height ratio: %0.3f\n",
26 emotion_object_ratio_get(o));
27 printf("\n");
28}
29
30static void
31_playback_started_cb(void *data, Evas_Object *o, void *event_info)
32{
33 printf(">>> Emotion object started playback.\n");
34 _display_info(o);
35}
36
37static void
38_playback_finished_cb(void *data, Evas_Object *o, void *event_info)
39{
40 printf(">>> Emotion object finished playback.\n");
41 _display_info(o);
42}
43
44static void
45_open_done_cb(void *data, Evas_Object *o, void *event_info)
46{
47 printf(">>> Emotion object open done.\n");
48 _display_info(o);
49}
50
51static void
52_position_update_cb(void *data, Evas_Object *o, void *event_info)
53{
54 printf(">>> Emotion object first position update.\n");
55 evas_object_smart_callback_del(o, "position_update", _position_update_cb);
56 _display_info(o);
57}
58
59static void
60_frame_decode_cb(void *data, Evas_Object *o, void *event_info)
61{
62 printf(">>> Emotion object first frame decode.\n");
63 evas_object_smart_callback_del(o, "frame_decode", _frame_decode_cb);
64 _display_info(o);
65}
66
67static void
68_decode_stop_cb(void *data, Evas_Object *o, void *event_info)
69{
70 printf(">>> Emotion object decode stop.\n");
71 _display_info(o);
72}
73
74static void
75_frame_resize_cb(void *data, Evas_Object *o, void *event_info)
76{
77 printf(">>> Emotion object frame resize.\n");
78 _display_info(o);
79}
80
81static void
82_setup_emotion_callbacks(Evas_Object *o)
83{
84 evas_object_smart_callback_add(
85 o, "playback_started", _playback_started_cb, NULL);
86 evas_object_smart_callback_add(
87 o, "playback_finished", _playback_finished_cb, NULL);
88 evas_object_smart_callback_add(
89 o, "open_done", _open_done_cb, NULL);
90 evas_object_smart_callback_add(
91 o, "position_update", _position_update_cb, NULL);
92 evas_object_smart_callback_add(
93 o, "frame_decode", _frame_decode_cb, NULL);
94 evas_object_smart_callback_add(
95 o, "decode_stop", _decode_stop_cb, NULL);
96 evas_object_smart_callback_add(
97 o, "frame_resize", _frame_resize_cb, NULL);
98}
99
100int
101main(int argc, const char *argv[])
102{
103 Ecore_Evas *ee;
104 Evas *e;
105 Evas_Object *bg, *em;
106 const char *filename = NULL;
107 const char *module = NULL;
108
109 if (argc < 2)
110 {
111 printf("At least one argument is necessary. Usage:\n");
112 printf("\t%s <filename> [module_name]\n", argv[0]);
113 goto error;
114 }
115
116 filename = argv[1];
117
118 if (argc >= 3)
119 module = argv[2];
120
121 if (!ecore_evas_init())
122 return EXIT_FAILURE;
123
124 /* this will give you a window with an Evas canvas under the first
125 * engine available */
126 ee = ecore_evas_new(NULL, 10, 10, WIDTH, HEIGHT, NULL);
127 if (!ee)
128 goto error;
129
130 ecore_evas_show(ee);
131
132 /* the canvas pointer, de facto */
133 e = ecore_evas_get(ee);
134
135 /* adding a background to this example */
136 bg = evas_object_rectangle_add(e);
137 evas_object_name_set(bg, "our dear rectangle");
138 evas_object_color_set(bg, 255, 255, 255, 255); /* white bg */
139 evas_object_move(bg, 0, 0); /* at canvas' origin */
140 evas_object_resize(bg, WIDTH, HEIGHT); /* covers full canvas */
141 evas_object_show(bg);
142
143 /* Creating the emotion object */
144 em = emotion_object_add(e);
145
146 /* Try to load the specified module - NULL for auto-discover */
147 if (!emotion_object_init(em, module))
148 fprintf(stderr, "Emotion: \"%s\" module could not be initialized.\n", module);
149
150 _display_info(em);
151 _setup_emotion_callbacks(em);
152
153 if (!emotion_object_file_set(em, filename))
154 fprintf(stderr, "Emotion: Could not load the file \"%s\"\n", filename);
155
156 evas_object_move(em, 0, 0);
157 evas_object_resize(em, WIDTH, HEIGHT);
158 evas_object_show(em);
159
160 emotion_object_play_set(em, EINA_TRUE);
161
162 ecore_main_loop_begin();
163
164 ecore_evas_free(ee);
165 ecore_evas_shutdown();
166 return 0;
167
168 ecore_evas_free(ee);
169
170error:
171 ecore_evas_shutdown();
172 return -1;
173}
diff --git a/legacy/emotion/src/lib/Emotion.h b/legacy/emotion/src/lib/Emotion.h
deleted file mode 100644
index 4c97f63cc7..0000000000
--- a/legacy/emotion/src/lib/Emotion.h
+++ /dev/null
@@ -1,1314 +0,0 @@
1#ifndef EMOTION_H
2#define EMOTION_H
3
4/**
5 * @file
6 * @brief Emotion Media Library
7 *
8 * These routines are used for Emotion.
9 */
10
11/**
12 *
13 * @mainpage Emotion Library Documentation
14 *
15 * @version 1.7.0
16 * @date 2003-2012
17 *
18 * @section intro What is Emotion?
19 *
20 * A media object library for Evas and Ecore.
21 *
22 * Emotion is a library that allows playing audio and video files, using one of
23 * its backends (gstreamer or xine).
24 *
25 * It is integrated into Ecore through its mainloop, and is transparent to the
26 * user of the library how the decoding of audio and video is being done. Once
27 * the objects are created, the user can set callbacks to the specific events
28 * and set options to this object, all in the main loop (no threads are needed).
29 *
30 * Emotion is also integrated with Evas. The emotion object returned by
31 * emotion_object_add() is an Evas smart object, so it can be manipulated with
32 * default Evas object functions. Callbacks can be added to the signals emitted
33 * by this object with evas_object_smart_callback_add().
34 *
35 * @section work How does Emotion work?
36 *
37 * The Emotion library uses Evas smart objects to allow you to manipulate the
38 * created object as any other Evas object, and to connect to its signals,
39 * handling them when needed. It's also possible to swallow Emotion objects
40 * inside Edje themes, and expect it to behave as a normal image or rectangle
41 * when regarding to its dimensions.
42 *
43 * To instantiate an Emotion object, the simple code below is enough:
44 *
45 * @code
46 * em = emotion_object_add(e);
47 * emotion_object_init(em, NULL);
48 *
49 * emotion_object_file_set(em, file_path);
50 *
51 * evas_object_move(em, 0, 0);
52 * evas_object_resize(em, WIDTH, HEIGHT);
53 * evas_object_show(em);
54 *
55 * emotion_object_play_set(em, EINA_TRUE);
56 * @endcode
57 *
58 * See the @ref Emotion_API for a better reference.
59 *
60 * Please see the @ref authors page for contact details.
61 *
62 */
63
64/**
65 *
66 * @page authors Authors
67 *
68 * @author Carsten Haitzler <raster@rasterman.com>
69 * @author Vincent Torri <torri@maths.univ-evry.fr>
70 * @author Nicolas Aguirre <aguirre.nicolas@gmail.com>
71 * @author Sebastian Dransfeld <sd@tango.flipp.net>
72 * @author Cedric Bail <cedric.bail@free.fr>
73 *
74 * Please contact <enlightenment-devel@lists.sourceforge.net> to get in
75 * contact with the developers and maintainers.
76 *
77 */
78
79#include <Evas.h>
80
81#ifdef EAPI
82# undef EAPI
83#endif
84
85#ifdef _WIN32
86# ifdef EFL_EMOTION_BUILD
87# ifdef DLL_EXPORT
88# define EAPI __declspec(dllexport)
89# else
90# define EAPI
91# endif /* ! DLL_EXPORT */
92# else
93# define EAPI __declspec(dllimport)
94# endif /* ! EFL_EMOTION_BUILD */
95#else
96# ifdef __GNUC__
97# if __GNUC__ >= 4
98# define EAPI __attribute__ ((visibility("default")))
99# else
100# define EAPI
101# endif
102# else
103# define EAPI
104# endif
105#endif /* ! _WIN32 */
106
107/**
108 * @file Emotion.h
109 * @brief The file that provides Emotion the API, with functions available for
110 * play, seek, change volume, etc.
111 */
112
113enum _Emotion_Module
114{
115 EMOTION_MODULE_XINE,
116 EMOTION_MODULE_GSTREAMER
117};
118
119enum _Emotion_Event
120{
121 EMOTION_EVENT_MENU1, // Escape Menu
122 EMOTION_EVENT_MENU2, // Title Menu
123 EMOTION_EVENT_MENU3, // Root Menu
124 EMOTION_EVENT_MENU4, // Subpicture Menu
125 EMOTION_EVENT_MENU5, // Audio Menu
126 EMOTION_EVENT_MENU6, // Angle Menu
127 EMOTION_EVENT_MENU7, // Part Menu
128 EMOTION_EVENT_UP,
129 EMOTION_EVENT_DOWN,
130 EMOTION_EVENT_LEFT,
131 EMOTION_EVENT_RIGHT,
132 EMOTION_EVENT_SELECT,
133 EMOTION_EVENT_NEXT,
134 EMOTION_EVENT_PREV,
135 EMOTION_EVENT_ANGLE_NEXT,
136 EMOTION_EVENT_ANGLE_PREV,
137 EMOTION_EVENT_FORCE,
138 EMOTION_EVENT_0,
139 EMOTION_EVENT_1,
140 EMOTION_EVENT_2,
141 EMOTION_EVENT_3,
142 EMOTION_EVENT_4,
143 EMOTION_EVENT_5,
144 EMOTION_EVENT_6,
145 EMOTION_EVENT_7,
146 EMOTION_EVENT_8,
147 EMOTION_EVENT_9,
148 EMOTION_EVENT_10
149};
150
151/**
152 * @enum _Emotion_Meta_Info
153 *
154 * Used for retrieving information about the media file being played.
155 *
156 * @see emotion_object_meta_info_get()
157 *
158 * @ingroup Emotion_Info
159 */
160enum _Emotion_Meta_Info
161{
162 EMOTION_META_INFO_TRACK_TITLE, /**< track title */
163 EMOTION_META_INFO_TRACK_ARTIST, /**< artist name */
164 EMOTION_META_INFO_TRACK_ALBUM, /**< album name */
165 EMOTION_META_INFO_TRACK_YEAR, /**< track year */
166 EMOTION_META_INFO_TRACK_GENRE, /**< track genre */
167 EMOTION_META_INFO_TRACK_COMMENT, /**< track comments */
168 EMOTION_META_INFO_TRACK_DISC_ID, /**< track disc ID */
169 EMOTION_META_INFO_TRACK_COUNT /**< track count - number of the track in the album */
170};
171
172/**
173 * @enum _Emotion_Vis
174 *
175 * Used for displaying a visualization on the emotion object.
176 *
177 * @see emotion_object_vis_set()
178 *
179 * @ingroup Emotion_Visualization
180 */
181enum _Emotion_Vis
182{
183 EMOTION_VIS_NONE, /**< no visualization set */
184 EMOTION_VIS_GOOM, /**< goom */
185 EMOTION_VIS_LIBVISUAL_BUMPSCOPE, /**< bumpscope */
186 EMOTION_VIS_LIBVISUAL_CORONA, /**< corona */
187 EMOTION_VIS_LIBVISUAL_DANCING_PARTICLES, /**< dancing particles */
188 EMOTION_VIS_LIBVISUAL_GDKPIXBUF, /**< gdkpixbuf */
189 EMOTION_VIS_LIBVISUAL_G_FORCE, /**< G force */
190 EMOTION_VIS_LIBVISUAL_GOOM, /**< goom */
191 EMOTION_VIS_LIBVISUAL_INFINITE, /**< infinite */
192 EMOTION_VIS_LIBVISUAL_JAKDAW, /**< jakdaw */
193 EMOTION_VIS_LIBVISUAL_JESS, /**< jess */
194 EMOTION_VIS_LIBVISUAL_LV_ANALYSER, /**< lv analyser */
195 EMOTION_VIS_LIBVISUAL_LV_FLOWER, /**< lv flower */
196 EMOTION_VIS_LIBVISUAL_LV_GLTEST, /**< lv gltest */
197 EMOTION_VIS_LIBVISUAL_LV_SCOPE, /**< lv scope */
198 EMOTION_VIS_LIBVISUAL_MADSPIN, /**< madspin */
199 EMOTION_VIS_LIBVISUAL_NEBULUS, /**< nebulus */
200 EMOTION_VIS_LIBVISUAL_OINKSIE, /**< oinksie */
201 EMOTION_VIS_LIBVISUAL_PLASMA, /**< plasma */
202 EMOTION_VIS_LAST /* sentinel */
203};
204
205/**
206 * @enum Emotion_Suspend
207 *
208 * Used for emotion pipeline ressource management.
209 *
210 * @see emotion_object_suspend_set()
211 * @see emotion_object_suspend_get()
212 *
213 * @ingroup Emotion_Ressource
214 */
215typedef enum
216{
217 EMOTION_WAKEUP, /**< pipeline is up and running */
218 EMOTION_SLEEP, /**< turn off hardware ressource usage like overlay */
219 EMOTION_DEEP_SLEEP, /**< destroy the pipeline, but keep full resolution pixels output around */
220 EMOTION_HIBERNATE /**< destroy the pipeline, and keep half resolution or object resolution if lower */
221} Emotion_Suspend;
222
223/**
224 * @enum _Emotion_Aspect
225 * Defines the aspect ratio option.
226 */
227enum _Emotion_Aspect
228{
229 EMOTION_ASPECT_KEEP_NONE, /**< ignore video aspect ratio */
230 EMOTION_ASPECT_KEEP_WIDTH, /**< respect video aspect, fitting its width inside the object width */
231 EMOTION_ASPECT_KEEP_HEIGHT, /**< respect video aspect, fitting its height inside the object height */
232 EMOTION_ASPECT_KEEP_BOTH, /**< respect video aspect, fitting it inside the object area */
233 EMOTION_ASPECT_CROP, /**< respect video aspect, cropping exceding area */
234 EMOTION_ASPECT_CUSTOM, /**< use custom borders/crop for the video */
235};
236
237typedef enum _Emotion_Module Emotion_Module;
238typedef enum _Emotion_Event Emotion_Event;
239typedef enum _Emotion_Meta_Info Emotion_Meta_Info; /**< Meta info type to be retrieved. */
240typedef enum _Emotion_Vis Emotion_Vis; /**< Type of visualization. */
241typedef enum _Emotion_Aspect Emotion_Aspect; /**< Aspect ratio option. */
242
243#define EMOTION_CHANNEL_AUTO -1
244#define EMOTION_CHANNEL_DEFAULT 0
245
246#ifdef __cplusplus
247extern "C" {
248#endif
249
250#define EMOTION_VERSION_MAJOR 1
251#define EMOTION_VERSION_MINOR 8
252
253 typedef struct _Emotion_Version
254 {
255 int major;
256 int minor;
257 int micro;
258 int revision;
259 } Emotion_Version;
260
261 EAPI extern Emotion_Version *emotion_version;
262
263/* api calls available */
264
265/**
266 * @brief How to create, initialize, manipulate and connect to signals of an
267 * Emotion object.
268 * @defgroup Emotion_API API available for manipulating Emotion object.
269 *
270 * @{
271 *
272 * Emotion provides an Evas smart object that allows to play, control and
273 * display a video or audio file. The API is synchronous but not everything
274 * happens immediately. There are also some signals to report changed states.
275 *
276 * Basically, once the object is created and initialized, a file will be set to
277 * it, and then it can be resized, moved, and controlled by other Evas object
278 * functions.
279 *
280 * However, the decoding of the music and video occurs not in the Ecore main
281 * loop, but usually in another thread (this depends on the module being used).
282 * The synchronization between this other thread and the main loop not visible
283 * to the end user of the library. The user can just register callbacks to the
284 * available signals to receive information about the changed states, and can
285 * call other functions from the API to request more changes on the current
286 * loaded file.
287 *
288 * There will be a delay between an API being called and it being really
289 * executed, since this request will be done in the main thread, and it needs to
290 * be sent to the decoding thread. For this reason, always call functions like
291 * emotion_object_size_get() or emotion_object_length_get() after some signal
292 * being sent, like "playback_started" or "open_done". @ref
293 * emotion_signals_example.c "This example demonstrates this behavior".
294 *
295 * @section signals Available signals
296 * The Evas_Object returned by emotion_object_add() has a number of signals that
297 * can be listened to using evas' smart callbacks mechanism. All signals have
298 * NULL as event info. The following is a list of interesting signals:
299 * @li "playback_started" - Emitted when the playback starts
300 * @li "playback_finished" - Emitted when the playback finishes
301 * @li "frame_decode" - Emitted every time a frame is decoded
302 * @li "open_done" - Emitted when the media file is opened
303 * @li "position_update" - Emitted when emotion_object_position_set is called
304 * @li "decode_stop" - Emitted after the last frame is decoded
305 *
306 * @section Examples
307 *
308 * The following examples exemplify the emotion usage. There's also the
309 * emotion_test binary that is distributed with this library and cover the
310 * entire API, but since it is too long and repetitive to be explained, its code
311 * is just displayed as another example.
312 *
313 * @li @ref emotion_basic_example_c
314 * @li @ref emotion_signals_example.c "Emotion signals"
315 * @li @ref emotion_test_main.c "emotion_test - full API usage"
316 *
317 */
318
319/**
320 * @defgroup Emotion_Init Creation and initialization functions
321 */
322
323/**
324 * @defgroup Emotion_Audio Audio control functions
325 */
326
327/**
328 * @defgroup Emotion_Video Video control functions
329 */
330
331/**
332 * @defgroup Emotion_Visualization Visualization control functions
333 */
334
335/**
336 * @defgroup Emotion_Info Miscellaneous information retrieval functions
337 */
338
339/**
340 * @defgroup Emotion_Ressource Video ressource management
341 */
342
343EAPI Eina_Bool emotion_init(void);
344EAPI Eina_Bool emotion_shutdown(void);
345
346/**
347 * @brief Add an emotion object to the canvas.
348 *
349 * @param evas The canvas where the object will be added to.
350 * @return The emotion object just created.
351 *
352 * This function creates an emotion object and adds it to the specified @p evas.
353 * The returned object can be manipulated as any other Evas object, using the
354 * default object manipulation functions - evas_object_*.
355 *
356 * After creating the object with this function, it's still necessary to
357 * initialize it with emotion_object_init(), and if an audio file is going to be
358 * played with this object instead of a video, use
359 * emotion_object_video_mute_set().
360 *
361 * The next step is to open the desired file with emotion_object_file_set(), and
362 * start playing it with emotion_object_play_set().
363 *
364 * @see emotion_object_init()
365 * @see emotion_object_video_mute_set()
366 * @see emotion_object_file_set()
367 * @see emotion_object_play_set()
368 *
369 * @ingroup Emotion_Init
370 */
371EAPI Evas_Object *emotion_object_add (Evas *evas);
372
373/**
374 * @brief Set the specified option for the current module.
375 *
376 * @param obj The emotion object which the option is being set to.
377 * @param opt The option that is being set. Currently supported optiosn: "video"
378 * and "audio".
379 * @param val The value of the option. Currently only supports "off" (?!?!?!)
380 *
381 * This function allows one to mute the video or audio of the emotion object.
382 *
383 * @note Please don't use this function, consider using
384 * emotion_object_audio_mute_set() and emotion_object_video_mute_set() instead.
385 *
386 * @see emotion_object_audio_mute_set()
387 * @see emotion_object_video_mute_set()
388 *
389 * @ingroup Emotion_Init
390 */
391EAPI void emotion_object_module_option_set (Evas_Object *obj, const char *opt, const char *val);
392
393/**
394 * @brief Initializes an emotion object with the specified module.
395 *
396 * @param obj The emotion object to be initialized.
397 * @param module_filename The name of the module to be used (gstreamer or xine).
398 * @return @c EINA_TRUE if the specified module was successfully initialized for
399 * this object, @c EINA_FALSE otherwise.
400 *
401 * This function is required after creating the emotion object, in order to
402 * specify which module will be used with this object. Different objects can
403 * use different modules to play a media file. The current supported modules are
404 * @b gstreamer and @b xine.
405 *
406 * To use any of them, you need to make sure that support for them was compiled
407 * correctly.
408 *
409 * @note It's possible to disable the build of a module with
410 * --disable-module_name.
411 *
412 * @see emotion_object_add()
413 * @see emotion_object_file_set()
414 *
415 * @ingroup Emotion_Init
416 */
417EAPI Eina_Bool emotion_object_init (Evas_Object *obj, const char *module_filename);
418
419/**
420 * @brief Set borders for the emotion object.
421 *
422 * @param obj The emotion object where borders are being set.
423 * @param l The left border.
424 * @param r The right border.
425 * @param t The top border.
426 * @param b The bottom border.
427 *
428 * This function sets borders for the emotion video object (just when a video is
429 * present). When positive values are given to one of the parameters, a border
430 * will be added to the respective position of the object, representing that
431 * size on the original video size. However, if the video is scaled up or down
432 * (i.e. the emotion object size is different from the video size), the borders
433 * will be scaled respectively too.
434 *
435 * If a negative value is given to one of the parameters, instead of a border,
436 * that respective side of the video will be cropped.
437 *
438 * It's possible to set a color for the added borders (default is transparent)
439 * with emotion_object_bg_color_set(). By default, an Emotion object doesn't
440 * have any border.
441 *
442 * @see emotion_object_border_get()
443 * @see emotion_object_bg_color_set()
444 *
445 * @ingroup Emotion_Video
446 */
447EAPI void emotion_object_border_set(Evas_Object *obj, int l, int r, int t, int b);
448
449/**
450 * @brief Get the borders set for the emotion object.
451 *
452 * @param obj The emotion object from which the borders are being retrieved.
453 * @param l The left border.
454 * @param r The right border.
455 * @param t The top border.
456 * @param b The bottom border.
457 *
458 * @see emotion_object_border_set()
459 *
460 * @ingroup Emotion_Video
461 */
462EAPI void emotion_object_border_get(const Evas_Object *obj, int *l, int *r, int *t, int *b);
463
464/**
465 * @brief Set a color for the background rectangle of this emotion object.
466 *
467 * @param obj The emotion object where the background color is being set.
468 * @param r Red component of the color.
469 * @param g Green component of the color.
470 * @param b Blue component of the color.
471 * @param a Alpha channel of the color.
472 *
473 * This is useful when a border is added to any side of the Emotion object. The
474 * area between the edge of the video and the edge of the object will be filled
475 * with the specified color.
476 *
477 * The default color is 0, 0, 0, 0 (transparent).
478 *
479 * @see emotion_object_bg_color_get()
480 *
481 * @ingroup Emotion_Video
482 */
483EAPI void emotion_object_bg_color_set(Evas_Object *obj, int r, int g, int b, int a);
484
485/**
486 * @brief Get the background color set for the emotion object.
487 *
488 * @param obj The emotion object from which the background color is being retrieved.
489 * @param r Red component of the color.
490 * @param g Green component of the color.
491 * @param b Blue component of the color.
492 * @param a AAlpha channel of the color.
493 *
494 * @see emotion_object_bg_color_set()
495 *
496 * @ingroup Emotion_Video
497 */
498EAPI void emotion_object_bg_color_get(const Evas_Object *obj, int *r, int *g, int *b, int *a);
499
500/**
501 * @brief Set whether emotion should keep the aspect ratio of the video.
502 *
503 * @param obj The emotion object where to set the aspect.
504 * @param a The aspect ratio policy.
505 *
506 * Instead of manually calculating the required border to set with
507 * emotion_object_border_set(), and using this to fix the aspect ratio of the
508 * video when the emotion object has a different aspect, it's possible to just
509 * set the policy to be used.
510 *
511 * The options are:
512 *
513 * - @b #EMOTION_ASPECT_KEEP_NONE - ignore the video aspect ratio, and reset any
514 * border set to 0, stretching the video inside the emotion object area. This
515 * option is similar to EVAS_ASPECT_CONTROL_NONE size hint.
516 * - @b #EMOTION_ASPECT_KEEP_WIDTH - respect the video aspect ratio, fitting the
517 * video width inside the object width. This option is similar to
518 * EVAS_ASPECT_CONTROL_HORIZONTAL size hint.
519 * - @b #EMOTION_ASPECT_KEEP_HEIGHT - respect the video aspect ratio, fitting
520 * the video height inside the object height. This option is similar to
521 * EVAS_ASPECT_CONTROL_VERTIAL size hint.
522 * - @b #EMOTION_ASPECT_KEEP_BOTH - respect the video aspect ratio, fitting both
523 * its width and height inside the object area. This option is similar to
524 * EVAS_ASPECT_CONTROL_BOTH size hint. It's the effect called letterboxing.
525 * - @b #EMOTION_ASPECT_CROP - respect the video aspect ratio, fitting the width
526 * or height inside the object area, and cropping the exceding areas of the
527 * video in height or width. It's the effect called pan-and-scan.
528 * - @b #EMOTION_ASPECT_CUSTOM - ignore the video aspect ratio, and use the
529 * current set from emotion_object_border_set().
530 *
531 * @note Calling this function with any value except #EMOTION_ASPECT_CUSTOM will
532 * invalidate borders set with emotion_object_border_set().
533 *
534 * @note Calling emotion_object_border_set() will automatically set the aspect
535 * policy to #EMOTION_ASPECT_CUSTOM.
536 *
537 * @see emotion_object_border_set()
538 * @see emotion_object_keep_aspect_get()
539 *
540 * @ingroup Emotion_Video
541 */
542EAPI void emotion_object_keep_aspect_set(Evas_Object *obj, Emotion_Aspect a);
543
544/**
545 * @brief Get the current emotion aspect ratio policy.
546 *
547 * @param obj The emotion object from which we are fetching the aspect ratio
548 * policy.
549 * @return The current aspect ratio policy.
550 *
551 * @see emotion_object_keep_aspect_set()
552 *
553 * @ingroup Emotion_Video
554 */
555EAPI Emotion_Aspect emotion_object_keep_aspect_get(const Evas_Object *obj);
556
557/**
558 * @brief Set the file to be played in the Emotion object.
559 *
560 * @param obj The emotion object where the file is being loaded.
561 * @param filename Path to the file to be loaded. It can be absolute or relative
562 * path.
563 * @return EINA_TRUE if the new file could be loaded successfully, and
564 * EINA_FALSE if the file could not be loaded. This happens when the filename is
565 * could not be found, when the module couldn't open the file, when no module is
566 * initialized in this object, or when the @p filename is the same as the
567 * one previously set.
568 *
569 * This function sets the file to be used with this emotion object. If the
570 * object already has another file set, this file will be unset and unloaded,
571 * and the new file will be loaded to this emotion object. The seek position
572 * will be set to 0, and the emotion object will be paused, instead of playing.
573 *
574 * If there was already a filename set, and it's the same as the one being set
575 * now, this function does nothing and returns EINA_FALSE.
576 *
577 * Use @c NULL as argument to @p filename if you want to unload the current file
578 * but don't want to load anything else.
579 *
580 * @see emotion_object_init()
581 * @see emotion_object_play_set()
582 * @see emotion_object_file_get()
583 *
584 * @ingroup Emotion_Init
585 */
586EAPI Eina_Bool emotion_object_file_set (Evas_Object *obj, const char *filename);
587
588/**
589 * @brief Get the filename of the file associated with the emotion object.
590 *
591 * @param obj The emotion object from which the filename will be retrieved.
592 * @return The path to the file loaded into this emotion object.
593 *
594 * This function returns the path of the file loaded in this emotion object. If
595 * no object is loaded, it will return @c NULL.
596 *
597 * @note Don't free or change the string returned by this function in any way.
598 * If you want to unset it, use @c emotion_object_file_set(obj, NULL).
599 *
600 * @see emotion_object_file_set()
601 *
602 * @ingroup Emotion_Init
603 */
604EAPI const char *emotion_object_file_get (const Evas_Object *obj);
605/**
606 * @defgroup Emotion_Play Play control functions
607 *
608 * @{
609 */
610/**
611 *
612 * @brief Set play/pause state of the media file.
613 *
614 * @param obj The emotion object whose state will be changed.
615 * @param play EINA_TRUE to play, EINA_FALSE to pause.
616 *
617 * This functions sets the currently playing status of the video. Using this
618 * function to play or pause the video doesn't alter it's current position.
619 */
620EAPI void emotion_object_play_set (Evas_Object *obj, Eina_Bool play);
621/**
622 * @brief Get play/pause state of the media file.
623 *
624 * @param obj The emotion object from which the state will be retrieved.
625 * @return EINA_TRUE if playing. EINA_FALSE if not playing.
626 */
627EAPI Eina_Bool emotion_object_play_get (const Evas_Object *obj);
628/**
629 * @brief Set the position in the media file.
630 *
631 * @param obj The emotion object whose position will be changed.
632 * @param sec The position(in seconds) to which the media file will be set.
633 *
634 * This functions sets the current position of the media file to @p sec, this
635 * only works on seekable streams. Setting the position doesn't change the
636 * playing state of the media file.
637 *
638 * @see emotion_object_seekable_get
639 */
640EAPI void emotion_object_position_set (Evas_Object *obj, double sec);
641/**
642 * @brief Get the position in the media file.
643 *
644 * @param obj The emotion object from which the position will be retrieved.
645 * @return The position of the media file.
646 *
647 * The position is returned as the number of seconds since the beginning of the
648 * media file.
649 */
650EAPI double emotion_object_position_get (const Evas_Object *obj);
651
652/**
653 * @brief Get the percentual size of the buffering cache.
654 *
655 * @param obj The emotion object from which the buffer size will be retrieved.
656 * @return The buffer percent size, ranging from 0.0 to 1.0
657 *
658 * The buffer size is returned as a number between 0.0 and 1.0, 0.0 means
659 * the buffer if empty, 1.0 means full.
660 * If no buffering is in progress 1.0 is returned. In all other cases (maybe
661 * the backend don't support buffering) 1.0 is returned, thus you can always
662 * check for buffer_size < 1.0 to know if buffering is in progress.
663 *
664 * @warning Generic backend don't implement this (will return 1.0).
665 */
666EAPI double emotion_object_buffer_size_get (const Evas_Object *obj);
667
668/**
669 * @brief Get whether the media file is seekable.
670 *
671 * @param obj The emotion object from which the seekable status will be
672 * retrieved.
673 * @return EINA_TRUE if the media file is seekable, EINA_FALSE otherwise.
674 */
675EAPI Eina_Bool emotion_object_seekable_get (const Evas_Object *obj);
676/**
677 * @brief Get the length of play for the media file.
678 *
679 * @param obj The emotion object from which the length will be retrieved.
680 * @return The length of the media file in seconds.
681 *
682 * This function returns the length of the media file in seconds.
683 *
684 * @warning This will return 0 if called before the "length_change" signal has,
685 * been emitted.
686 */
687EAPI double emotion_object_play_length_get (const Evas_Object *obj);
688
689/**
690 * @brief Set the play speed of the media file.
691 *
692 * @param obj The emotion object whose speed will be set.
693 * @param speed The speed to be set in the range [0,infinity)
694 *
695 * This function sets the speed with which the media file will be played. 1.0
696 * represents the normal speed, 2 double speed, 0.5 half speed and so on.
697 *
698 * @warning The only backend that implements this is the experimental VLC
699 * backend.
700 */
701EAPI void emotion_object_play_speed_set (Evas_Object *obj, double speed);
702/**
703 * @brief Get the play speed of the media file.
704 *
705 * @param obj The emotion object from which the filename will be retrieved.
706 * @return The current speed of the media file.
707 *
708 * @see emotion_object_play_speed_set
709 */
710EAPI double emotion_object_play_speed_get (const Evas_Object *obj);
711/**
712 * @brief Get how much of the file has been played.
713 *
714 * @param obj The emotion object from which the filename will be retrieved.
715 * @return The progress of the media file.
716 *
717 * @warning Don't change of free the returned string.
718 * @warning gstreamer xine backends don't implement this(will return NULL).
719 */
720EAPI const char *emotion_object_progress_info_get (const Evas_Object *obj);
721/**
722 * @brief Get how much of the file has been played.
723 *
724 * @param obj The emotion object from which the filename will be retrieved
725 * @return The progress of the media file.
726 *
727 * This function gets the progress in playing the file, the return value is in
728 * the [0, 1] range.
729 *
730 * @warning gstreamer xine backends don't implement this(will return 0).
731 */
732EAPI double emotion_object_progress_status_get (const Evas_Object *obj);
733/**
734 * @}
735 */
736EAPI Eina_Bool emotion_object_video_handled_get (const Evas_Object *obj);
737EAPI Eina_Bool emotion_object_audio_handled_get (const Evas_Object *obj);
738
739/**
740 * @brief Retrieve the video aspect ratio of the media file loaded.
741 *
742 * @param obj The emotion object which the video aspect ratio will be retrieved
743 * from.
744 * @return The video aspect ratio of the file loaded.
745 *
746 * This function returns the video aspect ratio (width / height) of the file
747 * loaded. It can be used to adapt the size of the emotion object in the canvas,
748 * so the aspect won't be changed (by wrongly resizing the object). Or to crop
749 * the video correctly, if necessary.
750 *
751 * The described behavior can be applied like following. Consider a given
752 * emotion object that we want to position inside an area, which we will
753 * represent by @c w and @c h. Since we want to position this object either
754 * stretching, or filling the entire area but overflowing the video, or just
755 * adjust the video to fit inside the area without keeping the aspect ratio, we
756 * must compare the video aspect ratio with the area aspect ratio:
757 * @code
758 * int w = 200, h = 300; // an arbitrary value which represents the area where
759 * // the video would be placed
760 * int vw, vh;
761 * double r, vr = emotion_object_ratio_get(obj);
762 * r = (double)w / h;
763 * @endcode
764 *
765 * Now, if we want to make the video fit inside the area, the following code
766 * would do it:
767 * @code
768 * if (vr > r) // the video is wider than the area
769 * {
770 * vw = w;
771 * vh = w / vr;
772 * }
773 * else // the video is taller than the area
774 * {
775 * vh = h;
776 * vw = h * vr;
777 * }
778 * evas_object_resize(obj, vw, vh);
779 * @endcode
780 *
781 * And for keeping the aspect ratio but making the video fill the entire area,
782 * overflowing the content which can't fit inside it, we would do:
783 * @code
784 * if (vr > r) // the video is wider than the area
785 * {
786 * vh = h;
787 * vw = h * vr;
788 * }
789 * else // the video is taller than the area
790 * {
791 * vw = w;
792 * vh = w / vr;
793 * }
794 * evas_object_resize(obj, vw, vh);
795 * @endcode
796 *
797 * Finally, by just resizing the video to the video area, we would have the
798 * video stretched:
799 * @code
800 * vw = w;
801 * vh = h;
802 * evas_object_resize(obj, vw, vh);
803 * @endcode
804 *
805 * The following diagram exemplifies what would happen to the video,
806 * respectively, in each case:
807 *
808 * @image html emotion_ratio.png
809 * @image latex emotion_ratio.eps width=\textwidth
810 *
811 * @note This function returns the aspect ratio that the video @b should be, but
812 * sometimes the reported size from emotion_object_size_get() represents a
813 * different aspect ratio. You can safely resize the video to respect the aspect
814 * ratio returned by @b this function.
815 *
816 * @see emotion_object_size_get()
817 *
818 * @ingroup Emotion_Video
819 */
820EAPI double emotion_object_ratio_get (const Evas_Object *obj);
821
822/**
823 * @brief Retrieve the video size of the loaded file.
824 *
825 * @param obj The object from which we are retrieving the video size.
826 * @param iw A pointer to a variable where the width will be stored.
827 * @param ih A pointer to a variable where the height will be stored.
828 *
829 * This function returns the reported size of the loaded video file. If a file
830 * that doesn't contain a video channel is loaded, then this size can be
831 * ignored.
832 *
833 * The value reported by this function should be consistent with the aspect
834 * ratio returned by emotion_object_ratio_get(), but sometimes the information
835 * stored in the file is wrong. So use the ratio size reported by
836 * emotion_object_ratio_get(), since it is more likely going to be accurate.
837 *
838 * @note Use @c NULL for @p iw or @p ih if you don't need one of these values.
839 *
840 * @see emotion_object_ratio_get()
841 *
842 * @ingroup Emotion_Video
843 */
844EAPI void emotion_object_size_get (const Evas_Object *obj, int *iw, int *ih);
845
846/**
847 * @brief Sets whether to use of high-quality image scaling algorithm
848 * of the given video object.
849 *
850 * When enabled, a higher quality video scaling algorithm is used when
851 * scaling videos to sizes other than the source video. This gives
852 * better results but is more computationally expensive.
853 *
854 * @param obj The given video object.
855 * @param smooth Whether to use smooth scale or not.
856 *
857 * @see emotion_object_smooth_scale_get()
858 *
859 * @ingroup Emotion_Video
860 */
861EAPI void emotion_object_smooth_scale_set (Evas_Object *obj, Eina_Bool smooth);
862
863/**
864 * @brief Gets whether the high-quality image scaling algorithm
865 * of the given video object is used.
866 *
867 * @param obj The given video object.
868 * @return Whether the smooth scale is used or not.
869 *
870 * @see emotion_object_smooth_scale_set()
871 *
872 * @ingroup Emotion_Video
873 */
874EAPI Eina_Bool emotion_object_smooth_scale_get (const Evas_Object *obj);
875EAPI void emotion_object_event_simple_send (Evas_Object *obj, Emotion_Event ev);
876
877/**
878 * @brief Set the audio volume.
879 *
880 * @param obj The object where the volume is being set.
881 * @param vol The new volume parameter. Range is from 0.0 to 1.0.
882 *
883 * Sets the audio volume of the stream being played. This has nothing to do with
884 * the system volume. This volume will be multiplied by the system volume. e.g.:
885 * if the current volume level is 0.5, and the system volume is 50%, it will be
886 * 0.5 * 0.5 = 0.25.
887 *
888 * The default value depends on the module used. This value doesn't get changed
889 * when another file is loaded.
890 *
891 * @see emotion_object_audio_volume_get()
892 *
893 * @ingroup Emotion_Audio
894 */
895EAPI void emotion_object_audio_volume_set (Evas_Object *obj, double vol);
896
897/**
898 * @brief Get the audio volume.
899 *
900 * @param obj The object from which we are retrieving the volume.
901 * @return The current audio volume level for this object.
902 *
903 * Get the current value for the audio volume level. Range is from 0.0 to 1.0.
904 * This volume is set with emotion_object_audio_volume_set().
905 *
906 * @see emotion_object_audio_volume_set()
907 *
908 * @ingroup Emotion_Audio
909 */
910EAPI double emotion_object_audio_volume_get (const Evas_Object *obj);
911
912/**
913 * @brief Set the mute audio option for this object.
914 *
915 * @param obj The object which we are setting the mute audio option.
916 * @param mute Whether the audio should be muted (@c EINA_TRUE) or not (@c
917 * EINA_FALSE).
918 *
919 * This function sets the mute audio option for this emotion object. The current
920 * module used for this object can use this to avoid decoding the audio portion
921 * of the loaded media file.
922 *
923 * @see emotion_object_audio_mute_get()
924 * @see emotion_object_video_mute_set()
925 *
926 * @ingroup Emotion_Audio
927 */
928EAPI void emotion_object_audio_mute_set (Evas_Object *obj, Eina_Bool mute);
929
930/**
931 * @brief Get the mute audio option of this object.
932 *
933 * @param obj The object which we are retrieving the mute audio option from.
934 * @return Whether the audio is muted (@c EINA_TRUE) or not (@c EINA_FALSE).
935 *
936 * This function return the mute audio option from this emotion object. It can
937 * be set with emotion_object_audio_mute_set().
938 *
939 * @see emotion_object_audio_mute_set()
940 *
941 * @ingroup Emotion_Audio
942 */
943EAPI Eina_Bool emotion_object_audio_mute_get (const Evas_Object *obj);
944EAPI int emotion_object_audio_channel_count (const Evas_Object *obj);
945EAPI const char *emotion_object_audio_channel_name_get(const Evas_Object *obj, int channel);
946EAPI void emotion_object_audio_channel_set (Evas_Object *obj, int channel);
947EAPI int emotion_object_audio_channel_get (const Evas_Object *obj);
948
949/**
950 * @brief Set the mute video option for this object.
951 *
952 * @param obj The object which we are setting the mute video option.
953 * @param mute Whether the video should be muted (@c EINA_TRUE) or not (@c
954 * EINA_FALSE).
955 *
956 * This function sets the mute video option for this emotion object. The
957 * current module used for this object can use this information to avoid
958 * decoding the video portion of the loaded media file.
959 *
960 * @see emotion_object_video_mute_get()
961 * @see emotion_object_audio_mute_set()
962 *
963 * @ingroup Emotion_Video
964 */
965EAPI void emotion_object_video_mute_set (Evas_Object *obj, Eina_Bool mute);
966
967/**
968 * @brief Get the mute video option of this object.
969 *
970 * @param obj The object which we are retrieving the mute video option from.
971 * @return Whether the video is muted (@c EINA_TRUE) or not (@c EINA_FALSE).
972 *
973 * This function returns the mute video option from this emotion object. It can
974 * be set with emotion_object_video_mute_set().
975 *
976 * @see emotion_object_video_mute_set()
977 *
978 * @ingroup Emotion_Video
979 */
980EAPI Eina_Bool emotion_object_video_mute_get (const Evas_Object *obj);
981
982/**
983 * @brief Set the video's subtitle file path.
984 *
985 * @param obj The object which we are setting a subtitle file path.
986 * @param filepath The subtitle file path.
987 *
988 * This function sets a video's subtitle file path(i.e an .srt file) for
989 * supported subtitle formats consult the backend's documentation.
990 *
991 * @see emotion_object_video_subtitle_file_get().
992 *
993 * @ingroup Emotion_Video
994 * @since 1.8
995 */
996EAPI void emotion_object_video_subtitle_file_set (Evas_Object *obj, const char *filepath);
997
998/**
999 * @brief Get the video's subtitle file path.
1000 *
1001 * @param obj The object which we are retrieving the subtitle file path from.
1002 * @return The video's subtitle file path previously set, NULL otherwise.
1003 *
1004 * This function returns the video's subtitle file path, if not previously set
1005 * or in error NULL is returned.
1006 *
1007 * @see emotion_object_video_subtitle_file_set().
1008 *
1009 * @ingroup Emotion_Video
1010 * @since 1.8
1011 */
1012EAPI const char *emotion_object_video_subtitle_file_get (const Evas_Object *obj);
1013
1014/**
1015 * @brief Get the number of available video channel
1016 *
1017 * @param obj The object which we are retrieving the channel count from
1018 * @return the number of available channel.
1019 *
1020 * @see emotion_object_video_channel_name_get()
1021 *
1022 * @ingroup Emotion_Video
1023 */
1024EAPI int emotion_object_video_channel_count (const Evas_Object *obj);
1025EAPI const char *emotion_object_video_channel_name_get(const Evas_Object *obj, int channel);
1026EAPI void emotion_object_video_channel_set (Evas_Object *obj, int channel);
1027EAPI int emotion_object_video_channel_get (const Evas_Object *obj);
1028EAPI void emotion_object_spu_mute_set (Evas_Object *obj, Eina_Bool mute);
1029EAPI Eina_Bool emotion_object_spu_mute_get (const Evas_Object *obj);
1030EAPI int emotion_object_spu_channel_count (const Evas_Object *obj);
1031EAPI const char *emotion_object_spu_channel_name_get (const Evas_Object *obj, int channel);
1032EAPI void emotion_object_spu_channel_set (Evas_Object *obj, int channel);
1033EAPI int emotion_object_spu_channel_get (const Evas_Object *obj);
1034EAPI int emotion_object_chapter_count (const Evas_Object *obj);
1035EAPI void emotion_object_chapter_set (Evas_Object *obj, int chapter);
1036EAPI int emotion_object_chapter_get (const Evas_Object *obj);
1037EAPI const char *emotion_object_chapter_name_get (const Evas_Object *obj, int chapter);
1038EAPI void emotion_object_eject (Evas_Object *obj);
1039
1040/**
1041 * @brief Get the dvd title from this emotion object.
1042 *
1043 * @param obj The object which the title will be retrieved from.
1044 * @return A string containing the title.
1045 *
1046 * This function is only useful when playing a DVD.
1047 *
1048 * @note Don't change or free the string returned by this function.
1049 *
1050 * @ingroup Emotion_Info
1051 */
1052EAPI const char *emotion_object_title_get (const Evas_Object *obj);
1053EAPI const char *emotion_object_ref_file_get (const Evas_Object *obj);
1054EAPI int emotion_object_ref_num_get (const Evas_Object *obj);
1055EAPI int emotion_object_spu_button_count_get (const Evas_Object *obj);
1056EAPI int emotion_object_spu_button_get (const Evas_Object *obj);
1057
1058/**
1059 * @brief Retrieve meta information from this file being played.
1060 *
1061 * @param obj The object which the meta info will be extracted from.
1062 * @param meta The type of meta information that will be extracted.
1063 *
1064 * This function retrieves information about the file loaded. It can retrieve
1065 * the track title, artist name, album name, etc. See @ref Emotion_Meta_Info
1066 * for all the possibilities.
1067 *
1068 * The meta info may be not available on all types of files. It will return @c
1069 * NULL if the the file doesn't have meta info, or if this specific field is
1070 * empty.
1071 *
1072 * @note Don't change or free the string returned by this function.
1073 *
1074 * @see Emotion_Meta_Info
1075 *
1076 * @ingroup Emotion_Info
1077 */
1078EAPI const char *emotion_object_meta_info_get (const Evas_Object *obj, Emotion_Meta_Info meta);
1079
1080/**
1081 * @brief Set the visualization to be used with this object.
1082 *
1083 * @param obj The object where the visualization will be set on.
1084 * @param visualization The type of visualization to be used.
1085 *
1086 * The @p visualization specified will be played instead of a video. This is
1087 * commonly used to display a visualization for audio only files (musics).
1088 *
1089 * The available visualizations are @ref Emotion_Vis.
1090 *
1091 * @see Emotion_Vis
1092 * @see emotion_object_vis_get()
1093 * @see emotion_object_vis_supported()
1094 *
1095 * @ingroup Emotion_Visualization
1096 */
1097EAPI void emotion_object_vis_set (Evas_Object *obj, Emotion_Vis visualization);
1098
1099/**
1100 * @brief Get the type of visualization in use by this emotion object.
1101 *
1102 * @param obj The emotion object which the visualization is being retrieved
1103 * from.
1104 * @return The type of visualization in use by this object.
1105 *
1106 * The type of visualization can be set by emotion_object_vis_set().
1107 *
1108 * @see Emotion_Vis
1109 * @see emotion_object_vis_set()
1110 * @see emotion_object_vis_supported()
1111 *
1112 * @ingroup Emotion_Visualization
1113 */
1114EAPI Emotion_Vis emotion_object_vis_get (const Evas_Object *obj);
1115
1116/**
1117 * @brief Query whether a type of visualization is supported by this object.
1118 *
1119 * @param obj The object which the query is being ran on.
1120 * @param visualization The type of visualization that is being queried.
1121 * @return EINA_TRUE if the visualization is supported, EINA_FALSE otherwise.
1122 *
1123 * This can be used to check if a visualization is supported. e.g.: one wants to
1124 * display a list of available visualizations for a specific object.
1125 *
1126 * @see Emotion_Vis
1127 * @see emotion_object_vis_set()
1128 * @see emotion_object_vis_get()
1129 *
1130 * @ingroup Emotion_Visualization
1131 */
1132EAPI Eina_Bool emotion_object_vis_supported (const Evas_Object *obj, Emotion_Vis visualization);
1133
1134/**
1135 * @brief Raise priority of an object so it will have a priviledged access to hardware ressource.
1136 *
1137 * @param obj The object which the query is being ran on.
1138 * @param priority EINA_TRUE means give me a priority access to the hardware ressource.
1139 *
1140 * Hardware have a few dedicated hardware pipeline that process the video at no cost for the CPU.
1141 * Especially on SoC, you mostly have one (on mobile phone SoC) or two (on Set Top Box SoC) when
1142 * Picture in Picture is needed. And most application just have a few video stream that really
1143 * deserve high frame rate, hiogh quality output. That's why this call is for.
1144 *
1145 * Please note that if Emotion can't acquire a priviledged hardware ressource, it will fallback
1146 * to the no-priority path. This work on the first asking first get basis system.
1147 *
1148 * @see emotion_object_priority_get()
1149 *
1150 * @ingroup Emotion_Ressource
1151 */
1152EAPI void emotion_object_priority_set(Evas_Object *obj, Eina_Bool priority);
1153
1154/**
1155 * @brief Get the actual priority of an object.
1156 *
1157 * @param obj The object which the query is being ran on.
1158 * @return EINA_TRUE if the object has a priority access to the hardware.
1159 *
1160 * This actually return the priority status of an object. If it failed to have a priviledged
1161 * access to the hardware, it will return EINA_FALSE.
1162 *
1163 * @see emotion_object_priority_get()
1164 *
1165 * @ingroup Emotion_Ressource
1166 */
1167EAPI Eina_Bool emotion_object_priority_get(const Evas_Object *obj);
1168
1169/**
1170 * @brief Change the state of an object pipeline.
1171 *
1172 * @param obj The object which the query is being ran on.
1173 * @param state The new state for the object.
1174 *
1175 * Changing the state of a pipeline should help preserve the battery of an embedded device.
1176 * But it will only work sanely if the pipeline is not playing at the time you change its
1177 * state. Depending on the engine all state may be not implemented.
1178 *
1179 * @see Emotion_Suspend
1180 * @see emotion_object_suspend_get()
1181 *
1182 * @ingroup Emotion_Ressource
1183 */
1184EAPI void emotion_object_suspend_set(Evas_Object *obj, Emotion_Suspend state);
1185
1186/**
1187 * @brief Get the current state of the pipeline
1188 *
1189 * @param obj The object which the query is being ran on.
1190 * @return the current state of the pipeline.
1191 *
1192 * @see Emotion_Suspend
1193 * @see emotion_object_suspend_set()
1194 *
1195 * @ingroup Emotion_Ressource
1196 */
1197EAPI Emotion_Suspend emotion_object_suspend_get(Evas_Object *obj);
1198
1199/**
1200 * @brief Load the last known position if available
1201 *
1202 * @param obj The object which the query is being ran on.
1203 *
1204 * By using Xattr, Emotion is able, if the system permitt it, to store and retrieve
1205 * the latest position. It should trigger some smart callback to let the application
1206 * know when it succeed or fail. Every operation is fully asynchronous and not
1207 * linked to the actual engine used to play the vide.
1208 *
1209 * @see emotion_object_last_position_save()
1210 *
1211 * @ingroup Emotion_Info
1212 */
1213EAPI void emotion_object_last_position_load(Evas_Object *obj);
1214
1215/**
1216 * @brief Save the lastest position if possible
1217 *
1218 * @param obj The object which the query is being ran on.
1219 *
1220 * By using Xattr, Emotion is able, if the system permitt it, to store and retrieve
1221 * the latest position. It should trigger some smart callback to let the application
1222 * know when it succeed or fail. Every operation is fully asynchronous and not
1223 * linked to the actual engine used to play the vide.
1224 *
1225 * @see emotion_object_last_position_load()
1226 *
1227 * @ingroup Emotion_Info
1228 */
1229EAPI void emotion_object_last_position_save(Evas_Object *obj);
1230
1231/**
1232 * @brief Do we have a chance to play that file
1233 *
1234 * @param file A stringshared filename that we want to know if Emotion can play.
1235 *
1236 * This just actually look at the extention of the file, it doesn't check the mime-type
1237 * nor if the file is actually sane. So this is just an hint for your application.
1238 *
1239 * @see emotion_object_extension_may_play_get()
1240 */
1241EAPI Eina_Bool emotion_object_extension_may_play_fast_get(const char *file);
1242
1243/**
1244 * @brief Do we have a chance to play that file
1245 *
1246 * @param file A filename that we want to know if Emotion can play.
1247 *
1248 * This just actually look at the extention of the file, it doesn't check the mime-type
1249 * nor if the file is actually sane. So this is just an hint for your application.
1250 *
1251 * @see emotion_object_extension_may_play_fast_get()
1252 */
1253EAPI Eina_Bool emotion_object_extension_may_play_get(const char *file);
1254
1255/**
1256 * @brief Get the actual image object that contains the pixels of the video stream
1257 *
1258 * @param obj The object which the query is being ran on.
1259 *
1260 * This function is usefull when you want to get a direct access to the pixels.
1261 *
1262 * @see emotion_object_image_get()
1263 */
1264EAPI Evas_Object *emotion_object_image_get(const Evas_Object *obj);
1265
1266/**
1267 * @defgroup Emotion_Webcam API available for accessing webcam
1268 */
1269
1270typedef struct _Emotion_Webcam Emotion_Webcam; /**< Webcam description */
1271
1272EAPI extern int EMOTION_WEBCAM_UPDATE; /**< Ecore_Event triggered when a new webcam is plugged in */
1273
1274/**
1275 * @brief Get a list of active and available webcam
1276 *
1277 * @return the list of available webcam at the time of the call.
1278 *
1279 * It will return the current live list of webcam. It is updated before
1280 * triggering EMOTION_WEBCAM_UPDATE and should never be modified.
1281 *
1282 * @ingroup Emotion_Webcam
1283 */
1284EAPI const Eina_List *emotion_webcams_get(void);
1285
1286/**
1287 * @brief Get the human understandable name of a Webcam
1288 *
1289 * @param ew The webcam to get the name from.
1290 * @return the actual human readable name.
1291 *
1292 * @ingroup Emotion_Webcam
1293 */
1294EAPI const char *emotion_webcam_name_get(const Emotion_Webcam *ew);
1295
1296/**
1297 * @brief Get the uri of a Webcam that will be understood by emotion
1298 *
1299 * @param ew The webcam to get the uri from.
1300 * @return the actual uri that emotion will later understood.
1301 *
1302 * @ingroup Emotion_Webcam
1303 */
1304EAPI const char *emotion_webcam_device_get(const Emotion_Webcam *ew);
1305
1306/**
1307 * @}
1308 */
1309
1310#ifdef __cplusplus
1311}
1312#endif
1313
1314#endif
diff --git a/legacy/emotion/src/lib/emotion_main.c b/legacy/emotion/src/lib/emotion_main.c
deleted file mode 100644
index d816424bf7..0000000000
--- a/legacy/emotion/src/lib/emotion_main.c
+++ /dev/null
@@ -1,481 +0,0 @@
1#ifdef HAVE_CONFIG_H
2# include "config.h"
3#endif
4
5#ifdef STDC_HEADERS
6# include <stdlib.h>
7# include <stddef.h>
8#else
9# ifdef HAVE_STDLIB_H
10# include <stdlib.h>
11# endif
12#endif
13#ifdef HAVE_ALLOCA_H
14# include <alloca.h>
15#elif !defined alloca
16# ifdef __GNUC__
17# define alloca __builtin_alloca
18# elif defined _AIX
19# define alloca __alloca
20# elif defined _MSC_VER
21# include <malloc.h>
22# define alloca _alloca
23# elif !defined HAVE_ALLOCA
24# ifdef __cplusplus
25extern "C"
26# endif
27void *alloca (size_t);
28# endif
29#endif
30
31#include <stdio.h>
32
33#ifdef EMOTION_HAVE_EEZE
34# include <sys/types.h>
35# include <sys/stat.h>
36# include <fcntl.h>
37# include <sys/ioctl.h>
38# ifdef HAVE_V4L2
39# include <linux/videodev2.h>
40# endif
41# include <Eeze.h>
42#endif
43
44#include <Ecore.h>
45#include <Eet.h>
46
47#include "Emotion.h"
48#include "emotion_private.h"
49
50static Emotion_Version _version = { VMAJ, VMIN, VMIC, VREV };
51static int emotion_pending_objects = 0;
52EAPI Emotion_Version *emotion_version = &_version;
53
54EAPI int EMOTION_WEBCAM_UPDATE = 0;
55
56struct ext_match_s
57{
58 unsigned int length;
59 const char *extension;
60};
61
62#define MATCHING(Ext) \
63 { sizeof (Ext), Ext }
64
65static const struct ext_match_s matchs[] =
66{ /* map extensions to know if it's a emotion playable content for good first-guess tries */
67 MATCHING(".264"),
68 MATCHING(".3g2"),
69 MATCHING(".3gp"),
70 MATCHING(".3gp2"),
71 MATCHING(".3gpp"),
72 MATCHING(".3gpp2"),
73 MATCHING(".3p2"),
74 MATCHING(".asf"),
75 MATCHING(".avi"),
76 MATCHING(".bdm"),
77 MATCHING(".bdmv"),
78 MATCHING(".clpi"),
79 MATCHING(".clp"),
80 MATCHING(".fla"),
81 MATCHING(".flv"),
82 MATCHING(".m1v"),
83 MATCHING(".m2v"),
84 MATCHING(".m2t"),
85 MATCHING(".m4v"),
86 MATCHING(".mkv"),
87 MATCHING(".mov"),
88 MATCHING(".mp2"),
89 MATCHING(".mp2ts"),
90 MATCHING(".mp4"),
91 MATCHING(".mpe"),
92 MATCHING(".mpeg"),
93 MATCHING(".mpg"),
94 MATCHING(".mpl"),
95 MATCHING(".mpls"),
96 MATCHING(".mts"),
97 MATCHING(".mxf"),
98 MATCHING(".nut"),
99 MATCHING(".nuv"),
100 MATCHING(".ogg"),
101 MATCHING(".ogm"),
102 MATCHING(".ogv"),
103 MATCHING(".rm"),
104 MATCHING(".rmj"),
105 MATCHING(".rmm"),
106 MATCHING(".rms"),
107 MATCHING(".rmx"),
108 MATCHING(".rmvb"),
109 MATCHING(".swf"),
110 MATCHING(".ts"),
111 MATCHING(".weba"),
112 MATCHING(".webm"),
113 MATCHING(".wmv")
114};
115
116Eina_Bool
117_emotion_object_extension_can_play_generic_get(const void *data __UNUSED__, const char *file)
118{
119 unsigned int length;
120 unsigned int i;
121
122 length = eina_stringshare_strlen(file) + 1;
123 if (length < 5) return EINA_FALSE;
124
125 for (i = 0; i < sizeof (matchs) / sizeof (struct ext_match_s); ++i)
126 {
127 if (matchs[i].length > length) continue;
128
129 if (!strcasecmp(matchs[i].extension,
130 file + length - matchs[i].length))
131 return EINA_TRUE;
132 }
133
134 return EINA_FALSE;
135}
136
137EAPI Eina_Bool
138emotion_object_extension_may_play_fast_get(const char *file)
139{
140 if (!file) return EINA_FALSE;
141 return _emotion_object_extension_can_play_generic_get(NULL, file);
142}
143
144EAPI Eina_Bool
145emotion_object_extension_may_play_get(const char *file)
146{
147 const char *tmp;
148 Eina_Bool result;
149
150 if (!file) return EINA_FALSE;
151 tmp = eina_stringshare_add(file);
152 result = emotion_object_extension_may_play_fast_get(tmp);
153 eina_stringshare_del(tmp);
154
155 return result;
156}
157
158typedef struct _Emotion_Webcams Emotion_Webcams;
159
160struct _Emotion_Webcams
161{
162 Eina_List *webcams;
163};
164
165struct _Emotion_Webcam
166{
167 EINA_REFCOUNT;
168
169 const char *syspath;
170 const char *device;
171 const char *name;
172
173 const char *custom;
174
175 const char *filename;
176};
177
178static int _emotion_webcams_count = 0;
179static Eet_Data_Descriptor *_webcam_edd;
180static Eet_Data_Descriptor *_webcams_edd;
181
182static Emotion_Webcams *_emotion_webcams = NULL;
183static Eet_File *_emotion_webcams_file = NULL;
184
185static Eet_Data_Descriptor *
186_emotion_webcams_data(void)
187{
188 Eet_Data_Descriptor_Class eddc;
189
190 EET_EINA_FILE_DATA_DESCRIPTOR_CLASS_SET(&eddc, Emotion_Webcam);
191 _webcam_edd = eet_data_descriptor_file_new(&eddc);
192 EET_DATA_DESCRIPTOR_ADD_BASIC(_webcam_edd, Emotion_Webcam, "device", device, EET_T_STRING);
193 EET_DATA_DESCRIPTOR_ADD_BASIC(_webcam_edd, Emotion_Webcam, "name", name, EET_T_STRING);
194 EET_DATA_DESCRIPTOR_ADD_BASIC(_webcam_edd, Emotion_Webcam, "custom", custom, EET_T_STRING);
195 EET_DATA_DESCRIPTOR_ADD_BASIC(_webcam_edd, Emotion_Webcam, "filename", filename, EET_T_STRING);
196
197 EET_EINA_FILE_DATA_DESCRIPTOR_CLASS_SET(&eddc, Emotion_Webcams);
198 _webcams_edd = eet_data_descriptor_file_new(&eddc);
199 EET_DATA_DESCRIPTOR_ADD_LIST(_webcams_edd, Emotion_Webcams, "webcams", webcams, _webcam_edd);
200
201 return _webcams_edd;
202}
203
204static void
205emotion_webcam_destroy(Emotion_Webcam *ew)
206{
207 if (!ew->custom)
208 {
209 eina_stringshare_del(ew->syspath);
210 eina_stringshare_del(ew->device);
211 eina_stringshare_del(ew->name);
212 }
213 free(ew);
214}
215
216#ifdef EMOTION_HAVE_EEZE
217static Eeze_Udev_Watch *eeze_watcher = NULL;
218
219static void
220_emotion_check_device(Emotion_Webcam *ew)
221{
222#ifdef HAVE_V4L2
223 Emotion_Webcam *check;
224 Eina_List *l;