summaryrefslogtreecommitdiff
path: root/src
diff options
context:
space:
mode:
authorGustavo Sverzut Barbieri <barbieri@gmail.com>2013-01-10 03:43:32 +0000
committerGustavo Sverzut Barbieri <barbieri@gmail.com>2013-01-10 03:43:32 +0000
commitdfb84c1657bfb14a5236b881193b81f4c0b8a69b (patch)
treeb51b210fc88a21eec8e5907b8bbfe12ebc669f90 /src
parent532284dbbe4259a9f2291f44d3eff376849e8031 (diff)
efl: merge emotion.
this one was quite a huge work, but hopefully it's correct. NOTES: * removed vlc generic module, it should go into a separate package. * gstreamer is enabled by default (see --disable-gstreamer) * xine is disabled by default (see --enable-gstreamer) * generic is always built statically if supported * gstreamer and xine can't be configured as static (just lacks command line options, build system supports it) * v4l2 is enabled by default on linux if eeze is built (see --disable-v4l2) * emotion_test moved to src/tests/emotion and depends on EFL_ENABLE_TESTS (--with-tests), but is still installed if enabled. TODO (need your help!): * fix warnings with gstreamer and xine engine * call engine shutdown functions if building as static * remove direct usage of PACKAGE_*_DIR and use eina_prefix * add eina_prefix checkme file as evas and others * add support for $EFL_RUN_IN_TREE * create separate package for emotion_generic_modules * check docs hierarchy (doxygen is segv'in here) SVN revision: 82501
Diffstat (limited to 'src')
-rw-r--r--src/Makefile.am5
-rw-r--r--src/Makefile_Edje.am10
-rw-r--r--src/Makefile_Emotion.am235
-rw-r--r--src/edje_external/emotion/emotion.c516
-rw-r--r--src/examples/Makefile.am2
-rw-r--r--src/examples/emotion/Makefile.am57
-rw-r--r--src/examples/emotion/emotion_basic_example.c81
-rw-r--r--src/examples/emotion/emotion_border_example.c238
-rw-r--r--src/examples/emotion/emotion_generic_example.c233
-rw-r--r--src/examples/emotion/emotion_generic_subtitle_example.c97
-rw-r--r--src/examples/emotion/emotion_signals_example.c173
-rw-r--r--src/lib/emotion/Emotion.h1332
-rw-r--r--src/lib/emotion/emotion_main.c464
-rw-r--r--src/lib/emotion/emotion_private.h137
-rw-r--r--src/lib/emotion/emotion_smart.c2133
-rw-r--r--src/modules/emotion/generic/Emotion_Generic_Plugin.h145
-rw-r--r--src/modules/emotion/generic/README79
-rw-r--r--src/modules/emotion/generic/emotion_generic.c1820
-rw-r--r--src/modules/emotion/generic/emotion_generic.h113
-rw-r--r--src/modules/emotion/gstreamer/emotion_alloc.c90
-rw-r--r--src/modules/emotion/gstreamer/emotion_convert.c251
-rw-r--r--src/modules/emotion/gstreamer/emotion_fakeeos.c70
-rw-r--r--src/modules/emotion/gstreamer/emotion_gstreamer.c2156
-rw-r--r--src/modules/emotion/gstreamer/emotion_gstreamer.h330
-rw-r--r--src/modules/emotion/gstreamer/emotion_sink.c1391
-rw-r--r--src/modules/emotion/xine/emotion_xine.c1723
-rw-r--r--src/modules/emotion/xine/emotion_xine.h98
-rw-r--r--src/modules/emotion/xine/emotion_xine_vo_out.c767
-rw-r--r--src/tests/emotion/data/bpause.pngbin0 -> 383 bytes
-rw-r--r--src/tests/emotion/data/bplay.pngbin0 -> 425 bytes
-rw-r--r--src/tests/emotion/data/bstop.pngbin0 -> 401 bytes
-rw-r--r--src/tests/emotion/data/e_logo.pngbin0 -> 7833 bytes
-rw-r--r--src/tests/emotion/data/fr1.pngbin0 -> 591 bytes
-rw-r--r--src/tests/emotion/data/fr2.pngbin0 -> 288 bytes
-rw-r--r--src/tests/emotion/data/fr3.pngbin0 -> 657 bytes
-rw-r--r--src/tests/emotion/data/fr4.pngbin0 -> 375 bytes
-rw-r--r--src/tests/emotion/data/fr5.pngbin0 -> 1366 bytes
-rw-r--r--src/tests/emotion/data/fr6.pngbin0 -> 699 bytes
-rw-r--r--src/tests/emotion/data/fr7.pngbin0 -> 1184 bytes
-rw-r--r--src/tests/emotion/data/h_slider.pngbin0 -> 917 bytes
-rw-r--r--src/tests/emotion/data/icon.edc14
-rw-r--r--src/tests/emotion/data/knob.pngbin0 -> 1076 bytes
-rw-r--r--src/tests/emotion/data/orb.pngbin0 -> 203 bytes
-rw-r--r--src/tests/emotion/data/pnl.pngbin0 -> 705 bytes
-rw-r--r--src/tests/emotion/data/sl.pngbin0 -> 225 bytes
-rw-r--r--src/tests/emotion/data/theme.edc1667
-rw-r--r--src/tests/emotion/data/tiles.pngbin0 -> 3026 bytes
-rw-r--r--src/tests/emotion/data/video_frame_bottom.pngbin0 -> 514 bytes
-rw-r--r--src/tests/emotion/data/video_frame_left.pngbin0 -> 2023 bytes
-rw-r--r--src/tests/emotion/data/video_frame_right.pngbin0 -> 2441 bytes
-rw-r--r--src/tests/emotion/data/video_frame_top.pngbin0 -> 471 bytes
-rw-r--r--src/tests/emotion/data/whb.pngbin0 -> 207 bytes
-rw-r--r--src/tests/emotion/data/window_inner_shadow.pngbin0 -> 30426 bytes
-rw-r--r--src/tests/emotion/emotion_test_main.c748
54 files changed, 17174 insertions, 1 deletions
diff --git a/src/Makefile.am b/src/Makefile.am
index 9dec122014..c530955d7b 100644
--- a/src/Makefile.am
+++ b/src/Makefile.am
@@ -42,6 +42,7 @@ include Makefile_Efreet.am
42include Makefile_Eeze.am 42include Makefile_Eeze.am
43include Makefile_EPhysics.am 43include Makefile_EPhysics.am
44include Makefile_Edje.am 44include Makefile_Edje.am
45include Makefile_Emotion.am
45 46
46.PHONY: benchmark examples 47.PHONY: benchmark examples
47 48
@@ -80,6 +81,7 @@ clean-local:
80 rm -rf lib/eeze/*.gcno 81 rm -rf lib/eeze/*.gcno
81 rm -rf lib/ephysics/*.gcno 82 rm -rf lib/ephysics/*.gcno
82 rm -rf lib/edje/*.gcno 83 rm -rf lib/edje/*.gcno
84 rm -rf lib/emotion/*.gcno
83 rm -rf modules/eina/mp/pass_through/*.gcno 85 rm -rf modules/eina/mp/pass_through/*.gcno
84 rm -rf modules/eina/mp/one_big/*.gcno 86 rm -rf modules/eina/mp/one_big/*.gcno
85 rm -rf modules/eina/mp/chained_pool/*.gcno 87 rm -rf modules/eina/mp/chained_pool/*.gcno
@@ -119,5 +121,8 @@ clean-local:
119 rm -rf modules/edje/alsa_snd_player/*.gcno 121 rm -rf modules/edje/alsa_snd_player/*.gcno
120 rm -rf modules/edje/eet_snd_reader/*.gcno 122 rm -rf modules/edje/eet_snd_reader/*.gcno
121 rm -rf modules/edje/multisense_factory/*.gcno 123 rm -rf modules/edje/multisense_factory/*.gcno
124 rm -rf modules/emotion/xine/*.gcno
125 rm -rf modules/emotion/gstreamer/*.gcno
126 rm -rf modules/emotion/generic/*.gcno
122 rm -rf static_libs/liblinebreak/*.gcno 127 rm -rf static_libs/liblinebreak/*.gcno
123 rm -rf static_libs/lz4/*.gcno 128 rm -rf static_libs/lz4/*.gcno
diff --git a/src/Makefile_Edje.am b/src/Makefile_Edje.am
index 6539c30087..f120f2cd54 100644
--- a/src/Makefile_Edje.am
+++ b/src/Makefile_Edje.am
@@ -257,3 +257,13 @@ $(EDJE_COMMON_USER_LDADD) \
257@CHECK_LIBS@ 257@CHECK_LIBS@
258 258
259endif 259endif
260
261# Useful to other modules that generate EDJ
262EDJE_CC = EFL_RUN_IN_TREE=1 $(top_builddir)/src/bin/edje/edje_cc
263EDJE_CC_FLAGS_VERBOSE_0 =
264EDJE_CC_FLAGS_VERBOSE_1 = -v
265EDJE_CC_FLAGS = $(EDJE_CC_FLAGS_VERBOSE_$(V)) -id $(srcdir) -fd $(srcdir)
266
267AM_V_EDJ = $(am__v_EDJ_$(V))
268am__v_EDJ_ = $(am__v_EDJ_$(AM_DEFAULT_VERBOSITY))
269am__v_EDJ_0 = @echo " EDJ " $@;
diff --git a/src/Makefile_Emotion.am b/src/Makefile_Emotion.am
new file mode 100644
index 0000000000..94dadc22b8
--- /dev/null
+++ b/src/Makefile_Emotion.am
@@ -0,0 +1,235 @@
1### Library
2
3lib_LTLIBRARIES += \
4lib/emotion/libemotion.la
5
6EMOTION_COMMON_CPPFLAGS = \
7-I$(top_srcdir)/src/lib/eina \
8-I$(top_builddir)/src/lib/eina \
9-I$(top_srcdir)/src/lib/eo \
10-I$(top_builddir)/src/lib/eo \
11-I$(top_srcdir)/src/lib/ecore \
12-I$(top_builddir)/src/lib/ecore \
13-I$(top_srcdir)/src/lib/ecore_x \
14-I$(top_builddir)/src/lib/ecore_x \
15-I$(top_srcdir)/src/lib/ecore_input \
16-I$(top_builddir)/src/lib/ecore_input \
17-I$(top_srcdir)/src/lib/ecore_evas \
18-I$(top_builddir)/src/lib/ecore_evas \
19-I$(top_srcdir)/src/lib/eet \
20-I$(top_builddir)/src/lib/eet \
21-I$(top_srcdir)/src/lib/evas \
22-I$(top_builddir)/src/lib/evas \
23-I$(top_srcdir)/src/lib/eio \
24-I$(top_builddir)/src/lib/eio \
25-I$(top_srcdir)/src/lib/eeze \
26-I$(top_builddir)/src/lib/eeze \
27-I$(top_srcdir)/src/lib/emotion \
28-I$(top_builddir)/src/lib/emotion \
29@EFL_COV_CFLAGS@ \
30@EMOTION_CFLAGS@
31
32EMOTION_COMMON_LDADD = \
33lib/eina/libeina.la \
34lib/eo/libeo.la \
35lib/ecore/libecore.la \
36lib/eet/libeet.la \
37lib/evas/libevas.la \
38lib/eio/libeio.la \
39@EFL_COV_LIBS@
40
41if EMOTION_HAVE_V4L2
42EMOTION_COMMON_LDADD += lib/eeze/libeeze.la
43endif
44
45installed_emotionmainheadersdir = $(includedir)/emotion-@VMAJ@
46dist_installed_emotionmainheaders_DATA = lib/emotion/Emotion.h
47
48# libemotion.la
49lib_emotion_libemotion_la_SOURCES = \
50lib/emotion/emotion_private.h \
51lib/emotion/emotion_smart.c \
52lib/emotion/emotion_main.c
53
54EMOTION_COMMON_LIBADD = $(EMOTION_COMMON_LDADD) @EMOTION_LIBS@
55EMOTION_COMMON_LDADD += @EMOTION_LDFLAGS@
56EMOTION_COMMON_USER_LIBADD = $(EMOTION_COMMON_LIBADD) lib/emotion/libemotion.la
57EMOTION_COMMON_USER_LDADD = $(EMOTION_COMMON_LDADD) lib/emotion/libemotion.la
58
59lib_emotion_libemotion_la_CPPFLAGS = \
60$(EMOTION_COMMON_CPPFLAGS) \
61-DPACKAGE_BIN_DIR=\"$(bindir)\" \
62-DPACKAGE_LIB_DIR=\"$(libdir)\" \
63-DPACKAGE_DATA_DIR=\"$(datadir)/emotion\" \
64-DPACKAGE_BUILD_DIR=\"`pwd`/$(top_builddir)\" \
65-DEFL_EMOTION_BUILD
66
67lib_emotion_libemotion_la_LIBADD = $(EMOTION_COMMON_LIBADD)
68lib_emotion_libemotion_la_LDFLAGS = @EFL_LTLIBRARY_FLAGS@
69
70## Modules
71
72# Xine
73EMOTION_XINE_SOURCES = \
74modules/emotion/xine/emotion_xine.h \
75modules/emotion/xine/emotion_xine.c \
76modules/emotion/xine/emotion_xine_vo_out.c
77
78if EMOTION_STATIC_BUILD_XINE
79lib_emotion_libemotion_la_SOURCES += $(EMOTION_XINE_SOURCES)
80else
81if EMOTION_BUILD_XINE
82emotionmodulexinedir = $(libdir)/emotion/modules/xine/$(MODULE_ARCH)
83emotionmodulexine_LTLIBRARIES = modules/emotion/xine/module.la
84modules_emotion_xine_module_la_SOURCES = $(EMOTION_XINE_SOURCES)
85modules_emotion_xine_module_la_CPPFLAGS = \
86$(EMOTION_COMMON_CPPFLAGS) \
87@EMOTION_MODULE_XINE_CFLAGS@
88modules_emotion_xine_module_la_LIBADD = \
89$(EMOTION_COMMON_USER_LIBADD) \
90@EMOTION_MODULE_XINE_LIBS@
91modules_emotion_xine_module_la_LDFLAGS = -module @EFL_LTMODULE_FLAGS@
92modules_emotion_xine_module_la_LIBTOOLFLAGS = --tag=disable-static
93endif
94endif
95
96# Gstreamer
97EMOTION_GSTREAMER_SOURCES = \
98modules/emotion/gstreamer/emotion_gstreamer.h \
99modules/emotion/gstreamer/emotion_gstreamer.c \
100modules/emotion/gstreamer/emotion_alloc.c \
101modules/emotion/gstreamer/emotion_convert.c \
102modules/emotion/gstreamer/emotion_fakeeos.c \
103modules/emotion/gstreamer/emotion_sink.c
104
105if EMOTION_STATIC_BUILD_GSTREAMER
106lib_emotion_libemotion_la_SOURCES += $(EMOTION_GSTREAMER_SOURCES)
107if HAVE_ECORE_X
108EMOTION_COMMON_LDADD += \
109lib/ecore_evas/libecore_evas.la \
110lib/ecore_x/libecore_x.la
111endif
112else
113if EMOTION_BUILD_GSTREAMER
114emotionmodulegstreamerdir = $(libdir)/emotion/modules/gstreamer/$(MODULE_ARCH)
115emotionmodulegstreamer_LTLIBRARIES = modules/emotion/gstreamer/module.la
116modules_emotion_gstreamer_module_la_SOURCES = $(EMOTION_GSTREAMER_SOURCES)
117modules_emotion_gstreamer_module_la_CPPFLAGS = \
118$(EMOTION_COMMON_CPPFLAGS) \
119@EMOTION_MODULE_GSTREAMER_CFLAGS@
120modules_emotion_gstreamer_module_la_LIBADD = \
121$(EMOTION_COMMON_USER_LIBADD) \
122@EMOTION_MODULE_GSTREAMER_LIBS@
123modules_emotion_gstreamer_module_la_LDFLAGS = -module @EFL_LTMODULE_FLAGS@
124modules_emotion_gstreamer_module_la_LIBTOOLFLAGS = --tag=disable-static
125if HAVE_ECORE_X
126modules_emotion_gstreamer_module_la_LIBADD += \
127lib/ecore_evas/libecore_evas.la \
128lib/ecore_x/libecore_x.la
129endif
130endif
131endif
132
133# Generic
134EMOTION_GENERIC_SOURCES = \
135modules/emotion/generic/emotion_generic.h \
136modules/emotion/generic/emotion_generic.c
137
138if EMOTION_STATIC_BUILD_GENERIC
139lib_emotion_libemotion_la_SOURCES += $(EMOTION_GENERIC_SOURCES)
140else
141if EMOTION_BUILD_GENERIC
142emotionmodulegenericdir = $(libdir)/emotion/modules/generic/$(MODULE_ARCH)
143emotionmodulegeneric_LTLIBRARIES = modules/emotion/generic/module.la
144modules_emotion_generic_module_la_SOURCES = $(EMOTION_GENERIC_SOURCES)
145modules_emotion_generic_module_la_CPPFLAGS = \
146$(EMOTION_COMMON_CPPFLAGS)
147modules_emotion_generic_module_la_LIBADD = \
148$(EMOTION_COMMON_USER_LIBADD)
149modules_emotion_generic_module_la_LDFLAGS = -module @EFL_LTMODULE_FLAGS@
150modules_emotion_generic_module_la_LIBTOOLFLAGS = --tag=disable-static
151endif
152endif
153
154if EMOTION_BUILD_GENERIC
155dist_installed_emotionmainheaders_DATA += \
156modules/emotion/generic/Emotion_Generic_Plugin.h
157endif
158
159
160# Edje_External
161emotionedjeexternaldir = $(libdir)/edje/modules/emotion/$(MODULE_ARCH)
162emotionedjeexternal_LTLIBRARIES = edje_external/emotion/module.la
163
164edje_external_emotion_module_la_SOURCES = \
165edje_external/emotion/emotion.c
166edje_external_emotion_module_la_CPPFLAGS = \
167$(EMOTION_COMMON_CPPFLAGS) \
168$(EDJE_COMMON_CPPFLAGS)
169edje_external_emotion_module_la_LIBADD = \
170$(EMOTION_COMMON_USER_LIBADD) \
171$(EDJE_COMMON_USER_LIBADD)
172edje_external_emotion_module_la_LDFLAGS = -module @EFL_LTMODULE_FLAGS@
173edje_external_emotion_module_la_LIBTOOLFLAGS = --tag=disable-static
174
175### Binary
176
177### Unit tests
178
179if EFL_ENABLE_TESTS
180
181bin_PROGRAMS += tests/emotion/emotion_test
182
183tests_emotion_emotion_test_SOURCES = \
184tests/emotion/emotion_test_main.c
185
186tests_emotion_emotion_test_CPPFLAGS = \
187$(EMOTION_COMMON_CPPFLAGS) \
188-I$(top_srcdir)/src/lib/edje \
189-I$(top_builddir)/src/lib/edje \
190-DPACKAGE_BIN_DIR=\"$(bindir)\" \
191-DPACKAGE_LIB_DIR=\"$(libdir)\" \
192-DPACKAGE_DATA_DIR=\"$(datadir)/emotion\" \
193-DPACKAGE_BUILD_DIR=\"`pwd`/$(top_builddir)\"
194
195tests_emotion_emotion_test_LDADD = \
196$(EMOTION_COMMON_USER_LDADD) \
197lib/edje/libedje.la
198
199tests/emotion/data/theme.edj: tests/emotion/data/theme.edc
200 $(AM_V_EDJ)$(EDJE_CC) $(EDJE_CC_FLAGS) -id $(srcdir)/tests/emotion/data $< $@
201
202EMOTION_DATA_FILES = \
203tests/emotion/data/bpause.png \
204tests/emotion/data/bplay.png \
205tests/emotion/data/bstop.png \
206tests/emotion/data/e_logo.png \
207tests/emotion/data/fr1.png \
208tests/emotion/data/fr2.png \
209tests/emotion/data/fr3.png \
210tests/emotion/data/fr4.png \
211tests/emotion/data/fr5.png \
212tests/emotion/data/fr6.png \
213tests/emotion/data/fr7.png \
214tests/emotion/data/h_slider.png \
215tests/emotion/data/icon.edc \
216tests/emotion/data/knob.png \
217tests/emotion/data/orb.png \
218tests/emotion/data/pnl.png \
219tests/emotion/data/sl.png \
220tests/emotion/data/theme.edc \
221tests/emotion/data/tiles.png \
222tests/emotion/data/video_frame_bottom.png \
223tests/emotion/data/video_frame_left.png \
224tests/emotion/data/video_frame_right.png \
225tests/emotion/data/video_frame_top.png \
226tests/emotion/data/whb.png \
227tests/emotion/data/window_inner_shadow.png
228
229emotiondatafilesdir = $(datadir)/emotion/data
230emotiondatafiles_DATA = tests/emotion/data/theme.edj
231endif
232
233EXTRA_DIST += \
234$(EMOTION_DATA_FILES) \
235modules/emotion/generic/README
diff --git a/src/edje_external/emotion/emotion.c b/src/edje_external/emotion/emotion.c
new file mode 100644
index 0000000000..7ae0a0e5ca
--- /dev/null
+++ b/src/edje_external/emotion/emotion.c
@@ -0,0 +1,516 @@
1#ifdef HAVE_CONFIG_H
2# include "config.h"
3#endif
4
5#include <Edje.h>
6
7#include "Emotion.h"
8
9typedef struct _External_Emotion_Params External_Emotion_Params;
10typedef struct _External_Emotion_Signals_Proxy_Context External_Emotion_Signals_Proxy_Context;
11
12struct _External_Emotion_Params
13{
14#define _STR(M) const char *M
15#define _BOOL(M) Eina_Bool M:1; Eina_Bool M##_exists:1
16#define _INT(M) int M; Eina_Bool M##_exists:1
17#define _DOUBLE(M) double M; Eina_Bool M##_exists:1
18 _STR(file);
19 _BOOL(play);
20 _DOUBLE(position);
21 _BOOL(smooth_scale);
22 _DOUBLE(audio_volume);
23 _BOOL(audio_mute);
24 _INT(audio_channel);
25 _BOOL(video_mute);
26 _INT(video_channel);
27 _BOOL(spu_mute);
28 _INT(spu_channel);
29 _INT(chapter);
30 _DOUBLE(play_speed);
31 _DOUBLE(play_length);
32 //_INT(vis);
33#undef _STR
34#undef _BOOL
35#undef _INT
36#undef _DOUBLE
37};
38
39struct _External_Emotion_Signals_Proxy_Context
40{
41 const char *emission;
42 const char *source;
43 Evas_Object *edje;
44};
45
46static int _log_dom = -1;
47#define CRITICAL(...) EINA_LOG_DOM_CRIT(_log_dom, __VA_ARGS__)
48#define ERR(...) EINA_LOG_DOM_ERR(_log_dom, __VA_ARGS__)
49#define WRN(...) EINA_LOG_DOM_WARN(_log_dom, __VA_ARGS__)
50#define INF(...) EINA_LOG_DOM_INFO(_log_dom, __VA_ARGS__)
51#define DBG(...) EINA_LOG_DOM_DBG(_log_dom, __VA_ARGS__)
52
53static const char *_external_emotion_engines[] = {
54#ifdef EMOTION_BUILD_XINE
55 "xine",
56#endif
57#ifdef EMOTION_BUILD_GSTREAMER
58 "gstreamer",
59#endif
60#ifdef EMOTION_BUILD_GENERIC
61 "generic",
62#endif
63 NULL,
64};
65
66static const char _external_emotion_engine_def[] =
67#if defined(EMOTION_BUILD_XINE)
68 "xine";
69#elif defined(EMOTION_BUILD_GSTREAMER)
70 "gstreamer";
71#elif defined(EMOTION_BUILD_GENERIC)
72 "generic";
73#else
74 "impossible";
75#endif
76
77static void
78_external_emotion_signal_proxy_free_cb(void *data, Evas *e EINA_UNUSED, Evas_Object *obj EINA_UNUSED, void *event_info EINA_UNUSED)
79{
80 External_Emotion_Signals_Proxy_Context *ctxt = data;
81 free(ctxt);
82}
83
84static void
85_external_emotion_signal_proxy_cb(void *data, Evas_Object *obj EINA_UNUSED, void *event_info EINA_UNUSED)
86{
87 External_Emotion_Signals_Proxy_Context *ctxt = data;
88 // TODO: Is it worth to check Evas_Smart_Cb_Description and do something
89 // TODO: with event_info given its description?
90 edje_object_signal_emit(ctxt->edje, ctxt->emission, ctxt->source);
91}
92
93static Evas_Object *
94_external_emotion_add(void *data EINA_UNUSED, Evas *evas, Evas_Object *edje EINA_UNUSED, const Eina_List *params, const char *part_name)
95{
96 const Evas_Smart_Cb_Description **cls_descs, **inst_descs;
97 unsigned int cls_count, inst_count, total;
98 External_Emotion_Signals_Proxy_Context *ctxt;
99 Evas_Object *obj;
100 const char *engine;
101
102 if (!edje_external_param_choice_get(params, "engine", &engine))
103 engine = NULL;
104 if (!engine) engine = _external_emotion_engine_def;
105
106 obj = emotion_object_add(evas);
107 if (!emotion_object_init(obj, engine))
108 {
109 ERR("failed to initialize emotion with engine '%s'.", engine);
110 return NULL;
111 }
112
113 evas_object_smart_callbacks_descriptions_get
114 (obj, &cls_descs, &cls_count, &inst_descs, &inst_count);
115
116 total = cls_count + inst_count;
117 if (!total) goto end;
118 ctxt = malloc(sizeof(External_Emotion_Signals_Proxy_Context) * total);
119 if (!ctxt) goto end;
120 evas_object_event_callback_add
121 (obj, EVAS_CALLBACK_DEL, _external_emotion_signal_proxy_free_cb, ctxt);
122
123 for (; cls_count > 0; cls_count--, cls_descs++, ctxt++)
124 {
125 const Evas_Smart_Cb_Description *d = *cls_descs;
126 ctxt->emission = d->name;
127 ctxt->source = part_name;
128 ctxt->edje = edje;
129 evas_object_smart_callback_add
130 (obj, d->name, _external_emotion_signal_proxy_cb, ctxt);
131 }
132
133 for (; inst_count > 0; inst_count--, inst_descs++, ctxt++)
134 {
135 const Evas_Smart_Cb_Description *d = *inst_descs;
136 ctxt->emission = d->name;
137 ctxt->source = part_name;
138 ctxt->edje = edje;
139 evas_object_smart_callback_add
140 (obj, d->name, _external_emotion_signal_proxy_cb, ctxt);
141 }
142
143 end:
144 return obj;
145}
146
147static void
148_external_emotion_signal(void *data EINA_UNUSED, Evas_Object *obj EINA_UNUSED, const char *signal, const char *source)
149{
150 DBG("External Signal received: '%s' '%s'", signal, source);
151}
152
153static void
154_external_emotion_state_set(void *data EINA_UNUSED, Evas_Object *obj, const void *from_params, const void *to_params, float pos EINA_UNUSED)
155{
156 const External_Emotion_Params *p;
157
158 if (to_params) p = to_params;
159 else if (from_params) p = from_params;
160 else return;
161
162#define _STR(M) if (p->M) emotion_object_##M##_set(obj, p->M)
163#define _BOOL(M) if (p->M##_exists) emotion_object_##M##_set(obj, p->M)
164#define _INT(M) if (p->M##_exists) emotion_object_##M##_set(obj, p->M)
165#define _DOUBLE(M) if (p->M##_exists) emotion_object_##M##_set(obj, p->M)
166 _STR(file);
167 _BOOL(play);
168 //_DOUBLE(position);
169 if (p->position_exists)
170 WRN("position should not be set from state description! Ignored.");
171 _BOOL(smooth_scale);
172 _DOUBLE(audio_volume);
173 _BOOL(audio_mute);
174 _INT(audio_channel);
175 _BOOL(video_mute);
176 _INT(video_channel);
177 _BOOL(spu_mute);
178 _INT(spu_channel);
179 _INT(chapter);
180 _DOUBLE(play_speed);
181 if (p->play_length_exists) ERR("play_length is read-only");
182 //_INT(vis);
183#undef _STR
184#undef _BOOL
185#undef _INT
186#undef _DOUBLE
187}
188
189static Eina_Bool
190_external_emotion_param_set(void *data EINA_UNUSED, Evas_Object *obj, const Edje_External_Param *param)
191{
192 if (!strcmp(param->name, "engine"))
193 {
194 // TODO
195 WRN("engine is a property that can be set only at object creation!");
196 return EINA_FALSE;
197 }
198
199#define _STR(M) \
200 else if (!strcmp(param->name, #M)) \
201 { \
202 if (param->type == EDJE_EXTERNAL_PARAM_TYPE_STRING) \
203 { \
204 emotion_object_##M##_set(obj, param->s); \
205 return EINA_TRUE; \
206 } \
207 }
208#define _BOOL(M) \
209 else if (!strcmp(param->name, #M)) \
210 { \
211 if (param->type == EDJE_EXTERNAL_PARAM_TYPE_BOOL) \
212 { \
213 emotion_object_##M##_set(obj, param->i); \
214 return EINA_TRUE; \
215 } \
216 }
217#define _INT(M) \
218 else if (!strcmp(param->name, #M)) \
219 { \
220 if (param->type == EDJE_EXTERNAL_PARAM_TYPE_INT) \
221 { \
222 emotion_object_##M##_set(obj, param->i); \
223 return EINA_TRUE; \
224 } \
225 }
226#define _DOUBLE(M) \
227 else if (!strcmp(param->name, #M)) \
228 { \
229 if (param->type == EDJE_EXTERNAL_PARAM_TYPE_DOUBLE) \
230 { \
231 emotion_object_##M##_set(obj, param->d); \
232 return EINA_TRUE; \
233 } \
234 }
235
236 if (0) {} // so else if works...
237 _STR(file)
238 _BOOL(play)
239 _DOUBLE(position)
240 _BOOL(smooth_scale)
241 _DOUBLE(audio_volume)
242 _BOOL(audio_mute)
243 _INT(audio_channel)
244 _BOOL(video_mute)
245 _INT(video_channel)
246 _BOOL(spu_mute)
247 _INT(spu_channel)
248 _INT(chapter)
249 _DOUBLE(play_speed)
250 else if (!strcmp(param->name, "play_length"))
251 {
252 ERR("play_length is read-only");
253 return EINA_FALSE;
254 }
255 //_INT(vis);
256#undef _STR
257#undef _BOOL
258#undef _INT
259#undef _DOUBLE
260
261 ERR("unknown parameter '%s' of type '%s'",
262 param->name, edje_external_param_type_str(param->type));
263
264 return EINA_FALSE;
265}
266
267static Eina_Bool
268_external_emotion_param_get(void *data EINA_UNUSED, const Evas_Object *obj, Edje_External_Param *param)
269{
270#define _STR(M) \
271 else if (!strcmp(param->name, #M)) \
272 { \
273 if (param->type == EDJE_EXTERNAL_PARAM_TYPE_STRING) \
274 { \
275 param->s = emotion_object_##M##_get(obj); \
276 return EINA_TRUE; \
277 } \
278 }
279#define _BOOL(M) \
280 else if (!strcmp(param->name, #M)) \
281 { \
282 if (param->type == EDJE_EXTERNAL_PARAM_TYPE_BOOL) \
283 { \
284 param->i = emotion_object_##M##_get(obj); \
285 return EINA_TRUE; \
286 } \
287 }
288#define _INT(M) \
289 else if (!strcmp(param->name, #M)) \
290 { \
291 if (param->type == EDJE_EXTERNAL_PARAM_TYPE_INT) \
292 { \
293 param->i = emotion_object_##M##_get(obj); \
294 return EINA_TRUE; \
295 } \
296 }
297#define _DOUBLE(M) \
298 else if (!strcmp(param->name, #M)) \
299 { \
300 if (param->type == EDJE_EXTERNAL_PARAM_TYPE_DOUBLE) \
301 { \
302 param->d = emotion_object_##M##_get(obj); \
303 return EINA_TRUE; \
304 } \
305 }
306
307 if (0) {} // so else if works...
308 _STR(file)
309 _BOOL(play)
310 _DOUBLE(position)
311 _BOOL(smooth_scale)
312 _DOUBLE(audio_volume)
313 _BOOL(audio_mute)
314 _INT(audio_channel)
315 _BOOL(video_mute)
316 _INT(video_channel)
317 _BOOL(spu_mute)
318 _INT(spu_channel)
319 _INT(chapter)
320 _DOUBLE(play_speed)
321 _DOUBLE(play_length)
322 //_INT(vis)
323#undef _STR
324#undef _BOOL
325#undef _INT
326#undef _DOUBLE
327
328 ERR("unknown parameter '%s' of type '%s'",
329 param->name, edje_external_param_type_str(param->type));
330
331 return EINA_FALSE;
332}
333
334static void *
335_external_emotion_params_parse(void *data EINA_UNUSED, Evas_Object *obj EINA_UNUSED, const Eina_List *params)
336{
337 const Edje_External_Param *param;
338 const Eina_List *l;
339 External_Emotion_Params *p = calloc(1, sizeof(External_Emotion_Params));
340 if (!p) return NULL;
341
342 EINA_LIST_FOREACH(params, l, param)
343 {
344#define _STR(M) \
345 if (!strcmp(param->name, #M)) p->M = eina_stringshare_add(param->s)
346#define _BOOL(M) \
347 if (!strcmp(param->name, #M)) \
348 { \
349 p->M = param->i; \
350 p->M##_exists = EINA_TRUE; \
351 }
352#define _INT(M) \
353 if (!strcmp(param->name, #M)) \
354 { \
355 p->M = param->i; \
356 p->M##_exists = EINA_TRUE; \
357 }
358#define _DOUBLE(M) \
359 if (!strcmp(param->name, #M)) \
360 { \
361 p->M = param->d; \
362 p->M##_exists = EINA_TRUE; \
363 }
364
365 _STR(file);
366 _BOOL(play);
367 _DOUBLE(position);
368 _BOOL(smooth_scale);
369 _DOUBLE(audio_volume);
370 _BOOL(audio_mute);
371 _INT(audio_channel);
372 _BOOL(video_mute);
373 _INT(video_channel);
374 _BOOL(spu_mute);
375 _INT(spu_channel);
376 _INT(chapter);
377 _DOUBLE(play_speed);
378 _DOUBLE(play_length);
379 //_INT(vis);
380#undef _STR
381#undef _BOOL
382#undef _INT
383#undef _DOUBLE
384 }
385
386 return p;
387}
388
389static void
390_external_emotion_params_free(void *params)
391{
392 External_Emotion_Params *p = params;
393
394#define _STR(M) eina_stringshare_del(p->M)
395#define _BOOL(M) do {} while (0)
396#define _INT(M) do {} while (0)
397#define _DOUBLE(M) do {} while (0)
398 _STR(file);
399 _BOOL(play);
400 _DOUBLE(position);
401 _BOOL(smooth_scale);
402 _DOUBLE(audio_volume);
403 _BOOL(audio_mute);
404 _INT(audio_channel);
405 _BOOL(video_mute);
406 _INT(video_channel);
407 _BOOL(spu_mute);
408 _INT(spu_channel);
409 _INT(chapter);
410 _DOUBLE(play_speed);
411 _DOUBLE(play_length);
412 //_INT(vis);
413#undef _STR
414#undef _BOOL
415#undef _INT
416#undef _DOUBLE
417 free(p);
418}
419
420static const char *
421_external_emotion_label_get(void *data EINA_UNUSED)
422{
423 return "Emotion";
424}
425
426static Evas_Object *
427_external_emotion_icon_add(void *data EINA_UNUSED, Evas *e)
428{
429 Evas_Object *ic;
430 int w = 0, h = 0;
431
432 ic = edje_object_add(e);
433 edje_object_file_set(ic, PACKAGE_DATA_DIR"/data/icon.edj", "icon");
434 edje_object_size_min_get(ic, &w, &h);
435 if (w < 1) w = 20;
436 if (h < 1) h = 10;
437 evas_object_size_hint_min_set(ic, w, h);
438 evas_object_size_hint_max_set(ic, w, h);
439
440 return ic;
441}
442
443static const char *
444_external_emotion_translate(void *data EINA_UNUSED, const char *orig)
445{
446 // in future, mark all params as translatable and use dgettext()
447 // with "emotion" text domain here.
448 return orig;
449}
450
451static Edje_External_Param_Info _external_emotion_params[] = {
452 EDJE_EXTERNAL_PARAM_INFO_CHOICE_FULL
453 ("engine", _external_emotion_engine_def, _external_emotion_engines),
454 EDJE_EXTERNAL_PARAM_INFO_STRING("file"),
455 EDJE_EXTERNAL_PARAM_INFO_BOOL_DEFAULT("play", EINA_FALSE),
456 EDJE_EXTERNAL_PARAM_INFO_DOUBLE("position"),
457 EDJE_EXTERNAL_PARAM_INFO_BOOL_DEFAULT("smooth_scale", EINA_FALSE),
458 EDJE_EXTERNAL_PARAM_INFO_DOUBLE_DEFAULT("audio_volume", 0.9),
459 EDJE_EXTERNAL_PARAM_INFO_BOOL_DEFAULT("audio_mute", EINA_FALSE),
460 EDJE_EXTERNAL_PARAM_INFO_INT_DEFAULT("audio_channel", 0),
461 EDJE_EXTERNAL_PARAM_INFO_BOOL_DEFAULT("video_mute", EINA_FALSE),
462 EDJE_EXTERNAL_PARAM_INFO_INT_DEFAULT("video_channel", 0),
463 EDJE_EXTERNAL_PARAM_INFO_BOOL_DEFAULT("spu_mute", EINA_FALSE),
464 EDJE_EXTERNAL_PARAM_INFO_INT_DEFAULT("spu_channel", 0),
465 EDJE_EXTERNAL_PARAM_INFO_INT("chapter"),
466 EDJE_EXTERNAL_PARAM_INFO_DOUBLE_DEFAULT("play_speed", 1.0),
467 EDJE_EXTERNAL_PARAM_INFO_DOUBLE("play_length"),
468 //EDJE_EXTERNAL_PARAM_INFO_CHOICE_FULL("vis", ...),
469 EDJE_EXTERNAL_PARAM_INFO_SENTINEL
470};
471
472static const Edje_External_Type _external_emotion_type = {
473 .abi_version = EDJE_EXTERNAL_TYPE_ABI_VERSION,
474 .module = "emotion",
475 .module_name = "Emotion",
476 .add = _external_emotion_add,
477 .state_set = _external_emotion_state_set,
478 .signal_emit = _external_emotion_signal,
479 .param_set = _external_emotion_param_set,
480 .param_get = _external_emotion_param_get,
481 .params_parse = _external_emotion_params_parse,
482 .params_free = _external_emotion_params_free,
483 .label_get = _external_emotion_label_get,
484 .description_get = NULL,
485 .icon_add = _external_emotion_icon_add,
486 .preview_add = NULL,
487 .translate = _external_emotion_translate,
488 .parameters_info = _external_emotion_params,
489 .data = NULL
490};
491
492static Edje_External_Type_Info _external_emotion_types[] =
493{
494 {"emotion", &_external_emotion_type},
495 {NULL, NULL}
496};
497
498static Eina_Bool
499external_emotion_mod_init(void)
500{
501 _log_dom = eina_log_domain_register
502 ("emotion-externals", EINA_COLOR_LIGHTBLUE);
503 edje_external_type_array_register(_external_emotion_types);
504 return EINA_TRUE;
505}
506
507static void
508external_emotion_mod_shutdown(void)
509{
510 edje_external_type_array_unregister(_external_emotion_types);
511 eina_log_domain_unregister(_log_dom);
512 _log_dom = -1;
513}
514
515EINA_MODULE_INIT(external_emotion_mod_init);
516EINA_MODULE_SHUTDOWN(external_emotion_mod_shutdown);
diff --git a/src/examples/Makefile.am b/src/examples/Makefile.am
index d46cfedf64..610627764d 100644
--- a/src/examples/Makefile.am
+++ b/src/examples/Makefile.am
@@ -1,6 +1,6 @@
1MAINTAINERCLEANFILES = Makefile.in 1MAINTAINERCLEANFILES = Makefile.in
2 2
3SUBDIRS = eina eo eet evas ecore eio edbus ephysics edje 3SUBDIRS = eina eo eet evas ecore eio edbus ephysics edje emotion
4 4
5.PHONY: examples install-examples 5.PHONY: examples install-examples
6 6
diff --git a/src/examples/emotion/Makefile.am b/src/examples/emotion/Makefile.am
new file mode 100644
index 0000000000..b1bf86e654
--- /dev/null
+++ b/src/examples/emotion/Makefile.am
@@ -0,0 +1,57 @@
1MAINTAINERCLEANFILES = Makefile.in
2
3AM_CPPFLAGS = \
4-I$(top_srcdir)/src/lib/eina \
5-I$(top_srcdir)/src/lib/eo \
6-I$(top_srcdir)/src/lib/evas \
7-I$(top_srcdir)/src/lib/ecore \
8-I$(top_srcdir)/src/lib/ecore_evas \
9-I$(top_srcdir)/src/lib/emotion \
10-I$(top_builddir)/src/lib/eina \
11-I$(top_builddir)/src/lib/eo \
12-I$(top_builddir)/src/lib/evas \
13-I$(top_builddir)/src/lib/ecore \
14-I$(top_builddir)/src/lib/ecore_evas \
15-I$(top_builddir)/src/lib/emotion \
16@EMOTION_CFLAGS@
17
18LDADD = \
19$(top_builddir)/src/lib/eina/libeina.la \
20$(top_builddir)/src/lib/eo/libeo.la \
21$(top_builddir)/src/lib/evas/libevas.la \
22$(top_builddir)/src/lib/ecore/libecore.la \
23$(top_builddir)/src/lib/ecore_evas/libecore_evas.la \
24$(top_builddir)/src/lib/emotion/libemotion.la \
25@EMOTION_LDFLAGS@
26
27EXTRA_PROGRAMS = \
28emotion_basic_example \
29emotion_generic_example \
30emotion_generic_subtitle_example \
31emotion_border_example \
32emotion_signals_example
33
34SRCS = \
35emotion_basic_example.c \
36emotion_generic_example.c \
37emotion_generic_subtitle_example.c \
38emotion_border_example.c \
39emotion_signals_example.c
40
41examples: $(EXTRA_PROGRAMS)
42
43clean-local:
44 rm -f $(EXTRA_PROGRAMS)
45
46install-examples:
47 mkdir -p $(datadir)/emotion/examples
48 $(install_sh_DATA) -c $(SRCS) $(datadir)/emotion/examples
49
50uninstall-local:
51 for f in $(SRCS); do \
52 rm -f $(datadir)/emotion/examples/$$f ; \
53 done
54
55if ALWAYS_BUILD_EXAMPLES
56noinst_PROGRAMS = $(EXTRA_PROGRAMS)
57endif
diff --git a/src/examples/emotion/emotion_basic_example.c b/src/examples/emotion/emotion_basic_example.c
new file mode 100644
index 0000000000..7e3e4c2e4b
--- /dev/null
+++ b/src/examples/emotion/emotion_basic_example.c
@@ -0,0 +1,81 @@
1#include <Ecore.h>
2#include <Ecore_Evas.h>
3#include <Evas.h>
4#include <Emotion.h>
5#include <stdio.h>
6
7#define WIDTH (320)
8#define HEIGHT (240)
9
10static void
11_playback_started_cb(void *data, Evas_Object *o, void *event_info)
12{
13 printf("Emotion object started playback.\n");
14}
15
16int
17main(int argc, const char *argv[])
18{
19 Ecore_Evas *ee;
20 Evas *e;
21 Evas_Object *bg, *em;
22 const char *filename = NULL;
23
24 if (argc < 2)
25 {
26 printf("One argument is necessary. Usage:\n");
27 printf("\t%s <filename>\n", argv[0]);
28 }
29
30 filename = argv[1];
31
32 if (!ecore_evas_init())
33 return EXIT_FAILURE;
34
35 /* this will give you a window with an Evas canvas under the first
36 * engine available */
37 ee = ecore_evas_new(NULL, 10, 10, WIDTH, HEIGHT, NULL);
38 if (!ee)
39 goto error;
40
41 ecore_evas_show(ee);
42
43 /* the canvas pointer, de facto */
44 e = ecore_evas_get(ee);
45
46 /* adding a background to this example */
47 bg = evas_object_rectangle_add(e);
48 evas_object_name_set(bg, "our dear rectangle");
49 evas_object_color_set(bg, 255, 255, 255, 255); /* white bg */
50 evas_object_move(bg, 0, 0); /* at canvas' origin */
51 evas_object_resize(bg, WIDTH, HEIGHT); /* covers full canvas */
52 evas_object_show(bg);
53
54 /* Creating the emotion object */
55 em = emotion_object_add(e);
56 emotion_object_init(em, NULL);
57
58 evas_object_smart_callback_add(
59 em, "playback_started", _playback_started_cb, NULL);
60
61 emotion_object_file_set(em, filename);
62
63 evas_object_move(em, 0, 0);
64 evas_object_resize(em, WIDTH, HEIGHT);
65 evas_object_show(em);
66
67 emotion_object_play_set(em, EINA_TRUE);
68
69 ecore_main_loop_begin();
70
71 ecore_evas_free(ee);
72 ecore_evas_shutdown();
73 return 0;
74
75error:
76 fprintf(stderr, "you got to have at least one evas engine built and linked"
77 " up to ecore-evas for this example to run properly.\n");
78
79 ecore_evas_shutdown();
80 return -1;
81}
diff --git a/src/examples/emotion/emotion_border_example.c b/src/examples/emotion/emotion_border_example.c
new file mode 100644
index 0000000000..9df53f4333
--- /dev/null
+++ b/src/examples/emotion/emotion_border_example.c
@@ -0,0 +1,238 @@
1#include <Ecore.h>
2#include <Ecore_Evas.h>
3#include <Evas.h>
4#include <Emotion.h>
5#include <stdio.h>
6#include <string.h>
7
8#define WIDTH (320)
9#define HEIGHT (240)
10
11static Eina_List *filenames = NULL;
12static Eina_List *curfile = NULL;
13
14static void
15_playback_started_cb(void *data, Evas_Object *o, void *event_info)
16{
17 printf("Emotion object started playback.\n");
18}
19
20static Evas_Object *
21_create_emotion_object(Evas *e)
22{
23 Evas_Object *em = emotion_object_add(e);
24
25 emotion_object_init(em, "gstreamer");
26
27 evas_object_smart_callback_add(
28 em, "playback_started", _playback_started_cb, NULL);
29
30 return em;
31}
32
33static void
34_on_key_down(void *data, Evas *e, Evas_Object *o, void *event_info)
35{
36 Evas_Event_Key_Down *ev = event_info;
37 Evas_Object *em = data;
38
39 if (!strcmp(ev->keyname, "Return"))
40 {
41 emotion_object_play_set(em, EINA_TRUE);
42 }
43 else if (!strcmp(ev->keyname, "space"))
44 {
45 emotion_object_play_set(em, EINA_FALSE);
46 }
47 else if (!strcmp(ev->keyname, "Escape"))
48 {
49 ecore_main_loop_quit();
50 }
51 else if (!strcmp(ev->keyname, "n"))
52 {
53 const char *file;
54 if (!curfile)
55 curfile = filenames;
56 else
57 curfile = eina_list_next(curfile);
58 file = eina_list_data_get(curfile);
59 fprintf(stderr, "playing next file: %s\n", file);
60 emotion_object_file_set(em, file);
61 }
62 else if (!strcmp(ev->keyname, "p"))
63 {
64 const char *file;
65 if (!curfile)
66 curfile = eina_list_last(filenames);
67 else
68 curfile = eina_list_prev(curfile);
69 file = eina_list_data_get(curfile);
70 fprintf(stderr, "playing next file: %s\n", file);
71 emotion_object_file_set(em, file);
72 }
73 else if (!strcmp(ev->keyname, "b"))
74 {
75 emotion_object_border_set(em, 0, 0, 50, 50);
76 }
77 else if (!strcmp(ev->keyname, "0"))
78 {
79 emotion_object_keep_aspect_set(em, EMOTION_ASPECT_KEEP_NONE);
80 }
81 else if (!strcmp(ev->keyname, "w"))
82 {
83 emotion_object_keep_aspect_set(em, EMOTION_ASPECT_KEEP_WIDTH);
84 }
85 else if (!strcmp(ev->keyname, "h"))
86 {
87 emotion_object_keep_aspect_set(em, EMOTION_ASPECT_KEEP_HEIGHT);
88 }
89 else if (!strcmp(ev->keyname, "2"))
90 {
91 emotion_object_keep_aspect_set(em, EMOTION_ASPECT_KEEP_BOTH);
92 }
93 else if (!strcmp(ev->keyname, "c"))
94 {
95 emotion_object_keep_aspect_set(em, EMOTION_ASPECT_CROP);
96 }
97 else
98 {
99 fprintf(stderr, "unhandled key: %s\n", ev->keyname);
100 }
101}
102
103static void
104_frame_decode_cb(void *data, Evas_Object *o, void *event_info)
105{
106 // fprintf(stderr, "smartcb: frame_decode\n");
107}
108
109static void
110_length_change_cb(void *data, Evas_Object *o, void *event_info)
111{
112 fprintf(stderr, "smartcb: length_change: %0.3f\n", emotion_object_play_length_get(o));
113}
114
115static void
116_position_update_cb(void *data, Evas_Object *o, void *event_info)
117{
118 fprintf(stderr, "smartcb: position_update: %0.3f\n", emotion_object_position_get(o));
119}
120
121static void
122_progress_change_cb(void *data, Evas_Object *o, void *event_info)
123{
124 fprintf(stderr, "smartcb: progress_change: %0.3f, %s\n",
125 emotion_object_progress_status_get(o),
126 emotion_object_progress_info_get(o));
127}
128
129static void
130_frame_resize_cb(void *data, Evas_Object *o, void *event_info)
131{
132 int w, h;
133 emotion_object_size_get(o, &w, &h);
134 fprintf(stderr, "smartcb: frame_resize: %dx%d\n", w, h);
135}
136
137static void /* adjust canvas' contents on resizes */
138_canvas_resize_cb(Ecore_Evas *ee)
139{
140 int w, h;
141 Evas_Object *bg, *em;
142
143 ecore_evas_geometry_get(ee, NULL, NULL, &w, &h);
144
145 bg = ecore_evas_data_get(ee, "bg");
146 em = ecore_evas_data_get(ee, "emotion");
147
148 evas_object_resize(bg, w, h);
149 evas_object_move(em, 10, 10);
150 evas_object_resize(em, w - 20, h - 20);
151}
152
153int
154main(int argc, const char *argv[])
155{
156 Ecore_Evas *ee;
157 Evas *e;
158 Evas_Object *bg, *em;
159 int i;
160
161 if (argc < 2)
162 {
163 printf("One argument is necessary. Usage:\n");
164 printf("\t%s <filename>\n", argv[0]);
165 }
166
167 eina_init();
168 for (i = 1; i < argc; i++)
169 filenames = eina_list_append(filenames, eina_stringshare_add(argv[i]));
170
171 curfile = filenames;
172
173 if (!ecore_evas_init())
174 return EXIT_FAILURE;
175
176 /* this will give you a window with an Evas canvas under the first
177 * engine available */
178 ee = ecore_evas_new(NULL, 10, 10, WIDTH, HEIGHT, NULL);
179 if (!ee)
180 goto error;
181
182 ecore_evas_callback_resize_set(ee, _canvas_resize_cb);
183
184 ecore_evas_show(ee);
185
186 /* the canvas pointer, de facto */
187 e = ecore_evas_get(ee);
188
189 /* adding a background to this example */
190 bg = evas_object_rectangle_add(e);
191 evas_object_name_set(bg, "our dear rectangle");
192 evas_object_color_set(bg, 255, 0, 0, 255); /* white bg */
193 evas_object_move(bg, 0, 0); /* at canvas' origin */
194 evas_object_resize(bg, WIDTH, HEIGHT); /* covers full canvas */
195 evas_object_show(bg);
196
197 ecore_evas_data_set(ee, "bg", bg);
198
199 /* Creating the emotion object */
200 em = _create_emotion_object(e);
201 emotion_object_file_set(em, eina_list_data_get(curfile));
202 evas_object_move(em, 10, 10);
203 evas_object_resize(em, WIDTH, HEIGHT);
204 evas_object_resize(em, WIDTH - 20, HEIGHT - 20);
205 emotion_object_keep_aspect_set(em, EMOTION_ASPECT_KEEP_BOTH);
206 emotion_object_bg_color_set(em, 0, 128, 0, 255);
207 evas_object_show(em);
208
209 ecore_evas_data_set(ee, "emotion", em);
210
211 evas_object_smart_callback_add(em, "frame_decode", _frame_decode_cb, NULL);
212 evas_object_smart_callback_add(em, "length_change", _length_change_cb, NULL);
213 evas_object_smart_callback_add(em, "position_update", _position_update_cb, NULL);
214 evas_object_smart_callback_add(em, "progress_change", _progress_change_cb, NULL);
215 evas_object_smart_callback_add(em, "frame_resize", _frame_resize_cb, NULL);
216
217 evas_object_event_callback_add(bg, EVAS_CALLBACK_KEY_DOWN, _on_key_down, em);
218 evas_object_focus_set(bg, EINA_TRUE);
219
220 emotion_object_play_set(em, EINA_TRUE);
221
222 ecore_main_loop_begin();
223
224 ecore_evas_free(ee);
225 ecore_evas_shutdown();
226 return 0;
227
228error:
229 fprintf(stderr, "you got to have at least one evas engine built and linked"
230 " up to ecore-evas for this example to run properly.\n");
231
232 EINA_LIST_FREE(filenames, curfile)
233 eina_stringshare_del(eina_list_data_get(curfile));
234
235 ecore_evas_shutdown();
236 eina_shutdown();
237 return -1;
238}
diff --git a/src/examples/emotion/emotion_generic_example.c b/src/examples/emotion/emotion_generic_example.c
new file mode 100644
index 0000000000..b8382862d5
--- /dev/null
+++ b/src/examples/emotion/emotion_generic_example.c
@@ -0,0 +1,233 @@
1#include <Ecore.h>
2#include <Ecore_Evas.h>
3#include <Evas.h>
4#include <Emotion.h>
5#include <stdio.h>
6#include <string.h>
7#include <unistd.h>
8
9#define WIDTH (320)
10#define HEIGHT (240)
11
12static Eina_List *filenames = NULL;
13static Eina_List *curfile = NULL;
14
15static void
16_playback_started_cb(void *data, Evas_Object *o, void *event_info)
17{
18 printf("Emotion object started playback.\n");
19}
20
21static void
22_playback_stopped_cb(void *data, Evas_Object *o, void *event_info)
23{
24 printf("Emotion playback stopped.\n");
25 emotion_object_play_set(o, EINA_FALSE);
26 emotion_object_position_set(o, 0);
27}
28
29static Evas_Object *
30_create_emotion_object(Evas *e)
31{
32 Evas_Object *em = emotion_object_add(e);
33
34 emotion_object_init(em, "generic");
35
36 evas_object_smart_callback_add(
37 em, "playback_started", _playback_started_cb, NULL);
38 evas_object_smart_callback_add(
39 em, "playback_finished", _playback_stopped_cb, NULL);
40
41 return em;
42}
43
44static void
45_on_key_down(void *data, Evas *e, Evas_Object *o, void *event_info)
46{
47 Evas_Event_Key_Down *ev = event_info;
48 Evas_Object *em = data;
49
50 if (!strcmp(ev->keyname, "Return"))
51 {
52 emotion_object_play_set(em, EINA_TRUE);
53 }
54 else if (!strcmp(ev->keyname, "space"))
55 {
56 emotion_object_play_set(em, EINA_FALSE);
57 }
58 else if (!strcmp(ev->keyname, "Escape"))
59 {
60 ecore_main_loop_quit();
61 }
62 else if (!strcmp(ev->keyname, "t"))
63 {
64 int w, h;
65 emotion_object_size_get(em, &w, &h);
66 fprintf(stderr, "example -> size: %dx%d\n", w, h);
67 }
68 else if (!strcmp(ev->keyname, "s"))
69 {
70 float len, pos;
71 len = emotion_object_play_length_get(em);
72 pos = 0.98 * len;
73 fprintf(stderr, "skipping to position %0.3f\n", pos);
74 emotion_object_position_set(em, pos);
75 }
76 else if (!strcmp(ev->keyname, "1"))
77 {
78 fprintf(stderr, "setting speed to 1.0\n");
79 emotion_object_play_speed_set(em, 1.0);
80 }
81 else if (!strcmp(ev->keyname, "2"))
82 {
83 fprintf(stderr, "setting speed to 2.0\n");
84 emotion_object_play_speed_set(em, 2.0);
85 }
86 else if (!strcmp(ev->keyname, "n"))
87 {
88 const char *file;
89 if (!curfile)
90 curfile = filenames;
91 else
92 curfile = eina_list_next(curfile);
93 file = eina_list_data_get(curfile);
94 fprintf(stderr, "playing next file: %s\n", file);
95 emotion_object_file_set(em, file);
96 }
97 else if (!strcmp(ev->keyname, "p"))
98 {
99 const char *file;
100 if (!curfile)
101 curfile = eina_list_last(filenames);
102 else
103 curfile = eina_list_prev(curfile);
104 file = eina_list_data_get(curfile);
105 fprintf(stderr, "playing next file: %s\n", file);
106 emotion_object_file_set(em, file);
107 }
108 else if (!strcmp(ev->keyname, "d"))
109 {
110 evas_object_del(em);
111 }
112 else if (!strcmp(ev->keyname, "l"))
113 {
114 // force frame dropping
115 sleep(5);
116 }
117 else
118 {
119 fprintf(stderr, "unhandled key: %s\n", ev->keyname);
120 }
121}
122
123static void
124_frame_decode_cb(void *data, Evas_Object *o, void *event_info)
125{
126 // fprintf(stderr, "smartcb: frame_decode\n");
127}
128
129static void
130_length_change_cb(void *data, Evas_Object *o, void *event_info)
131{
132 fprintf(stderr, "smartcb: length_change: %0.3f\n", emotion_object_play_length_get(o));
133}
134
135static void
136_position_update_cb(void *data, Evas_Object *o, void *event_info)
137{
138 fprintf(stderr, "smartcb: position_update: %0.3f\n", emotion_object_position_get(o));
139}
140
141static void
142_progress_change_cb(void *data, Evas_Object *o, void *event_info)
143{
144 fprintf(stderr, "smartcb: progress_change: %0.3f, %s\n",
145 emotion_object_progress_status_get(o),
146 emotion_object_progress_info_get(o));
147}
148
149static void
150_frame_resize_cb(void *data, Evas_Object *o, void *event_info)
151{
152 int w, h;
153 emotion_object_size_get(o, &w, &h);
154 fprintf(stderr, "smartcb: frame_resize: %dx%d\n", w, h);
155}
156
157int
158main(int argc, const char *argv[])
159{
160 Ecore_Evas *ee;
161 Evas *e;
162 Evas_Object *bg, *em;
163 int i;
164
165 if (argc < 2)
166 {
167 printf("One argument is necessary. Usage:\n");
168 printf("\t%s <filename>\n", argv[0]);
169 }
170
171 eina_init();
172 for (i = 1; i < argc; i++)
173 filenames = eina_list_append(filenames, eina_stringshare_add(argv[i]));
174
175 curfile = filenames;
176
177 if (!ecore_evas_init())
178 return EXIT_FAILURE;
179
180 /* this will give you a window with an Evas canvas under the first
181 * engine available */
182 ee = ecore_evas_new(NULL, 10, 10, WIDTH, HEIGHT, NULL);
183 if (!ee)
184 goto error;
185
186 ecore_evas_show(ee);
187
188 /* the canvas pointer, de facto */
189 e = ecore_evas_get(ee);
190
191 /* adding a background to this example */
192 bg = evas_object_rectangle_add(e);
193 evas_object_name_set(bg, "our dear rectangle");
194 evas_object_color_set(bg, 255, 255, 255, 255); /* white bg */
195 evas_object_move(bg, 0, 0); /* at canvas' origin */
196 evas_object_resize(bg, WIDTH, HEIGHT); /* covers full canvas */
197 evas_object_show(bg);
198
199 /* Creating the emotion object */
200 em = _create_emotion_object(e);
201 emotion_object_file_set(em, eina_list_data_get(curfile));
202 evas_object_move(em, 0, 0);
203 evas_object_resize(em, WIDTH, HEIGHT);
204 evas_object_show(em);
205
206 evas_object_smart_callback_add(em, "frame_decode", _frame_decode_cb, NULL);
207 evas_object_smart_callback_add(em, "length_change", _length_change_cb, NULL);
208 evas_object_smart_callback_add(em, "position_update", _position_update_cb, NULL);
209 evas_object_smart_callback_add(em, "progress_change", _progress_change_cb, NULL);
210 evas_object_smart_callback_add(em, "frame_resize", _frame_resize_cb, NULL);
211
212 evas_object_event_callback_add(bg, EVAS_CALLBACK_KEY_DOWN, _on_key_down, em);
213 evas_object_focus_set(bg, EINA_TRUE);
214
215 emotion_object_play_set(em, EINA_TRUE);
216
217 ecore_main_loop_begin();
218
219 ecore_evas_free(ee);
220 ecore_evas_shutdown();
221 return 0;
222
223error:
224 fprintf(stderr, "you got to have at least one evas engine built and linked"
225 " up to ecore-evas for this example to run properly.\n");
226
227 EINA_LIST_FREE(filenames, curfile)
228 eina_stringshare_del(eina_list_data_get(curfile));
229
230 ecore_evas_shutdown();
231 eina_shutdown();
232 return -1;
233}
diff --git a/src/examples/emotion/emotion_generic_subtitle_example.c b/src/examples/emotion/emotion_generic_subtitle_example.c
new file mode 100644
index 0000000000..448b505449
--- /dev/null
+++ b/src/examples/emotion/emotion_generic_subtitle_example.c
@@ -0,0 +1,97 @@
1#include <Ecore.h>
2#include <Ecore_Evas.h>
3#include <Evas.h>
4#include <Emotion.h>
5#include <stdio.h>
6
7#define WIDTH (320)
8#define HEIGHT (240)
9
10static void
11_playback_started_cb(void *data, Evas_Object *o, void *event_info)
12{
13 printf("Emotion object started playback.\n");
14}
15
16static void
17_on_delete(Ecore_Evas *ee)
18{
19 ecore_main_loop_quit();
20}
21
22int
23main(int argc, const char *argv[])
24{
25 Ecore_Evas *ee;
26 Evas *e;
27 Evas_Object *bg, *em;
28 const char *filename = NULL;
29 const char *subtitle_filename = NULL;
30
31 if (argc < 2)
32 {
33 printf("At least one argument is necessary. Usage:\n");
34 printf("\t%s <filename> <subtitle filename>\n", argv[0]);
35 return -1;
36 }
37
38 filename = argv[1];
39
40 if (argc > 2)
41 subtitle_filename = argv[2];
42
43 if (!ecore_evas_init())
44 return EXIT_FAILURE;
45
46 /* this will give you a window with an Evas canvas under the first
47 * engine available */
48 ee = ecore_evas_new(NULL, 10, 10, WIDTH, HEIGHT, NULL);
49 if (!ee)
50 goto error;
51
52 ecore_evas_callback_delete_request_set(ee, _on_delete);
53
54 ecore_evas_show(ee);
55
56 /* the canvas pointer, de facto */
57 e = ecore_evas_get(ee);
58
59 /* adding a background to this example */
60 bg = evas_object_rectangle_add(e);
61 evas_object_name_set(bg, "our dear rectangle");
62 evas_object_color_set(bg, 255, 255, 255, 255); /* white bg */
63 evas_object_move(bg, 0, 0); /* at canvas' origin */
64 evas_object_resize(bg, WIDTH, HEIGHT); /* covers full canvas */
65 evas_object_show(bg);
66
67 /* Creating the emotion object */
68 em = emotion_object_add(e);
69 emotion_object_init(em, "generic");
70
71 if (subtitle_filename)
72 emotion_object_video_subtitle_file_set(em, subtitle_filename);
73
74 evas_object_smart_callback_add(
75 em, "playback_started", _playback_started_cb, NULL);
76
77 emotion_object_file_set(em, filename);
78
79 evas_object_move(em, 0, 0);
80 evas_object_resize(em, WIDTH, HEIGHT);
81 evas_object_show(em);
82
83 emotion_object_play_set(em, EINA_TRUE);
84
85 ecore_main_loop_begin();
86
87 ecore_evas_free(ee);
88 ecore_evas_shutdown();
89 return 0;
90
91error:
92 fprintf(stderr, "you got to have at least one evas engine built and linked"
93 " up to ecore-evas for this example to run properly.\n");
94
95 ecore_evas_shutdown();
96 return -1;
97}
diff --git a/src/examples/emotion/emotion_signals_example.c b/src/examples/emotion/emotion_signals_example.c
new file mode 100644
index 0000000000..2469c468ba
--- /dev/null
+++ b/src/examples/emotion/emotion_signals_example.c
@@ -0,0 +1,173 @@
1#include <Ecore.h>
2#include <Ecore_Evas.h>
3#include <Evas.h>
4#include <Emotion.h>
5#include <stdio.h>
6
7#define WIDTH (320)
8#define HEIGHT (240)
9
10static void
11_display_info(Evas_Object *o)
12{
13 int w, h;
14 printf("playing: %d\n", emotion_object_play_get(o));
15 printf("meta title: %s\n",
16 emotion_object_meta_info_get(o, EMOTION_META_INFO_TRACK_TITLE));
17 printf("seek position: %0.3f\n",
18 emotion_object_position_get(o));
19 printf("play length: %0.3f\n",
20 emotion_object_play_length_get(o));
21 printf("is seekable: %d\n",
22 emotion_object_seekable_get(o));
23 emotion_object_size_get(o, &w, &h);
24 printf("video geometry: %dx%d\n", w, h);
25 printf("video width / height ratio: %0.3f\n",
26 emotion_object_ratio_get(o));
27 printf("\n");
28}
29
30static void
31_playback_started_cb(void *data, Evas_Object *o, void *event_info)
32{
33 printf(">>> Emotion object started playback.\n");
34 _display_info(o);
35}
36
37static void
38_playback_finished_cb(void *data, Evas_Object *o, void *event_info)
39{
40 printf(">>> Emotion object finished playback.\n");
41 _display_info(o);
42}
43
44static void
45_open_done_cb(void *data, Evas_Object *o, void *event_info)
46{
47 printf(">>> Emotion object open done.\n");
48 _display_info(o);
49}
50
51static void
52_position_update_cb(void *data, Evas_Object *o, void *event_info)
53{
54 printf(">>> Emotion object first position update.\n");
55 evas_object_smart_callback_del(o, "position_update", _position_update_cb);
56 _display_info(o);
57}
58
59static void
60_frame_decode_cb(void *data, Evas_Object *o, void *event_info)
61{
62 printf(">>> Emotion object first frame decode.\n");
63 evas_object_smart_callback_del(o, "frame_decode", _frame_decode_cb);
64 _display_info(o);
65}
66
67static void
68_decode_stop_cb(void *data, Evas_Object *o, void *event_info)
69{
70 printf(">>> Emotion object decode stop.\n");
71 _display_info(o);
72}
73
74static void
75_frame_resize_cb(void *data, Evas_Object *o, void *event_info)
76{
77 printf(">>> Emotion object frame resize.\n");
78 _display_info(o);
79}
80
81static void
82_setup_emotion_callbacks(Evas_Object *o)
83{
84 evas_object_smart_callback_add(
85 o, "playback_started", _playback_started_cb, NULL);
86 evas_object_smart_callback_add(
87 o, "playback_finished", _playback_finished_cb, NULL);
88 evas_object_smart_callback_add(
89 o, "open_done", _open_done_cb, NULL);
90 evas_object_smart_callback_add(
91 o, "position_update", _position_update_cb, NULL);
92 evas_object_smart_callback_add(
93 o, "frame_decode", _frame_decode_cb, NULL);
94 evas_object_smart_callback_add(
95 o, "decode_stop", _decode_stop_cb, NULL);
96 evas_object_smart_callback_add(
97 o, "frame_resize", _frame_resize_cb, NULL);
98}
99
100int
101main(int argc, const char *argv[])
102{
103 Ecore_Evas *ee;
104 Evas *e;
105 Evas_Object *bg, *em;
106 const char *filename = NULL;
107 const char *module = NULL;
108
109 if (argc < 2)
110 {
111 printf("At least one argument is necessary. Usage:\n");
112 printf("\t%s <filename> [module_name]\n", argv[0]);
113 goto error;
114 }
115
116 filename = argv[1];
117
118 if (argc >= 3)
119 module = argv[2];
120
121 if (!ecore_evas_init())
122 return EXIT_FAILURE;
123
124 /* this will give you a window with an Evas canvas under the first
125 * engine available */
126 ee = ecore_evas_new(NULL, 10, 10, WIDTH, HEIGHT, NULL);
127 if (!ee)
128 goto error;
129
130 ecore_evas_show(ee);
131
132 /* the canvas pointer, de facto */
133 e = ecore_evas_get(ee);
134
135 /* adding a background to this example */
136 bg = evas_object_rectangle_add(e);
137 evas_object_name_set(bg, "our dear rectangle");
138 evas_object_color_set(bg, 255, 255, 255, 255); /* white bg */
139 evas_object_move(bg, 0, 0); /* at canvas' origin */
140 evas_object_resize(bg, WIDTH, HEIGHT); /* covers full canvas */
141 evas_object_show(bg);
142
143 /* Creating the emotion object */
144 em = emotion_object_add(e);
145
146 /* Try to load the specified module - NULL for auto-discover */
147 if (!emotion_object_init(em, module))
148 fprintf(stderr, "Emotion: \"%s\" module could not be initialized.\n", module);
149
150 _display_info(em);
151 _setup_emotion_callbacks(em);
152
153 if (!emotion_object_file_set(em, filename))
154 fprintf(stderr, "Emotion: Could not load the file \"%s\"\n", filename);
155
156 evas_object_move(em, 0, 0);
157 evas_object_resize(em, WIDTH, HEIGHT);
158 evas_object_show(em);
159
160 emotion_object_play_set(em, EINA_TRUE);
161
162 ecore_main_loop_begin();
163
164 ecore_evas_free(ee);
165 ecore_evas_shutdown();
166 return 0;
167
168 ecore_evas_free(ee);
169
170error:
171 ecore_evas_shutdown();
172 return -1;
173}
diff --git a/src/lib/emotion/Emotion.h b/src/lib/emotion/Emotion.h
new file mode 100644
index 0000000000..454ee0f42e
--- /dev/null
+++ b/src/lib/emotion/Emotion.h
@@ -0,0 +1,1332 @@
1#ifndef EMOTION_H
2#define EMOTION_H
3
4/**
5 * @file
6 * @brief Emotion Media Library
7 *
8 * These routines are used for Emotion.
9 */
10
11/**
12 *
13 * @page emotion_main Emotion
14 *
15 * @date 2003 (created)
16 *
17 * @section toc Table of Contents
18 *
19 * @li @ref emotion_main_intro
20 * @li @ref emotion_main_work
21 * @li @ref emotion_main_compiling
22 * @li @ref emotion_main_next_steps
23 * @li @ref emotion_main_intro_example
24 *
25 * @section emotion_main_intro Introduction
26 *
27 * A media object library for Evas and Ecore.
28 *
29 * Emotion is a library that allows playing audio and video files, using one of
30 * its backends (gstreamer, xine or generic shm player).
31 *
32 * It is integrated into Ecore through its mainloop, and is transparent to the
33 * user of the library how the decoding of audio and video is being done. Once
34 * the objects are created, the user can set callbacks to the specific events
35 * and set options to this object, all in the main loop (no threads are needed).
36 *
37 * Emotion is also integrated with Evas. The emotion object returned by
38 * emotion_object_add() is an Evas smart object, so it can be manipulated with
39 * default Evas object functions. Callbacks can be added to the signals emitted
40 * by this object with evas_object_smart_callback_add().
41 *
42 * @section emotion_main_work How does Emotion work?
43 *
44 * The Emotion library uses Evas smart objects to allow you to manipulate the
45 * created object as any other Evas object, and to connect to its signals,
46 * handling them when needed. It's also possible to swallow Emotion objects
47 * inside Edje themes, and expect it to behave as a normal image or rectangle
48 * when regarding to its dimensions.
49 *
50 * @section emotion_main_compiling How to compile
51 *
52 * Emotion is a library your application links to. The procedure for this is
53 * very simple. You simply have to compile your application with the
54 * appropriate compiler flags that the @c pkg-config script outputs. For
55 * example:
56 *
57 * Compiling C or C++ files into object files:
58 *
59 * @verbatim
60 gcc -c -o main.o main.c `pkg-config --cflags emotion`
61 @endverbatim
62 *
63 * Linking object files into a binary executable:
64 *
65 * @verbatim
66 gcc -o my_application main.o `pkg-config --libs emotion`
67 @endverbatim
68 *
69 * See @ref pkgconfig
70 *
71 * @section emotion_main_next_steps Next Steps
72 *
73 * After you understood what Emotion is and installed it in your
74 * system you should proceed understanding the programming
75 * interface. We'd recommend you to take a while to learn @ref Ecore and
76 * @ref Evas to get started.
77 *
78 * Recommended reading:
79 *
80 * @li @ref Emotion_Init to initialize the library.
81 * @li @ref Emotion_Video to control video parameters.
82 * @li @ref Emotion_Audio to control audio parameters.
83 * @li @ref Emotion_Play to control playback.
84 * @li @ref Emotion_Webcam to show cameras.
85 * @li @ref Emotion_API for general programming interface.
86 *
87 * @section emotion_main_intro_example Introductory Example
88 *
89 * @include emotion_basic_example.c
90 *
91 * More examples can be found at @ref emotion_examples.
92 */
93
94#include <Evas.h>
95
96#ifdef EAPI
97# undef EAPI
98#endif
99
100#ifdef _WIN32
101# ifdef EFL_EMOTION_BUILD
102# ifdef DLL_EXPORT
103# define EAPI __declspec(dllexport)
104# else
105# define EAPI
106# endif /* ! DLL_EXPORT */
107# else
108# define EAPI __declspec(dllimport)
109# endif /* ! EFL_EMOTION_BUILD */
110#else
111# ifdef __GNUC__
112# if __GNUC__ >= 4
113# define EAPI __attribute__ ((visibility("default")))
114# else
115# define EAPI
116# endif
117# else
118# define EAPI
119# endif
120#endif /* ! _WIN32 */
121
122/**
123 * @file Emotion.h
124 * @brief The file that provides Emotion the API, with functions available for
125 * play, seek, change volume, etc.
126 */
127
128enum _Emotion_Module
129{
130 EMOTION_MODULE_XINE,
131 EMOTION_MODULE_GSTREAMER
132};
133
134enum _Emotion_Event
135{
136 EMOTION_EVENT_MENU1, // Escape Menu
137 EMOTION_EVENT_MENU2, // Title Menu
138 EMOTION_EVENT_MENU3, // Root Menu
139 EMOTION_EVENT_MENU4, // Subpicture Menu
140 EMOTION_EVENT_MENU5, // Audio Menu
141 EMOTION_EVENT_MENU6, // Angle Menu
142 EMOTION_EVENT_MENU7, // Part Menu
143 EMOTION_EVENT_UP,
144 EMOTION_EVENT_DOWN,
145 EMOTION_EVENT_LEFT,
146 EMOTION_EVENT_RIGHT,
147 EMOTION_EVENT_SELECT,
148 EMOTION_EVENT_NEXT,
149 EMOTION_EVENT_PREV,
150 EMOTION_EVENT_ANGLE_NEXT,
151 EMOTION_EVENT_ANGLE_PREV,
152 EMOTION_EVENT_FORCE,
153 EMOTION_EVENT_0,
154 EMOTION_EVENT_1,
155 EMOTION_EVENT_2,
156 EMOTION_EVENT_3,
157 EMOTION_EVENT_4,
158 EMOTION_EVENT_5,
159 EMOTION_EVENT_6,
160 EMOTION_EVENT_7,
161 EMOTION_EVENT_8,
162 EMOTION_EVENT_9,
163 EMOTION_EVENT_10
164};
165
166/**
167 * @enum _Emotion_Meta_Info
168 *
169 * Used for retrieving information about the media file being played.
170 *
171 * @see emotion_object_meta_info_get()
172 *
173 * @ingroup Emotion_Info
174 */
175enum _Emotion_Meta_Info
176{
177 EMOTION_META_INFO_TRACK_TITLE, /**< track title */
178 EMOTION_META_INFO_TRACK_ARTIST, /**< artist name */
179 EMOTION_META_INFO_TRACK_ALBUM, /**< album name */
180 EMOTION_META_INFO_TRACK_YEAR, /**< track year */
181 EMOTION_META_INFO_TRACK_GENRE, /**< track genre */
182 EMOTION_META_INFO_TRACK_COMMENT, /**< track comments */
183 EMOTION_META_INFO_TRACK_DISC_ID, /**< track disc ID */
184 EMOTION_META_INFO_TRACK_COUNT /**< track count - number of the track in the album */
185};
186
187/**
188 * @enum _Emotion_Vis
189 *
190 * Used for displaying a visualization on the emotion object.
191 *
192 * @see emotion_object_vis_set()
193 *
194 * @ingroup Emotion_Visualization
195 */
196enum _Emotion_Vis
197{
198 EMOTION_VIS_NONE, /**< no visualization set */
199 EMOTION_VIS_GOOM, /**< goom */
200 EMOTION_VIS_LIBVISUAL_BUMPSCOPE, /**< bumpscope */
201 EMOTION_VIS_LIBVISUAL_CORONA, /**< corona */
202 EMOTION_VIS_LIBVISUAL_DANCING_PARTICLES, /**< dancing particles */
203 EMOTION_VIS_LIBVISUAL_GDKPIXBUF, /**< gdkpixbuf */
204 EMOTION_VIS_LIBVISUAL_G_FORCE, /**< G force */
205 EMOTION_VIS_LIBVISUAL_GOOM, /**< goom */
206 EMOTION_VIS_LIBVISUAL_INFINITE, /**< infinite */
207 EMOTION_VIS_LIBVISUAL_JAKDAW, /**< jakdaw */
208 EMOTION_VIS_LIBVISUAL_JESS, /**< jess */
209 EMOTION_VIS_LIBVISUAL_LV_ANALYSER, /**< lv analyser */
210 EMOTION_VIS_LIBVISUAL_LV_FLOWER, /**< lv flower */
211 EMOTION_VIS_LIBVISUAL_LV_GLTEST, /**< lv gltest */
212 EMOTION_VIS_LIBVISUAL_LV_SCOPE, /**< lv scope */
213 EMOTION_VIS_LIBVISUAL_MADSPIN, /**< madspin */
214 EMOTION_VIS_LIBVISUAL_NEBULUS, /**< nebulus */
215 EMOTION_VIS_LIBVISUAL_OINKSIE, /**< oinksie */
216 EMOTION_VIS_LIBVISUAL_PLASMA, /**< plasma */
217 EMOTION_VIS_LAST /* sentinel */
218};
219
220/**
221 * @enum Emotion_Suspend
222 *
223 * Used for emotion pipeline ressource management.
224 *
225 * @see emotion_object_suspend_set()
226 * @see emotion_object_suspend_get()
227 *
228 * @ingroup Emotion_Ressource
229 */
230typedef enum
231{
232 EMOTION_WAKEUP, /**< pipeline is up and running */
233 EMOTION_SLEEP, /**< turn off hardware ressource usage like overlay */
234 EMOTION_DEEP_SLEEP, /**< destroy the pipeline, but keep full resolution pixels output around */
235 EMOTION_HIBERNATE /**< destroy the pipeline, and keep half resolution or object resolution if lower */
236} Emotion_Suspend;
237
238/**
239 * @enum _Emotion_Aspect
240 * Defines the aspect ratio option.
241 */
242enum _Emotion_Aspect
243{
244 EMOTION_ASPECT_KEEP_NONE, /**< ignore video aspect ratio */
245 EMOTION_ASPECT_KEEP_WIDTH, /**< respect video aspect, fitting its width inside the object width */
246 EMOTION_ASPECT_KEEP_HEIGHT, /**< respect video aspect, fitting its height inside the object height */
247 EMOTION_ASPECT_KEEP_BOTH, /**< respect video aspect, fitting it inside the object area */
248 EMOTION_ASPECT_CROP, /**< respect video aspect, cropping exceding area */
249 EMOTION_ASPECT_CUSTOM, /**< use custom borders/crop for the video */
250};
251
252typedef enum _Emotion_Module Emotion_Module;
253typedef enum _Emotion_Event Emotion_Event;
254typedef enum _Emotion_Meta_Info Emotion_Meta_Info; /**< Meta info type to be retrieved. */
255typedef enum _Emotion_Vis Emotion_Vis; /**< Type of visualization. */
256typedef enum _Emotion_Aspect Emotion_Aspect; /**< Aspect ratio option. */
257
258#define EMOTION_CHANNEL_AUTO -1
259#define EMOTION_CHANNEL_DEFAULT 0
260
261#ifdef __cplusplus
262extern "C" {
263#endif
264
265#define EMOTION_VERSION_MAJOR 1
266#define EMOTION_VERSION_MINOR 8
267
268 typedef struct _Emotion_Version
269 {
270 int major;
271 int minor;
272 int micro;
273 int revision;
274 } Emotion_Version;
275
276 EAPI extern Emotion_Version *emotion_version;
277
278/* api calls available */
279
280/**
281 * @brief How to create, initialize, manipulate and connect to signals of an
282 * Emotion object.
283 * @defgroup Emotion_API API available for manipulating Emotion object.
284 * @ingroup Emotion
285 *
286 * @{
287 *
288 * Emotion provides an Evas smart object that allows to play, control and
289 * display a video or audio file. The API is synchronous but not everything
290 * happens immediately. There are also some signals to report changed states.
291 *
292 * Basically, once the object is created and initialized, a file will be set to
293 * it, and then it can be resized, moved, and controlled by other Evas object
294 * functions.
295 *
296 * However, the decoding of the music and video occurs not in the Ecore main
297 * loop, but usually in another thread (this depends on the module being used).
298 * The synchronization between this other thread and the main loop not visible
299 * to the end user of the library. The user can just register callbacks to the
300 * available signals to receive information about the changed states, and can
301 * call other functions from the API to request more changes on the current
302 * loaded file.
303 *
304 * There will be a delay between an API being called and it being really
305 * executed, since this request will be done in the main thread, and it needs to
306 * be sent to the decoding thread. For this reason, always call functions like
307 * emotion_object_size_get() or emotion_object_length_get() after some signal
308 * being sent, like "playback_started" or "open_done". @ref
309 * emotion_signals_example.c "This example demonstrates this behavior".
310 *
311 * @section signals Available signals
312 * The Evas_Object returned by emotion_object_add() has a number of signals that
313 * can be listened to using evas' smart callbacks mechanism. All signals have
314 * NULL as event info. The following is a list of interesting signals:
315 * @li "playback_started" - Emitted when the playback starts
316 * @li "playback_finished" - Emitted when the playback finishes
317 * @li "frame_decode" - Emitted every time a frame is decoded
318 * @li "open_done" - Emitted when the media file is opened
319 * @li "position_update" - Emitted when emotion_object_position_set is called
320 * @li "decode_stop" - Emitted after the last frame is decoded
321 *
322 * @section Examples
323 *
324 * The following examples exemplify the emotion usage. There's also the
325 * emotion_test binary that is distributed with this library and cover the
326 * entire API, but since it is too long and repetitive to be explained, its code
327 * is just displayed as another example.
328 *
329 * @li @ref emotion_basic_example_c
330 * @li @ref emotion_signals_example.c "Emotion signals"
331 * @li @ref emotion_test_main.c "emotion_test - full API usage"
332 *
333 */
334
335/**
336 * @defgroup Emotion_Init Creation and initialization functions
337 */
338
339/**
340 * @defgroup Emotion_Audio Audio control functions
341 */
342
343/**
344 * @defgroup Emotion_Video Video control functions
345 */
346
347/**
348 * @defgroup Emotion_Visualization Visualization control functions
349 */
350
351/**
352 * @defgroup Emotion_Info Miscellaneous information retrieval functions
353 */
354
355/**
356 * @defgroup Emotion_Ressource Video ressource management
357 */
358
359EAPI Eina_Bool emotion_init(void);
360EAPI Eina_Bool emotion_shutdown(void);
361
362/**
363 * @brief Add an emotion object to the canvas.
364 *
365 * @param evas The canvas where the object will be added to.
366 * @return The emotion object just created.
367 *
368 * This function creates an emotion object and adds it to the specified @p evas.
369 * The returned object can be manipulated as any other Evas object, using the
370 * default object manipulation functions - evas_object_*.
371 *
372 * After creating the object with this function, it's still necessary to
373 * initialize it with emotion_object_init(), and if an audio file is going to be
374 * played with this object instead of a video, use
375 * emotion_object_video_mute_set().
376 *
377 * The next step is to open the desired file with emotion_object_file_set(), and
378 * start playing it with emotion_object_play_set().
379 *
380 * @see emotion_object_init()
381 * @see emotion_object_video_mute_set()
382 * @see emotion_object_file_set()
383 * @see emotion_object_play_set()
384 *
385 * @ingroup Emotion_Init
386 */
387EAPI Evas_Object *emotion_object_add (Evas *evas);
388
389/**
390 * @brief Set the specified option for the current module.
391 *
392 * @param obj The emotion object which the option is being set to.
393 * @param opt The option that is being set. Currently supported optiosn: "video"
394 * and "audio".
395 * @param val The value of the option. Currently only supports "off" (?!?!?!)
396 *
397 * This function allows one to mute the video or audio of the emotion object.
398 *
399 * @note Please don't use this function, consider using
400 * emotion_object_audio_mute_set() and emotion_object_video_mute_set() instead.
401 *
402 * @see emotion_object_audio_mute_set()
403 * @see emotion_object_video_mute_set()
404 *
405 * @ingroup Emotion_Init
406 */
407EAPI void emotion_object_module_option_set (Evas_Object *obj, const char *opt, const char *val);
408
409/**
410 * @brief Initializes an emotion object with the specified module.
411 *
412 * @param obj The emotion object to be initialized.
413 * @param module_filename The name of the module to be used (gstreamer or xine).
414 * @return @c EINA_TRUE if the specified module was successfully initialized for
415 * this object, @c EINA_FALSE otherwise.
416 *
417 * This function is required after creating the emotion object, in order to
418 * specify which module will be used with this object. Different objects can
419 * use different modules to play a media file. The current supported modules are
420 * @b gstreamer and @b xine.
421 *
422 * To use any of them, you need to make sure that support for them was compiled
423 * correctly.
424 *
425 * @note It's possible to disable the build of a module with
426 * --disable-module_name.
427 *
428 * @see emotion_object_add()
429 * @see emotion_object_file_set()
430 *
431 * @ingroup Emotion_Init
432 */
433EAPI Eina_Bool emotion_object_init (Evas_Object *obj, const char *module_filename);
434
435/**
436 * @brief Set borders for the emotion object.
437 *
438 * @param obj The emotion object where borders are being set.
439 * @param l The left border.
440 * @param r The right border.
441 * @param t The top border.
442 * @param b The bottom border.
443 *
444 * This function sets borders for the emotion video object (just when a video is
445 * present). When positive values are given to one of the parameters, a border
446 * will be added to the respective position of the object, representing that
447 * size on the original video size. However, if the video is scaled up or down
448 * (i.e. the emotion object size is different from the video size), the borders
449 * will be scaled respectively too.
450 *
451 * If a negative value is given to one of the parameters, instead of a border,
452 * that respective side of the video will be cropped.
453 *
454 * It's possible to set a color for the added borders (default is transparent)
455 * with emotion_object_bg_color_set(). By default, an Emotion object doesn't
456 * have any border.
457 *
458 * @see emotion_object_border_get()
459 * @see emotion_object_bg_color_set()
460 *
461 * @ingroup Emotion_Video
462 */
463EAPI void emotion_object_border_set(Evas_Object *obj, int l, int r, int t, int b);
464
465/**
466 * @brief Get the borders set for the emotion object.
467 *
468 * @param obj The emotion object from which the borders are being retrieved.
469 * @param l The left border.
470 * @param r The right border.
471 * @param t The top border.
472 * @param b The bottom border.
473 *
474 * @see emotion_object_border_set()
475 *
476 * @ingroup Emotion_Video
477 */
478EAPI void emotion_object_border_get(const Evas_Object *obj, int *l, int *r, int *t, int *b);
479
480/**
481 * @brief Set a color for the background rectangle of this emotion object.
482 *
483 * @param obj The emotion object where the background color is being set.
484 * @param r Red component of the color.
485 * @param g Green component of the color.
486 * @param b Blue component of the color.
487 * @param a Alpha channel of the color.
488 *
489 * This is useful when a border is added to any side of the Emotion object. The
490 * area between the edge of the video and the edge of the object will be filled
491 * with the specified color.
492 *
493 * The default color is 0, 0, 0, 0 (transparent).
494 *
495 * @see emotion_object_bg_color_get()
496 *
497 * @ingroup Emotion_Video
498 */
499EAPI void emotion_object_bg_color_set(Evas_Object *obj, int r, int g, int b, int a);
500
501/**
502 * @brief Get the background color set for the emotion object.
503 *
504 * @param obj The emotion object from which the background color is being retrieved.
505 * @param r Red component of the color.
506 * @param g Green component of the color.
507 * @param b Blue component of the color.
508 * @param a AAlpha channel of the color.
509 *
510 * @see emotion_object_bg_color_set()
511 *
512 * @ingroup Emotion_Video
513 */
514EAPI void emotion_object_bg_color_get(const Evas_Object *obj, int *r, int *g, int *b, int *a);
515
516/**
517 * @brief Set whether emotion should keep the aspect ratio of the video.
518 *
519 * @param obj The emotion object where to set the aspect.
520 * @param a The aspect ratio policy.
521 *
522 * Instead of manually calculating the required border to set with
523 * emotion_object_border_set(), and using this to fix the aspect ratio of the
524 * video when the emotion object has a different aspect, it's possible to just
525 * set the policy to be used.
526 *
527 * The options are:
528 *
529 * - @b #EMOTION_ASPECT_KEEP_NONE - ignore the video aspect ratio, and reset any
530 * border set to 0, stretching the video inside the emotion object area. This
531 * option is similar to EVAS_ASPECT_CONTROL_NONE size hint.
532 * - @b #EMOTION_ASPECT_KEEP_WIDTH - respect the video aspect ratio, fitting the
533 * video width inside the object width. This option is similar to
534 * EVAS_ASPECT_CONTROL_HORIZONTAL size hint.
535 * - @b #EMOTION_ASPECT_KEEP_HEIGHT - respect the video aspect ratio, fitting
536 * the video height inside the object height. This option is similar to
537 * EVAS_ASPECT_CONTROL_VERTIAL size hint.
538 * - @b #EMOTION_ASPECT_KEEP_BOTH - respect the video aspect ratio, fitting both
539 * its width and height inside the object area. This option is similar to
540 * EVAS_ASPECT_CONTROL_BOTH size hint. It's the effect called letterboxing.
541 * - @b #EMOTION_ASPECT_CROP - respect the video aspect ratio, fitting the width
542 * or height inside the object area, and cropping the exceding areas of the
543 * video in height or width. It's the effect called pan-and-scan.
544 * - @b #EMOTION_ASPECT_CUSTOM - ignore the video aspect ratio, and use the
545 * current set from emotion_object_border_set().
546 *
547 * @note Calling this function with any value except #EMOTION_ASPECT_CUSTOM will
548 * invalidate borders set with emotion_object_border_set().
549 *
550 * @note Calling emotion_object_border_set() will automatically set the aspect
551 * policy to #EMOTION_ASPECT_CUSTOM.
552 *
553 * @see emotion_object_border_set()
554 * @see emotion_object_keep_aspect_get()
555 *
556 * @ingroup Emotion_Video
557 */
558EAPI void emotion_object_keep_aspect_set(Evas_Object *obj, Emotion_Aspect a);
559
560/**
561 * @brief Get the current emotion aspect ratio policy.
562 *
563 * @param obj The emotion object from which we are fetching the aspect ratio
564 * policy.
565 * @return The current aspect ratio policy.
566 *
567 * @see emotion_object_keep_aspect_set()
568 *
569 * @ingroup Emotion_Video
570 */
571EAPI Emotion_Aspect emotion_object_keep_aspect_get(const Evas_Object *obj);
572
573/**
574 * @brief Set the file to be played in the Emotion object.
575 *
576 * @param obj The emotion object where the file is being loaded.
577 * @param filename Path to the file to be loaded. It can be absolute or relative
578 * path.
579 * @return EINA_TRUE if the new file could be loaded successfully, and
580 * EINA_FALSE if the file could not be loaded. This happens when the filename is
581 * could not be found, when the module couldn't open the file, when no module is
582 * initialized in this object, or when the @p filename is the same as the
583 * one previously set.
584 *
585 * This function sets the file to be used with this emotion object. If the
586 * object already has another file set, this file will be unset and unloaded,
587 * and the new file will be loaded to this emotion object. The seek position
588 * will be set to 0, and the emotion object will be paused, instead of playing.
589 *
590 * If there was already a filename set, and it's the same as the one being set
591 * now, this function does nothing and returns EINA_FALSE.
592 *
593 * Use @c NULL as argument to @p filename if you want to unload the current file
594 * but don't want to load anything else.
595 *
596 * @see emotion_object_init()
597 * @see emotion_object_play_set()
598 * @see emotion_object_file_get()
599 *
600 * @ingroup Emotion_Init
601 */
602EAPI Eina_Bool emotion_object_file_set (Evas_Object *obj, const char *filename);
603
604/**
605 * @brief Get the filename of the file associated with the emotion object.
606 *
607 * @param obj The emotion object from which the filename will be retrieved.
608 * @return The path to the file loaded into this emotion object.
609 *
610 * This function returns the path of the file loaded in this emotion object. If
611 * no object is loaded, it will return @c NULL.
612 *
613 * @note Don't free or change the string returned by this function in any way.
614 * If you want to unset it, use @c emotion_object_file_set(obj, NULL).
615 *
616 * @see emotion_object_file_set()
617 *
618 * @ingroup Emotion_Init
619 */
620EAPI const char *emotion_object_file_get (const Evas_Object *obj);
621/**
622 * @defgroup Emotion_Play Play control functions
623 * @ingroup Emotion
624 *
625 * @{
626 */
627/**
628 *
629 * @brief Set play/pause state of the media file.
630 *
631 * @param obj The emotion object whose state will be changed.
632 * @param play EINA_TRUE to play, EINA_FALSE to pause.
633 *
634 * This functions sets the currently playing status of the video. Using this
635 * function to play or pause the video doesn't alter it's current position.
636 */
637EAPI void emotion_object_play_set (Evas_Object *obj, Eina_Bool play);
638/**
639 * @brief Get play/pause state of the media file.
640 *
641 * @param obj The emotion object from which the state will be retrieved.
642 * @return EINA_TRUE if playing. EINA_FALSE if not playing.
643 */
644EAPI Eina_Bool emotion_object_play_get (const Evas_Object *obj);
645/**
646 * @brief Set the position in the media file.
647 *
648 * @param obj The emotion object whose position will be changed.
649 * @param sec The position(in seconds) to which the media file will be set.
650 *
651 * This functions sets the current position of the media file to @p sec, this
652 * only works on seekable streams. Setting the position doesn't change the
653 * playing state of the media file.
654 *
655 * @see emotion_object_seekable_get
656 */
657EAPI void emotion_object_position_set (Evas_Object *obj, double sec);
658/**
659 * @brief Get the position in the media file.
660 *
661 * @param obj The emotion object from which the position will be retrieved.
662 * @return The position of the media file.
663 *
664 * The position is returned as the number of seconds since the beginning of the
665 * media file.
666 */
667EAPI double emotion_object_position_get (const Evas_Object *obj);
668
669/**
670 * @brief Get the percentual size of the buffering cache.
671 *
672 * @param obj The emotion object from which the buffer size will be retrieved.
673 * @return The buffer percent size, ranging from 0.0 to 1.0
674 *
675 * The buffer size is returned as a number between 0.0 and 1.0, 0.0 means
676 * the buffer if empty, 1.0 means full.
677 * If no buffering is in progress 1.0 is returned. In all other cases (maybe
678 * the backend don't support buffering) 1.0 is returned, thus you can always
679 * check for buffer_size < 1.0 to know if buffering is in progress.
680 *
681 * @warning Generic backend don't implement this (will return 1.0).
682 */
683EAPI double emotion_object_buffer_size_get (const Evas_Object *obj);
684
685/**
686 * @brief Get whether the media file is seekable.
687 *
688 * @param obj The emotion object from which the seekable status will be
689 * retrieved.
690 * @return EINA_TRUE if the media file is seekable, EINA_FALSE otherwise.
691 */
692EAPI Eina_Bool emotion_object_seekable_get (const Evas_Object *obj);
693/**
694 * @brief Get the length of play for the media file.
695 *
696 * @param obj The emotion object from which the length will be retrieved.
697 * @return The length of the media file in seconds.
698 *
699 * This function returns the length of the media file in seconds.
700 *
701 * @warning This will return 0 if called before the "length_change" signal has,
702 * been emitted.
703 */
704EAPI double emotion_object_play_length_get (const Evas_Object *obj);
705
706/**
707 * @brief Set the play speed of the media file.
708 *
709 * @param obj The emotion object whose speed will be set.
710 * @param speed The speed to be set in the range [0,infinity)
711 *
712 * This function sets the speed with which the media file will be played. 1.0
713 * represents the normal speed, 2 double speed, 0.5 half speed and so on.
714 *
715 * @warning The only backend that implements this is the experimental VLC
716 * backend.
717 */
718EAPI void emotion_object_play_speed_set (Evas_Object *obj, double speed);
719/**
720 * @brief Get the play speed of the media file.
721 *
722 * @param obj The emotion object from which the filename will be retrieved.
723 * @return The current speed of the media file.
724 *
725 * @see emotion_object_play_speed_set
726 */
727EAPI double emotion_object_play_speed_get (const Evas_Object *obj);
728/**
729 * @brief Get how much of the file has been played.
730 *
731 * @param obj The emotion object from which the filename will be retrieved.
732 * @return The progress of the media file.
733 *
734 * @warning Don't change of free the returned string.
735 * @warning gstreamer xine backends don't implement this(will return NULL).
736 */
737EAPI const char *emotion_object_progress_info_get (const Evas_Object *obj);
738/**
739 * @brief Get how much of the file has been played.
740 *
741 * @param obj The emotion object from which the filename will be retrieved
742 * @return The progress of the media file.
743 *
744 * This function gets the progress in playing the file, the return value is in
745 * the [0, 1] range.
746 *
747 * @warning gstreamer xine backends don't implement this(will return 0).
748 */
749EAPI double emotion_object_progress_status_get (const Evas_Object *obj);
750/**
751 * @}
752 */
753EAPI Eina_Bool emotion_object_video_handled_get (const Evas_Object *obj);
754EAPI Eina_Bool emotion_object_audio_handled_get (const Evas_Object *obj);
755
756/**
757 * @brief Retrieve the video aspect ratio of the media file loaded.
758 *
759 * @param obj The emotion object which the video aspect ratio will be retrieved
760 * from.
761 * @return The video aspect ratio of the file loaded.
762 *
763 * This function returns the video aspect ratio (width / height) of the file
764 * loaded. It can be used to adapt the size of the emotion object in the canvas,
765 * so the aspect won't be changed (by wrongly resizing the object). Or to crop
766 * the video correctly, if necessary.
767 *
768 * The described behavior can be applied like following. Consider a given
769 * emotion object that we want to position inside an area, which we will
770 * represent by @c w and @c h. Since we want to position this object either
771 * stretching, or filling the entire area but overflowing the video, or just
772 * adjust the video to fit inside the area without keeping the aspect ratio, we
773 * must compare the video aspect ratio with the area aspect ratio:
774 * @code
775 * int w = 200, h = 300; // an arbitrary value which represents the area where
776 * // the video would be placed
777 * int vw, vh;
778 * double r, vr = emotion_object_ratio_get(obj);
779 * r = (double)w / h;
780 * @endcode
781 *
782 * Now, if we want to make the video fit inside the area, the following code
783 * would do it:
784 * @code
785 * if (vr > r) // the video is wider than the area
786 * {
787 * vw = w;
788 * vh = w / vr;
789 * }
790 * else // the video is taller than the area
791 * {
792 * vh = h;
793 * vw = h * vr;
794 * }
795 * evas_object_resize(obj, vw, vh);
796 * @endcode
797 *
798 * And for keeping the aspect ratio but making the video fill the entire area,
799 * overflowing the content which can't fit inside it, we would do:
800 * @code
801 * if (vr > r) // the video is wider than the area
802 * {
803 * vh = h;
804 * vw = h * vr;
805 * }
806 * else // the video is taller than the area
807 * {
808 * vw = w;
809 * vh = w / vr;
810 * }
811 * evas_object_resize(obj, vw, vh);
812 * @endcode
813 *
814 * Finally, by just resizing the video to the video area, we would have the
815 * video stretched:
816 * @code
817 * vw = w;
818 * vh = h;
819 * evas_object_resize(obj, vw, vh);
820 * @endcode
821 *
822 * The following diagram exemplifies what would happen to the video,
823 * respectively, in each case:
824 *
825 * @image html emotion_ratio.png
826 * @image latex emotion_ratio.eps width=\textwidth
827 *
828 * @note This function returns the aspect ratio that the video @b should be, but
829 * sometimes the reported size from emotion_object_size_get() represents a
830 * different aspect ratio. You can safely resize the video to respect the aspect
831 * ratio returned by @b this function.
832 *
833 * @see emotion_object_size_get()
834 *
835 * @ingroup Emotion_Video
836 */
837EAPI double emotion_object_ratio_get (const Evas_Object *obj);
838
839/**
840 * @brief Retrieve the video size of the loaded file.
841 *
842 * @param obj The object from which we are retrieving the video size.
843 * @param iw A pointer to a variable where the width will be stored.
844 * @param ih A pointer to a variable where the height will be stored.
845 *
846 * This function returns the reported size of the loaded video file. If a file
847 * that doesn't contain a video channel is loaded, then this size can be
848 * ignored.
849 *
850 * The value reported by this function should be consistent with the aspect
851 * ratio returned by emotion_object_ratio_get(), but sometimes the information
852 * stored in the file is wrong. So use the ratio size reported by
853 * emotion_object_ratio_get(), since it is more likely going to be accurate.
854 *
855 * @note Use @c NULL for @p iw or @p ih if you don't need one of these values.
856 *
857 * @see emotion_object_ratio_get()
858 *
859 * @ingroup Emotion_Video
860 */
861EAPI void emotion_object_size_get (const Evas_Object *obj, int *iw, int *ih);
862
863/**
864 * @brief Sets whether to use of high-quality image scaling algorithm
865 * of the given video object.
866 *
867 * When enabled, a higher quality video scaling algorithm is used when
868 * scaling videos to sizes other than the source video. This gives
869 * better results but is more computationally expensive.
870 *
871 * @param obj The given video object.
872 * @param smooth Whether to use smooth scale or not.
873 *
874 * @see emotion_object_smooth_scale_get()
875 *
876 * @ingroup Emotion_Video
877 */
878EAPI void emotion_object_smooth_scale_set (Evas_Object *obj, Eina_Bool smooth);
879
880/**
881 * @brief Gets whether the high-quality image scaling algorithm
882 * of the given video object is used.
883 *
884 * @param obj The given video object.
885 * @return Whether the smooth scale is used or not.
886 *
887 * @see emotion_object_smooth_scale_set()
888 *
889 * @ingroup Emotion_Video
890 */
891EAPI Eina_Bool emotion_object_smooth_scale_get (const Evas_Object *obj);
892EAPI void emotion_object_event_simple_send (Evas_Object *obj, Emotion_Event ev);
893
894/**
895 * @brief Set the audio volume.
896 *
897 * @param obj The object where the volume is being set.
898 * @param vol The new volume parameter. Range is from 0.0 to 1.0.
899 *
900 * Sets the audio volume of the stream being played. This has nothing to do with
901 * the system volume. This volume will be multiplied by the system volume. e.g.:
902 * if the current volume level is 0.5, and the system volume is 50%, it will be
903 * 0.5 * 0.5 = 0.25.
904 *
905 * The default value depends on the module used. This value doesn't get changed
906 * when another file is loaded.
907 *
908 * @see emotion_object_audio_volume_get()
909 *
910 * @ingroup Emotion_Audio
911 */
912EAPI void emotion_object_audio_volume_set (Evas_Object *obj, double vol);
913
914/**
915 * @brief Get the audio volume.
916 *
917 * @param obj The object from which we are retrieving the volume.
918 * @return The current audio volume level for this object.
919 *
920 * Get the current value for the audio volume level. Range is from 0.0 to 1.0.
921 * This volume is set with emotion_object_audio_volume_set().
922 *
923 * @see emotion_object_audio_volume_set()
924 *
925 * @ingroup Emotion_Audio
926 */
927EAPI double emotion_object_audio_volume_get (const Evas_Object *obj);
928
929/**
930 * @brief Set the mute audio option for this object.
931 *
932 * @param obj The object which we are setting the mute audio option.
933 * @param mute Whether the audio should be muted (@c EINA_TRUE) or not (@c
934 * EINA_FALSE).
935 *
936 * This function sets the mute audio option for this emotion object. The current
937 * module used for this object can use this to avoid decoding the audio portion
938 * of the loaded media file.
939 *
940 * @see emotion_object_audio_mute_get()
941 * @see emotion_object_video_mute_set()
942 *
943 * @ingroup Emotion_Audio
944 */
945EAPI void emotion_object_audio_mute_set (Evas_Object *obj, Eina_Bool mute);
946
947/**
948 * @brief Get the mute audio option of this object.
949 *
950 * @param obj The object which we are retrieving the mute audio option from.
951 * @return Whether the audio is muted (@c EINA_TRUE) or not (@c EINA_FALSE).
952 *
953 * This function return the mute audio option from this emotion object. It can
954 * be set with emotion_object_audio_mute_set().
955 *
956 * @see emotion_object_audio_mute_set()
957 *
958 * @ingroup Emotion_Audio
959 */
960EAPI Eina_Bool emotion_object_audio_mute_get (const Evas_Object *obj);
961EAPI int emotion_object_audio_channel_count (const Evas_Object *obj);
962EAPI const char *emotion_object_audio_channel_name_get(const Evas_Object *obj, int channel);
963EAPI void emotion_object_audio_channel_set (Evas_Object *obj, int channel);
964EAPI int emotion_object_audio_channel_get (const Evas_Object *obj);
965
966/**
967 * @brief Set the mute video option for this object.
968 *
969 * @param obj The object which we are setting the mute video option.
970 * @param mute Whether the video should be muted (@c EINA_TRUE) or not (@c
971 * EINA_FALSE).
972 *
973 * This function sets the mute video option for this emotion object. The
974 * current module used for this object can use this information to avoid
975 * decoding the video portion of the loaded media file.
976 *
977 * @see emotion_object_video_mute_get()
978 * @see emotion_object_audio_mute_set()
979 *
980 * @ingroup Emotion_Video
981 */
982EAPI void emotion_object_video_mute_set (Evas_Object *obj, Eina_Bool mute);
983
984/**
985 * @brief Get the mute video option of this object.
986 *
987 * @param obj The object which we are retrieving the mute video option from.
988 * @return Whether the video is muted (@c EINA_TRUE) or not (@c EINA_FALSE).
989 *
990 * This function returns the mute video option from this emotion object. It can
991 * be set with emotion_object_video_mute_set().
992 *
993 * @see emotion_object_video_mute_set()
994 *
995 * @ingroup Emotion_Video
996 */
997EAPI Eina_Bool emotion_object_video_mute_get (const Evas_Object *obj);
998
999/**
1000 * @brief Set the video's subtitle file path.
1001 *
1002 * @param obj The object which we are setting a subtitle file path.
1003 * @param filepath The subtitle file path.
1004 *
1005 * This function sets a video's subtitle file path(i.e an .srt file) for
1006 * supported subtitle formats consult the backend's documentation.
1007 *
1008 * @see emotion_object_video_subtitle_file_get().
1009 *
1010 * @ingroup Emotion_Video
1011 * @since 1.8
1012 */
1013EAPI void emotion_object_video_subtitle_file_set (Evas_Object *obj, const char *filepath);
1014
1015/**
1016 * @brief Get the video's subtitle file path.
1017 *
1018 * @param obj The object which we are retrieving the subtitle file path from.
1019 * @return The video's subtitle file path previously set, NULL otherwise.
1020 *
1021 * This function returns the video's subtitle file path, if not previously set
1022 * or in error NULL is returned.
1023 *
1024 * @see emotion_object_video_subtitle_file_set().
1025 *
1026 * @ingroup Emotion_Video
1027 * @since 1.8
1028 */
1029EAPI const char *emotion_object_video_subtitle_file_get (const Evas_Object *obj);
1030
1031/**
1032 * @brief Get the number of available video channel
1033 *
1034 * @param obj The object which we are retrieving the channel count from
1035 * @return the number of available channel.
1036 *
1037 * @see emotion_object_video_channel_name_get()
1038 *
1039 * @ingroup Emotion_Video
1040 */
1041EAPI int emotion_object_video_channel_count (const Evas_Object *obj);
1042EAPI const char *emotion_object_video_channel_name_get(const Evas_Object *obj, int channel);
1043EAPI void emotion_object_video_channel_set (Evas_Object *obj, int channel);
1044EAPI int emotion_object_video_channel_get (const Evas_Object *obj);
1045EAPI void emotion_object_spu_mute_set (Evas_Object *obj, Eina_Bool mute);
1046EAPI Eina_Bool emotion_object_spu_mute_get (const Evas_Object *obj);
1047EAPI int emotion_object_spu_channel_count (const Evas_Object *obj);
1048EAPI const char *emotion_object_spu_channel_name_get (const Evas_Object *obj, int channel);
1049EAPI void emotion_object_spu_channel_set (Evas_Object *obj, int channel);
1050EAPI int emotion_object_spu_channel_get (const Evas_Object *obj);
1051EAPI int emotion_object_chapter_count (const Evas_Object *obj);
1052EAPI void emotion_object_chapter_set (Evas_Object *obj, int chapter);
1053EAPI int emotion_object_chapter_get (const Evas_Object *obj);
1054EAPI const char *emotion_object_chapter_name_get (const Evas_Object *obj, int chapter);
1055EAPI void emotion_object_eject (Evas_Object *obj);
1056
1057/**
1058 * @brief Get the dvd title from this emotion object.
1059 *
1060 * @param obj The object which the title will be retrieved from.
1061 * @return A string containing the title.
1062 *
1063 * This function is only useful when playing a DVD.
1064 *
1065 * @note Don't change or free the string returned by this function.
1066 *
1067 * @ingroup Emotion_Info
1068 */
1069EAPI const char *emotion_object_title_get (const Evas_Object *obj);
1070EAPI const char *emotion_object_ref_file_get (const Evas_Object *obj);
1071EAPI int emotion_object_ref_num_get (const Evas_Object *obj);
1072EAPI int emotion_object_spu_button_count_get (const Evas_Object *obj);
1073EAPI int emotion_object_spu_button_get (const Evas_Object *obj);
1074
1075/**
1076 * @brief Retrieve meta information from this file being played.
1077 *
1078 * @param obj The object which the meta info will be extracted from.
1079 * @param meta The type of meta information that will be extracted.
1080 *
1081 * This function retrieves information about the file loaded. It can retrieve
1082 * the track title, artist name, album name, etc. See @ref Emotion_Meta_Info
1083 * for all the possibilities.
1084 *
1085 * The meta info may be not available on all types of files. It will return @c
1086 * NULL if the the file doesn't have meta info, or if this specific field is
1087 * empty.
1088 *
1089 * @note Don't change or free the string returned by this function.
1090 *
1091 * @see Emotion_Meta_Info
1092 *
1093 * @ingroup Emotion_Info
1094 */
1095EAPI const char *emotion_object_meta_info_get (const Evas_Object *obj, Emotion_Meta_Info meta);
1096
1097/**
1098 * @brief Set the visualization to be used with this object.
1099 *
1100 * @param obj The object where the visualization will be set on.
1101 * @param visualization The type of visualization to be used.
1102 *
1103 * The @p visualization specified will be played instead of a video. This is
1104 * commonly used to display a visualization for audio only files (musics).
1105 *
1106 * The available visualizations are @ref Emotion_Vis.
1107 *
1108 * @see Emotion_Vis
1109 * @see emotion_object_vis_get()
1110 * @see emotion_object_vis_supported()
1111 *
1112 * @ingroup Emotion_Visualization
1113 */
1114EAPI void emotion_object_vis_set (Evas_Object *obj, Emotion_Vis visualization);
1115
1116/**
1117 * @brief Get the type of visualization in use by this emotion object.
1118 *
1119 * @param obj The emotion object which the visualization is being retrieved
1120 * from.
1121 * @return The type of visualization in use by this object.
1122 *
1123 * The type of visualization can be set by emotion_object_vis_set().
1124 *
1125 * @see Emotion_Vis
1126 * @see emotion_object_vis_set()
1127 * @see emotion_object_vis_supported()
1128 *
1129 * @ingroup Emotion_Visualization
1130 */
1131EAPI Emotion_Vis emotion_object_vis_get (const Evas_Object *obj);
1132
1133/**
1134 * @brief Query whether a type of visualization is supported by this object.
1135 *
1136 * @param obj The object which the query is being ran on.
1137 * @param visualization The type of visualization that is being queried.
1138 * @return EINA_TRUE if the visualization is supported, EINA_FALSE otherwise.
1139 *
1140 * This can be used to check if a visualization is supported. e.g.: one wants to
1141 * display a list of available visualizations for a specific object.
1142 *
1143 * @see Emotion_Vis
1144 * @see emotion_object_vis_set()
1145 * @see emotion_object_vis_get()
1146 *
1147 * @ingroup Emotion_Visualization
1148 */
1149EAPI Eina_Bool emotion_object_vis_supported (const Evas_Object *obj, Emotion_Vis visualization);
1150
1151/**
1152 * @brief Raise priority of an object so it will have a priviledged access to hardware ressource.
1153 *
1154 * @param obj The object which the query is being ran on.
1155 * @param priority EINA_TRUE means give me a priority access to the hardware ressource.
1156 *
1157 * Hardware have a few dedicated hardware pipeline that process the video at no cost for the CPU.
1158 * Especially on SoC, you mostly have one (on mobile phone SoC) or two (on Set Top Box SoC) when
1159 * Picture in Picture is needed. And most application just have a few video stream that really
1160 * deserve high frame rate, hiogh quality output. That's why this call is for.
1161 *
1162 * Please note that if Emotion can't acquire a priviledged hardware ressource, it will fallback
1163 * to the no-priority path. This work on the first asking first get basis system.
1164 *
1165 * @see emotion_object_priority_get()
1166 *
1167 * @ingroup Emotion_Ressource
1168 */
1169EAPI void emotion_object_priority_set(Evas_Object *obj, Eina_Bool priority);
1170
1171/**
1172 * @brief Get the actual priority of an object.
1173 *
1174 * @param obj The object which the query is being ran on.
1175 * @return EINA_TRUE if the object has a priority access to the hardware.
1176 *
1177 * This actually return the priority status of an object. If it failed to have a priviledged
1178 * access to the hardware, it will return EINA_FALSE.
1179 *
1180 * @see emotion_object_priority_get()
1181 *
1182 * @ingroup Emotion_Ressource
1183 */
1184EAPI Eina_Bool emotion_object_priority_get(const Evas_Object *obj);
1185
1186/**
1187 * @brief Change the state of an object pipeline.
1188 *
1189 * @param obj The object which the query is being ran on.
1190 * @param state The new state for the object.
1191 *
1192 * Changing the state of a pipeline should help preserve the battery of an embedded device.
1193 * But it will only work sanely if the pipeline is not playing at the time you change its
1194 * state. Depending on the engine all state may be not implemented.
1195 *
1196 * @see Emotion_Suspend
1197 * @see emotion_object_suspend_get()
1198 *
1199 * @ingroup Emotion_Ressource
1200 */
1201EAPI void emotion_object_suspend_set(Evas_Object *obj, Emotion_Suspend state);
1202
1203/**
1204 * @brief Get the current state of the pipeline
1205 *
1206 * @param obj The object which the query is being ran on.
1207 * @return the current state of the pipeline.
1208 *
1209 * @see Emotion_Suspend
1210 * @see emotion_object_suspend_set()
1211 *
1212 * @ingroup Emotion_Ressource
1213 */
1214EAPI Emotion_Suspend emotion_object_suspend_get(Evas_Object *obj);
1215
1216/**
1217 * @brief Load the last known position if available
1218 *
1219 * @param obj The object which the query is being ran on.
1220 *
1221 * By using Xattr, Emotion is able, if the system permitt it, to store and retrieve
1222 * the latest position. It should trigger some smart callback to let the application
1223 * know when it succeed or fail. Every operation is fully asynchronous and not
1224 * linked to the actual engine used to play the vide.
1225 *
1226 * @see emotion_object_last_position_save()
1227 *
1228 * @ingroup Emotion_Info
1229 */
1230EAPI void emotion_object_last_position_load(Evas_Object *obj);
1231
1232/**
1233 * @brief Save the lastest position if possible
1234 *
1235 * @param obj The object which the query is being ran on.
1236 *
1237 * By using Xattr, Emotion is able, if the system permitt it, to store and retrieve
1238 * the latest position. It should trigger some smart callback to let the application
1239 * know when it succeed or fail. Every operation is fully asynchronous and not
1240 * linked to the actual engine used to play the vide.
1241 *
1242 * @see emotion_object_last_position_load()
1243 *
1244 * @ingroup Emotion_Info
1245 */
1246EAPI void emotion_object_last_position_save(Evas_Object *obj);
1247
1248/**
1249 * @brief Do we have a chance to play that file
1250 *
1251 * @param file A stringshared filename that we want to know if Emotion can play.
1252 *
1253 * This just actually look at the extention of the file, it doesn't check the mime-type
1254 * nor if the file is actually sane. So this is just an hint for your application.
1255 *
1256 * @see emotion_object_extension_may_play_get()
1257 */
1258EAPI Eina_Bool emotion_object_extension_may_play_fast_get(const char *file);
1259
1260/**
1261 * @brief Do we have a chance to play that file
1262 *
1263 * @param file A filename that we want to know if Emotion can play.
1264 *
1265 * This just actually look at the extention of the file, it doesn't check the mime-type
1266 * nor if the file is actually sane. So this is just an hint for your application.
1267 *
1268 * @see emotion_object_extension_may_play_fast_get()
1269 */
1270EAPI Eina_Bool emotion_object_extension_may_play_get(const char *file);
1271
1272/**
1273 * @brief Get the actual image object that contains the pixels of the video stream
1274 *
1275 * @param obj The object which the query is being ran on.
1276 *
1277 * This function is usefull when you want to get a direct access to the pixels.
1278 *
1279 * @see emotion_object_image_get()
1280 */
1281EAPI Evas_Object *emotion_object_image_get(const Evas_Object *obj);
1282
1283/**
1284 * @defgroup Emotion_Webcam API available for accessing webcam
1285 * @ingroup Emotion
1286 */
1287
1288typedef struct _Emotion_Webcam Emotion_Webcam; /**< Webcam description */
1289
1290EAPI extern int EMOTION_WEBCAM_UPDATE; /**< Ecore_Event triggered when a new webcam is plugged in */
1291
1292/**
1293 * @brief Get a list of active and available webcam
1294 *
1295 * @return the list of available webcam at the time of the call.
1296 *
1297 * It will return the current live list of webcam. It is updated before
1298 * triggering EMOTION_WEBCAM_UPDATE and should never be modified.
1299 *
1300 * @ingroup Emotion_Webcam
1301 */
1302EAPI const Eina_List *emotion_webcams_get(void);
1303
1304/**
1305 * @brief Get the human understandable name of a Webcam
1306 *
1307 * @param ew The webcam to get the name from.
1308 * @return the actual human readable name.
1309 *
1310 * @ingroup Emotion_Webcam
1311 */
1312EAPI const char *emotion_webcam_name_get(const Emotion_Webcam *ew);
1313
1314/**
1315 * @brief Get the uri of a Webcam that will be understood by emotion
1316 *
1317 * @param ew The webcam to get the uri from.
1318 * @return the actual uri that emotion will later understood.
1319 *
1320 * @ingroup Emotion_Webcam
1321 */
1322EAPI const char *emotion_webcam_device_get(const Emotion_Webcam *ew);
1323
1324/**
1325 * @}
1326 */
1327
1328#ifdef __cplusplus
1329}
1330#endif
1331
1332#endif
diff --git a/src/lib/emotion/emotion_main.c b/src/lib/emotion/emotion_main.c
new file mode 100644
index 0000000000..8416f50b1c
--- /dev/null
+++ b/src/lib/emotion/emotion_main.c
@@ -0,0 +1,464 @@
1#ifdef HAVE_CONFIG_H
2# include "config.h"
3#endif
4
5#ifdef STDC_HEADERS
6# include <stdlib.h>
7# include <stddef.h>
8#else
9# ifdef HAVE_STDLIB_H
10# include <stdlib.h>
11# endif
12#endif
13
14#include <stdio.h>
15
16#ifdef HAVE_EEZE
17# include <sys/types.h>
18# include <sys/stat.h>
19# include <fcntl.h>
20# ifdef HAVE_V4L2
21# include <sys/ioctl.h>
22# include <linux/videodev2.h>
23# endif
24# include <Eeze.h>
25#endif
26
27#include <Ecore.h>
28#include <Eet.h>
29
30#include "Emotion.h"
31#include "emotion_private.h"
32
33static Emotion_Version _version = { VMAJ, VMIN, VMIC, VREV };
34static int emotion_pending_objects = 0;
35EAPI Emotion_Version *emotion_version = &_version;
36
37EAPI int EMOTION_WEBCAM_UPDATE = 0;
38
39struct ext_match_s
40{
41 unsigned int length;
42 const char *extension;
43};
44
45#define MATCHING(Ext) \
46 { sizeof (Ext), Ext }
47
48static const struct ext_match_s matchs[] =
49{ /* map extensions to know if it's a emotion playable content for good first-guess tries */
50 MATCHING(".264"),
51 MATCHING(".3g2"),
52 MATCHING(".3gp"),
53 MATCHING(".3gp2"),
54 MATCHING(".3gpp"),
55 MATCHING(".3gpp2"),
56 MATCHING(".3p2"),
57 MATCHING(".asf"),
58 MATCHING(".avi"),
59 MATCHING(".bdm"),
60 MATCHING(".bdmv"),
61 MATCHING(".clpi"),
62 MATCHING(".clp"),
63 MATCHING(".fla"),
64 MATCHING(".flv"),
65 MATCHING(".m1v"),
66 MATCHING(".m2v"),
67 MATCHING(".m2t"),
68 MATCHING(".m4v"),
69 MATCHING(".mkv"),
70 MATCHING(".mov"),
71 MATCHING(".mp2"),
72 MATCHING(".mp2ts"),
73 MATCHING(".mp4"),
74 MATCHING(".mpe"),
75 MATCHING(".mpeg"),
76 MATCHING(".mpg"),
77 MATCHING(".mpl"),
78 MATCHING(".mpls"),
79 MATCHING(".mts"),
80 MATCHING(".mxf"),
81 MATCHING(".nut"),
82 MATCHING(".nuv"),
83 MATCHING(".ogg"),
84 MATCHING(".ogm"),
85 MATCHING(".ogv"),
86 MATCHING(".rm"),
87 MATCHING(".rmj"),
88 MATCHING(".rmm"),
89 MATCHING(".rms"),
90 MATCHING(".rmx"),
91 MATCHING(".rmvb"),
92 MATCHING(".swf"),
93 MATCHING(".ts"),
94 MATCHING(".weba"),
95 MATCHING(".webm"),
96 MATCHING(".wmv")
97};
98
99Eina_Bool
100_emotion_object_extension_can_play_generic_get(const void *data EINA_UNUSED, const char *file)
101{
102 unsigned int length;
103 unsigned int i;
104
105 length = eina_stringshare_strlen(file) + 1;
106 if (length < 5) return EINA_FALSE;
107
108 for (i = 0; i < sizeof (matchs) / sizeof (struct ext_match_s); ++i)
109 {
110 if (matchs[i].length > length) continue;
111
112 if (!strcasecmp(matchs[i].extension,
113 file + length - matchs[i].length))
114 return EINA_TRUE;
115 }
116
117 return EINA_FALSE;
118}
119
120EAPI Eina_Bool
121emotion_object_extension_may_play_fast_get(const char *file)
122{
123 if (!file) return EINA_FALSE;
124 return _emotion_object_extension_can_play_generic_get(NULL, file);
125}
126
127EAPI Eina_Bool
128emotion_object_extension_may_play_get(const char *file)
129{
130 const char *tmp;
131 Eina_Bool result;
132
133 if (!file) return EINA_FALSE;
134 tmp = eina_stringshare_add(file);
135 result = emotion_object_extension_may_play_fast_get(tmp);
136 eina_stringshare_del(tmp);
137
138 return result;
139}
140
141typedef struct _Emotion_Webcams Emotion_Webcams;
142
143struct _Emotion_Webcams
144{
145 Eina_List *webcams;
146};
147
148struct _Emotion_Webcam
149{
150 EINA_REFCOUNT;
151
152 const char *syspath;
153 const char *device;
154 const char *name;
155
156 const char *custom;
157
158 const char *filename;
159};
160
161static int _emotion_webcams_count = 0;
162static Eet_Data_Descriptor *_webcam_edd;
163static Eet_Data_Descriptor *_webcams_edd;
164
165static Emotion_Webcams *_emotion_webcams = NULL;
166static Eet_File *_emotion_webcams_file = NULL;
167
168static Eet_Data_Descriptor *
169_emotion_webcams_data(void)
170{
171 Eet_Data_Descriptor_Class eddc;
172
173 EET_EINA_FILE_DATA_DESCRIPTOR_CLASS_SET(&eddc, Emotion_Webcam);
174 _webcam_edd = eet_data_descriptor_file_new(&eddc);
175 EET_DATA_DESCRIPTOR_ADD_BASIC(_webcam_edd, Emotion_Webcam, "device", device, EET_T_STRING);
176 EET_DATA_DESCRIPTOR_ADD_BASIC(_webcam_edd, Emotion_Webcam, "name", name, EET_T_STRING);
177 EET_DATA_DESCRIPTOR_ADD_BASIC(_webcam_edd, Emotion_Webcam, "custom", custom, EET_T_STRING);
178 EET_DATA_DESCRIPTOR_ADD_BASIC(_webcam_edd, Emotion_Webcam, "filename", filename, EET_T_STRING);
179
180 EET_EINA_FILE_DATA_DESCRIPTOR_CLASS_SET(&eddc, Emotion_Webcams);
181 _webcams_edd = eet_data_descriptor_file_new(&eddc);
182 EET_DATA_DESCRIPTOR_ADD_LIST(_webcams_edd, Emotion_Webcams, "webcams", webcams, _webcam_edd);
183
184 return _webcams_edd;
185}
186
187static void
188emotion_webcam_destroy(Emotion_Webcam *ew)
189{
190 if (!ew->custom)
191 {
192 eina_stringshare_del(ew->syspath);
193 eina_stringshare_del(ew->device);
194 eina_stringshare_del(ew->name);
195 }
196 free(ew);
197}
198
199#ifdef HAVE_EEZE
200static Eeze_Udev_Watch *eeze_watcher = NULL;
201
202static void
203_emotion_check_device(Emotion_Webcam *ew)
204{
205#ifdef HAVE_V4L2
206 Emotion_Webcam *check;
207 Eina_List *l;
208 struct v4l2_capability caps;
209 int fd;
210#endif
211
212 if (!ew) return ;
213#ifdef HAVE_V4L2
214 if (!ew->device) goto on_error;
215
216 fd = open(ew->filename, O_RDONLY);
217 if (fd < 0) goto on_error;
218
219 if (ioctl(fd, VIDIOC_QUERYCAP, &caps) == -1) goto on_error;
220
221 /* Likely not a webcam */
222 if (!caps.capabilities & V4L2_CAP_VIDEO_CAPTURE) goto on_error;
223 if (caps.capabilities & V4L2_CAP_TUNER
224 || caps.capabilities & V4L2_CAP_RADIO
225 || caps.capabilities & V4L2_CAP_MODULATOR)
226 goto on_error;
227
228 EINA_LIST_FOREACH(_emotion_webcams->webcams, l, check)
229 if (check->device == ew->device)
230 goto on_error;
231
232 _emotion_webcams->webcams = eina_list_append(_emotion_webcams->webcams, ew);
233
234 EINA_REFCOUNT_INIT(ew);
235
236 return ;
237
238 on_error:
239#endif
240 EINA_LOG_ERR("'%s' is not a webcam ['%s']", ew->name, strerror(errno));
241 eina_stringshare_del(ew->syspath);
242 eina_stringshare_del(ew->device);
243 eina_stringshare_del(ew->name);
244 free(ew);
245}
246
247static Emotion_Webcam *
248_emotion_webcam_new(const char *syspath)
249{
250 Emotion_Webcam *test;
251 const char *device;
252 char *local;
253
254 test = malloc(sizeof (Emotion_Webcam));
255 if (!test) return NULL;
256
257 test->custom = NULL;
258 test->syspath = eina_stringshare_ref(syspath);
259 test->name = eeze_udev_syspath_get_sysattr(syspath, "name");
260
261 device = eeze_udev_syspath_get_property(syspath, "DEVNAME");
262 local = alloca(eina_stringshare_strlen(device) + 8);
263 snprintf(local, eina_stringshare_strlen(device) + 8, "v4l2://%s", device);
264 test->device = eina_stringshare_add(local);
265 eina_stringshare_del(device);
266 test->filename = test->device + 7;
267
268 return test;
269}
270
271static void
272_emotion_enumerate_all_webcams(void)
273{
274 Eina_List *devices;
275 const char *syspath;
276
277 devices = eeze_udev_find_by_type(EEZE_UDEV_TYPE_V4L, NULL);
278
279 EINA_LIST_FREE(devices, syspath)
280 {
281 Emotion_Webcam *test;
282
283 test = _emotion_webcam_new(syspath);
284 if (test) _emotion_check_device(test);
285
286 eina_stringshare_del(syspath);
287 }
288}
289
290static void
291_emotion_eeze_events(const char *syspath,
292 Eeze_Udev_Event ev,
293 void *data EINA_UNUSED,
294 Eeze_Udev_Watch *watcher EINA_UNUSED)
295{
296 if (ev == EEZE_UDEV_EVENT_REMOVE)
297 {
298 Emotion_Webcam *check;
299 Eina_List *l;
300
301 EINA_LIST_FOREACH(_emotion_webcams->webcams, l, check)
302 if (check->syspath == syspath)
303 {
304 _emotion_webcams->webcams = eina_list_remove_list(_emotion_webcams->webcams, l);
305 EINA_REFCOUNT_UNREF(check)
306 emotion_webcam_destroy(check);
307 break ;
308 }
309 }
310 else if (ev == EEZE_UDEV_EVENT_ADD)
311 {
312 Emotion_Webcam *test;
313
314 test = _emotion_webcam_new(syspath);
315 if (test) _emotion_check_device(test);
316 }
317 ecore_event_add(EMOTION_WEBCAM_UPDATE, NULL, NULL, NULL);
318}
319
320#endif
321
322EAPI Eina_Bool
323emotion_init(void)
324{
325 char buffer[4096];
326
327 if (_emotion_webcams_count++) return EINA_TRUE;
328
329 ecore_init();
330
331 snprintf(buffer, 4096, "%s/emotion.cfg", PACKAGE_DATA_DIR);
332 _emotion_webcams_file = eet_open(buffer, EET_FILE_MODE_READ);
333 if (_emotion_webcams_file)
334 {
335 Eet_Data_Descriptor *edd;
336
337 edd = _emotion_webcams_data();
338
339 _emotion_webcams = eet_data_read(_emotion_webcams_file, edd, "config");
340
341 eet_data_descriptor_free(_webcams_edd); _webcams_edd = NULL;
342 eet_data_descriptor_free(_webcam_edd); _webcam_edd = NULL;
343 }
344
345 if (!_emotion_webcams)
346 {
347 _emotion_webcams = calloc(1, sizeof (Emotion_Webcams));
348 if (!_emotion_webcams) return EINA_FALSE;
349 }
350
351#ifdef HAVE_EEZE
352 EMOTION_WEBCAM_UPDATE = ecore_event_type_new();
353
354 eeze_init();
355
356 _emotion_enumerate_all_webcams();
357
358 eeze_watcher = eeze_udev_watch_add(EEZE_UDEV_TYPE_V4L,
359 (EEZE_UDEV_EVENT_ADD | EEZE_UDEV_EVENT_REMOVE),
360 _emotion_eeze_events, NULL);
361#endif
362
363 return EINA_TRUE;
364}
365
366EAPI Eina_Bool
367emotion_shutdown(void)
368{
369 Emotion_Webcam *ew;
370 double start;
371
372 if (_emotion_webcams_count <= 0)
373 {
374 EINA_LOG_ERR("Init count not greater than 0 in shutdown.");
375 return EINA_FALSE;
376 }
377 if (--_emotion_webcams_count) return EINA_TRUE;
378
379 EINA_LIST_FREE(_emotion_webcams->webcams, ew)
380 {
381 /* There is currently no way to refcount from the outside, this help, but could lead to some issue */
382 EINA_REFCOUNT_UNREF(ew)
383 emotion_webcam_destroy(ew);
384 }
385 free(_emotion_webcams);
386 _emotion_webcams = NULL;
387
388 if (_emotion_webcams_file)
389 {
390 /* As long as there is no one reference any pointer, you are safe */
391 eet_close(_emotion_webcams_file);
392 _emotion_webcams_file = NULL;
393 }
394
395#ifdef HAVE_EEZE
396 eeze_udev_watch_del(eeze_watcher);
397 eeze_watcher = NULL;
398
399 eeze_shutdown();
400#endif
401
402 start = ecore_time_get();
403 while (emotion_pending_objects && ecore_time_get() - start < 0.5)
404 ecore_main_loop_iterate();
405
406 if (emotion_pending_objects)
407 {
408 EINA_LOG_ERR("There is still %i Emotion pipeline running", emotion_pending_objects);
409 }
410
411 ecore_shutdown();
412
413 return EINA_TRUE;
414}
415
416EAPI const Eina_List *
417emotion_webcams_get(void)
418{
419 return _emotion_webcams->webcams;
420}
421
422EAPI const char *
423emotion_webcam_name_get(const Emotion_Webcam *ew)
424{
425 if (!ew) return NULL;
426
427 return ew->name;
428}
429
430EAPI const char *
431emotion_webcam_device_get(const Emotion_Webcam *ew)
432{
433 if (!ew) return NULL;
434
435 return ew->device;
436}
437
438EAPI const char *
439emotion_webcam_custom_get(const char *device)
440{
441 const Emotion_Webcam *ew;
442 const Eina_List *l;
443
444 if (_emotion_webcams)
445 {
446 EINA_LIST_FOREACH(_emotion_webcams->webcams, l, ew)
447 if (ew->device && strcmp(device, ew->device) == 0)
448 return ew->custom;
449 }
450
451 return NULL;
452}
453
454EAPI void
455_emotion_pending_object_ref(void)
456{
457 emotion_pending_objects++;
458}
459
460EAPI void
461_emotion_pending_object_unref(void)
462{
463 emotion_pending_objects--;
464}
diff --git a/src/lib/emotion/emotion_private.h b/src/lib/emotion/emotion_private.h
new file mode 100644
index 0000000000..73a1b7ddf0
--- /dev/null
+++ b/src/lib/emotion/emotion_private.h
@@ -0,0 +1,137 @@
1#ifndef EMOTION_PRIVATE_H
2#define EMOTION_PRIVATE_H
3
4#define META_TRACK_TITLE 1
5#define META_TRACK_ARTIST 2
6#define META_TRACK_GENRE 3
7#define META_TRACK_COMMENT 4
8#define META_TRACK_ALBUM 5
9#define META_TRACK_YEAR 6
10#define META_TRACK_DISCID 7
11#define META_TRACK_COUNT 8
12
13typedef enum _Emotion_Format Emotion_Format;
14typedef struct _Emotion_Video_Module Emotion_Video_Module;
15typedef struct _Emotion_Module_Options Emotion_Module_Options;
16typedef struct _Eina_Emotion_Plugins Eina_Emotion_Plugins;
17
18typedef Eina_Bool (*Emotion_Module_Open)(Evas_Object *, const Emotion_Video_Module **, void **, Emotion_Module_Options *);
19typedef void (*Emotion_Module_Close)(Emotion_Video_Module *module, void *);
20
21enum _Emotion_Format
22{
23 EMOTION_FORMAT_NONE,
24 EMOTION_FORMAT_I420,
25 EMOTION_FORMAT_YV12,
26 EMOTION_FORMAT_YUY2, /* unused for now since evas does not support yuy2 format */
27 EMOTION_FORMAT_BGRA
28};
29
30struct _Emotion_Module_Options
31{
32 const char *player;
33 Eina_Bool no_video : 1;
34 Eina_Bool no_audio : 1;
35};
36
37struct _Eina_Emotion_Plugins
38{
39 Emotion_Module_Open open;
40 Emotion_Module_Close close;
41};
42
43struct _Emotion_Video_Module
44{
45 unsigned char (*init) (Evas_Object *obj, void **video, Emotion_Module_Options *opt);
46 int (*shutdown) (void *video);
47 unsigned char (*file_open) (const char *file, Evas_Object *obj, void *video);
48 void (*file_close) (void *ef);
49 void (*play) (void *ef, double pos);
50 void (*stop) (void *ef);
51 void (*size_get) (void *ef, int *w, int *h);
52 void (*pos_set) (void *ef, double pos);
53 double (*len_get) (void *ef);
54 double (*buffer_size_get) (void *ef);
55 int (*fps_num_get) (void *ef);
56 int (*fps_den_get) (void *ef);
57 double (*fps_get) (void *ef);
58 double (*pos_get) (void *ef);
59 void (*vis_set) (void *ef, Emotion_Vis vis);
60 Emotion_Vis (*vis_get) (void *ef);
61 Eina_Bool (*vis_supported) (void *ef, Emotion_Vis vis);
62 double (*ratio_get) (void *ef);
63 int (*video_handled) (void *ef);
64 int (*audio_handled) (void *ef);
65 int (*seekable) (void *ef);
66 void (*frame_done) (void *ef);
67 Emotion_Format (*format_get) (void *ef);
68 void (*video_data_size_get) (void *ef, int *w, int *h);
69 int (*yuv_rows_get) (void *ef, int w, int h, unsigned char **yrows, unsigned char **urows, unsigned char **vrows);
70 int (*bgra_data_get) (void *ef, unsigned char **bgra_data);
71 void (*event_feed) (void *ef, int event);
72 void (*event_mouse_button_feed) (void *ef, int button, int x, int y);
73 void (*event_mouse_move_feed) (void *ef, int x, int y);
74 int (*video_channel_count) (void *ef);
75 void (*video_channel_set) (void *ef, int channel);
76 int (*video_channel_get) (void *ef);
77 void (*video_subtitle_file_set) (void *ef, const char *filepath);
78 const char * (*video_subtitle_file_get) (void *ef);
79 const char * (*video_channel_name_get) (void *ef, int channel);
80 void (*video_channel_mute_set) (void *ef, int mute);
81 int (*video_channel_mute_get) (void *ef);
82 int (*audio_channel_count) (void *ef);
83 void (*audio_channel_set) (void *ef, int channel);
84 int (*audio_channel_get) (void *ef);
85 const char * (*audio_channel_name_get) (void *ef, int channel);
86 void (*audio_channel_mute_set) (void *ef, int mute);
87 int (*audio_channel_mute_get) (void *ef);
88 void (*audio_channel_volume_set) (void *ef, double vol);
89 double (*audio_channel_volume_get) (void *ef);
90 int (*spu_channel_count) (void *ef);
91 void (*spu_channel_set) (void *ef, int channel);
92 int (*spu_channel_get) (void *ef);
93 const char * (*spu_channel_name_get) (void *ef, int channel);
94 void (*spu_channel_mute_set) (void *ef, int mute);
95 int (*spu_channel_mute_get) (void *ef);
96 int (*chapter_count) (void *ef);
97 void (*chapter_set) (void *ef, int chapter);
98 int (*chapter_get) (void *ef);
99 const char * (*chapter_name_get) (void *ef, int chapter);
100 void (*speed_set) (void *ef, double speed);
101 double (*speed_get) (void *ef);
102 int (*eject) (void *ef);
103 const char * (*meta_get) (void *ef, int meta);
104 void (*priority_set) (void *ef, Eina_Bool priority);
105 Eina_Bool (*priority_get) (void *ef);
106
107 Eina_Emotion_Plugins *plugin;
108};
109
110EAPI void *_emotion_video_get(const Evas_Object *obj);
111EAPI void _emotion_frame_new(Evas_Object *obj);
112EAPI void _emotion_video_pos_update(Evas_Object *obj, double pos, double len);
113EAPI void _emotion_frame_resize(Evas_Object *obj, int w, int h, double ratio);
114EAPI void _emotion_frame_refill(Evas_Object *obj, double w, double h);
115EAPI void _emotion_decode_stop(Evas_Object *obj);
116EAPI void _emotion_open_done(Evas_Object *obj);
117EAPI void _emotion_playback_started(Evas_Object *obj);
118EAPI void _emotion_playback_finished(Evas_Object *obj);
119EAPI void _emotion_audio_level_change(Evas_Object *obj);
120EAPI void _emotion_channels_change(Evas_Object *obj);
121EAPI void _emotion_title_set(Evas_Object *obj, char *title);
122EAPI void _emotion_progress_set(Evas_Object *obj, char *info, double stat);
123EAPI void _emotion_file_ref_set(Evas_Object *obj, const char *file, int num);
124EAPI void _emotion_spu_button_num_set(Evas_Object *obj, int num);
125EAPI void _emotion_spu_button_set(Evas_Object *obj, int button);
126EAPI void _emotion_seek_done(Evas_Object *obj);
127EAPI void _emotion_image_reset(Evas_Object *obj);
128
129EAPI Eina_Bool _emotion_module_register(const char *name, Emotion_Module_Open open, Emotion_Module_Close close);
130EAPI Eina_Bool _emotion_module_unregister(const char *name);
131
132EAPI const char *emotion_webcam_custom_get(const char *device);
133
134EAPI void _emotion_pending_object_ref(void);
135EAPI void _emotion_pending_object_unref(void);
136
137#endif
diff --git a/src/lib/emotion/emotion_smart.c b/src/lib/emotion/emotion_smart.c
new file mode 100644
index 0000000000..709414459c
--- /dev/null
+++ b/src/lib/emotion/emotion_smart.c
@@ -0,0 +1,2133 @@
1#ifdef HAVE_CONFIG_H
2# include "config.h"
3#endif
4
5#include <Evas.h>
6#include <Ecore.h>
7
8#ifdef HAVE_EIO
9# include <math.h>
10# include <Eio.h>
11#endif
12
13#include "Emotion.h"
14#include "emotion_private.h"
15
16#ifdef _WIN32
17# define FMT_UCHAR "%c"
18#else
19# define FMT_UCHAR "%hhu"
20#endif
21
22#define E_SMART_OBJ_GET(smart, o, type) \
23 { \
24 char *_e_smart_str; \
25 \
26 if (!o) return; \
27 smart = evas_object_smart_data_get(o); \
28 if (!smart) return; \
29 _e_smart_str = (char *)evas_object_type_get(o); \
30 if (!_e_smart_str) return; \
31 if (strcmp(_e_smart_str, type)) return; \
32 }
33
34#define E_SMART_OBJ_GET_RETURN(smart, o, type, ret) \
35 { \
36 char *_e_smart_str; \
37 \
38 if (!o) return ret; \
39 smart = evas_object_smart_data_get(o); \
40 if (!smart) return ret; \
41 _e_smart_str = (char *)evas_object_type_get(o); \
42 if (!_e_smart_str) return ret; \
43 if (strcmp(_e_smart_str, type)) return ret; \
44 }
45
46#define DBG(...) EINA_LOG_DOM_DBG(_log_domain, __VA_ARGS__)
47#define INF(...) EINA_LOG_DOM_INFO(_log_domain, __VA_ARGS__)
48#define WRN(...) EINA_LOG_DOM_WARN(_log_domain, __VA_ARGS__)
49#define ERR(...) EINA_LOG_DOM_ERR(_log_domain, __VA_ARGS__)
50#define CRITICAL(...) EINA_LOG_DOM_CRIT(_log_domain, __VA_ARGS__)
51
52#define E_OBJ_NAME "emotion_object"
53
54typedef struct _Smart_Data Smart_Data;
55
56struct _Smart_Data
57{
58 EINA_REFCOUNT;
59 Emotion_Video_Module *module;
60 void *video_data;
61
62 char *module_name;
63
64 const char *file;
65 Evas_Object *obj;
66 Evas_Object *bg;
67
68 Ecore_Job *job;
69
70 char *title;
71
72#ifdef HAVE_EIO
73 Eio_File *load_xattr;
74 Eio_File *save_xattr;
75#endif
76
77 struct {
78 char *info;
79 double stat;
80 } progress;
81 struct {
82 char *file;
83 int num;
84 } ref;
85 struct {
86 int button_num;
87 int button;
88