summaryrefslogtreecommitdiff
diff options
context:
space:
mode:
authordoursse <doursse>2005-10-02 07:34:36 +0000
committerdoursse <doursse@7cbeb6ba-43b4-40fd-8cce-4c39aea84d33>2005-10-02 07:34:36 +0000
commitbad4d6e848af68f23d207b55d1fd5d4c741336ac (patch)
treec33956c097a4c070700db976017b969b4741db52
parentdc5e3e07afcc52d47b8d80f40ef06c95c819a4a3 (diff)
emotion can use gstreamer, now
SVN revision: 17103
-rw-r--r--legacy/emotion/AUTHORS3
-rw-r--r--legacy/emotion/TODO15
-rw-r--r--legacy/emotion/configure.in57
-rw-r--r--legacy/emotion/emotion.c.in2
-rw-r--r--legacy/emotion/src/bin/emotion_test_main.c26
-rw-r--r--legacy/emotion/src/lib/Emotion.h11
-rw-r--r--legacy/emotion/src/lib/emotion_smart.c42
-rw-r--r--legacy/emotion/src/modules/Makefile.am28
-rw-r--r--legacy/emotion/src/modules/emotion_gstreamer.c1395
-rw-r--r--legacy/emotion/src/modules/emotion_gstreamer.h76
10 files changed, 1625 insertions, 30 deletions
diff --git a/legacy/emotion/AUTHORS b/legacy/emotion/AUTHORS
index 7bf31c1234..9ba33c333e 100644
--- a/legacy/emotion/AUTHORS
+++ b/legacy/emotion/AUTHORS
@@ -1,2 +1,3 @@
1The Rasterman (Carsten Haitzler) <raster@rasterman.com> 1The Rasterman (Carsten Haitzler) <raster@rasterman.com>
2 2doursse (Vincent Torri) <torri@maths.univ-evry.fr>
3captainigloo
diff --git a/legacy/emotion/TODO b/legacy/emotion/TODO
index dad5800ef0..9751806e04 100644
--- a/legacy/emotion/TODO
+++ b/legacy/emotion/TODO
@@ -1,2 +1,17 @@
1
2Emotion smart object
3--------------------
4
5
6Xine module
7-----------
8
1* Fix seek and get_pos threads delete bugs (infinite loop) 9* Fix seek and get_pos threads delete bugs (infinite loop)
2* Add support of visualizations for audio files (Goom...) 10* Add support of visualizations for audio files (Goom...)
11
12
13Gstreamer module
14----------------
15
16* Add gststreamselector to support multiple video and audio streams.
17* Add support of visualizations for audio files (Goom...)
diff --git a/legacy/emotion/configure.in b/legacy/emotion/configure.in
index 864b6ab05c..3e5d8b7f7d 100644
--- a/legacy/emotion/configure.in
+++ b/legacy/emotion/configure.in
@@ -130,17 +130,40 @@ AC_PATH_GENERIC(embryo, 0.9.1,
130) 130)
131 131
132AC_PATH_GENERIC(xine, 1.0.0, 132AC_PATH_GENERIC(xine, 1.0.0,
133 [ ], 133 [HAVE_XINE="yes"],
134 [ AC_MSG_ERROR(Xine isn't installed)] 134 [HAVE_XINE="no"]
135) 135)
136 136
137xine_plugins=`$XINE_CONFIG --plugindir` 137xine_plugins=`$XINE_CONFIG --plugindir`
138AC_SUBST(xine_plugins) 138AC_SUBST(xine_plugins)
139 139
140dnl gstreamer
141gstreamer_cflags=
142gstreamer_libs=
143
144dnl Gstreamer version requirement
145GST_REQS=0.8.10
146GSTPLUG_REQS=0.8.10
147GST_MAJORMINOR=0.8
148
149PKG_CHECK_MODULES(GST, \
150 gstreamer-$GST_MAJORMINOR >= $GST_REQS,
151 HAVE_GSTREAMER="yes", HAVE_GSTREAMER="no")
152
153AC_SUBST(GST_CFLAGS)
154AC_SUBST(GST_LIBS)
155
156if test "${HAVE_XINE}" = "no" && test "${HAVE_GSTREAMER}" = "no" ; then
157 AC_MSG_ERROR([Xine or Gstreamer must be installed to build emotion])
158fi
159
160AM_CONDITIONAL([BUILD_XINE_MODULE], [test "$HAVE_XINE" = yes])
161AM_CONDITIONAL([BUILD_GSTREAMER_MODULE], [test "$HAVE_GSTREAMER" = yes])
162
140AC_CHECK_LIB(dl, dlopen, dlopen_libs=-ldl) 163AC_CHECK_LIB(dl, dlopen, dlopen_libs=-ldl)
141AC_SUBST(dlopen_libs) 164AC_SUBST(dlopen_libs)
142 165
143requirements="eet evas ecore edje libxine" 166requirements="eet evas ecore edje libxine gstreamer"
144AC_SUBST(requirements) 167AC_SUBST(requirements)
145 168
146AC_OUTPUT([ 169AC_OUTPUT([
@@ -156,3 +179,31 @@ emotion-config
156],[ 179],[
157chmod +x emotion-config 180chmod +x emotion-config
158]) 181])
182
183
184#####################################################################
185## Info
186
187echo
188echo
189echo
190echo "------------------------------------------------------------------------"
191echo "$PACKAGE $VERSION"
192echo "------------------------------------------------------------------------"
193echo
194echo "Configuration Options Summary:"
195echo
196echo " Modules:"
197echo " Xine...............: $HAVE_XINE"
198echo " Gstreamer..........: $HAVE_GSTREAMER"
199echo
200echo " Compilation..........: make"
201echo
202echo " Installation.........: make install"
203echo
204echo " prefix.........: $prefix"
205echo " binaries.......: $bindir"
206echo " libraries......: $libdir"
207echo " headers........: $includedir"
208echo
209
diff --git a/legacy/emotion/emotion.c.in b/legacy/emotion/emotion.c.in
index fd09dc6d4b..aa01486960 100644
--- a/legacy/emotion/emotion.c.in
+++ b/legacy/emotion/emotion.c.in
@@ -9,7 +9,7 @@ These routines are used for Emotion.
9 9
10@mainpage Emotion Library Documentation 10@mainpage Emotion Library Documentation
11@image html emotion.png 11@image html emotion.png
12@version 0.0.1 12@version @VERSION@
13@author Carsten Haitzler <raster@rasterman.com> 13@author Carsten Haitzler <raster@rasterman.com>
14@date 2003-2004 14@date 2003-2004
15 15
diff --git a/legacy/emotion/src/bin/emotion_test_main.c b/legacy/emotion/src/bin/emotion_test_main.c
index d35ca808ca..2a137eac06 100644
--- a/legacy/emotion/src/bin/emotion_test_main.c
+++ b/legacy/emotion/src/bin/emotion_test_main.c
@@ -715,7 +715,7 @@ video_obj_signal_frame_move_cb(void *data, Evas_Object *o, const char *emission,
715 715
716 716
717static void 717static void
718init_video_object(char *file) 718init_video_object(char *module_filename, char *filename)
719{ 719{
720 Evas_Object *o, *oe; 720 Evas_Object *o, *oe;
721 int iw, ih; 721 int iw, ih;
@@ -724,8 +724,10 @@ init_video_object(char *file)
724 724
725 725
726/* basic video object setup */ 726/* basic video object setup */
727 o = emotion_object_add(evas); 727 o = emotion_object_add(evas);
728 emotion_object_file_set(o, file); 728 if (!emotion_object_init(o, module_filename))
729 return;
730 emotion_object_file_set(o, filename);
729 emotion_object_play_set(o, 1); 731 emotion_object_play_set(o, 1);
730 evas_object_move(o, 0, 0); 732 evas_object_move(o, 0, 0);
731 evas_object_resize(o, 320, 240); 733 evas_object_resize(o, 320, 240);
@@ -806,11 +808,14 @@ enter_idle(void *data)
806int 808int
807main(int argc, char **argv) 809main(int argc, char **argv)
808{ 810{
811 char *module_filename;
809 int i; 812 int i;
810 813
811 if (main_start(argc, argv) < 1) return -1; 814 if (main_start(argc, argv) < 1) return -1;
812 bg_setup(); 815 bg_setup();
813 816
817 module_filename = "emotion_decoder_xine.so";
818
814 for (i = 1; i < argc; i++) 819 for (i = 1; i < argc; i++)
815 { 820 {
816 if (((!strcmp(argv[i], "-g")) || 821 if (((!strcmp(argv[i], "-g")) ||
@@ -822,7 +827,7 @@ main(int argc, char **argv)
822 (!strcmp(argv[i], "--help")))) 827 (!strcmp(argv[i], "--help"))))
823 { 828 {
824 printf("Usage:\n"); 829 printf("Usage:\n");
825 printf(" %s [-gl] [-g WxH] \n", argv[0]); 830 printf(" %s [-gl] [-g WxH] [-xine] [-gstreamer] filename\n", argv[0]);
826 exit(-1); 831 exit(-1);
827 } 832 }
828 else if (!strcmp(argv[i], "-gl")) 833 else if (!strcmp(argv[i], "-gl"))
@@ -831,9 +836,18 @@ main(int argc, char **argv)
831 else if (!strcmp(argv[i], "-fb")) 836 else if (!strcmp(argv[i], "-fb"))
832 { 837 {
833 } 838 }
834 else 839 else if (!strcmp(argv[i], "-xine"))
840 {
841 module_filename = "emotion_decoder_xine.so";
842 }
843 else if (!strcmp(argv[i], "-gstreamer"))
844 {
845 module_filename = "emotion_decoder_gstreamer.so";
846 }
847 else
835 { 848 {
836 init_video_object(argv[i]); 849 printf ("module : %s\n", module_filename);
850 init_video_object(module_filename, argv[i]);
837 } 851 }
838 } 852 }
839 853
diff --git a/legacy/emotion/src/lib/Emotion.h b/legacy/emotion/src/lib/Emotion.h
index 692abb90e2..cc3ba7fc8e 100644
--- a/legacy/emotion/src/lib/Emotion.h
+++ b/legacy/emotion/src/lib/Emotion.h
@@ -3,6 +3,12 @@
3 3
4#include <Evas.h> 4#include <Evas.h>
5 5
6enum _Emotion_Module
7{
8 EMOTION_MODULE_XINE,
9 EMOTION_MODULE_GSTREAMER
10};
11
6enum _Emotion_Event 12enum _Emotion_Event
7{ 13{
8 EMOTION_EVENT_MENU1, // Escape Menu 14 EMOTION_EVENT_MENU1, // Escape Menu
@@ -46,6 +52,7 @@ enum _Emotion_Meta_Info
46 EMOTION_META_INFO_TRACK_DISC_ID 52 EMOTION_META_INFO_TRACK_DISC_ID
47}; 53};
48 54
55typedef enum _Emotion_Module Emotion_Module;
49typedef enum _Emotion_Event Emotion_Event; 56typedef enum _Emotion_Event Emotion_Event;
50typedef enum _Emotion_Meta_Info Emotion_Meta_Info; 57typedef enum _Emotion_Meta_Info Emotion_Meta_Info;
51 58
@@ -58,8 +65,8 @@ extern "C" {
58 65
59/* api calls available */ 66/* api calls available */
60Evas_Object *emotion_object_add (Evas *evas); 67Evas_Object *emotion_object_add (Evas *evas);
61Evas_Bool emotion_object_init (Evas_Object *obj); 68Evas_Bool emotion_object_init (Evas_Object *obj, const char *module_filename);
62void emotion_object_file_set (Evas_Object *obj, const char *file); 69void emotion_object_file_set (Evas_Object *obj, const char *filename);
63const char *emotion_object_file_get (Evas_Object *obj); 70const char *emotion_object_file_get (Evas_Object *obj);
64void emotion_object_play_set (Evas_Object *obj, Evas_Bool play); 71void emotion_object_play_set (Evas_Object *obj, Evas_Bool play);
65Evas_Bool emotion_object_play_get (Evas_Object *obj); 72Evas_Bool emotion_object_play_get (Evas_Object *obj);
diff --git a/legacy/emotion/src/lib/emotion_smart.c b/legacy/emotion/src/lib/emotion_smart.c
index b9c6be3ead..1d02acd9ed 100644
--- a/legacy/emotion/src/lib/emotion_smart.c
+++ b/legacy/emotion/src/lib/emotion_smart.c
@@ -35,6 +35,8 @@ struct _Smart_Data
35 Emotion_Video_Module *module; 35 Emotion_Video_Module *module;
36 void *video; 36 void *video;
37 37
38 char *module_name;
39
38 char *file; 40 char *file;
39 Evas_Object *obj; 41 Evas_Object *obj;
40 double ratio; 42 double ratio;
@@ -94,12 +96,15 @@ _emotion_module_open(const char *name, Evas_Object *obj, Emotion_Video_Module **
94 void *handle; 96 void *handle;
95 char buf[4096]; 97 char buf[4096];
96 98
97 snprintf(buf, sizeof(buf), "%s/%s", PACKAGE_LIB_DIR"/emotion/", name); 99 snprintf(buf, sizeof(buf), "%s%s", PACKAGE_LIB_DIR"/emotion/",
100 name);
101 printf ("module : %s\n", buf);
98 handle = dlopen(buf, RTLD_NOW | RTLD_GLOBAL); 102 handle = dlopen(buf, RTLD_NOW | RTLD_GLOBAL);
99 if (handle) 103 if (handle)
100 { 104 {
101 unsigned char (*func_module_open)(Evas_Object *, Emotion_Video_Module **, void **); 105 unsigned char (*func_module_open)(Evas_Object *, Emotion_Video_Module **, void **);
102 106
107 printf ("module opened\n");
103 func_module_open = dlsym(handle, "module_open"); 108 func_module_open = dlsym(handle, "module_open");
104 if (func_module_open) 109 if (func_module_open)
105 { 110 {
@@ -111,6 +116,12 @@ _emotion_module_open(const char *name, Evas_Object *obj, Emotion_Video_Module **
111 } 116 }
112 dlclose(handle); 117 dlclose(handle);
113 } 118 }
119 else
120 {
121 char *err;
122 err = dlerror();
123 printf ("pas de module : %s\n", err);
124 }
114 return 0; 125 return 0;
115} 126}
116 127
@@ -129,6 +140,9 @@ _emotion_module_close(Emotion_Video_Module *mod, void *video)
129/*******************************/ 140/*******************************/
130/* Externally accessible calls */ 141/* Externally accessible calls */
131/*******************************/ 142/*******************************/
143
144
145
132Evas_Object * 146Evas_Object *
133emotion_object_add(Evas *evas) 147emotion_object_add(Evas *evas)
134{ 148{
@@ -137,7 +151,7 @@ emotion_object_add(Evas *evas)
137} 151}
138 152
139Evas_Bool 153Evas_Bool
140emotion_object_init(Evas_Object *obj) 154emotion_object_init(Evas_Object *obj, const char *module_filename)
141{ 155{
142 Smart_Data *sd; 156 Smart_Data *sd;
143 157
@@ -162,10 +176,14 @@ emotion_object_init(Evas_Object *obj)
162 176
163 if (!sd->module || !sd->video) 177 if (!sd->module || !sd->video)
164 { 178 {
165 if (!_emotion_module_open("emotion_decoder_xine.so", obj, &sd->module, &sd->video)) 179 if (!_emotion_module_open(module_filename, obj, &sd->module, &sd->video))
166 return 0; 180 return 0;
167 } 181 }
168 182
183 if (!sd->module || !sd->video)
184 if (!_emotion_module_open(module_filename, obj, &sd->module, &sd->video))
185 return 0;
186
169 return 1; 187 return 1;
170} 188}
171 189
@@ -177,27 +195,22 @@ emotion_object_file_set(Evas_Object *obj, const char *file)
177 E_SMART_OBJ_GET(sd, obj, E_OBJ_NAME); 195 E_SMART_OBJ_GET(sd, obj, E_OBJ_NAME);
178 196
179 if ((file) && (sd->file) && (!strcmp(file, sd->file))) return; 197 if ((file) && (sd->file) && (!strcmp(file, sd->file))) return;
180
181 if ((file) && (strlen(file) > 0)) 198 if ((file) && (strlen(file) > 0))
182 { 199 {
183 int w, h; 200 int w, h;
184
185 if (!emotion_object_init(obj))
186 return;
187 sd->file = strdup(file); 201 sd->file = strdup(file);
188 if (sd->module) 202 if (sd->module)
189 { 203 {
190 sd->module->file_close(sd->video); 204 sd->module->file_close(sd->video);
191 evas_object_image_size_set(sd->obj, 0, 0); 205 evas_object_image_size_set(sd->obj, 0, 0);
192 } 206 }
193 if (!sd->module->file_open(sd->file, obj, sd->video)) 207 if (!sd->module->file_open(sd->file, obj, sd->video))
194 return; 208 return;
195 sd->module->size_get(sd->video, &w, &h); 209 sd->module->size_get(sd->video, &w, &h);
196 evas_object_image_size_set(sd->obj, w, h); 210 evas_object_image_size_set(sd->obj, w, h);
197 sd->ratio = sd->module->ratio_get(sd->video); 211 sd->ratio = sd->module->ratio_get(sd->video);
198 sd->pos = 0.0; 212 sd->pos = 0.0;
199 if (sd->play) 213 if (sd->play) sd->module->play(sd->video, 0.0);
200 sd->module->play(sd->video, 0.0);
201 } 214 }
202 else 215 else
203 { 216 {
@@ -238,8 +251,11 @@ emotion_object_play_get(Evas_Object *obj)
238{ 251{
239 Smart_Data *sd; 252 Smart_Data *sd;
240 253
254 printf ("play get\n");
241 E_SMART_OBJ_GET_RETURN(sd, obj, E_OBJ_NAME, 0); 255 E_SMART_OBJ_GET_RETURN(sd, obj, E_OBJ_NAME, 0);
256 printf ("play get1\n");
242 if (!sd->video) return 0; 257 if (!sd->video) return 0;
258 printf ("play get2\n");
243 return sd->play; 259 return sd->play;
244} 260}
245 261
@@ -810,7 +826,7 @@ _emotion_frame_resize(Evas_Object *obj, int w, int h, double ratio)
810 Smart_Data *sd; 826 Smart_Data *sd;
811 int iw, ih; 827 int iw, ih;
812 int changed = 0; 828 int changed = 0;
813 829 printf ("frame resize %d %d\n", w, h);
814 E_SMART_OBJ_GET(sd, obj, E_OBJ_NAME); 830 E_SMART_OBJ_GET(sd, obj, E_OBJ_NAME);
815 evas_object_image_size_get(sd->obj, &iw, &ih); 831 evas_object_image_size_get(sd->obj, &iw, &ih);
816 if ((w != iw) || (h != ih)) 832 if ((w != iw) || (h != ih))
diff --git a/legacy/emotion/src/modules/Makefile.am b/legacy/emotion/src/modules/Makefile.am
index 5182d39c77..a8f2074ac2 100644
--- a/legacy/emotion/src/modules/Makefile.am
+++ b/legacy/emotion/src/modules/Makefile.am
@@ -1,6 +1,13 @@
1## Process this file with automake to produce Makefile.in 1## Process this file with automake to produce Makefile.in
2 2
3if BUILD_XINE_MODULE
3SUBDIRS = xine 4SUBDIRS = xine
5XINE_LIB_NAME=emotion_decoder_xine.la
6endif
7
8if BUILD_GSTREAMER_MODULE
9GSTREAMER_LIB_NAME = emotion_decoder_gstreamer.la
10endif
4 11
5#AUTOMAKE_OPTIONS = 1.4 foreign 12#AUTOMAKE_OPTIONS = 1.4 foreign
6 13
@@ -9,15 +16,28 @@ MAINTAINERCLEANFILES = Makefile.in
9INCLUDES = -I$(top_srcdir) \ 16INCLUDES = -I$(top_srcdir) \
10 -I$(top_srcdir)/src/lib \ 17 -I$(top_srcdir)/src/lib \
11 -I$(top_srcdir)/src/modules \ 18 -I$(top_srcdir)/src/modules \
12 @EVAS_CFLAGS@ @ECORE_CFLAGS@ @XINE_CFLAGS@ 19 @EVAS_CFLAGS@ @ECORE_CFLAGS@ @XINE_CFLAGS@ @GST_CFLAGS@
20
21pkgdir = $(libdir)/emotion
13 22
14pkgdir = $(libdir)/emotion 23pkg_LTLIBRARIES = $(XINE_LIB_NAME) $(GSTREAMER_LIB_NAME)
15 24
16pkg_LTLIBRARIES = emotion_decoder_xine.la 25if BUILD_XINE_MODULE
17emotion_decoder_xine_la_SOURCES = \ 26emotion_decoder_xine_la_SOURCES = \
18emotion_xine.c \ 27emotion_xine.c \
19emotion_xine.h 28emotion_xine.h
20emotion_decoder_xine_la_LIBADD = @EVAS_LIBS@ @ECORE_LIBS@ @XINE_LIBS@ 29emotion_decoder_xine_la_LIBADD = @EVAS_LIBS@ @ECORE_LIBS@ @XINE_LIBS@
21emotion_decoder_xine_la_LDFLAGS = -module -avoid-version \ 30emotion_decoder_xine_la_LDFLAGS = -module -avoid-version \
22 -L$(top_builddir)/src/lib -L$(top_builddir)/src/lib/.libs 31 -L$(top_builddir)/src/lib -L$(top_builddir)/src/lib/.libs
23emotion_decoder_xine_la_DEPENDENCIES = $(top_builddir)/config.h 32emotion_decoder_xine_la_DEPENDENCIES = $(top_builddir)/config.h
33endif
34
35if BUILD_GSTREAMER_MODULE
36emotion_decoder_gstreamer_la_SOURCES = \
37emotion_gstreamer.c \
38emotion_gstreamer.h
39emotion_decoder_gstreamer_la_LIBADD = @EVAS_LIBS@ @ECORE_LIBS@ @GST_LIBS@
40emotion_decoder_gstreamer_la_LDFLAGS = -module -avoid-version \
41 -L$(top_builddir)/src/lib -L$(top_builddir)/src/lib/.libs
42emotion_decoder_gstreamer_la_DEPENDENCIES = $(top_builddir)/config.h
43endif
diff --git a/legacy/emotion/src/modules/emotion_gstreamer.c b/legacy/emotion/src/modules/emotion_gstreamer.c
new file mode 100644
index 0000000000..8cf3552bfa
--- /dev/null
+++ b/legacy/emotion/src/modules/emotion_gstreamer.c
@@ -0,0 +1,1395 @@
1#include <unistd.h>
2#include <fcntl.h>
3#include <math.h>
4
5#include "Emotion.h"
6#include "emotion_private.h"
7#include "emotion_gstreamer.h"
8
9
10static Emotion_Gstreamer_Video *em_v;
11
12static int id_new_pad;
13static int id_no_more_pad;
14static int count = 0;
15static int video_streams_count = 0;
16static int audio_streams_count = 0;
17
18
19static void _em_set_pipeline_info (Emotion_Gstreamer_Video *ev);
20
21/* Callbacks to get stream information */
22static void cb_end_of_stream (GstElement *thread,
23 gpointer data);
24
25static gboolean cb_idle_eos (gpointer data);
26
27static void cb_thread_error (GstElement *thread,
28 GstElement *source,
29 GError *error,
30 gchar *debug,
31 gpointer data);
32
33static void cb_new_pad (GstElement *decodebin,
34 GstPad *pad,
35 gboolean last,
36 gpointer data);
37
38static void cb_no_more_pad (GstElement *decodebin,
39 gpointer data);
40
41static void cb_caps_video_set (GObject *obj,
42 GParamSpec *pspec,
43 gpointer data);
44
45static void cb_caps_audio_set (GObject *obj,
46 GParamSpec *pspec,
47 gpointer data);
48
49/* Callbacks to display the frame content */
50
51static void cb_handoff (GstElement *fakesrc,
52 GstBuffer *buffer,
53 GstPad *pad,
54 gpointer user_data);
55
56static int _em_fd_ev_active(void *data, Ecore_Fd_Handler *fdh);
57
58/* Interface */
59
60static unsigned char em_init(Evas_Object *obj,
61 void **emotion_video);
62static int em_shutdown(void *video);
63static unsigned char em_file_open(const char *file,
64 Evas_Object *obj,
65 void *video);
66static void em_file_close(void *video);
67static void em_play(void *video,
68 double pos);
69static void em_stop(void *video);
70static void em_size_get(void *video,
71 int *width,
72 int *height);
73static void em_pos_set(void *video,
74 double pos);
75static double em_len_get(void *video);
76static double em_fps_get(void *video);
77static double em_pos_get(void *video);
78static double em_ratio_get(void *video);
79
80static int em_seekable(void *video);
81static void em_frame_done(void *video);
82static Emotion_Format em_format_get(void *video);
83static void em_video_data_size_get(void *video, int *w, int *h);
84static int em_yuv_rows_get(void *video, int w, int h, unsigned char **yrows, unsigned char **urows, unsigned char **vrows);
85static int em_bgra_data_get(void *video, unsigned char **bgra_data);
86static void em_event_feed(void *video, int event);
87static void em_event_mouse_button_feed(void *video, int button, int x, int y);
88static void em_event_mouse_move_feed(void *video, int x, int y);
89
90static int em_video_channel_count(void *video);
91static void em_video_channel_set(void *video,
92 int channel);
93static int em_video_channel_get(void *video);
94static const char *em_video_channel_name_get(void *video,
95 int channel);
96static void em_video_channel_mute_set(void *video,
97 int mute);
98static int em_video_channel_mute_get(void *video);
99
100static int em_audio_channel_count(void *video);
101static void em_audio_channel_set(void *video,
102 int channel);
103static int em_audio_channel_get(void *video);
104static const char *em_audio_channel_name_get(void *video,
105 int channel);
106static void em_audio_channel_mute_set(void *video,
107 int mute);
108static int em_audio_channel_mute_get(void *video);
109static void em_audio_channel_volume_set(void *video,
110 double vol);
111static double em_audio_channel_volume_get(void *video);
112
113static int em_spu_channel_count(void *video);
114static void em_spu_channel_set(void *video, int channel);
115static int em_spu_channel_get(void *video);
116static const char *em_spu_channel_name_get(void *video, int channel);
117static void em_spu_channel_mute_set(void *video, int mute);
118static int em_spu_channel_mute_get(void *video);
119
120static int em_chapter_count(void *video);
121static void em_chapter_set(void *video, int chapter);
122static int em_chapter_get(void *video);
123static const char *em_chapter_name_get(void *video, int chapter);
124static void em_speed_set(void *video, double speed);
125static double em_speed_get(void *video);
126static int em_eject(void *video);
127static const char *em_meta_get(void *video, int meta);
128
129
130static unsigned char
131em_init(Evas_Object *obj,
132 void **emotion_video)
133{
134 Emotion_Gstreamer_Video *ev;
135 int fds[2];
136
137 if (!emotion_video)
138 return 0;
139
140 printf ("Init gstreamer...\n");
141
142 ev = calloc(1, sizeof(Emotion_Gstreamer_Video));
143 if (!ev) return 0;
144 ev->obj = obj;
145 ev->obj_data = NULL;
146
147 em_v = ev;
148
149 /* Initialization of gstreamer */
150 gst_init (NULL, NULL);
151
152 /* Gstreamer pipeline */
153 ev->pipeline = gst_thread_new ("pipeline");
154 g_signal_connect (ev->pipeline, "eos", G_CALLBACK (cb_end_of_stream), ev);
155 g_signal_connect (ev->pipeline, "error", G_CALLBACK (cb_thread_error), ev);
156
157 /* We allocate the sinks lists */
158 ev->video_sinks = ecore_list_new ();
159 ev->audio_sinks = ecore_list_new ();
160
161 *emotion_video = ev;
162
163 /* Default values */
164 ev->width = 1;
165 ev->height = 1;
166 ev->ratio = 1.0;
167
168 ev->position = 0;
169
170 /* Create the file descriptors */
171 if (pipe(fds) == 0)
172 {
173 ev->fd_ev_read = fds[0];
174 ev->fd_ev_write = fds[1];
175 fcntl(ev->fd_ev_read, F_SETFL, O_NONBLOCK);
176 ev->fd_ev_handler = ecore_main_fd_handler_add(ev->fd_ev_read,
177 ECORE_FD_READ,
178 _em_fd_ev_active,
179 ev,
180 NULL, NULL);
181 ecore_main_fd_handler_active_set(ev->fd_ev_handler, ECORE_FD_READ);
182 }
183
184 return 1;
185}
186
187static int
188em_shutdown(void *video)
189{
190 Emotion_Gstreamer_Video *ev;
191
192 ev = (Emotion_Gstreamer_Video *)video;
193
194 gst_element_set_state (ev->pipeline, GST_STATE_NULL);
195 gst_object_unref (GST_OBJECT (ev->pipeline));
196
197 ecore_main_fd_handler_del(ev->fd_ev_handler);
198 close(ev->fd_ev_write);
199 close(ev->fd_ev_read);
200
201 free(ev);
202
203 return 1;
204}
205
206static unsigned char
207em_file_open(const char *file,
208 Evas_Object *obj,
209 void *video)
210{
211 Emotion_Gstreamer_Video *ev;
212 GstElement *filesrc;
213 GstElement *decoder;
214
215 ev = (Emotion_Gstreamer_Video *)video;
216 printf ("Open file gstreamer...\n");
217
218 /* Evas Object */
219 ev->obj = obj;
220
221 /* Gstreamer elements */
222 filesrc = gst_element_factory_make ("filesrc", "source");
223 g_object_set (G_OBJECT (filesrc), "location", file, NULL);
224 decoder = gst_element_factory_make ("decodebin", "decoder");
225 gst_bin_add_many (GST_BIN (ev->pipeline), filesrc, decoder, NULL);
226 gst_element_link (filesrc, decoder);
227
228 gst_element_set_state (ev->pipeline, GST_STATE_READY);
229
230 /* Set the callback to get the sinks */
231 id_new_pad = g_signal_connect (G_OBJECT (decoder),
232 "new-decoded-pad",
233 G_CALLBACK (cb_new_pad), ev);
234 id_no_more_pad = g_signal_connect (G_OBJECT (decoder),
235 "no-more-pads",
236 G_CALLBACK (cb_no_more_pad), ev);
237 /* Get the sinks */
238 printf ("get sinks\n");
239 gst_element_set_state (ev->pipeline, GST_STATE_PLAYING);
240
241 /* we set the streams to the first frame */
242/* ev->get_poslen = 0; */
243/* ev->seek_to = 0; */
244
245 return 1;
246}
247
248static void
249em_file_close(void *video)
250{
251 Emotion_Gstreamer_Video *ev;
252
253 ev = (Emotion_Gstreamer_Video *)video;
254 if (!ev)
255 return;
256
257
258 printf("EX pause end...\n");
259 if (!emotion_object_play_get(ev->obj))
260 {
261 printf(" ... unpause\n");
262 /* FIXME: do something ? */
263 }
264 printf("EX stop\n");
265 if (ev->pipeline)
266 gst_element_set_state (ev->pipeline, GST_STATE_READY);
267}
268
269static void
270em_play(void *video,
271 double pos)
272{
273 Emotion_Gstreamer_Video *ev;
274
275 ev = (Emotion_Gstreamer_Video *)video;
276 gst_element_set_state (ev->pipeline, GST_STATE_PLAYING);
277}
278
279static void
280em_stop(void *video)
281{
282 Emotion_Gstreamer_Video *ev;
283
284 ev = (Emotion_Gstreamer_Video *)video;
285
286 gst_element_set_state (ev->pipeline, GST_STATE_PAUSED);
287 ev->play = 0;
288}
289
290static void
291em_size_get(void *video,
292 int *width,
293 int *height)
294{
295 Emotion_Gstreamer_Video *ev;
296
297 ev = (Emotion_Gstreamer_Video *)video;
298 if (width) *width = ev->width;
299 if (height) *height = ev->height;
300}
301
302static void
303em_pos_set(void *video,
304 double pos)
305{
306 Emotion_Gstreamer_Video *ev;
307 GstElement *vsink = NULL;
308 GstElement *asink = NULL;
309 GstSeekType type;
310 guint64 time;
311
312 ev = (Emotion_Gstreamer_Video *)video;
313
314 if (ev->seek_to_pos == pos) return;
315
316 type = (GstSeekType)(GST_FORMAT_TIME |
317 GST_SEEK_METHOD_SET |
318 GST_SEEK_FLAG_FLUSH);
319
320 if (ecore_list_current (ev->video_sinks))
321 vsink = ((Emotion_Video_Sink *)(ecore_list_current (ev->video_sinks)))->sink;
322 if (ecore_list_current (ev->audio_sinks))
323 asink = ((Emotion_Audio_Sink *)(ecore_list_current (ev->audio_sinks)))->sink;
324
325
326 time = (guint64)(floor (pos));
327
328 if (vsink)
329 gst_element_seek(vsink, type, time);
330 if (asink)
331 gst_element_seek(asink, type, time);
332}
333
334static double
335em_len_get(void *video)
336{
337 Emotion_Gstreamer_Video *ev;
338 Emotion_Video_Sink *vs;
339
340 ev = (Emotion_Gstreamer_Video *)video;
341
342 if (!ecore_list_is_empty(ev->video_sinks))
343 {
344 vs = (Emotion_Video_Sink *)ecore_list_current(ev->video_sinks);
345 return vs->length_time;
346 }
347
348 return 0;
349}
350
351static double
352em_fps_get(void *video)
353{
354 Emotion_Gstreamer_Video *ev;
355 Emotion_Video_Sink *vs;
356
357 ev = (Emotion_Gstreamer_Video *)video;
358
359 /* FIXME: Maybe we can just get the fps of the current stream */
360 if (!ecore_list_is_empty(ev->video_sinks))
361 {
362 vs = (Emotion_Video_Sink *)ecore_list_current(ev->video_sinks);
363 return vs->framerate;
364 }
365
366 return 0;
367}
368
369static double
370em_pos_get(void *video)
371{
372 Emotion_Gstreamer_Video *ev;
373
374 ev = (Emotion_Gstreamer_Video *)video;
375
376 return 0;//ev->pos;
377}
378
379static double
380em_ratio_get(void *video)
381{
382 Emotion_Gstreamer_Video *ev;
383
384 ev = (Emotion_Gstreamer_Video *)video;
385
386 return ev->ratio;
387}
388
389static int
390em_video_handled(void *video)
391{
392 Emotion_Gstreamer_Video *ev;
393
394 ev = (Emotion_Gstreamer_Video *)video;
395
396 return 0;
397}
398
399static int
400em_audio_handled(void *video)
401{
402 Emotion_Gstreamer_Video *ev;
403
404 ev = (Emotion_Gstreamer_Video *)video;
405
406 return 0;
407}
408
409static int
410em_seekable(void *video)
411{
412 Emotion_Gstreamer_Video *ev;
413
414 ev = (Emotion_Gstreamer_Video *)video;
415
416 return 0;
417}
418
419static void
420em_frame_done(void *video)
421{
422 Emotion_Gstreamer_Video *ev;
423
424 ev = (Emotion_Gstreamer_Video *)video;
425}
426
427static Emotion_Format
428em_format_get (void *video)
429{
430 Emotion_Gstreamer_Video *ev;
431
432 ev = (Emotion_Gstreamer_Video *)video;
433
434 return EMOTION_YV12;
435}
436
437static void
438em_video_data_size_get(void *video, int *w, int *h)
439{
440 Emotion_Gstreamer_Video *ev;
441
442 ev = (Emotion_Gstreamer_Video *)video;
443
444 *w= ev->width;
445 *h = ev->height;
446}
447
448static int
449em_yuv_rows_get(void *video,
450 int w,
451 int h,
452 unsigned char **yrows,
453 unsigned char **urows,
454 unsigned char **vrows)
455{
456 Emotion_Gstreamer_Video *ev;
457 int i;
458
459 ev = (Emotion_Gstreamer_Video *)video;
460
461 if (ev->obj_data)
462 {
463 for (i = 0; i < h; i++)
464 yrows[i] = &ev->obj_data[i * w];
465
466 for (i = 0; i < (h / 2); i++)
467 urows[i] = &ev->obj_data[h * w + i * (w / 2) ];
468
469 for (i = 0; i < (h / 2); i++)
470 vrows[i] = &ev->obj_data[h * w + h * (w /4) + i * (w / 2)];
471
472 return 1;
473 }
474
475 return 0;
476}
477
478static int
479em_bgra_data_get(void *video, unsigned char **bgra_data)
480{
481 Emotion_Gstreamer_Video *ev;
482
483 ev = (Emotion_Gstreamer_Video *)video;
484
485 return 1;
486}
487
488static void
489em_event_feed(void *video, int event)
490{
491 Emotion_Gstreamer_Video *ev;
492
493 ev = (Emotion_Gstreamer_Video *)video;
494}
495
496static void
497em_event_mouse_button_feed(void *video, int button, int x, int y)
498{
499 Emotion_Gstreamer_Video *ev;
500
501 ev = (Emotion_Gstreamer_Video *)video;
502}
503
504static void
505em_event_mouse_move_feed(void *video, int x, int y)
506{
507 Emotion_Gstreamer_Video *ev;
508
509 ev = (Emotion_Gstreamer_Video *)video;
510}
511
512/* Video channels */
513static int
514em_video_channel_count(void *video)
515{
516 Emotion_Gstreamer_Video *ev;
517
518 ev = (Emotion_Gstreamer_Video *)video;
519
520 return ecore_list_nodes(ev->video_sinks);
521}
522
523static void
524em_video_channel_set(void *video,
525 int channel)
526{
527 Emotion_Gstreamer_Video *ev;
528
529 ev = (Emotion_Gstreamer_Video *)video;
530
531 if (channel < 0) channel = 0;
532 /* FIXME: a faire... */
533}
534
535static int
536em_video_channel_get(void *video)
537{
538 Emotion_Gstreamer_Video *ev;
539
540 ev = (Emotion_Gstreamer_Video *)video;
541
542 return ecore_list_index(ev->video_sinks);
543}
544
545static const char *
546em_video_channel_name_get(void *video,
547 int channel)
548{
549 Emotion_Gstreamer_Video *ev;
550
551 ev = (Emotion_Gstreamer_Video *)video;
552
553 return NULL;
554}
555
556static void
557em_video_channel_mute_set(void *video,
558 int mute)
559{
560 Emotion_Gstreamer_Video *ev;
561
562 ev = (Emotion_Gstreamer_Video *)video;
563 ev->video_mute = mute;
564}
565
566static int
567em_video_channel_mute_get(void *video)
568{
569 Emotion_Gstreamer_Video *ev;
570
571 ev = (Emotion_Gstreamer_Video *)video;
572
573 return ev->video_mute;
574}
575
576/* Audio channels */
577
578static int
579em_audio_channel_count(void *video)
580{
581 Emotion_Gstreamer_Video *ev;
582
583 ev = (Emotion_Gstreamer_Video *)video;
584
585 return ecore_list_nodes(ev->audio_sinks);
586}
587
588static void
589em_audio_channel_set(void *video,
590 int channel)
591{
592 Emotion_Gstreamer_Video *ev;
593
594 ev = (Emotion_Gstreamer_Video *)video;
595
596 if (channel < -1) channel = -1;
597 /* FIXME: a faire... */
598}
599
600static int
601em_audio_channel_get(void *video)
602{
603 Emotion_Gstreamer_Video *ev;
604
605 ev = (Emotion_Gstreamer_Video *)video;
606
607 return ecore_list_index(ev->audio_sinks);
608}
609
610static const char *
611em_audio_channel_name_get(void *video,
612 int channel)
613{
614 Emotion_Gstreamer_Video *ev;
615
616 ev = (Emotion_Gstreamer_Video *)video;
617
618 return NULL;
619}
620
621static void
622em_audio_channel_mute_set(void *video,
623 int mute)
624{
625 Emotion_Gstreamer_Video *ev;
626
627 ev = (Emotion_Gstreamer_Video *)video;
628
629 ev->audio_mute = mute;
630 /* FIXME: a faire ... */
631}
632
633static int
634em_audio_channel_mute_get(void *video)
635{
636 Emotion_Gstreamer_Video *ev;
637
638 ev = (Emotion_Gstreamer_Video *)video;
639
640 return ev->audio_mute;
641}
642
643static void
644em_audio_channel_volume_set(void *video,
645 double vol)
646{
647 Emotion_Gstreamer_Video *ev;
648
649 ev = (Emotion_Gstreamer_Video *)video;
650
651 if (vol < 0.0)
652 vol = 0.0;
653 if (vol > 100.0)
654 vol = 100.0;
655 g_object_set (G_OBJECT (ev->pipeline), "volume",
656 vol / 100.0, NULL);
657}
658
659static double
660em_audio_channel_volume_get(void *video)
661{
662 Emotion_Gstreamer_Video *ev;
663 double vol;
664
665 ev = (Emotion_Gstreamer_Video *)video;
666
667 g_object_get (G_OBJECT (ev->pipeline), "volume", &vol, NULL);
668
669 return vol*100.0;
670}
671
672/* spu stuff */
673
674static int
675em_spu_channel_count(void *video)
676{
677 Emotion_Gstreamer_Video *ev;
678
679 ev = (Emotion_Gstreamer_Video *)video;
680
681 return 0;
682}
683
684static void
685em_spu_channel_set(void *video, int channel)
686{
687 Emotion_Gstreamer_Video *ev;
688
689 ev = (Emotion_Gstreamer_Video *)video;
690}
691
692static int
693em_spu_channel_get(void *video)
694{
695 Emotion_Gstreamer_Video *ev;
696
697 ev = (Emotion_Gstreamer_Video *)video;
698
699 return 1;
700}
701
702static const char *
703em_spu_channel_name_get(void *video, int channel)
704{
705 Emotion_Gstreamer_Video *ev;
706
707 ev = (Emotion_Gstreamer_Video *)video;
708 return NULL;
709}
710
711static void
712em_spu_channel_mute_set(void *video, int mute)
713{
714 Emotion_Gstreamer_Video *ev;
715
716 ev = (Emotion_Gstreamer_Video *)video;
717}
718
719static int
720em_spu_channel_mute_get(void *video)
721{
722 Emotion_Gstreamer_Video *ev;
723
724 ev = (Emotion_Gstreamer_Video *)video;
725
726 return 0;
727}
728
729static int
730em_chapter_count(void *video)
731{
732 Emotion_Gstreamer_Video *ev;
733
734 ev = (Emotion_Gstreamer_Video *)video;
735 return 0;
736}
737
738static void
739em_chapter_set(void *video, int chapter)
740{
741 Emotion_Gstreamer_Video *ev;
742
743 ev = (Emotion_Gstreamer_Video *)video;
744}
745
746static int
747em_chapter_get(void *video)
748{
749 Emotion_Gstreamer_Video *ev;
750
751 ev = (Emotion_Gstreamer_Video *)video;
752
753 return 0;
754}
755
756static const char *
757em_chapter_name_get(void *video, int chapter)
758{
759 Emotion_Gstreamer_Video *ev;
760
761 ev = (Emotion_Gstreamer_Video *)video;
762
763 return NULL;
764}
765
766static void
767em_speed_set(void *video, double speed)
768{
769 Emotion_Gstreamer_Video *ev;
770
771 ev = (Emotion_Gstreamer_Video *)video;
772}
773
774static double
775em_speed_get(void *video)
776{
777 Emotion_Gstreamer_Video *ev;
778
779 ev = (Emotion_Gstreamer_Video *)video;
780
781 return 1.0;
782}
783
784static int
785em_eject(void *video)
786{
787 Emotion_Gstreamer_Video *ev;
788
789 ev = (Emotion_Gstreamer_Video *)video;
790
791 return 1;
792}
793
794static const char *
795em_meta_get(void *video, int meta)
796{
797 Emotion_Gstreamer_Video *ev;
798
799 ev = (Emotion_Gstreamer_Video *)video;
800 return NULL;
801}
802
803/* Module interface */
804
805static Emotion_Video_Module em_module =
806{
807 em_init, /* init */
808 em_shutdown, /* shutdown */
809 em_file_open, /* file_open */
810 em_file_close, /* file_close */
811 em_play, /* play */
812 em_stop, /* stop */
813 em_size_get, /* size_get */
814 em_pos_set, /* pos_set */
815 em_len_get, /* len_get */
816 em_fps_get, /* fps_get */
817 em_pos_get, /* pos_get */
818 em_ratio_get, /* ratio_get */
819 em_video_handled, /* video_handled */
820 em_audio_handled, /* audio_handled */
821 em_seekable, /* seekable */
822 em_frame_done, /* frame_done */
823 em_format_get, /* format_get */
824 em_video_data_size_get, /* video_data_size_get */
825 em_yuv_rows_get, /* yuv_rows_get */
826 em_bgra_data_get, /* bgra_data_get */
827 em_event_feed, /* event_feed */
828 em_event_mouse_button_feed, /* event_mouse_button_feed */
829 em_event_mouse_move_feed, /* event_mouse_move_feed */
830 em_video_channel_count, /* video_channel_count */
831 em_video_channel_set, /* video_channel_set */
832 em_video_channel_get, /* video_channel_get */
833 em_video_channel_name_get, /* video_channel_name_get */
834 em_video_channel_mute_set, /* video_channel_mute_set */
835 em_video_channel_mute_get, /* video_channel_mute_get */
836 em_audio_channel_count, /* audio_channel_count */
837 em_audio_channel_set, /* audio_channel_set */
838 em_audio_channel_get, /* audio_channel_get */
839 em_audio_channel_name_get, /* audio_channel_name_get */
840 em_audio_channel_mute_set, /* audio_channel_mute_set */
841 em_audio_channel_mute_get, /* audio_channel_mute_get */
842 em_audio_channel_volume_set, /* audio_channel_volume_set */
843 em_audio_channel_volume_get, /* audio_channel_volume_get */
844 em_spu_channel_count, /* spu_channel_count */
845 em_spu_channel_set, /* spu_channel_set */
846 em_spu_channel_get, /* spu_channel_get */
847 em_spu_channel_name_get, /* spu_channel_name_get */
848 em_spu_channel_mute_set, /* spu_channel_mute_set */
849 em_spu_channel_mute_get, /* spu_channel_mute_get */
850 em_chapter_count, /* chapter_count */
851 em_chapter_set, /* chapter_set */
852 em_chapter_get, /* chapter_get */
853 em_chapter_name_get, /* chapter_name_get */
854 em_speed_set, /* speed_set */
855 em_speed_get, /* speed_get */
856 em_eject, /* eject */
857 em_meta_get /* meta_get */
858};
859
860unsigned char
861module_open(Evas_Object *obj,
862 Emotion_Video_Module **module,
863 void **video)
864{
865 if (!module)
866 return 0;
867
868 if (!em_module.init(obj, video))
869 return 0;
870
871 *module = &em_module;
872 return 1;
873}
874
875void
876module_close(Emotion_Video_Module *module,
877 void *video)
878{
879 em_module.shutdown(video);
880}
881
882#if 0
883void
884em_debug(Emotion_Xine_Video *ev)
885{
886 int has_chapters = 0;
887 int max_spu = 0;
888 int max_audio = 0;
889 int video_channels = 0;
890 int video_streams = 0;
891 int video_seekable = 0;
892 char *title;
893 char *comment;
894 char *artist;
895 char *genre;
896 char *album;
897 char *year;
898 char *cdindex_discid;
899 int video_channel = 0;
900 int audio_channel = 0;
901 int spu_channel = 0;
902 int video_ratio = 0;
903 int audio_mode = 0;
904
905// return;
906 has_chapters = xine_get_stream_info(ev->stream, XINE_STREAM_INFO_HAS_CHAPTERS);
907 max_spu = xine_get_stream_info(ev->stream, XINE_STREAM_INFO_MAX_SPU_CHANNEL);
908 max_audio = xine_get_stream_info(ev->stream, XINE_STREAM_INFO_MAX_AUDIO_CHANNEL);
909 video_channels = xine_get_stream_info(ev->stream, XINE_STREAM_INFO_VIDEO_CHANNELS);
910 video_streams = xine_get_stream_info(ev->stream, XINE_STREAM_INFO_VIDEO_STREAMS);
911 video_seekable = xine_get_stream_info(ev->stream, XINE_STREAM_INFO_SEEKABLE);
912 title = xine_get_meta_info(ev->stream, XINE_META_INFO_TITLE);
913 comment = xine_get_meta_info(ev->stream, XINE_META_INFO_COMMENT);
914 artist = xine_get_meta_info(ev->stream, XINE_META_INFO_ARTIST);
915 genre = xine_get_meta_info(ev->stream, XINE_META_INFO_GENRE);
916 album = xine_get_meta_info(ev->stream, XINE_META_INFO_ALBUM);
917 year = xine_get_meta_info(ev->stream, XINE_META_INFO_YEAR);
918 cdindex_discid = xine_get_meta_info(ev->stream, XINE_META_INFO_CDINDEX_DISCID);
919 video_channel = xine_get_param(ev->stream, XINE_PARAM_VIDEO_CHANNEL);
920 audio_channel = xine_get_param(ev->stream, XINE_PARAM_AUDIO_CHANNEL_LOGICAL);
921 spu_channel = xine_get_param(ev->stream, XINE_PARAM_SPU_CHANNEL);
922 video_ratio = xine_get_stream_info(ev->stream, XINE_STREAM_INFO_VIDEO_RATIO);
923 audio_mode = xine_get_stream_info(ev->stream, XINE_STREAM_INFO_AUDIO_MODE);
924 printf("has_chapters = %i\n", has_chapters);
925 printf("max_spu = %i\n", max_spu);
926 printf("max_audio = %i\n", max_audio);
927 printf("video_channels = %i\n", video_channels);
928 printf("video_streams = %i\n", video_streams);
929 printf("video_seekable = %i\n", video_seekable);
930 printf("title = %s\n", title);
931 printf("comment = %s\n", comment);
932 printf("artist = %s\n", artist);
933 printf("genre = %s\n", genre);
934 printf("album = %s\n", album);
935 printf("year = %s\n", year);
936 printf("cdindex_discid = %s\n", cdindex_discid);
937 printf("video_channel = %i\n", video_channel);
938 printf("audio_channel = %i\n", audio_channel);
939 printf("spu_channels = %i\n", spu_channel);
940 printf("video_ratio = %i\n", video_ratio);
941 printf("audio_mode = %i\n", audio_mode);
942 {
943 int i;
944
945 for (i = 0; i <= max_audio; i++)
946 {
947 char lang[XINE_LANG_MAX + 1];
948
949 lang[0] = 0;
950 printf(" AUDIO %i = ", i);
951 if (xine_get_audio_lang(ev->stream, i, lang))
952 printf("%s\n", lang);
953 else
954 printf("NONE\n");
955 }
956 for (i = 0; i <= max_spu; i++)
957 {
958 char lang[XINE_LANG_MAX + 1];
959
960 lang[0] = 0;
961 printf(" SPU %i = ", i);
962 if (xine_get_spu_lang(ev->stream, i, lang))
963 printf("%s\n", lang);
964 else
965 printf("NONE\n");
966 }
967 }
968}
969#endif
970
971static void
972_em_set_pipeline_info (Emotion_Gstreamer_Video *ev)
973{
974 Emotion_Video_Sink *video_sink;
975 GstElement *misc;
976
977 video_sink = (Emotion_Video_Sink *)ecore_list_goto_first (ev->video_sinks);
978 ev->width = video_sink->width;
979 ev->height = video_sink->height;
980 ev->ratio = (double)ev->width / (double)ev->height;
981
982 _emotion_frame_new(ev->obj);
983 _emotion_frame_resize(ev->obj,
984 ev->width, ev->height, ev->ratio);
985
986 /* we set the streams to the first frame */
987 ev->get_poslen = 0;
988 ev->seek_to = 0;
989
990 /* we add hand-offs signal to display the video */
991 misc = video_sink->sink;
992
993 g_object_set (G_OBJECT (misc ),"signal-handoffs", TRUE, NULL);
994 g_signal_connect (misc, "handoff", G_CALLBACK (cb_handoff),ev);
995}
996
997
998/*
999 * Callbacks
1000 */
1001
1002static void
1003cb_end_of_stream (GstElement *thread,
1004 gpointer data)
1005{
1006 Emotion_Gstreamer_Video* ev;
1007
1008 ev = (Emotion_Gstreamer_Video *)data;
1009 printf ("Have eos in thread %p\n", g_thread_self ());
1010 g_idle_add ((GSourceFunc) cb_idle_eos, data);
1011}
1012
1013static gboolean
1014cb_idle_eos (gpointer data)
1015{
1016 Emotion_Gstreamer_Video* ev;
1017
1018 ev = (Emotion_Gstreamer_Video *)data;
1019
1020 printf ("Have idle-func in thread %p\n", g_thread_self ());
1021
1022 _emotion_playback_finished(ev->obj);
1023
1024 return 0;
1025}
1026
1027static void
1028cb_thread_error (GstElement *thread,
1029 GstElement *source,
1030 GError *error,
1031 gchar *debug,
1032 gpointer data)
1033{
1034 printf ("Error in thread %p: %s\n", g_thread_self (), error->message);
1035 g_idle_add ((GSourceFunc) cb_idle_eos, NULL);
1036}
1037
1038
1039/*
1040 * Add a sink to the pipeline when it is found by the decodebin
1041 * element.
1042 */
1043void
1044cb_new_pad (GstElement *decodebin,
1045 GstPad *pad,
1046 gboolean last,
1047 gpointer data)
1048{
1049 Emotion_Gstreamer_Video *ev;
1050 GstCaps *caps;
1051 GstStructure *str;
1052 const gchar *mimetype;
1053
1054 ev = (Emotion_Gstreamer_Video *)data;
1055
1056 caps = gst_pad_get_caps (pad);
1057 str = gst_caps_get_structure (caps, 0);
1058 mimetype = gst_structure_get_name (str);
1059
1060 /* test */
1061 printf ("\nNew Pad : %s\n", gst_structure_to_string (str));
1062 /* end test */
1063
1064 if (g_str_has_prefix (mimetype, "audio/"))
1065 {
1066 Emotion_Audio_Sink *asink;
1067
1068 GstElement *audio_thread;
1069 GstElement *audioqueue;
1070 GstElement *conv;
1071 GstElement *scale;
1072 GstPad *audiopad;
1073
1074 char buf[1024];
1075 char *namebin;
1076
1077 asink = (Emotion_Audio_Sink *)malloc (sizeof (Emotion_Audio_Sink));
1078 if (!asink)
1079 return;
1080 if (!ecore_list_append(ev->audio_sinks, asink))
1081 {
1082 free (asink);
1083 return;
1084 }
1085
1086 sprintf (buf, "audio_thread%d", ecore_list_nodes (ev->audio_sinks));
1087 namebin = strdup (buf);
1088 audio_thread = gst_thread_new (namebin);
1089 free (namebin);
1090 audioqueue = gst_element_factory_make ("queue", namebin);
1091 conv = gst_element_factory_make ("audioconvert", NULL);
1092 audiopad = gst_element_get_pad (conv, "sink");
1093 scale = gst_element_factory_make ("audioscale", NULL);
1094 if (audio_streams_count == 0)
1095 asink->sink = gst_element_factory_make ("alsasink", NULL);
1096 else
1097 asink->sink = gst_element_factory_make ("fakesink", NULL);
1098 gst_bin_add_many (GST_BIN (audio_thread),
1099 audioqueue, conv, scale, asink->sink, NULL);
1100 gst_element_link_many (audioqueue, conv, scale, asink->sink, NULL);
1101
1102
1103 /* Ghost pads for the thread */
1104 gst_element_add_ghost_pad (audio_thread,
1105 gst_element_get_pad (audioqueue,
1106 "sink"),
1107 "sink");
1108
1109 gst_pad_link (pad,
1110 gst_element_get_pad (audioqueue, "sink"));
1111
1112 /* We add the thread in the pipeline */
1113 gst_bin_add (GST_BIN (ev->pipeline), audio_thread);
1114 gst_bin_sync_children_state (GST_BIN (ev->pipeline));
1115
1116/* audiopad = gst_element_get_pad (ai->sink, "sink"); */
1117 audiopad = gst_element_get_pad (gst_pad_get_real_parent (pad),
1118 "src");
1119 g_signal_connect (audiopad, "notify::caps",
1120 G_CALLBACK (cb_caps_audio_set), asink);
1121
1122 audio_streams_count++;
1123 }
1124 else
1125 {
1126 if (g_str_has_prefix (mimetype, "video/"))
1127 {
1128 Emotion_Video_Sink *vsink;
1129
1130 GstElement *video_thread;
1131 GstElement *videoqueue;
1132 GstElement *vcs;
1133 GstPad *videopad;
1134
1135 char buf[1024];
1136 char *name;
1137
1138 vsink = (Emotion_Video_Sink *)malloc (sizeof (Emotion_Video_Sink));
1139 if (!ev->video_sinks)
1140 printf ("DIABOLIC !!!\n");
1141 if (!vsink)
1142 return;
1143 if (!ecore_list_append(ev->video_sinks, vsink))
1144 {
1145 free (vsink);
1146 return;
1147 }
1148 printf ("video, sink added\n");
1149
1150 sprintf (buf, "video_thread%d", ecore_list_nodes (ev->video_sinks));
1151 name = strdup (buf);
1152 video_thread = gst_thread_new (name);
1153 free (name);
1154 videoqueue = gst_element_factory_make ("queue", NULL);
1155 vcs = gst_element_factory_make ("ffmpegcolorspace", NULL);
1156 videopad = gst_element_get_pad (vcs, "sink");
1157 vsink->sink = gst_element_factory_make ("fakesink", NULL);
1158 g_object_set (G_OBJECT (vsink->sink), "sync", TRUE, NULL);
1159 gst_bin_add_many (GST_BIN (video_thread),
1160 videoqueue, vcs, vsink->sink, NULL);
1161 gst_element_link_many (videoqueue, vcs, vsink->sink, NULL);
1162
1163
1164 /* Ghost pads for the thread */
1165 gst_element_add_ghost_pad (video_thread,
1166 gst_element_get_pad (videoqueue,
1167 "sink"),
1168 "sink");
1169
1170 gst_pad_link (pad,
1171 gst_element_get_pad (videoqueue, "sink"));
1172
1173 /* We add the thread in the pipeline */
1174 gst_bin_add (GST_BIN (ev->pipeline), video_thread);
1175 gst_bin_sync_children_state (GST_BIN (ev->pipeline));
1176
1177 /* audiopad = gst_element_get_pad (ai->sink, "sink");
1178 */
1179 videopad = gst_element_get_pad (gst_pad_get_real_parent (pad),
1180 "src");
1181 g_signal_connect (videopad, "notify::caps",
1182 G_CALLBACK (cb_caps_video_set), vsink);
1183
1184 video_streams_count++;
1185 }
1186 else
1187 {
1188 printf ("Unknown stream type\n");
1189 }
1190 }
1191}
1192
1193/*
1194 * Stop the pipeline when there is no more streams to find.
1195 */
1196void
1197cb_no_more_pad (GstElement *decodebin,
1198 gpointer data)
1199{
1200 printf ("no more pads\n");
1201
1202 if (g_signal_handler_is_connected (G_OBJECT (decodebin), id_new_pad))
1203 g_signal_handler_disconnect (G_OBJECT (decodebin), id_new_pad);
1204 if (g_signal_handler_is_connected (G_OBJECT (decodebin), id_no_more_pad))
1205 g_signal_handler_disconnect (G_OBJECT (decodebin), id_no_more_pad);
1206}
1207
1208/*
1209 * Fill the informations for each video streams
1210 */
1211void
1212cb_caps_video_set (GObject *obj,
1213 GParamSpec *pspec,
1214 gpointer data)
1215{
1216 GstStructure *str;
1217 GstPad *pad = GST_PAD (obj);
1218 Emotion_Video_Sink *vsink;
1219
1220 if (!GST_PAD_CAPS (pad))
1221 return;
1222
1223 vsink = (Emotion_Video_Sink *)data;
1224
1225 str = gst_caps_get_structure (GST_PAD_CAPS (pad), 0);
1226 if (str)
1227 {
1228 gdouble framerate;
1229 const GValue *val;
1230 gint width;
1231 gint height;
1232
1233 if ((gst_structure_get_int (str, "width", &width)) &&
1234 (gst_structure_get_int (str, "height", &height)))
1235 {
1236 vsink->width = (unsigned int)width;
1237 vsink->height = (unsigned int)height;
1238 }
1239
1240 if (gst_structure_get_double (str, "framerate", &framerate))
1241 vsink->framerate = framerate;
1242
1243 val = gst_structure_get_value (str, "pixel-aspect-ratio");
1244 if (val)
1245 {
1246 vsink->par_num = (unsigned int)gst_value_get_fraction_numerator (val);
1247 vsink->par_den = (unsigned int)gst_value_get_fraction_denominator (val);
1248 }
1249 printf ("width : %d\n", width);
1250 printf ("height : %d\n", height);
1251 printf ("frame rate : %f\n", framerate);
1252
1253 {
1254 GstFormat fmt;
1255 gint64 len;
1256 GstElement *sink;
1257
1258 sink = vsink->sink;
1259 fmt = GST_FORMAT_TIME;
1260 if (gst_element_query (sink, GST_QUERY_TOTAL, &fmt, &len))
1261 {
1262 vsink->length_time = (unsigned long long)len;
1263
1264 fmt = GST_FORMAT_DEFAULT;
1265 if (gst_element_query (sink, GST_QUERY_TOTAL, &fmt, &len))
1266 vsink->length_frames = (unsigned long long)len;
1267 else
1268 vsink->length_frames = vsink->framerate *
1269 (double)vsink->length_time / 1000000000.0;
1270
1271 printf ("frame count : %lld\n", vsink->length_frames);
1272 count ++;
1273 printf ("video, count : %d, vsc : %d asc : %d\n",
1274 count, video_streams_count, audio_streams_count);
1275 if (count == video_streams_count + audio_streams_count)
1276 {
1277 _em_set_pipeline_info (em_v);
1278 }
1279 }
1280 }
1281 }
1282}
1283
1284/*
1285 * Fill the informations for each audio streams
1286 */
1287void
1288cb_caps_audio_set (GObject *obj,
1289 GParamSpec *pspec,
1290 gpointer data)
1291{
1292 GstStructure *str;
1293 GstPad *pad = GST_PAD (obj);
1294 Emotion_Audio_Sink *asink;
1295
1296 if (!GST_PAD_CAPS (pad))
1297 return;
1298
1299 asink = (Emotion_Audio_Sink *)data;
1300
1301 str = gst_caps_get_structure (GST_PAD_CAPS (pad), 0);
1302 if (str)
1303 {
1304 gint channels;
1305 gint samplerate;
1306
1307 if (gst_structure_get_int (str, "channels", &channels))
1308 asink->channels = (unsigned int)channels;
1309
1310 if (gst_structure_get_int (str, "rate", &samplerate))
1311 asink->samplerate = (unsigned int)samplerate;
1312
1313 printf ("channels : %d\n", channels);
1314 printf ("sample rate : %d\n", samplerate);
1315
1316 {
1317 GstFormat fmt;
1318 gint64 len;
1319 GstElement *sink;
1320
1321 sink = asink->sink;
1322 fmt = GST_FORMAT_TIME;
1323 if (gst_element_query (sink, GST_QUERY_TOTAL, &fmt, &len))
1324 {
1325 asink->length_time = (unsigned long long)len;
1326
1327 fmt = GST_FORMAT_DEFAULT;
1328 if (gst_element_query (sink, GST_QUERY_TOTAL, &fmt, &len))
1329 asink->length_samples = (unsigned long long)len;
1330 else
1331 asink->length_samples = asink->samplerate *
1332 (double)asink->length_time / 1000000000.0;
1333
1334 printf ("sample count : %lld\n", asink->length_samples);
1335 count ++;
1336 printf ("audio, count : %d, vsc : %d asc : %d\n",
1337 count, video_streams_count, audio_streams_count);
1338 if (count == video_streams_count + audio_streams_count)
1339 {
1340 _em_set_pipeline_info (em_v);
1341 }
1342 }
1343 }
1344 }
1345}
1346
1347/* Send the video frame to the evas object */
1348static void
1349cb_handoff (GstElement *fakesrc,
1350 GstBuffer *buffer,
1351 GstPad *pad,
1352 gpointer user_data)
1353{
1354 void *buf[2];
1355
1356 Emotion_Gstreamer_Video *ev = ( Emotion_Gstreamer_Video *) user_data;
1357
1358
1359 if (!ev->obj_data)
1360 ev->obj_data = (void*) malloc (GST_BUFFER_SIZE(buffer) * sizeof(void));
1361
1362 memcpy ( ev->obj_data, GST_BUFFER_DATA(buffer), GST_BUFFER_SIZE(buffer));
1363 buf[0] = GST_BUFFER_DATA(buffer);
1364 buf[1] = buffer;
1365 write(ev->fd_ev_write, buf, sizeof(buf));
1366
1367}
1368
1369static int
1370_em_fd_ev_active(void *data, Ecore_Fd_Handler *fdh)
1371{
1372 int fd;
1373 int len;
1374 void *buf[1];
1375 unsigned char *frame_data;
1376 Emotion_Gstreamer_Video *ev;
1377 GstBuffer *buffer;
1378
1379 ev = data;
1380 fd = ecore_main_fd_handler_fd_get(fdh);
1381
1382 while ((len = read(fd, buf, sizeof(buf))) > 0)
1383 {
1384 if (len == sizeof(buf))
1385 {
1386 frame_data = buf[0];
1387 buffer = buf[1];
1388 _emotion_frame_new(ev->obj);
1389 len = ((Emotion_Video_Sink *)ecore_list_goto_first(ev->video_sinks))->length_time;
1390 _emotion_video_pos_update(ev->obj, ev->position, len);
1391
1392 }
1393 }
1394 return 1;
1395}
diff --git a/legacy/emotion/src/modules/emotion_gstreamer.h b/legacy/emotion/src/modules/emotion_gstreamer.h
new file mode 100644
index 0000000000..4b725bb362
--- /dev/null
+++ b/legacy/emotion/src/modules/emotion_gstreamer.h
@@ -0,0 +1,76 @@
1#ifndef __EMOTION_GSTREAMER_H__
2#define __EMOTION_GSTREAMER_H__
3
4#include <Ecore_Data.h>
5
6#include <gst/gst.h>
7
8typedef struct _Emotion_Video_Sink Emotion_Video_Sink;
9
10struct _Emotion_Video_Sink
11{
12 GstElement *sink;
13 unsigned long long length_time;
14 unsigned long long length_frames;
15 unsigned int width;
16 unsigned int height;
17 unsigned int par_num;
18 unsigned int par_den;
19 double framerate;
20};
21
22typedef struct _Emotion_Audio_Sink Emotion_Audio_Sink;
23
24struct _Emotion_Audio_Sink
25{
26 GstElement *sink;
27 unsigned long long length_time;
28 unsigned long long length_samples;
29 unsigned int channels;
30 unsigned int samplerate;
31};
32
33typedef struct _Emotion_Gstreamer_Video Emotion_Gstreamer_Video;
34
35struct _Emotion_Gstreamer_Video
36{
37 /* Gstreamer elements */
38 GstElement *pipeline;
39
40 /* Sinks */
41 Ecore_List *video_sinks;
42 Ecore_List *audio_sinks;
43
44 /* Evas object */
45 Evas_Object *obj;
46 unsigned char *obj_data;
47
48 /* Characteristics */
49 int position;
50 int width;
51 int height;
52 double ratio;
53
54 volatile int seek_to;
55 volatile int get_poslen;
56 volatile double seek_to_pos;
57
58 int fd_ev_read;
59 int fd_ev_write;
60 Ecore_Fd_Handler *fd_ev_handler;
61
62
63 unsigned char play : 1;
64 unsigned char video_mute : 1;
65 unsigned char audio_mute : 1;
66};
67
68unsigned char module_open (Evas_Object *obj,
69 Emotion_Video_Module **module,
70 void **video);
71
72void module_close (Emotion_Video_Module *module,
73 void *video);
74
75
76#endif /* __EMOTION_GSTREAMER_H__ */