summaryrefslogtreecommitdiff
diff options
context:
space:
mode:
authorSebastian Dröge <sebastian@centricular.com>2013-12-23 10:50:53 +0900
committerCedric BAIL <cedric.bail@samsung.com>2013-12-23 10:59:42 +0900
commit9c752106f75c9a6415971cd965eedaf3d15fedc6 (patch)
tree52c05f04936d7832c689f354624eca9468afaf7f
parent574c5c1ba78d6a02f62b26e4070ef266b0d4bc8e (diff)
emotion: initial port of emotion to GStreamer 1.0
Some commits to port emotion to GStreamer 1.0 and implement some missing features, clean up code a bit and fix some bugs on the way. This works as good as the 0.10 code for me now with the emotion examples, just the Samsung hardware specific code is commented out. This should be ported by someone who has such hardware, and also in a clean way now that GStreamer since 1.0 has features to handle all this properly. There's still a lot of potential to clean things up and fix many bugs, and also to implement zerocopy rendering. But those are for later if there's actual interest in this at all. Commits: - Update configure checks and ecore example to GStreamer 1.0 - Initial port of emotion to GStreamer 1.0 - Samsung specific code commented out, should be ported by someone with the hardware. - Return GST_FLOW_FLUSHING when the sink is unlocked - Remove unused GSignal from the sink - Use GstVideoInfo to store the format details inside the sink - Add support for pixel-aspect-ratio - Store video format information in GstVideoInfo for the different video streams - Use GstAudioInfo to store the audio format information - Remove some unused defines - Header cleanup - Implement initial support for GstNavigation interface - Implement setting of audio/video channel Reviewers: cedric CC: cedric Differential Revision: https://phab.enlightenment.org/D387 Signed-off-by: Cedric BAIL <cedric.bail@samsung.com>
-rw-r--r--configure.ac29
-rw-r--r--m4/emotion_module.m413
-rw-r--r--src/Makefile_Emotion.am35
-rw-r--r--src/examples/ecore/Makefile.examples9
-rw-r--r--src/examples/ecore/ecore_pipe_gstreamer_example.c18
-rw-r--r--src/modules/emotion/gstreamer1/emotion_alloc.c73
-rw-r--r--src/modules/emotion/gstreamer1/emotion_convert.c160
-rw-r--r--src/modules/emotion/gstreamer1/emotion_fakeeos.c47
-rw-r--r--src/modules/emotion/gstreamer1/emotion_gstreamer.c2053
-rw-r--r--src/modules/emotion/gstreamer1/emotion_gstreamer.h295
-rw-r--r--src/modules/emotion/gstreamer1/emotion_sink.c1006
11 files changed, 3729 insertions, 9 deletions
diff --git a/configure.ac b/configure.ac
index 9f3dcf1c2f..0751b391c3 100644
--- a/configure.ac
+++ b/configure.ac
@@ -1796,8 +1796,8 @@ AC_ARG_ENABLE([g-main-loop],
1796 [want_g_main_loop="no"]) 1796 [want_g_main_loop="no"])
1797 1797
1798AC_ARG_ENABLE([gstreamer], 1798AC_ARG_ENABLE([gstreamer],
1799 [AC_HELP_STRING([--disable-gstreamer], 1799 [AC_HELP_STRING([--enable-gstreamer],
1800 [disable gstreamer support. @<:@default=enabled@:>@])], 1800 [enable gstreamer 0.10 support. @<:@default=disabled@:>@])],
1801 [ 1801 [
1802 if test "x${enableval}" = "xyes" ; then 1802 if test "x${enableval}" = "xyes" ; then
1803 want_gstreamer="yes" 1803 want_gstreamer="yes"
@@ -1805,7 +1805,19 @@ AC_ARG_ENABLE([gstreamer],
1805 want_gstreamer="no" 1805 want_gstreamer="no"
1806 fi 1806 fi
1807 ], 1807 ],
1808 [want_gstreamer="yes"]) 1808 [want_gstreamer="no"])
1809
1810AC_ARG_ENABLE([gstreamer1],
1811 [AC_HELP_STRING([--disable-gstreamer1],
1812 [disable gstreamer 1.0 support. @<:@default=enabled@:>@])],
1813 [
1814 if test "x${enableval}" = "xyes" ; then
1815 want_gstreamer1="yes"
1816 else
1817 want_gstreamer1="no"
1818 fi
1819 ],
1820 [want_gstreamer1="yes"])
1809 1821
1810AC_ARG_ENABLE([tizen], 1822AC_ARG_ENABLE([tizen],
1811 [AC_HELP_STRING([--enable-tizen], 1823 [AC_HELP_STRING([--enable-tizen],
@@ -1882,10 +1894,17 @@ if test "x${want_g_main_loop}" = "xyes" ; then
1882fi 1894fi
1883 1895
1884# not EFL_OPTIONAL_DEPEND_PKG() because it's only used for ecore examples 1896# not EFL_OPTIONAL_DEPEND_PKG() because it's only used for ecore examples
1897if test "${want_gstreamer1}" = "yes" -a "${want_gstreamer}" = "yes"; then
1898 AC_MSG_ERROR([You can only enable either GStreamer 1.0 or GStreamer 0.10 support])
1899fi
1900
1901if test "${want_gstreamer1}" = "yes"; then
1902 PKG_CHECK_MODULES([GSTREAMER], [gstreamer-1.0])
1903fi
1885if test "${want_gstreamer}" = "yes"; then 1904if test "${want_gstreamer}" = "yes"; then
1886 PKG_CHECK_MODULES([GSTREAMER], [gstreamer-0.10]) 1905 PKG_CHECK_MODULES([GSTREAMER], [gstreamer-0.10])
1887fi 1906fi
1888AM_CONDITIONAL([HAVE_GSTREAMER], [test "${want_gstreamer}" = "yes"]) 1907AM_CONDITIONAL([HAVE_GSTREAMER], [test "${want_gstreamer}" = "yes" -o "${want_gstreamer1}" = "yes"])
1889 1908
1890EFL_EVAL_PKGS([ECORE]) 1909EFL_EVAL_PKGS([ECORE])
1891 1910
@@ -3663,10 +3682,12 @@ have_gst_xoverlay="no"
3663 3682
3664EMOTION_MODULE([xine], [${want_xine}]) 3683EMOTION_MODULE([xine], [${want_xine}])
3665EMOTION_MODULE([gstreamer], [${want_gstreamer}]) 3684EMOTION_MODULE([gstreamer], [${want_gstreamer}])
3685EMOTION_MODULE([gstreamer1], [${want_gstreamer1}])
3666EMOTION_MODULE([generic], [${want_emotion_generic}]) 3686EMOTION_MODULE([generic], [${want_emotion_generic}])
3667 3687
3668EFL_ADD_FEATURE([EMOTION], [xine]) 3688EFL_ADD_FEATURE([EMOTION], [xine])
3669EFL_ADD_FEATURE([EMOTION], [gstreamer]) 3689EFL_ADD_FEATURE([EMOTION], [gstreamer])
3690EFL_ADD_FEATURE([EMOTION], [gstreamer1])
3670EFL_ADD_FEATURE([EMOTION], [generic], [${want_emotion_generic}]) 3691EFL_ADD_FEATURE([EMOTION], [generic], [${want_emotion_generic}])
3671 3692
3672EFL_EVAL_PKGS([EMOTION]) 3693EFL_EVAL_PKGS([EMOTION])
diff --git a/m4/emotion_module.m4 b/m4/emotion_module.m4
index 7685f992c2..75884e78d4 100644
--- a/m4/emotion_module.m4
+++ b/m4/emotion_module.m4
@@ -45,6 +45,19 @@ AC_DEFUN([EMOTION_MODULE_DEP_CHECK_GSTREAMER],
45 fi 45 fi
46]) 46])
47 47
48dnl use: EMOTION_MODULE_DEP_CHECK_GSTREAMER_1(want_static)
49dnl where want_engine = yes or static
50AC_DEFUN([EMOTION_MODULE_DEP_CHECK_GSTREAMER1],
51[dnl
52 GST_VER=1.0
53 requirements="gstreamer-1.0 >= ${GST_VER} gstreamer-plugins-base-1.0 >= ${GST_VER} gstreamer-video-1.0 >= ${GST_VER} gstreamer-audio-1.0 >= ${GST_VER} gstreamer-tag-1.0 >= ${GST_VER}"
54 if test "$1" = "static"; then
55 EFL_DEPEND_PKG([EMOTION], [EMOTION_MODULE_GSTREAMER1], [${requirements}])
56 else
57 PKG_CHECK_MODULES([EMOTION_MODULE_GSTREAMER1], [${requirements}])
58 fi
59])
60
48dnl use: EMOTION_MODULE_DEP_CHECK_GENERIC(want_static) 61dnl use: EMOTION_MODULE_DEP_CHECK_GENERIC(want_static)
49dnl where want_engine = yes or static 62dnl where want_engine = yes or static
50AC_DEFUN([EMOTION_MODULE_DEP_CHECK_GENERIC], 63AC_DEFUN([EMOTION_MODULE_DEP_CHECK_GENERIC],
diff --git a/src/Makefile_Emotion.am b/src/Makefile_Emotion.am
index 4689f7d135..4796018113 100644
--- a/src/Makefile_Emotion.am
+++ b/src/Makefile_Emotion.am
@@ -52,7 +52,7 @@ modules_emotion_xine_module_la_LIBTOOLFLAGS = --tag=disable-static
52endif 52endif
53endif 53endif
54 54
55# Gstreamer 55# Gstreamer 0.10
56EMOTION_GSTREAMER_SOURCES = \ 56EMOTION_GSTREAMER_SOURCES = \
57modules/emotion/gstreamer/emotion_gstreamer.h \ 57modules/emotion/gstreamer/emotion_gstreamer.h \
58modules/emotion/gstreamer/emotion_gstreamer.c \ 58modules/emotion/gstreamer/emotion_gstreamer.c \
@@ -85,6 +85,39 @@ endif
85endif 85endif
86endif 86endif
87 87
88# Gstreamer 1.0
89EMOTION_GSTREAMER1_SOURCES = \
90modules/emotion/gstreamer1/emotion_gstreamer.h \
91modules/emotion/gstreamer1/emotion_gstreamer.c \
92modules/emotion/gstreamer1/emotion_alloc.c \
93modules/emotion/gstreamer1/emotion_convert.c \
94modules/emotion/gstreamer1/emotion_fakeeos.c \
95modules/emotion/gstreamer1/emotion_sink.c
96
97if EMOTION_STATIC_BUILD_GSTREAMER1
98lib_emotion_libemotion_la_SOURCES += $(EMOTION_GSTREAMER1_SOURCES)
99else
100if EMOTION_BUILD_GSTREAMER1
101emotionmodulegstreamer1dir = $(libdir)/emotion/modules/gstreamer1/$(MODULE_ARCH)
102emotionmodulegstreamer1_LTLIBRARIES = modules/emotion/gstreamer1/module.la
103modules_emotion_gstreamer1_module_la_SOURCES = $(EMOTION_GSTREAMER1_SOURCES)
104modules_emotion_gstreamer1_module_la_CPPFLAGS = -I$(top_builddir)/src/lib/efl \
105@EMOTION_CFLAGS@ \
106@EMOTION_MODULE_GSTREAMER1_CFLAGS@
107modules_emotion_gstreamer1_module_la_LIBADD = \
108@USE_EMOTION_LIBS@ \
109@EMOTION_MODULE_GSTREAMER1_LIBS@
110modules_emotion_gstreamer1_module_la_DEPENDENCIES = @USE_EMOTION_INTERNAL_LIBS@
111modules_emotion_gstreamer1_module_la_LDFLAGS = -module @EFL_LTMODULE_FLAGS@
112modules_emotion_gstreamer1_module_la_LIBTOOLFLAGS = --tag=disable-static
113if HAVE_ECORE_X
114modules_emotion_gstreamer1_module_la_CPPFLAGS += @ECORE_X_CFLAGS@ @ECORE_EVAS_CFLAGS@
115modules_emotion_gstreamer1_module_la_LIBADD += @USE_ECORE_X_LIBS@ @USE_ECORE_EVAS_LIBS@
116modules_emotion_gstreamer1_module_la_DEPENDENCIES += @USE_ECORE_X_INTERNAL_LIBS@ @USE_ECORE_EVAS_INTERNAL_LIBS@
117endif
118endif
119endif
120
88# Generic 121# Generic
89EMOTION_GENERIC_SOURCES = \ 122EMOTION_GENERIC_SOURCES = \
90modules/emotion/generic/emotion_generic.h \ 123modules/emotion/generic/emotion_generic.h \
diff --git a/src/examples/ecore/Makefile.examples b/src/examples/ecore/Makefile.examples
index f8ac82d50f..63e5f6b796 100644
--- a/src/examples/ecore/Makefile.examples
+++ b/src/examples/ecore/Makefile.examples
@@ -1,6 +1,13 @@
1CC=gcc 1CC=gcc
2
3if HAVE_GSTREAMER_1
4 GSTREAMER_DEP="gstreamer-1.0"
5else
6 GSTREAMER_DEP="gstreamer-0.10"
7endif
8
2COMMON_FLAGS=`pkg-config --libs --cflags eina,ecore,evas,ecore-evas,ecore-audio,ecore-con,ecore-file, \ 9COMMON_FLAGS=`pkg-config --libs --cflags eina,ecore,evas,ecore-evas,ecore-audio,ecore-con,ecore-file, \
3 eo,evas-software-buffer,gnutls,ecore-imf,ecore-imf-evas,gstreamer-0.10` -lm 10 eo,evas-software-buffer,gnutls,ecore-imf,ecore-imf-evas,$(GSTREAMER_DEP)` -lm
4 11
5EXAMPLES= ecore_animator_example \ 12EXAMPLES= ecore_animator_example \
6 ecore_audio_custom \ 13 ecore_audio_custom \
diff --git a/src/examples/ecore/ecore_pipe_gstreamer_example.c b/src/examples/ecore/ecore_pipe_gstreamer_example.c
index 008b96e5d8..20d3fa1163 100644
--- a/src/examples/ecore/ecore_pipe_gstreamer_example.c
+++ b/src/examples/ecore/ecore_pipe_gstreamer_example.c
@@ -1,4 +1,6 @@
1//Compile with: 1//Compile with:
2// gcc -o ecore_pipe_gstreamer_example ecore_pipe_gstreamer_example.c `pkg-config --libs --cflags ecore gstreamer-1.0`
3// or
2// gcc -o ecore_pipe_gstreamer_example ecore_pipe_gstreamer_example.c `pkg-config --libs --cflags ecore gstreamer-0.10` 4// gcc -o ecore_pipe_gstreamer_example ecore_pipe_gstreamer_example.c `pkg-config --libs --cflags ecore gstreamer-0.10`
3 5
4#include <gst/gst.h> 6#include <gst/gst.h>
@@ -98,23 +100,33 @@ new_decoded_pad_cb(GstElement *demuxer,
98 GstElement *decoder; 100 GstElement *decoder;
99 GstPad *pad; 101 GstPad *pad;
100 GstCaps *caps; 102 GstCaps *caps;
101 gchar *str; 103 GstStructure *s;
104 const gchar *str;
102 105
106#if GST_CHECK_VERSION(1,0,0)
107 caps = gst_pad_get_current_caps(new_pad);
108#else
103 caps = gst_pad_get_caps(new_pad); 109 caps = gst_pad_get_caps(new_pad);
104 str = gst_caps_to_string(caps); 110#endif
111 s = gst_caps_get_structure(caps, 0);
112 str = gst_structure_get_name(s);
105 113
106 if (g_str_has_prefix(str, "video/")) 114 if (g_str_has_prefix(str, "video/"))
107 { 115 {
108 decoder = GST_ELEMENT(user_data); 116 decoder = GST_ELEMENT(user_data);
109 117
118#if GST_CHECK_VERSION(1,0,0)
119 pad = gst_element_get_static_pad(decoder, "sink");
120#else
110 pad = gst_element_get_pad(decoder, "sink"); 121 pad = gst_element_get_pad(decoder, "sink");
122#endif
111 if (GST_PAD_LINK_FAILED(gst_pad_link(new_pad, pad))) 123 if (GST_PAD_LINK_FAILED(gst_pad_link(new_pad, pad)))
112 { 124 {
113 g_warning("Failed to link %s:%s to %s:%s", GST_DEBUG_PAD_NAME(new_pad), 125 g_warning("Failed to link %s:%s to %s:%s", GST_DEBUG_PAD_NAME(new_pad),
114 GST_DEBUG_PAD_NAME(pad)); 126 GST_DEBUG_PAD_NAME(pad));
115 } 127 }
128 gst_object_unref(pad);
116 } 129 }
117 g_free(str);
118 gst_caps_unref(caps); 130 gst_caps_unref(caps);
119} 131}
120 132
diff --git a/src/modules/emotion/gstreamer1/emotion_alloc.c b/src/modules/emotion/gstreamer1/emotion_alloc.c
new file mode 100644
index 0000000000..80d1160217
--- /dev/null
+++ b/src/modules/emotion/gstreamer1/emotion_alloc.c
@@ -0,0 +1,73 @@
1#ifdef HAVE_CONFIG_H
2# include "config.h"
3#endif
4
5#include "emotion_gstreamer.h"
6
7Emotion_Gstreamer_Buffer *
8emotion_gstreamer_buffer_alloc(EvasVideoSinkPrivate *sink,
9 GstBuffer *buffer,
10 Eina_Bool preroll)
11{
12 Emotion_Gstreamer_Buffer *send;
13
14 if (!sink->ev) return NULL;
15
16 send = malloc(sizeof (Emotion_Gstreamer_Buffer));
17 if (!send) return NULL;
18
19 send->sink = sink;
20 send->frame = gst_buffer_ref(buffer);
21 send->preroll = preroll;
22 send->force = EINA_FALSE;
23 sink->ev->out++;
24 send->ev = sink->ev;
25
26 return send;
27}
28
29void
30emotion_gstreamer_buffer_free(Emotion_Gstreamer_Buffer *send)
31{
32 send->ev->in++;
33
34 if (send->ev->in == send->ev->out
35 && send->ev->threads == NULL
36 && send->ev->delete_me)
37 send->ev->api->del(send->ev);
38
39 gst_buffer_unref(send->frame);
40 free(send);
41}
42
43Emotion_Gstreamer_Message *
44emotion_gstreamer_message_alloc(Emotion_Gstreamer_Video *ev,
45 GstMessage *msg)
46{
47 Emotion_Gstreamer_Message *send;
48
49 if (!ev) return NULL;
50
51 send = malloc(sizeof (Emotion_Gstreamer_Message));
52 if (!send) return NULL;
53
54 ev->out++;
55 send->ev = ev;
56 send->msg = gst_message_ref(msg);
57
58 return send;
59}
60
61void
62emotion_gstreamer_message_free(Emotion_Gstreamer_Message *send)
63{
64 send->ev->in++;
65
66 if (send->ev->in == send->ev->out
67 && send->ev->threads == NULL
68 && send->ev->delete_me)
69 send->ev->api->del(send->ev);
70
71 gst_message_unref(send->msg);
72 free(send);
73}
diff --git a/src/modules/emotion/gstreamer1/emotion_convert.c b/src/modules/emotion/gstreamer1/emotion_convert.c
new file mode 100644
index 0000000000..2a3aaa58ed
--- /dev/null
+++ b/src/modules/emotion/gstreamer1/emotion_convert.c
@@ -0,0 +1,160 @@
1#ifdef HAVE_CONFIG_H
2# include "config.h"
3#endif
4
5#include "emotion_gstreamer.h"
6
7static inline void
8_evas_video_bgrx_step(unsigned char *evas_data, const unsigned char *gst_data,
9 unsigned int w, unsigned int h EINA_UNUSED, unsigned int output_height, unsigned int step)
10{
11 unsigned int x;
12 unsigned int y;
13
14 for (y = 0; y < output_height; ++y)
15 {
16 for (x = 0; x < w; x++)
17 {
18 evas_data[0] = gst_data[0];
19 evas_data[1] = gst_data[1];
20 evas_data[2] = gst_data[2];
21 evas_data[3] = 255;
22 gst_data += step;
23 evas_data += 4;
24 }
25 }
26}
27
28static void
29_evas_video_bgr(unsigned char *evas_data, const unsigned char *gst_data, unsigned int w, unsigned int h, unsigned int output_height)
30{
31 _evas_video_bgrx_step(evas_data, gst_data, w, h, output_height, 3);
32}
33
34static void
35_evas_video_bgrx(unsigned char *evas_data, const unsigned char *gst_data, unsigned int w, unsigned int h, unsigned int output_height)
36{
37 _evas_video_bgrx_step(evas_data, gst_data, w, h, output_height, 4);
38}
39
40static void
41_evas_video_bgra(unsigned char *evas_data, const unsigned char *gst_data, unsigned int w, unsigned int h EINA_UNUSED, unsigned int output_height)
42{
43 unsigned int x;
44 unsigned int y;
45
46 for (y = 0; y < output_height; ++y)
47 {
48 unsigned char alpha;
49
50 for (x = 0; x < w; ++x)
51 {
52 alpha = gst_data[3];
53 evas_data[0] = (gst_data[0] * alpha) / 255;
54 evas_data[1] = (gst_data[1] * alpha) / 255;
55 evas_data[2] = (gst_data[2] * alpha) / 255;
56 evas_data[3] = alpha;
57 gst_data += 4;
58 evas_data += 4;
59 }
60 }
61}
62
63static void
64_evas_video_i420(unsigned char *evas_data, const unsigned char *gst_data, unsigned int w, unsigned int h, unsigned int output_height)
65{
66 const unsigned char **rows;
67 unsigned int i, j;
68 unsigned int rh;
69 unsigned int stride_y, stride_uv;
70
71 rh = output_height;
72
73 rows = (const unsigned char **)evas_data;
74
75 stride_y = GST_ROUND_UP_4(w);
76 stride_uv = GST_ROUND_UP_8(w) / 2;
77
78 for (i = 0; i < rh; i++)
79 rows[i] = &gst_data[i * stride_y];
80
81 for (j = 0; j < (rh / 2); j++, i++)
82 rows[i] = &gst_data[h * stride_y + j * stride_uv];
83
84 for (j = 0; j < (rh / 2); j++, i++)
85 rows[i] = &gst_data[h * stride_y +
86 (rh / 2) * stride_uv +
87 j * stride_uv];
88}
89
90static void
91_evas_video_yv12(unsigned char *evas_data, const unsigned char *gst_data, unsigned int w, unsigned int h, unsigned int output_height)
92{
93 const unsigned char **rows;
94 unsigned int i, j;
95 unsigned int rh;
96 unsigned int stride_y, stride_uv;
97
98 rh = output_height;
99
100 rows = (const unsigned char **)evas_data;
101
102 stride_y = GST_ROUND_UP_4(w);
103 stride_uv = GST_ROUND_UP_8(w) / 2;
104
105 for (i = 0; i < rh; i++)
106 rows[i] = &gst_data[i * stride_y];
107
108 for (j = 0; j < (rh / 2); j++, i++)
109 rows[i] = &gst_data[h * stride_y +
110 (rh / 2) * stride_uv +
111 j * stride_uv];
112
113 for (j = 0; j < (rh / 2); j++, i++)
114 rows[i] = &gst_data[h * stride_y + j * stride_uv];
115}
116
117static void
118_evas_video_yuy2(unsigned char *evas_data, const unsigned char *gst_data, unsigned int w, unsigned int h EINA_UNUSED, unsigned int output_height)
119{
120 const unsigned char **rows;
121 unsigned int i;
122 unsigned int stride;
123
124 rows = (const unsigned char **)evas_data;
125
126 stride = GST_ROUND_UP_4(w * 2);
127
128 for (i = 0; i < output_height; i++)
129 rows[i] = &gst_data[i * stride];
130}
131
132static void
133_evas_video_nv12(unsigned char *evas_data, const unsigned char *gst_data, unsigned int w, unsigned int h EINA_UNUSED, unsigned int output_height)
134{
135 const unsigned char **rows;
136 unsigned int i, j;
137 unsigned int rh;
138
139 rh = output_height;
140
141 rows = (const unsigned char **)evas_data;
142
143 for (i = 0; i < rh; i++)
144 rows[i] = &gst_data[i * w];
145
146 for (j = 0; j < (rh / 2); j++, i++)
147 rows[i] = &gst_data[rh * w + j * w];
148}
149
150const ColorSpace_Format_Convertion colorspace_format_convertion[] = {
151 { "I420", GST_VIDEO_FORMAT_I420, EVAS_COLORSPACE_YCBCR422P601_PL, _evas_video_i420, EINA_TRUE },
152 { "YV12", GST_VIDEO_FORMAT_YV12, EVAS_COLORSPACE_YCBCR422P601_PL, _evas_video_yv12, EINA_TRUE },
153 { "YUY2", GST_VIDEO_FORMAT_YUY2, EVAS_COLORSPACE_YCBCR422601_PL, _evas_video_yuy2, EINA_FALSE },
154 { "NV12", GST_VIDEO_FORMAT_NV12, EVAS_COLORSPACE_YCBCR420NV12601_PL, _evas_video_nv12, EINA_TRUE },
155 { "BGR", GST_VIDEO_FORMAT_BGR, EVAS_COLORSPACE_ARGB8888, _evas_video_bgr, EINA_FALSE },
156 { "BGRx", GST_VIDEO_FORMAT_BGRx, EVAS_COLORSPACE_ARGB8888, _evas_video_bgrx, EINA_FALSE },
157 { "BGRA", GST_VIDEO_FORMAT_BGRA, EVAS_COLORSPACE_ARGB8888, _evas_video_bgra, EINA_FALSE },
158 { NULL, 0, 0, NULL, 0 }
159};
160
diff --git a/src/modules/emotion/gstreamer1/emotion_fakeeos.c b/src/modules/emotion/gstreamer1/emotion_fakeeos.c
new file mode 100644
index 0000000000..7e71e72940
--- /dev/null
+++ b/src/modules/emotion/gstreamer1/emotion_fakeeos.c
@@ -0,0 +1,47 @@
1#ifdef HAVE_CONFIG_H
2# include "config.h"
3#endif
4
5#include "emotion_gstreamer.h"
6
7typedef struct _FakeEOSBin
8{
9 GstBin parent;
10} FakeEOSBin;
11
12typedef struct _FakeEOSBinClass
13{
14 GstBinClass parent;
15} FakeEOSBinClass;
16
17G_DEFINE_TYPE (FakeEOSBin, fakeeos_bin, GST_TYPE_BIN);
18
19static void
20fakeeos_bin_handle_message(GstBin * bin, GstMessage * message)
21{
22 /* FakeEOSBin *fakeeos = (FakeEOSBin *)(bin); */
23
24 switch (GST_MESSAGE_TYPE(message)) {
25 case GST_MESSAGE_EOS:
26 /* what to do here ? just returning at the moment */
27 return;
28 default:
29 break;
30 }
31
32 GST_BIN_CLASS(fakeeos_bin_parent_class)->handle_message(bin, message);
33}
34
35static void
36fakeeos_bin_class_init(FakeEOSBinClass * klass)
37{
38 GstBinClass *gstbin_class = GST_BIN_CLASS(klass);
39
40 gstbin_class->handle_message =
41 GST_DEBUG_FUNCPTR (fakeeos_bin_handle_message);
42}
43
44static void
45fakeeos_bin_init(FakeEOSBin *src EINA_UNUSED)
46{
47}
diff --git a/src/modules/emotion/gstreamer1/emotion_gstreamer.c b/src/modules/emotion/gstreamer1/emotion_gstreamer.c
new file mode 100644
index 0000000000..c9ed86c403
--- /dev/null
+++ b/src/modules/emotion/gstreamer1/emotion_gstreamer.c
@@ -0,0 +1,2053 @@
1#ifdef HAVE_CONFIG_H
2# include "config.h"
3#endif
4
5#include "emotion_gstreamer.h"
6
7Eina_Bool window_manager_video = EINA_FALSE;
8int _emotion_gstreamer_log_domain = -1;
9Eina_Bool debug_fps = EINA_FALSE;
10Eina_Bool _ecore_x_available = EINA_FALSE;
11
12static Ecore_Idler *restart_idler;
13static int _emotion_init_count = 0;
14
15/* Callbacks to get the eos */
16static void _for_each_tag (GstTagList const* list, gchar const* tag, void *data);
17static void _free_metadata (Emotion_Gstreamer_Metadata *m);
18
19static GstBusSyncReply _eos_sync_fct(GstBus *bus,
20 GstMessage *message,
21 gpointer data);
22
23static Eina_Bool _em_restart_stream(void *data);
24
25/* Module interface */
26
27
28static int priority_overide = 0;
29
30static Emotion_Video_Stream *
31emotion_video_stream_new(Emotion_Gstreamer_Video *ev)
32{
33 Emotion_Video_Stream *vstream;
34
35 if (!ev) return NULL;
36
37 vstream = (Emotion_Video_Stream *)calloc(1, sizeof(Emotion_Video_Stream));
38 if (!vstream) return NULL;
39
40 ev->video_streams = eina_list_append(ev->video_streams, vstream);
41 return vstream;
42}
43
44static const char *
45emotion_visualization_element_name_get(Emotion_Vis visualisation)
46{
47 switch (visualisation)
48 {
49 case EMOTION_VIS_NONE:
50 return NULL;
51 case EMOTION_VIS_GOOM:
52 return "goom";
53 case EMOTION_VIS_LIBVISUAL_BUMPSCOPE:
54 return "libvisual_bumpscope";
55 case EMOTION_VIS_LIBVISUAL_CORONA:
56 return "libvisual_corona";
57 case EMOTION_VIS_LIBVISUAL_DANCING_PARTICLES:
58 return "libvisual_dancingparticles";
59 case EMOTION_VIS_LIBVISUAL_GDKPIXBUF:
60 return "libvisual_gdkpixbuf";
61 case EMOTION_VIS_LIBVISUAL_G_FORCE:
62 return "libvisual_G-Force";
63 case EMOTION_VIS_LIBVISUAL_GOOM:
64 return "libvisual_goom";
65 case EMOTION_VIS_LIBVISUAL_INFINITE:
66 return "libvisual_infinite";
67 case EMOTION_VIS_LIBVISUAL_JAKDAW:
68 return "libvisual_jakdaw";
69 case EMOTION_VIS_LIBVISUAL_JESS:
70 return "libvisual_jess";
71 case EMOTION_VIS_LIBVISUAL_LV_ANALYSER:
72 return "libvisual_lv_analyzer";
73 case EMOTION_VIS_LIBVISUAL_LV_FLOWER:
74 return "libvisual_lv_flower";
75 case EMOTION_VIS_LIBVISUAL_LV_GLTEST:
76 return "libvisual_lv_gltest";
77 case EMOTION_VIS_LIBVISUAL_LV_SCOPE:
78 return "libvisual_lv_scope";
79 case EMOTION_VIS_LIBVISUAL_MADSPIN:
80 return "libvisual_madspin";
81 case EMOTION_VIS_LIBVISUAL_NEBULUS:
82 return "libvisual_nebulus";
83 case EMOTION_VIS_LIBVISUAL_OINKSIE:
84 return "libvisual_oinksie";
85 case EMOTION_VIS_LIBVISUAL_PLASMA:
86 return "libvisual_plazma";
87 default:
88 return "goom";
89 }
90}
91
92static void
93em_cleanup(Emotion_Gstreamer_Video *ev)
94{
95 Emotion_Audio_Stream *astream;
96 Emotion_Video_Stream *vstream;
97
98 if (ev->send)
99 {
100 emotion_gstreamer_buffer_free(ev->send);
101 ev->send = NULL;
102 }
103
104 if (ev->eos_bus)
105 {
106 gst_object_unref(GST_OBJECT(ev->eos_bus));
107 ev->eos_bus = NULL;
108 }
109
110 if (ev->metadata)
111 {
112 _free_metadata(ev->metadata);
113 ev->metadata = NULL;
114 }
115
116 if (ev->last_buffer)
117 {
118 gst_buffer_unref(ev->last_buffer);
119 ev->last_buffer = NULL;
120 }
121
122 if (!ev->stream)
123 {
124 evas_object_image_video_surface_set(emotion_object_image_get(ev->obj), NULL);
125 ev->stream = EINA_TRUE;
126 }
127
128 if (ev->pipeline)
129 {
130 gstreamer_video_sink_new(ev, ev->obj, NULL);
131
132 g_object_set(G_OBJECT(ev->esink), "ev", NULL, NULL);
133 g_object_set(G_OBJECT(ev->esink), "evas-object", NULL, NULL);
134 gst_element_set_state(ev->pipeline, GST_STATE_NULL);
135 gst_object_unref(ev->pipeline);
136
137 ev->pipeline = NULL;
138 ev->sink = NULL;
139
140 if (ev->eteepad) gst_object_unref(ev->eteepad);
141 ev->eteepad = NULL;
142 if (ev->xvteepad) gst_object_unref(ev->xvteepad);
143 ev->xvteepad = NULL;
144 if (ev->xvpad) gst_object_unref(ev->xvpad);
145 ev->xvpad = NULL;
146
147 ev->src_width = 0;
148 ev->src_height = 0;
149
150#ifdef HAVE_ECORE_X
151 INF("destroying window: %i", ev->win);
152 if (ev->win) ecore_x_window_free(ev->win);
153 ev->win = 0;
154#endif
155 }
156
157 if (restart_idler)
158 {
159 ecore_idler_del(restart_idler);
160 restart_idler = NULL;
161 }
162
163 EINA_LIST_FREE(ev->audio_streams, astream)
164 free(astream);
165 EINA_LIST_FREE(ev->video_streams, vstream)
166 free(vstream);
167}
168
169static void
170em_del(void *video)
171{
172 Emotion_Gstreamer_Video *ev = video;
173
174 if (ev->threads)
175 {
176 Ecore_Thread *t;
177
178 EINA_LIST_FREE(ev->threads, t)
179 ecore_thread_cancel(t);
180
181 ev->delete_me = EINA_TRUE;
182 return;
183 }
184
185 if (ev->in != ev->out)
186 {
187 ev->delete_me = EINA_TRUE;
188 return;
189 }
190
191 em_cleanup(ev);
192
193 free(ev);
194}
195
196static Eina_Bool
197em_file_open(void *video,
198 const char *file)
199{
200 Emotion_Gstreamer_Video *ev = video;
201 Eina_Strbuf *sbuf = NULL;
202 const char *uri;
203
204 if (!file) return EINA_FALSE;
205 if (strstr(file, "://") == NULL)
206 {
207 sbuf = eina_strbuf_new();
208 eina_strbuf_append(sbuf, "file://");
209 if (strncmp(file, "./", 2) == 0)
210 file += 2;
211 if (strstr(file, ":/") != NULL)
212 { /* We absolutely need file:///C:/ under Windows, so adding it here */
213 eina_strbuf_append(sbuf, "/");
214 }
215 else if (*file != '/')
216 {
217 char tmp[PATH_MAX];
218
219 if (getcwd(tmp, PATH_MAX))
220 {
221 eina_strbuf_append(sbuf, tmp);
222 eina_strbuf_append(sbuf, "/");
223 }
224 }
225 eina_strbuf_append(sbuf, file);
226 }
227
228 ev->play_started = 0;
229 ev->pipeline_parsed = 0;
230
231 uri = sbuf ? eina_strbuf_string_get(sbuf) : file;
232 DBG("setting file to '%s'", uri);
233 ev->pipeline = gstreamer_video_sink_new(ev, ev->obj, uri);
234 if (sbuf) eina_strbuf_free(sbuf);
235
236 if (!ev->pipeline)
237 return EINA_FALSE;
238
239 ev->eos_bus = gst_pipeline_get_bus(GST_PIPELINE(ev->pipeline));
240 if (!ev->eos_bus)
241 {
242 ERR("could not get the bus");
243 return EINA_FALSE;
244 }
245
246 gst_bus_set_sync_handler(ev->eos_bus, _eos_sync_fct, ev, NULL);
247
248 ev->position = 0.0;
249
250 return 1;
251}
252
253static void
254em_file_close(void *video)
255{
256 Emotion_Gstreamer_Video *ev;
257
258 ev = (Emotion_Gstreamer_Video *)video;
259 if (!ev)
260 return;
261
262 if (ev->threads)
263 {
264 Ecore_Thread *t;
265
266 EINA_LIST_FREE(ev->threads, t)
267 ecore_thread_cancel(t);
268 }
269
270 em_cleanup(ev);
271
272 ev->pipeline_parsed = EINA_FALSE;
273 ev->play_started = 0;
274}
275
276static void
277em_play(void *video,
278 double pos EINA_UNUSED)
279{
280 Emotion_Gstreamer_Video *ev;
281
282 ev = (Emotion_Gstreamer_Video *)video;
283 if (!ev->pipeline) return;
284
285 if (ev->pipeline_parsed)
286 gst_element_set_state(ev->pipeline, GST_STATE_PLAYING);
287 ev->play = 1;
288 ev->play_started = 1;
289}
290
291static void
292em_stop(void *video)
293{
294 Emotion_Gstreamer_Video *ev;
295
296 ev = (Emotion_Gstreamer_Video *)video;
297
298 if (!ev->pipeline) return;
299
300 if (ev->pipeline_parsed)
301 gst_element_set_state(ev->pipeline, GST_STATE_PAUSED);
302 ev->play = 0;
303}
304
305static void
306em_size_get(void *video,
307 int *width,
308 int *height)
309{
310 Emotion_Gstreamer_Video *ev;
311 Emotion_Video_Stream *vstream;
312
313 ev = (Emotion_Gstreamer_Video *)video;
314
315 if (!_emotion_gstreamer_video_pipeline_parse(ev, EINA_FALSE))
316 goto on_error;
317
318 vstream = eina_list_nth(ev->video_streams, ev->video_stream_nbr - 1);
319 if (vstream)
320 {
321 if (width) *width = vstream->info.width;
322 if (height) *height = vstream->info.height;
323
324 return;
325 }
326
327 on_error:
328 if (width) *width = 0;
329 if (height) *height = 0;
330}
331
332static void
333em_pos_set(void *video,
334 double pos)
335{
336 Emotion_Gstreamer_Video *ev;
337
338 ev = (Emotion_Gstreamer_Video *)video;
339
340 if (!ev->pipeline) return;
341
342 if (ev->play)
343 gst_element_set_state(ev->pipeline, GST_STATE_PAUSED);
344
345 gst_element_seek(ev->pipeline, 1.0,
346 GST_FORMAT_TIME,
347 GST_SEEK_FLAG_ACCURATE | GST_SEEK_FLAG_FLUSH,
348 GST_SEEK_TYPE_SET,
349 (gint64)(pos * (double)GST_SECOND),
350 GST_SEEK_TYPE_NONE, -1);
351
352 if (ev->play)
353 gst_element_set_state(ev->pipeline, GST_STATE_PLAYING);
354}
355
356static double
357em_len_get(void *video)
358{
359 Emotion_Gstreamer_Video *ev;
360 Emotion_Video_Stream *vstream;
361 Emotion_Audio_Stream *astream;
362 Eina_List *l;
363 gint64 val;
364 gboolean ret;
365
366 ev = video;
367
368 if (!ev->pipeline) return 0.0;
369
370 ret = gst_element_query_duration(ev->pipeline, GST_FORMAT_TIME, &val);
371 if (!ret)
372 goto fallback;
373
374 if (val <= 0.0)
375 goto fallback;
376
377 return val / 1000000000.0;
378
379 fallback:
380 if (!_emotion_gstreamer_video_pipeline_parse(ev, EINA_FALSE))
381 return 0.0;
382
383 EINA_LIST_FOREACH(ev->audio_streams, l, astream)
384 if (astream->length_time >= 0)
385 return astream->length_time;
386
387 EINA_LIST_FOREACH(ev->video_streams, l, vstream)
388 if (vstream->length_time >= 0)
389 return vstream->length_time;
390
391 return 0.0;
392}
393
394static double
395em_buffer_size_get(void *video)
396{
397 Emotion_Gstreamer_Video *ev;
398
399 GstQuery *query;
400 gboolean busy;
401 gint percent;
402
403 ev = video;
404
405 if (!ev->pipeline) return 0.0;
406
407 query = gst_query_new_buffering(GST_FORMAT_DEFAULT);
408 if (gst_element_query(ev->pipeline, query))
409 gst_query_parse_buffering_percent(query, &busy, &percent);
410 else
411 percent = 100;
412
413 gst_query_unref(query);
414 return ((float)(percent)) / 100.0;
415}
416
417static int
418em_fps_num_get(void *video)
419{
420 Emotion_Gstreamer_Video *ev;
421 Emotion_Video_Stream *vstream;
422
423 ev = (Emotion_Gstreamer_Video *)video;
424
425 if (!_emotion_gstreamer_video_pipeline_parse(ev, EINA_FALSE))
426 return 0;
427
428 vstream = eina_list_nth(ev->video_streams, ev->video_stream_nbr - 1);
429 if (vstream)
430 return vstream->info.fps_n;
431
432 return 0;
433}
434
435static int
436em_fps_den_get(void *video)
437{
438 Emotion_Gstreamer_Video *ev;
439 Emotion_Video_Stream *vstream;
440
441 ev = (Emotion_Gstreamer_Video *)video;
442
443 if (!_emotion_gstreamer_video_pipeline_parse(ev, EINA_FALSE))
444 return 1;
445
446 vstream = eina_list_nth(ev->video_streams, ev->video_stream_nbr - 1);
447 if (vstream)
448 return vstream->info.fps_d;
449
450 return 1;
451}
452
453static double
454em_fps_get(void *video)
455{
456 Emotion_Gstreamer_Video *ev;
457 Emotion_Video_Stream *vstream;
458
459 ev = (Emotion_Gstreamer_Video *)video;
460
461 if (!_emotion_gstreamer_video_pipeline_parse(ev, EINA_FALSE))
462 return 0.0;
463
464 vstream = eina_list_nth(ev->video_streams, ev->video_stream_nbr - 1);
465 if (vstream)
466 return (double)vstream->info.fps_n / (double)vstream->info.fps_d;
467
468 return 0.0;
469}
470
471static double
472em_pos_get(void *video)
473{
474 Emotion_Gstreamer_Video *ev;
475 gint64 val;
476 gboolean ret;
477
478 ev = video;
479
480 if (!ev->pipeline) return 0.0;
481
482 ret = gst_element_query_position(ev->pipeline, GST_FORMAT_TIME, &val);
483 if (!ret)
484 return ev->position;
485
486 ev->position = val / 1000000000.0;
487 return ev->position;
488}
489
490static void
491em_vis_set(void *video,
492 Emotion_Vis vis)
493{
494 Emotion_Gstreamer_Video *ev;
495
496 ev = (Emotion_Gstreamer_Video *)video;
497
498 ev->vis = vis;
499}
500
501static Emotion_Vis
502em_vis_get(void *video)
503{
504 Emotion_Gstreamer_Video *ev;
505
506 ev = (Emotion_Gstreamer_Video *)video;
507
508 return ev->vis;
509}
510
511static Eina_Bool
512em_vis_supported(void *ef EINA_UNUSED, Emotion_Vis vis)
513{
514 const char *name;
515 GstElementFactory *factory;
516
517 if (vis == EMOTION_VIS_NONE)
518 return EINA_TRUE;
519
520 name = emotion_visualization_element_name_get(vis);
521 if (!name)
522 return EINA_FALSE;
523
524 factory = gst_element_factory_find(name);
525 if (!factory)
526 return EINA_FALSE;
527
528 gst_object_unref(factory);
529 return EINA_TRUE;
530}
531
532static double
533em_ratio_get(void *video)
534{
535 Emotion_Gstreamer_Video *ev;
536
537 ev = (Emotion_Gstreamer_Video *)video;
538
539 return ev->ratio;
540}
541
542static int
543em_video_handled(void *video)
544{
545 Emotion_Gstreamer_Video *ev;
546
547 ev = (Emotion_Gstreamer_Video *)video;
548
549 _emotion_gstreamer_video_pipeline_parse(ev, EINA_FALSE);
550
551 if (!eina_list_count(ev->video_streams))
552 return 0;
553
554 return 1;
555}
556
557static int
558em_audio_handled(void *video)
559{
560 Emotion_Gstreamer_Video *ev;
561
562 ev = (Emotion_Gstreamer_Video *)video;
563
564 _emotion_gstreamer_video_pipeline_parse(ev, EINA_FALSE);
565
566 if (!eina_list_count(ev->audio_streams))
567 return 0;
568
569 return 1;
570}
571
572static int
573em_seekable(void *video EINA_UNUSED)
574{
575 return 1;
576}
577
578static void
579em_frame_done(void *video EINA_UNUSED)
580{
581}
582
583static Emotion_Format
584em_format_get(void *video)
585{
586 Emotion_Gstreamer_Video *ev;
587 Emotion_Video_Stream *vstream;
588
589 ev = (Emotion_Gstreamer_Video *)video;
590
591 if (!_emotion_gstreamer_video_pipeline_parse(ev, EINA_FALSE))
592 return EMOTION_FORMAT_NONE;
593
594 vstream = eina_list_nth(ev->video_streams, ev->video_stream_nbr - 1);
595 if (vstream)
596 {
597 switch (vstream->info.finfo->format)
598 {
599 case GST_VIDEO_FORMAT_I420:
600 return EMOTION_FORMAT_I420;
601 case GST_VIDEO_FORMAT_YV12:
602 return EMOTION_FORMAT_YV12;
603 case GST_VIDEO_FORMAT_YUY2:
604 return EMOTION_FORMAT_YUY2;
605 case GST_VIDEO_FORMAT_ARGB:
606 /* FIXME: This will be wrong for big endian archs */
607 return EMOTION_FORMAT_BGRA;
608 default:
609 return EMOTION_FORMAT_NONE;
610 }
611 }
612 return EMOTION_FORMAT_NONE;
613}
614
615static void
616em_video_data_size_get(void *video, int *w, int *h)
617{
618 Emotion_Gstreamer_Video *ev;
619 Emotion_Video_Stream *vstream;
620
621 ev = (Emotion_Gstreamer_Video *)video;
622
623 if (ev->pipeline && (!ev->video_stream_nbr || !ev->video_streams))
624 if (!_emotion_gstreamer_video_pipeline_parse(ev, EINA_FALSE))
625 goto on_error;
626
627 vstream = eina_list_nth(ev->video_streams, ev->video_stream_nbr - 1);
628 if (vstream)
629 {
630 *w = vstream->info.width;
631 *h = vstream->info.height;
632
633 return;
634 }
635
636 on_error:
637 *w = 0;
638 *h = 0;
639}
640
641static int
642em_yuv_rows_get(void *video EINA_UNUSED,
643 int w EINA_UNUSED,
644 int h EINA_UNUSED,
645 unsigned char **yrows EINA_UNUSED,
646 unsigned char **urows EINA_UNUSED,
647 unsigned char **vrows EINA_UNUSED)
648{
649 return 0;
650}
651
652static int
653em_bgra_data_get(void *video EINA_UNUSED, unsigned char **bgra_data EINA_UNUSED)
654{
655 return 0;
656}
657
658static void
659em_event_feed(void *video, int event)
660{
661 Emotion_Gstreamer_Video *ev;
662 GstNavigationCommand command;
663
664 ev = (Emotion_Gstreamer_Video *)video;
665
666 switch (event)
667 {
668 case EMOTION_EVENT_MENU1:
669 command = GST_NAVIGATION_COMMAND_MENU1;
670 break;
671 case EMOTION_EVENT_MENU2:
672 command = GST_NAVIGATION_COMMAND_MENU2;
673 break;
674 case EMOTION_EVENT_MENU3:
675 command = GST_NAVIGATION_COMMAND_MENU3;
676 break;
677 case EMOTION_EVENT_MENU4:
678 command = GST_NAVIGATION_COMMAND_MENU4;
679 break;
680 case EMOTION_EVENT_MENU5:
681 command = GST_NAVIGATION_COMMAND_MENU5;
682 break;
683 case EMOTION_EVENT_MENU6:
684 command = GST_NAVIGATION_COMMAND_MENU6;
685 break;
686 case EMOTION_EVENT_MENU7:
687 command = GST_NAVIGATION_COMMAND_MENU7;
688 break;
689 case EMOTION_EVENT_UP:
690 command = GST_NAVIGATION_COMMAND_UP;
691 break;
692 case EMOTION_EVENT_DOWN:
693 command = GST_NAVIGATION_COMMAND_DOWN;
694 break;
695 case EMOTION_EVENT_LEFT:
696 command = GST_NAVIGATION_COMMAND_LEFT;
697 break;
698 case EMOTION_EVENT_RIGHT:
699 command = GST_NAVIGATION_COMMAND_RIGHT;
700 break;
701 case EMOTION_EVENT_SELECT:
702 command = GST_NAVIGATION_COMMAND_ACTIVATE;
703 break;
704 case EMOTION_EVENT_NEXT:
705 /* FIXME */
706 command = GST_NAVIGATION_COMMAND_RIGHT;
707 break;
708 case EMOTION_EVENT_PREV:
709 /* FIXME */
710 command = GST_NAVIGATION_COMMAND_LEFT;
711 break;
712 case EMOTION_EVENT_ANGLE_NEXT:
713 command = GST_NAVIGATION_COMMAND_NEXT_ANGLE;
714 break;
715 case EMOTION_EVENT_ANGLE_PREV:
716 command = GST_NAVIGATION_COMMAND_PREV_ANGLE;
717 break;
718 case EMOTION_EVENT_FORCE:
719 /* FIXME */
720 command = GST_NAVIGATION_COMMAND_ACTIVATE;
721 break;
722 case EMOTION_EVENT_0:
723 case EMOTION_EVENT_1:
724 case EMOTION_EVENT_2:
725 case EMOTION_EVENT_3:
726 case EMOTION_EVENT_4:
727 case EMOTION_EVENT_5:
728 case EMOTION_EVENT_6:
729 case EMOTION_EVENT_7:
730 case EMOTION_EVENT_8:
731 case EMOTION_EVENT_9:
732 case EMOTION_EVENT_10:
733 default:
734 return;
735 break;
736 }
737
738 gst_navigation_send_command (GST_NAVIGATION (ev->pipeline), command);
739}
740
741static void
742em_event_mouse_button_feed(void *video, int button, int x, int y)
743{
744 Emotion_Gstreamer_Video *ev;
745
746 ev = (Emotion_Gstreamer_Video *)video;
747 /* FIXME */
748 gst_navigation_send_mouse_event (GST_NAVIGATION (ev->pipeline), "mouse-button-press", button, x, y);
749 gst_navigation_send_mouse_event (GST_NAVIGATION (ev->pipeline), "mouse-button-release", button, x, y);
750}
751
752static void
753em_event_mouse_move_feed(void *video, int x, int y)
754{
755 Emotion_Gstreamer_Video *ev;
756
757 ev = (Emotion_Gstreamer_Video *)video;
758 gst_navigation_send_mouse_event (GST_NAVIGATION (ev->pipeline), "mouse-move", 0, x, y);
759}
760
761/* Video channels */
762static int
763em_video_channel_count(void *video)
764{
765 Emotion_Gstreamer_Video *ev;
766
767 ev = (Emotion_Gstreamer_Video *)video;
768
769 _emotion_gstreamer_video_pipeline_parse(ev, EINA_FALSE);
770
771 return eina_list_count(ev->video_streams);
772}
773
774static void
775em_video_channel_set(void *video,
776 int channel)
777{
778 Emotion_Gstreamer_Video *ev;
779
780 ev = (Emotion_Gstreamer_Video *)video;
781
782 _emotion_gstreamer_video_pipeline_parse(ev, EINA_FALSE);
783
784 if (channel < 0) channel = -1;
785
786 if (ev->pipeline)
787 g_object_set (ev->pipeline, "current-video", channel, NULL);
788}
789
790static int
791em_video_channel_get(void *video)
792{
793 Emotion_Gstreamer_Video *ev;
794
795 ev = (Emotion_Gstreamer_Video *)video;
796
797 _emotion_gstreamer_video_pipeline_parse(ev, EINA_FALSE);
798
799 return ev->video_stream_nbr;
800}
801
802static void
803em_video_subtitle_file_set(void *video EINA_UNUSED,
804 const char *filepath EINA_UNUSED)
805{
806 DBG("video_subtitle_file_set not implemented for gstreamer yet.");
807}
808
809static const char *
810em_video_subtitle_file_get(void *video EINA_UNUSED)
811{
812 DBG("video_subtitle_file_get not implemented for gstreamer yet.");
813 return NULL;
814}
815
816static const char *
817em_video_channel_name_get(void *video EINA_UNUSED,
818 int channel EINA_UNUSED)
819{
820 return NULL;
821}
822
823static void
824em_video_channel_mute_set(void *video,
825 int mute)
826{
827 Emotion_Gstreamer_Video *ev;
828
829 ev = (Emotion_Gstreamer_Video *)video;
830
831 ev->video_mute = mute;
832}
833
834static int
835em_video_channel_mute_get(void *video)
836{
837 Emotion_Gstreamer_Video *ev;
838
839 ev = (Emotion_Gstreamer_Video *)video;
840
841 return ev->video_mute;
842}
843
844/* Audio channels */
845
846static int
847em_audio_channel_count(void *video)
848{
849 Emotion_Gstreamer_Video *ev;
850
851 ev = (Emotion_Gstreamer_Video *)video;
852
853 _emotion_gstreamer_video_pipeline_parse(ev, EINA_FALSE);
854
855 return eina_list_count(ev->audio_streams);
856}
857
858static void
859em_audio_channel_set(void *video,
860 int channel)
861{
862 Emotion_Gstreamer_Video *ev;
863
864 ev = (Emotion_Gstreamer_Video *)video;
865
866 _emotion_gstreamer_video_pipeline_parse(ev, EINA_FALSE);
867
868 if (channel < 0) channel = -1;
869
870 if (ev->pipeline)
871 g_object_set (ev->pipeline, "current-audio", channel, NULL);
872}
873
874static int
875em_audio_channel_get(void *video)
876{
877 Emotion_Gstreamer_Video *ev;
878
879 ev = (Emotion_Gstreamer_Video *)video;
880
881 _emotion_gstreamer_video_pipeline_parse(ev, EINA_FALSE);
882
883 return ev->audio_stream_nbr;
884}
885
886static const char *
887em_audio_channel_name_get(void *video EINA_UNUSED,
888 int channel EINA_UNUSED)
889{
890 return NULL;
891}
892
893#define GST_PLAY_FLAG_AUDIO (1 << 1)
894
895static void
896em_audio_channel_mute_set(void *video,
897 int mute)
898{
899 /* NOTE: at first I wanted to completly shutdown the audio path on mute,
900 but that's not possible as the audio sink could be the clock source
901 for the pipeline (at least that's the case on some of the hardware
902 I have been tested emotion on.
903 */
904 Emotion_Gstreamer_Video *ev;
905
906 ev = (Emotion_Gstreamer_Video *)video;
907
908 if (!ev->pipeline) return;
909
910 ev->audio_mute = mute;
911
912 g_object_set(G_OBJECT(ev->pipeline), "mute", !!mute, NULL);
913}
914
915static int
916em_audio_channel_mute_get(void *video)
917{
918 Emotion_Gstreamer_Video *ev;
919
920 ev = (Emotion_Gstreamer_Video *)video;
921
922 return ev->audio_mute;
923}
924
925static void
926em_audio_channel_volume_set(void *video,
927 double vol)
928{
929 Emotion_Gstreamer_Video *ev;
930
931 ev = (Emotion_Gstreamer_Video *)video;
932
933 if (!ev->pipeline) return;
934
935 if (vol < 0.0)
936 vol = 0.0;
937 if (vol > 1.0)
938 vol = 1.0;
939 ev->volume = vol;
940 g_object_set(G_OBJECT(ev->pipeline), "volume", vol, NULL);
941}
942
943static double
944em_audio_channel_volume_get(void *video)
945{
946 Emotion_Gstreamer_Video *ev;
947
948 ev = (Emotion_Gstreamer_Video *)video;
949
950 return ev->volume;
951}
952
953/* spu stuff */
954
955static int
956em_spu_channel_count(void *video EINA_UNUSED)
957{
958 return 0;
959}
960
961static void
962em_spu_channel_set(void *video EINA_UNUSED, int channel EINA_UNUSED)
963{
964}
965
966static int
967em_spu_channel_get(void *video EINA_UNUSED)
968{
969 return 1;
970}
971
972static const char *
973em_spu_channel_name_get(void *video EINA_UNUSED, int channel EINA_UNUSED)
974{
975 return NULL;
976}
977
978static void
979em_spu_channel_mute_set(void *video EINA_UNUSED, int mute EINA_UNUSED)
980{
981}
982
983static int
984em_spu_channel_mute_get(void *video EINA_UNUSED)
985{
986 return 0;
987}
988
989static int
990em_chapter_count(void *video EINA_UNUSED)
991{
992 return 0;
993}
994
995static void
996em_chapter_set(void *video EINA_UNUSED, int chapter EINA_UNUSED)
997{
998}
999
1000static int
1001em_chapter_get(void *video EINA_UNUSED)
1002{
1003 return 0;
1004}
1005
1006static const char *
1007em_chapter_name_get(void *video EINA_UNUSED, int chapter EINA_UNUSED)
1008{
1009 return NULL;
1010}
1011
1012static void
1013em_speed_set(void *video EINA_UNUSED, double speed EINA_UNUSED)
1014{
1015}
1016
1017static double
1018em_speed_get(void *video EINA_UNUSED)
1019{
1020 return 1.0;
1021}
1022
1023static int
1024em_eject(void *video EINA_UNUSED)
1025{
1026 return 1;
1027}
1028
1029static const char *
1030em_meta_get(void *video, int meta)
1031{
1032 Emotion_Gstreamer_Video *ev;
1033 const char *str = NULL;
1034
1035 ev = (Emotion_Gstreamer_Video *)video;
1036
1037 if (!ev || !ev->metadata) return NULL;
1038 switch (meta)
1039 {
1040 case META_TRACK_TITLE:
1041 str = ev->metadata->title;
1042 break;
1043 case META_TRACK_ARTIST:
1044 str = ev->metadata->artist;
1045 break;
1046 case META_TRACK_ALBUM:
1047 str = ev->metadata->album;
1048 break;
1049 case META_TRACK_YEAR:
1050 str = ev->metadata->year;
1051 break;
1052 case META_TRACK_GENRE:
1053 str = ev->metadata->genre;
1054 break;
1055 case META_TRACK_COMMENT:
1056 str = ev->metadata->comment;
1057 break;
1058 case META_TRACK_DISCID:
1059 str = ev->metadata->disc_id;
1060 break;
1061 default:
1062 break;
1063 }
1064
1065 return str;
1066}
1067
1068static void
1069em_priority_set(void *video, Eina_Bool pri)
1070{
1071 Emotion_Gstreamer_Video *ev;
1072
1073 ev = video;
1074 if (priority_overide > 3) return; /* If we failed to much to create that pipeline, let's don't wast our time anymore */
1075
1076 if (ev->priority != pri && ev->pipeline)
1077 {
1078 if (ev->threads)
1079 {
1080 Ecore_Thread *t;
1081
1082 EINA_LIST_FREE(ev->threads, t)
1083 ecore_thread_cancel(t);
1084 }
1085 em_cleanup(ev);
1086 restart_idler = ecore_idler_add(_em_restart_stream, ev);
1087 }
1088 ev->priority = pri;
1089}
1090
1091static Eina_Bool
1092em_priority_get(void *video)
1093{
1094 Emotion_Gstreamer_Video *ev;
1095
1096 ev = video;
1097 return !ev->stream;
1098}
1099
1100#ifdef HAVE_ECORE_X
1101static Eina_Bool
1102_ecore_event_x_destroy(void *data EINA_UNUSED, int type EINA_UNUSED, void *event EINA_UNUSED)
1103{
1104 Ecore_X_Event_Window_Destroy *ev = event;
1105
1106 INF("killed window: %x (%x).", ev->win, ev->event_win);
1107
1108 return EINA_TRUE;
1109}
1110
1111static void
1112gstreamer_ecore_x_check(void)
1113{
1114 Ecore_X_Window *roots;
1115 int num;
1116
1117 ecore_event_handler_add(ECORE_X_EVENT_WINDOW_DESTROY, _ecore_event_x_destroy, NULL);
1118
1119 /* Check if the window manager is able to handle our special Xv window. */
1120 roots = ecore_x_window_root_list(&num);
1121 if (roots && num > 0)
1122 {
1123 Ecore_X_Window win, twin;
1124 int nwins;
1125
1126 nwins = ecore_x_window_prop_window_get(roots[0],
1127 ECORE_X_ATOM_NET_SUPPORTING_WM_CHECK,
1128 &win, 1);
1129 if (nwins > 0)
1130 {
1131 nwins = ecore_x_window_prop_window_get(win,
1132 ECORE_X_ATOM_NET_SUPPORTING_WM_CHECK,
1133 &twin, 1);
1134 if (nwins > 0 && twin == win)
1135 {
1136 Ecore_X_Atom *supported;
1137 int supported_num;
1138 int i;
1139
1140 if (ecore_x_netwm_supported_get(roots[0], &supported, &supported_num))
1141 {
1142 Eina_Bool parent = EINA_FALSE;
1143 Eina_Bool video_position = EINA_FALSE;
1144
1145 for (i = 0; i < supported_num; ++i)
1146 {
1147 if (supported[i] == ECORE_X_ATOM_E_VIDEO_PARENT)
1148 parent = EINA_TRUE;
1149 else if (supported[i] == ECORE_X_ATOM_E_VIDEO_POSITION)
1150 video_position = EINA_TRUE;
1151 if (parent && video_position)
1152 break;
1153 }
1154
1155 if (parent && video_position)
1156 {
1157 window_manager_video = EINA_TRUE;
1158 }
1159 }
1160 free(supported);
1161 }
1162 }
1163 }
1164 free(roots);
1165}
1166#endif
1167
1168static void *
1169em_add(const Emotion_Engine *api,
1170 Evas_Object *obj,
1171 const Emotion_Module_Options *opt EINA_UNUSED)
1172{
1173 Emotion_Gstreamer_Video *ev;
1174
1175 ev = calloc(1, sizeof(Emotion_Gstreamer_Video));
1176 EINA_SAFETY_ON_NULL_RETURN_VAL(ev, NULL);
1177
1178 ev->api = api;
1179 ev->obj = obj;
1180
1181 /* Default values */
1182 ev->ratio = 1.0;
1183 ev->vis = EMOTION_VIS_NONE;
1184 ev->volume = 0.8;
1185 ev->play_started = 0;
1186 ev->delete_me = EINA_FALSE;
1187 ev->threads = NULL;
1188
1189 return ev;
1190}
1191
1192static const Emotion_Engine em_engine =
1193{
1194 EMOTION_ENGINE_API_VERSION,
1195 EMOTION_ENGINE_PRIORITY_DEFAULT,
1196 "gstreamer1",
1197 em_add, /* add */
1198 em_del, /* del */
1199 em_file_open, /* file_open */
1200 em_file_close, /* file_close */
1201 em_play, /* play */
1202 em_stop, /* stop */
1203 em_size_get, /* size_get */
1204 em_pos_set, /* pos_set */
1205 em_len_get, /* len_get */
1206 em_buffer_size_get, /* buffer_size_get */
1207 em_fps_num_get, /* fps_num_get */
1208 em_fps_den_get, /* fps_den_get */
1209 em_fps_get, /* fps_get */
1210 em_pos_get, /* pos_get */
1211 em_vis_set, /* vis_set */
1212 em_vis_get, /* vis_get */
1213 em_vis_supported, /* vis_supported */
1214 em_ratio_get, /* ratio_get */
1215 em_video_handled, /* video_handled */
1216 em_audio_handled, /* audio_handled */
1217 em_seekable, /* seekable */
1218 em_frame_done, /* frame_done */
1219 em_format_get, /* format_get */
1220 em_video_data_size_get, /* video_data_size_get */
1221 em_yuv_rows_get, /* yuv_rows_get */
1222 em_bgra_data_get, /* bgra_data_get */
1223 em_event_feed, /* event_feed */
1224 em_event_mouse_button_feed, /* event_mouse_button_feed */
1225 em_event_mouse_move_feed, /* event_mouse_move_feed */
1226 em_video_channel_count, /* video_channel_count */
1227 em_video_channel_set, /* video_channel_set */
1228 em_video_channel_get, /* video_channel_get */
1229 em_video_subtitle_file_set, /* video_subtitle_file_set */
1230 em_video_subtitle_file_get, /* video_subtitle_file_get */
1231 em_video_channel_name_get, /* video_channel_name_get */
1232 em_video_channel_mute_set, /* video_channel_mute_set */
1233 em_video_channel_mute_get, /* video_channel_mute_get */
1234 em_audio_channel_count, /* audio_channel_count */
1235 em_audio_channel_set, /* audio_channel_set */
1236 em_audio_channel_get, /* audio_channel_get */
1237 em_audio_channel_name_get, /* audio_channel_name_get */
1238 em_audio_channel_mute_set, /* audio_channel_mute_set */
1239 em_audio_channel_mute_get, /* audio_channel_mute_get */
1240 em_audio_channel_volume_set, /* audio_channel_volume_set */
1241 em_audio_channel_volume_get, /* audio_channel_volume_get */
1242 em_spu_channel_count, /* spu_channel_count */
1243 em_spu_channel_set, /* spu_channel_set */
1244 em_spu_channel_get, /* spu_channel_get */
1245 em_spu_channel_name_get, /* spu_channel_name_get */
1246 em_spu_channel_mute_set, /* spu_channel_mute_set */
1247 em_spu_channel_mute_get, /* spu_channel_mute_get */
1248 em_chapter_count, /* chapter_count */
1249 em_chapter_set, /* chapter_set */
1250 em_chapter_get, /* chapter_get */
1251 em_chapter_name_get, /* chapter_name_get */
1252 em_speed_set, /* speed_set */
1253 em_speed_get, /* speed_get */
1254 em_eject, /* eject */
1255 em_meta_get, /* meta_get */
1256 em_priority_set, /* priority_set */
1257 em_priority_get /* priority_get */
1258};
1259
1260Eina_Bool
1261gstreamer_module_init(void)
1262{
1263 GError *error;
1264
1265 if (_emotion_init_count > 0)
1266 {
1267 _emotion_pending_ecore_begin();
1268 return EINA_TRUE;
1269 }
1270
1271 if (getenv("EMOTION_FPS_DEBUG")) debug_fps = EINA_TRUE;
1272
1273 eina_threads_init();
1274 eina_log_threads_enable();
1275 _emotion_gstreamer_log_domain = eina_log_domain_register
1276 ("emotion-gstreamer", EINA_COLOR_LIGHTCYAN);
1277 if (_emotion_gstreamer_log_domain < 0)
1278 {
1279 EINA_LOG_CRIT("Could not register log domain 'emotion-gstreamer'");
1280 return EINA_FALSE;
1281 }
1282
1283 if (!gst_init_check(0, NULL, &error))
1284 {
1285 EINA_LOG_CRIT("Could not init GStreamer");
1286 goto error_gst_init;
1287 }
1288
1289#ifdef HAVE_ECORE_X
1290 if (ecore_x_init(NULL) > 0)
1291 {
1292 _ecore_x_available = EINA_TRUE;
1293 gstreamer_ecore_x_check();
1294 }
1295#endif
1296
1297 if (gst_plugin_register_static(GST_VERSION_MAJOR, GST_VERSION_MINOR,
1298 "emotion-sink",
1299 "video sink plugin for Emotion",
1300 gstreamer_plugin_init,
1301 VERSION,
1302 "LGPL",
1303 "Enlightenment",
1304 PACKAGE,
1305 "http://www.enlightenment.org/") == FALSE)
1306 {
1307 EINA_LOG_CRIT("Could not load static gstreamer video sink for Emotion.");
1308 goto error_gst_plugin;
1309 }
1310
1311 if (!_emotion_module_register(&em_engine))
1312 {
1313 ERR("Could not register module %p", &em_engine);
1314 goto error_register;
1315 }
1316
1317 _emotion_init_count = 1;
1318 return EINA_TRUE;
1319
1320 error_register:
1321 error_gst_plugin:
1322#ifdef HAVE_ECORE_X
1323 if (_ecore_x_available)
1324 {
1325 ecore_x_shutdown();
1326 _ecore_x_available = EINA_FALSE;
1327 window_manager_video = EINA_FALSE;
1328 }
1329#endif
1330
1331 gst_deinit();
1332
1333 error_gst_init:
1334 eina_log_domain_unregister(_emotion_gstreamer_log_domain);
1335 _emotion_gstreamer_log_domain = -1;
1336
1337 return EINA_FALSE;
1338}
1339
1340void
1341gstreamer_module_shutdown(void)
1342{
1343 if (_emotion_init_count > 1)
1344 {
1345 _emotion_init_count--;
1346 return;
1347 }
1348 else if (_emotion_init_count == 0)
1349 {
1350 EINA_LOG_ERR("too many gstreamer_module_shutdown()");
1351 return;
1352 }
1353 _emotion_init_count = 0;
1354
1355 _emotion_module_unregister(&em_engine);
1356
1357#ifdef HAVE_ECORE_X
1358 if (_ecore_x_available)
1359 {
1360 ecore_x_shutdown();
1361 _ecore_x_available = EINA_FALSE;
1362 window_manager_video = EINA_FALSE;
1363 }
1364#endif
1365
1366 eina_log_domain_unregister(_emotion_gstreamer_log_domain);
1367 _emotion_gstreamer_log_domain = -1;
1368
1369 gst_deinit();
1370}
1371
1372#ifndef EMOTION_STATIC_BUILD_GSTREAMER
1373
1374EINA_MODULE_INIT(gstreamer_module_init);
1375EINA_MODULE_SHUTDOWN(gstreamer_module_shutdown);
1376
1377#endif
1378
1379static void
1380_for_each_tag(GstTagList const* list,
1381 gchar const* tag,
1382 void *data)
1383{
1384 Emotion_Gstreamer_Video *ev;
1385 int i;
1386 int count;
1387
1388
1389 ev = (Emotion_Gstreamer_Video*)data;
1390
1391 if (!ev || !ev->metadata) return;
1392
1393 /* FIXME: Should use the GStreamer tag merging functions */
1394 count = gst_tag_list_get_tag_size(list, tag);
1395
1396 for (i = 0; i < count; i++)
1397 {
1398 if (!strcmp(tag, GST_TAG_TITLE))
1399 {
1400 char *str;
1401 g_free(ev->metadata->title);
1402 if (gst_tag_list_get_string(list, GST_TAG_TITLE, &str))
1403 ev->metadata->title = str;
1404 else
1405 ev->metadata->title = NULL;
1406 break;
1407 }
1408 if (!strcmp(tag, GST_TAG_ALBUM))
1409 {
1410 gchar *str;
1411 g_free(ev->metadata->album);
1412 if (gst_tag_list_get_string(list, GST_TAG_ALBUM, &str))
1413 ev->metadata->album = str;
1414 else
1415 ev->metadata->album = NULL;
1416 break;
1417 }
1418 if (!strcmp(tag, GST_TAG_ARTIST))
1419 {
1420 gchar *str;
1421 g_free(ev->metadata->artist);
1422 if (gst_tag_list_get_string(list, GST_TAG_ARTIST, &str))
1423 ev->metadata->artist = str;
1424 else
1425 ev->metadata->artist = NULL;
1426 break;
1427 }
1428 if (!strcmp(tag, GST_TAG_GENRE))
1429 {
1430 gchar *str;
1431 g_free(ev->metadata->genre);
1432 if (gst_tag_list_get_string(list, GST_TAG_GENRE, &str))
1433 ev->metadata->genre = str;
1434 else
1435 ev->metadata->genre = NULL;
1436 break;
1437 }
1438 if (!strcmp(tag, GST_TAG_COMMENT))
1439 {
1440 gchar *str;
1441 g_free(ev->metadata->comment);
1442 if (gst_tag_list_get_string(list, GST_TAG_COMMENT, &str))
1443 ev->metadata->comment = str;
1444 else
1445 ev->metadata->comment = NULL;
1446 break;
1447 }
1448 if (!strcmp(tag, GST_TAG_DATE))
1449 {
1450 gchar *str;
1451 const GValue *date;
1452 g_free(ev->metadata->year);
1453 date = gst_tag_list_get_value_index(list, GST_TAG_DATE, 0);
1454 if (date)
1455 str = g_strdup_value_contents(date);
1456 else
1457 str = NULL;
1458 ev->metadata->year = str;
1459 break;
1460 }
1461
1462 if (!strcmp(tag, GST_TAG_TRACK_NUMBER))
1463 {
1464 gchar *str;
1465 const GValue *track;
1466 g_free(ev->metadata->count);
1467 track = gst_tag_list_get_value_index(list, GST_TAG_TRACK_NUMBER, 0);
1468 if (track)
1469 str = g_strdup_value_contents(track);
1470 else
1471 str = NULL;
1472 ev->metadata->count = str;
1473 break;
1474 }
1475
1476 if (!strcmp(tag, GST_TAG_CDDA_CDDB_DISCID))
1477 {
1478 gchar *str;
1479 const GValue *discid;
1480 g_free(ev->metadata->disc_id);
1481 discid = gst_tag_list_get_value_index(list, GST_TAG_CDDA_CDDB_DISCID, 0);
1482 if (discid)
1483 str = g_strdup_value_contents(discid);
1484 else
1485 str = NULL;
1486 ev->metadata->disc_id = str;
1487 break;
1488 }
1489 }
1490
1491}
1492
1493static void
1494_free_metadata(Emotion_Gstreamer_Metadata *m)
1495{
1496 if (!m) return;
1497
1498 g_free(m->title);
1499 g_free(m->album);
1500 g_free(m->artist);
1501 g_free(m->genre);
1502 g_free(m->comment);
1503 g_free(m->year);
1504 g_free(m->count);
1505 g_free(m->disc_id);
1506
1507 free(m);
1508}
1509
1510static Eina_Bool
1511_em_restart_stream(void *data)
1512{
1513 Emotion_Gstreamer_Video *ev;
1514
1515 ev = data;
1516
1517 ev->pipeline = gstreamer_video_sink_new(ev, ev->obj, ev->uri);
1518
1519 if (ev->pipeline)
1520 {
1521 ev->eos_bus = gst_pipeline_get_bus(GST_PIPELINE(ev->pipeline));
1522 if (!ev->eos_bus)
1523 {
1524 ERR("could not get the bus");
1525 return EINA_FALSE;
1526 }
1527
1528 gst_bus_set_sync_handler(ev->eos_bus, _eos_sync_fct, ev, NULL);
1529 }
1530
1531 restart_idler = NULL;
1532
1533 return ECORE_CALLBACK_CANCEL;
1534}
1535
1536static Eina_Bool
1537_video_size_get(GstElement *elem, int *width, int *height)
1538{
1539 GstIterator *itr = NULL;
1540 GstCaps *caps;
1541 GValue v = G_VALUE_INIT;
1542 GstPad *pad;
1543 Eina_Bool ret = EINA_FALSE;
1544 Eina_Bool done = EINA_FALSE;
1545 GstVideoInfo info;
1546
1547 itr = gst_element_iterate_src_pads(elem);
1548
1549 while (!done && !ret)
1550 {
1551 switch(gst_iterator_next(itr, &v))
1552 {
1553 case GST_ITERATOR_OK:
1554 pad = GST_PAD(g_value_get_object(&v));
1555 caps = gst_pad_get_current_caps(pad);
1556 if (gst_video_info_from_caps(&info, caps))
1557 {
1558 *width = info.width;
1559 *height = info.height;
1560 ret = EINA_TRUE;
1561 done = EINA_TRUE;
1562 }
1563 gst_caps_unref(caps);
1564 g_value_reset(&v);
1565 break;
1566 case GST_ITERATOR_RESYNC:
1567 gst_iterator_resync(itr);
1568 done = ret = EINA_FALSE;
1569 break;
1570 case GST_ITERATOR_ERROR:
1571 case GST_ITERATOR_DONE:
1572 done = TRUE;
1573 break;
1574 }
1575 }
1576 g_value_unset(&v);
1577 gst_iterator_free(itr);
1578
1579 return ret;
1580}
1581
1582static void
1583_main_frame_resize(void *data)
1584{
1585 Emotion_Gstreamer_Video *ev = data;
1586 double ratio;
1587
1588 ratio = (double)ev->src_width / (double)ev->src_height;
1589 _emotion_frame_resize(ev->obj, ev->src_width, ev->src_height, ratio);
1590 _emotion_pending_ecore_end();
1591}
1592
1593static void
1594_no_more_pads(GstElement *decodebin, gpointer data)
1595{
1596 GstIterator *itr = NULL;
1597 Emotion_Gstreamer_Video *ev = data;
1598 GValue v = G_VALUE_INIT;
1599 GstElement *elem;
1600 Eina_Bool done = EINA_FALSE;
1601
1602 itr = gst_bin_iterate_elements(GST_BIN(decodebin));
1603
1604 while (!done)
1605 {
1606 switch(gst_iterator_next(itr, &v))
1607 {
1608 case GST_ITERATOR_OK:
1609 elem = GST_ELEMENT(g_value_get_object(&v));
1610 if(_video_size_get(elem, &ev->src_width, &ev->src_height))
1611 {
1612 _emotion_pending_ecore_begin();
1613 ecore_main_loop_thread_safe_call_async(_main_frame_resize, ev);
1614 g_value_reset(&v);
1615 done = EINA_TRUE;
1616 break;
1617 }
1618 g_value_reset(&v);
1619 break;
1620 case GST_ITERATOR_RESYNC:
1621 gst_iterator_resync(itr);
1622 done = EINA_FALSE;
1623 break;
1624 case GST_ITERATOR_ERROR:
1625 case GST_ITERATOR_DONE:
1626 done = TRUE;
1627 break;
1628 }
1629 }
1630 g_value_unset(&v);
1631 gst_iterator_free(itr);
1632}
1633
1634static void
1635_eos_main_fct(void *data)
1636{
1637 Emotion_Gstreamer_Message *send;
1638 Emotion_Gstreamer_Video *ev;
1639 GstMessage *msg;
1640
1641 send = data;
1642 ev = send->ev;
1643 msg = send->msg;
1644
1645 if (ev->play_started && !ev->delete_me)
1646 {
1647 _emotion_playback_started(ev->obj);
1648 ev->play_started = 0;
1649 }
1650
1651 switch (GST_MESSAGE_TYPE(msg))
1652 {
1653 case GST_MESSAGE_EOS:
1654 if (!ev->delete_me)
1655 {
1656 ev->play = 0;
1657 _emotion_decode_stop(ev->obj);
1658 _emotion_playback_finished(ev->obj);
1659 }
1660 break;
1661 case GST_MESSAGE_TAG:
1662 if (!ev->delete_me)
1663 {
1664 GstTagList *new_tags;
1665 gst_message_parse_tag(msg, &new_tags);
1666 if (new_tags)
1667 {
1668 gst_tag_list_foreach(new_tags,
1669 (GstTagForeachFunc)_for_each_tag,
1670 ev);
1671 gst_tag_list_free(new_tags);
1672 }
1673 }
1674 break;
1675 case GST_MESSAGE_ASYNC_DONE:
1676 if (!ev->delete_me) _emotion_seek_done(ev->obj);
1677 break;
1678 case GST_MESSAGE_STREAM_STATUS:
1679 break;
1680 case GST_MESSAGE_STATE_CHANGED:
1681 if (!ev->delete_me)
1682 {
1683 /* FIXME: This is conceptionally broken */
1684 if (!g_signal_handlers_disconnect_by_func(msg->src, _no_more_pads, ev))
1685 g_signal_connect(msg->src, "no-more-pads", G_CALLBACK(_no_more_pads), ev);
1686 }
1687 break;
1688 case GST_MESSAGE_ERROR:
1689 em_cleanup(ev);
1690
1691 if (ev->priority)
1692 {
1693 ERR("Switching back to canvas rendering.");
1694 ev->priority = EINA_FALSE;
1695 priority_overide++;
1696
1697 restart_idler = ecore_idler_add(_em_restart_stream, ev);
1698 }
1699 break;
1700 default:
1701 ERR("bus say: %s [%i - %s]",
1702 GST_MESSAGE_SRC_NAME(msg),
1703 GST_MESSAGE_TYPE(msg),
1704 GST_MESSAGE_TYPE_NAME(msg));
1705 break;
1706 }
1707
1708 emotion_gstreamer_message_free(send);
1709 _emotion_pending_ecore_end();
1710}
1711
1712static GstBusSyncReply
1713_eos_sync_fct(GstBus *bus EINA_UNUSED, GstMessage *msg, gpointer data)
1714{
1715 Emotion_Gstreamer_Video *ev = data;
1716 Emotion_Gstreamer_Message *send;
1717
1718 switch (GST_MESSAGE_TYPE(msg))
1719 {
1720 case GST_MESSAGE_EOS:
1721 case GST_MESSAGE_TAG:
1722 case GST_MESSAGE_ASYNC_DONE:
1723 case GST_MESSAGE_STREAM_STATUS:
1724 INF("bus say: %s [%i - %s]",
1725 GST_MESSAGE_SRC_NAME(msg),
1726 GST_MESSAGE_TYPE(msg),
1727 GST_MESSAGE_TYPE_NAME(msg));
1728 send = emotion_gstreamer_message_alloc(ev, msg);
1729
1730 if (send)
1731 {
1732 _emotion_pending_ecore_begin();
1733 ecore_main_loop_thread_safe_call_async(_eos_main_fct, send);
1734 }
1735
1736 break;
1737
1738 case GST_MESSAGE_STATE_CHANGED:
1739 {
1740 GstState old_state, new_state;
1741
1742 gst_message_parse_state_changed(msg, &old_state, &new_state, NULL);
1743 INF("Element %s changed state from %s to %s.",
1744 GST_OBJECT_NAME(msg->src),
1745 gst_element_state_get_name(old_state),
1746 gst_element_state_get_name(new_state));
1747
1748 /* FIXME: This is broken */
1749 if (!strncmp(GST_OBJECT_NAME(msg->src), "decodebin", 9) && new_state == GST_STATE_READY)
1750 {
1751 send = emotion_gstreamer_message_alloc(ev, msg);
1752
1753 if (send)
1754 {
1755 _emotion_pending_ecore_begin();
1756 ecore_main_loop_thread_safe_call_async(_eos_main_fct, send);
1757 }
1758 }
1759 break;
1760 }
1761 case GST_MESSAGE_ERROR:
1762 {
1763 GError *error;
1764 gchar *debug;
1765
1766 gst_message_parse_error(msg, &error, &debug);
1767 ERR("ERROR from element %s: %s", GST_OBJECT_NAME(msg->src), error->message);
1768 ERR("Debugging info: %s", (debug) ? debug : "none");
1769 g_error_free(error);
1770 g_free(debug);
1771
1772 /* FIXME: This is broken */
1773 if (strncmp(GST_OBJECT_NAME(msg->src), "xvimagesink", 11) == 0)
1774 {
1775 send = emotion_gstreamer_message_alloc(ev, msg);
1776
1777 if (send)
1778 {
1779 _emotion_pending_ecore_begin();
1780 ecore_main_loop_thread_safe_call_async(_eos_main_fct, send);
1781 }
1782 }
1783 break;
1784 }
1785 case GST_MESSAGE_WARNING:
1786 {
1787 GError *error;
1788 gchar *debug;
1789
1790 gst_message_parse_warning(msg, &error, &debug);
1791 WRN("WARNING from element %s: %s", GST_OBJECT_NAME(msg->src), error->message);
1792 WRN("Debugging info: %s", (debug) ? debug : "none");
1793 g_error_free(error);
1794 g_free(debug);
1795 break;
1796 }
1797 default:
1798 WRN("bus say: %s [%i - %s]",
1799 GST_MESSAGE_SRC_NAME(msg),
1800 GST_MESSAGE_TYPE(msg),
1801 GST_MESSAGE_TYPE_NAME(msg));
1802 break;
1803 }
1804
1805 gst_message_unref(msg);
1806
1807 return GST_BUS_DROP;
1808}
1809
1810Eina_Bool
1811_emotion_gstreamer_video_pipeline_parse(Emotion_Gstreamer_Video *ev,
1812 Eina_Bool force)
1813{
1814 gboolean res;
1815 int i;
1816
1817 if (ev->pipeline_parsed)
1818 return EINA_TRUE;
1819
1820 if (force && ev->threads)
1821 {
1822 Ecore_Thread *t;
1823
1824 EINA_LIST_FREE(ev->threads, t)
1825 ecore_thread_cancel(t);
1826 }
1827
1828 if (ev->threads)
1829 return EINA_FALSE;
1830
1831 res = gst_element_get_state(ev->pipeline, NULL, NULL, GST_CLOCK_TIME_NONE);
1832 if (res == GST_STATE_CHANGE_NO_PREROLL)
1833 {
1834 gst_element_set_state(ev->pipeline, GST_STATE_PLAYING);
1835
1836 res = gst_element_get_state(ev->pipeline, NULL, NULL, GST_CLOCK_TIME_NONE);
1837 }
1838
1839 /** NOTE: you need to set: GST_DEBUG_DUMP_DOT_DIR=/tmp EMOTION_ENGINE=gstreamer to save the $EMOTION_GSTREAMER_DOT file in '/tmp' */
1840 /** then call dot -Tpng -oemotion_pipeline.png /tmp/$TIMESTAMP-$EMOTION_GSTREAMER_DOT.dot */
1841 if (getenv("EMOTION_GSTREAMER_DOT"))
1842 GST_DEBUG_BIN_TO_DOT_FILE_WITH_TS(GST_BIN(ev->pipeline),
1843 GST_DEBUG_GRAPH_SHOW_ALL,
1844 getenv("EMOTION_GSTREAMER_DOT"));
1845
1846 if (!(res == GST_STATE_CHANGE_SUCCESS
1847 || res == GST_STATE_CHANGE_NO_PREROLL))
1848 {
1849 ERR("Unable to get GST_CLOCK_TIME_NONE.");
1850 return EINA_FALSE;
1851 }
1852
1853 g_object_get(G_OBJECT(ev->pipeline),
1854 "n-audio", &ev->audio_stream_nbr,
1855 "n-video", &ev->video_stream_nbr,
1856 NULL);
1857
1858 if ((ev->video_stream_nbr == 0) && (ev->audio_stream_nbr == 0))
1859 {
1860 ERR("No audio nor video stream found");
1861 return EINA_FALSE;
1862 }
1863
1864 /* video stream */
1865 for (i = 0; i < ev->video_stream_nbr; i++)
1866 {
1867 Emotion_Video_Stream *vstream;
1868 GstPad *pad = NULL;
1869 GstCaps *caps;
1870 GstQuery *query;
1871
1872 gdouble length_time = 0.0;
1873 GstVideoInfo info;
1874
1875 g_signal_emit_by_name(ev->pipeline, "get-video-pad", i, &pad);
1876 if (!pad)
1877 continue;
1878
1879 caps = gst_pad_get_current_caps(pad);
1880 if (!caps)
1881 goto unref_pad_v;
1882
1883 if (!gst_video_info_from_caps(&info, caps))
1884 goto unref_caps_v;
1885
1886 query = gst_query_new_duration(GST_FORMAT_TIME);
1887 if (gst_pad_peer_query(pad, query))
1888 {
1889 gint64 t;
1890
1891 gst_query_parse_duration(query, NULL, &t);
1892 length_time = (double)t / (double)GST_SECOND;
1893 }
1894 else
1895 goto unref_query_v;
1896
1897 vstream = emotion_video_stream_new(ev);
1898 if (!vstream) goto unref_query_v;
1899
1900 vstream->length_time = length_time;
1901 vstream->info = info;
1902 vstream->index = i;
1903
1904 unref_query_v:
1905 gst_query_unref(query);
1906 unref_caps_v:
1907 gst_caps_unref(caps);
1908 unref_pad_v:
1909 gst_object_unref(pad);
1910 }
1911
1912 /* Audio streams */
1913 for (i = 0; i < ev->audio_stream_nbr; i++)
1914 {
1915 Emotion_Audio_Stream *astream;
1916 GstPad *pad;
1917 GstCaps *caps;
1918 GstAudioInfo info;
1919 GstQuery *query;
1920
1921 gdouble length_time = 0.0;
1922
1923 g_signal_emit_by_name(ev->pipeline, "get-audio-pad", i, &pad);
1924 if (!pad)
1925 continue;
1926
1927 caps = gst_pad_get_current_caps(pad);
1928 if (!caps)
1929 goto unref_pad_a;
1930
1931 if (!gst_audio_info_from_caps(&info, caps))
1932 goto unref_caps_a;
1933
1934 query = gst_query_new_duration(GST_FORMAT_TIME);
1935 if (gst_pad_peer_query(pad, query))
1936 {
1937 gint64 t;
1938
1939 gst_query_parse_duration(query, NULL, &t);
1940 length_time = (double)t / (double)GST_SECOND;
1941 }
1942 else
1943 goto unref_query_a;
1944
1945 astream = calloc(1, sizeof(Emotion_Audio_Stream));
1946 if (!astream) continue;
1947 ev->audio_streams = eina_list_append(ev->audio_streams, astream);
1948
1949 astream->length_time = length_time;
1950 astream->info = info;
1951
1952 unref_query_a:
1953 gst_query_unref(query);
1954 unref_caps_a:
1955 gst_caps_unref(caps);
1956 unref_pad_a:
1957 gst_object_unref(pad);
1958 }
1959
1960 /* Visualization sink */
1961 if (ev->video_stream_nbr == 0)
1962 {
1963 GstElement *vis = NULL;
1964 Emotion_Video_Stream *vstream;
1965 Emotion_Audio_Stream *astream;
1966 gint flags;
1967 const char *vis_name;
1968
1969 if (!(vis_name = emotion_visualization_element_name_get(ev->vis)))
1970 {
1971 WRN("pb vis name %d", ev->vis);
1972 goto finalize;
1973 }
1974
1975 astream = eina_list_data_get(ev->audio_streams);
1976
1977 vis = gst_element_factory_make(vis_name, "vissink");
1978 vstream = emotion_video_stream_new(ev);
1979 if (!vstream)
1980 goto finalize;
1981 else
1982 DBG("could not create visualization stream");
1983
1984 vstream->length_time = astream->length_time;
1985 gst_video_info_init (&vstream->info);
1986 gst_video_info_set_format (&vstream->info, 320, 200, GST_VIDEO_FORMAT_ARGB);
1987 vstream->info.fps_n = 25;
1988 vstream->info.fps_d = 1;
1989
1990 g_object_set(G_OBJECT(ev->pipeline), "vis-plugin", vis, NULL);
1991 g_object_get(G_OBJECT(ev->pipeline), "flags", &flags, NULL);
1992 flags |= 0x00000008;
1993 g_object_set(G_OBJECT(ev->pipeline), "flags", flags, NULL);
1994 }
1995
1996 finalize:
1997
1998 ev->video_stream_nbr = eina_list_count(ev->video_streams);
1999 ev->audio_stream_nbr = eina_list_count(ev->audio_streams);
2000
2001 if (ev->video_stream_nbr == 1)
2002 {
2003 Emotion_Video_Stream *vstream;
2004
2005 vstream = eina_list_data_get(ev->video_streams);
2006 ev->ratio = (double)vstream->info.width / (double)vstream->info.height;
2007 ev->ratio *= (double)vstream->info.par_n / (double)vstream->info.par_d;
2008 _emotion_frame_resize(ev->obj, vstream->info.width, vstream->info.height, ev->ratio);
2009 }
2010
2011 {
2012 /* on recapitule : */
2013 Emotion_Video_Stream *vstream;
2014 Emotion_Audio_Stream *astream;
2015
2016 vstream = eina_list_data_get(ev->video_streams);
2017 if (vstream)
2018 {
2019 DBG("video size=%dx%d, fps=%d/%d, "
2020 "format=%s, length=%"GST_TIME_FORMAT,
2021 vstream->info.width, vstream->info.height, vstream->info.fps_n, vstream->info.fps_d,
2022 gst_video_format_to_string(vstream->info.finfo->format),
2023 GST_TIME_ARGS((guint64)(vstream->length_time * GST_SECOND)));
2024 }
2025
2026 astream = eina_list_data_get(ev->audio_streams);
2027 if (astream)
2028 {
2029 DBG("audio channels=%d, rate=%d, length=%"GST_TIME_FORMAT,
2030 astream->info.channels, astream->info.rate,
2031 GST_TIME_ARGS((guint64)(astream->length_time * GST_SECOND)));
2032 }
2033 }
2034
2035 if (ev->metadata)
2036 _free_metadata(ev->metadata);
2037 ev->metadata = calloc(1, sizeof(Emotion_Gstreamer_Metadata));
2038
2039 ev->pipeline_parsed = EINA_TRUE;
2040
2041 em_audio_channel_volume_set(ev, ev->volume);
2042 em_audio_channel_mute_set(ev, ev->audio_mute);
2043
2044 if (ev->play_started)
2045 {
2046 _emotion_playback_started(ev->obj);
2047 ev->play_started = 0;
2048 }
2049
2050 _emotion_open_done(ev->obj);
2051
2052 return EINA_TRUE;
2053}
diff --git a/src/modules/emotion/gstreamer1/emotion_gstreamer.h b/src/modules/emotion/gstreamer1/emotion_gstreamer.h
new file mode 100644
index 0000000000..7ba9050cf0
--- /dev/null
+++ b/src/modules/emotion/gstreamer1/emotion_gstreamer.h
@@ -0,0 +1,295 @@
1#ifndef __EMOTION_GSTREAMER_H__
2#define __EMOTION_GSTREAMER_H__
3
4#include "emotion_modules.h"
5
6#include <unistd.h>
7#include <fcntl.h>
8
9#include <Eina.h>
10#include <Evas.h>
11#include <Ecore.h>
12
13#include <glib.h>
14#include <gst/gst.h>
15#include <glib-object.h>
16#include <gst/video/gstvideosink.h>
17#include <gst/video/video.h>
18#include <gst/audio/audio.h>
19#include <gst/tag/tag.h>
20
21// forcibly disable x overlay window.. broken badly.
22#undef HAVE_ECORE_X
23
24#ifdef HAVE_ECORE_X
25# include <Ecore_X.h>
26# include <gst/video/videooverlay.h>
27#endif
28
29typedef void (*Evas_Video_Convert_Cb)(unsigned char *evas_data,
30 const unsigned char *gst_data,
31 unsigned int w,
32 unsigned int h,
33 unsigned int output_height);
34
35typedef struct _EvasVideoSinkPrivate EvasVideoSinkPrivate;
36typedef struct _EvasVideoSink EvasVideoSink;
37typedef struct _EvasVideoSinkClass EvasVideoSinkClass;
38typedef struct _Emotion_Gstreamer_Video Emotion_Gstreamer_Video;
39typedef struct _Emotion_Audio_Stream Emotion_Audio_Stream;
40typedef struct _Emotion_Gstreamer_Metadata Emotion_Gstreamer_Metadata;
41typedef struct _Emotion_Gstreamer_Buffer Emotion_Gstreamer_Buffer;
42typedef struct _Emotion_Gstreamer_Message Emotion_Gstreamer_Message;
43typedef struct _Emotion_Video_Stream Emotion_Video_Stream;
44
45struct _Emotion_Video_Stream
46{
47 gdouble length_time;
48 GstVideoInfo info;
49 int index;
50};
51
52struct _Emotion_Audio_Stream
53{
54 gdouble length_time;
55 GstAudioInfo info;
56};
57
58struct _Emotion_Gstreamer_Metadata
59{
60 char *title;
61 char *album;
62 char *artist;
63 char *genre;
64 char *comment;
65 char *year;
66 char *count;
67 char *disc_id;
68};
69
70struct _Emotion_Gstreamer_Video
71{
72 const Emotion_Engine *api;
73
74 /* Gstreamer elements */
75 GstElement *pipeline;
76 GstElement *sink;
77 GstElement *esink;
78 GstElement *xvsink;
79 GstElement *tee;
80 GstElement *convert;
81
82 GstPad *eteepad;
83 GstPad *xvteepad;
84 GstPad *xvpad;
85 Eina_List *threads;
86
87 /* eos */
88 GstBus *eos_bus;
89
90 /* Strams */
91 Eina_List *video_streams;
92 Eina_List *audio_streams;
93
94 int video_stream_nbr;
95 int audio_stream_nbr;
96
97 /* We need to keep a copy of the last inserted buffer as evas doesn't copy YUV data around */
98 GstBuffer *last_buffer;
99
100 /* Evas object */
101 Evas_Object *obj;
102
103 /* Characteristics of stream */
104 double position;
105 double ratio;
106 double volume;
107
108 volatile int seek_to;
109 volatile int get_poslen;
110
111 Emotion_Gstreamer_Metadata *metadata;
112
113#ifdef HAVE_ECORE_X
114 Ecore_X_Window win;
115#endif
116
117 const char *uri;
118
119 Emotion_Gstreamer_Buffer *send;
120
121 EvasVideoSinkPrivate *sink_data;
122
123 Emotion_Vis vis;
124
125 int in;
126 int out;
127
128 int frames;
129 int flapse;
130 double rtime;
131 double rlapse;
132
133 struct
134 {
135 double width;
136 double height;
137 } fill;
138
139 Eina_Bool play : 1;
140 Eina_Bool play_started : 1;
141 Eina_Bool video_mute : 1;
142 Eina_Bool audio_mute : 1;
143 Eina_Bool pipeline_parsed : 1;
144 Eina_Bool delete_me : 1;
145 Eina_Bool kill_buffer : 1;
146 Eina_Bool stream : 1;
147 Eina_Bool priority : 1;
148
149 int src_width;
150 int src_height;
151};
152
153struct _EvasVideoSink {
154 /*< private >*/
155 GstVideoSink parent;
156 EvasVideoSinkPrivate *priv;
157};
158
159struct _EvasVideoSinkClass {
160 /*< private >*/
161 GstVideoSinkClass parent_class;
162};
163
164struct _EvasVideoSinkPrivate {
165 EINA_REFCOUNT;
166
167 Evas_Object *o;
168
169 Emotion_Gstreamer_Video *ev;
170
171 Evas_Video_Convert_Cb func;
172
173 GstVideoInfo info;
174 unsigned int eheight;
175 Evas_Colorspace eformat;
176
177 Eina_Lock m;
178 Eina_Condition c;
179
180 // If this is TRUE all processing should finish ASAP
181 // This is necessary because there could be a race between
182 // unlock() and render(), where unlock() wins, signals the
183 // GCond, then render() tries to render a frame although
184 // everything else isn't running anymore. This will lead
185 // to deadlocks because render() holds the stream lock.
186 //
187 // Protected by the buffer mutex
188 Eina_Bool unlocked : 1;
189};
190
191struct _Emotion_Gstreamer_Buffer
192{
193 Emotion_Gstreamer_Video *ev;
194 EvasVideoSinkPrivate *sink;
195
196 GstBuffer *frame;
197
198 Eina_Bool preroll : 1;
199 Eina_Bool force : 1;
200};
201
202struct _Emotion_Gstreamer_Message
203{
204 Emotion_Gstreamer_Video *ev;
205
206 GstMessage *msg;
207};
208
209extern Eina_Bool window_manager_video;
210extern Eina_Bool debug_fps;
211extern int _emotion_gstreamer_log_domain;
212extern Eina_Bool _ecore_x_available;
213
214#ifdef DBG
215#undef DBG
216#endif
217#define DBG(...) EINA_LOG_DOM_DBG(_emotion_gstreamer_log_domain, __VA_ARGS__)
218
219#ifdef INF
220#undef INF
221#endif
222#define INF(...) EINA_LOG_DOM_INFO(_emotion_gstreamer_log_domain, __VA_ARGS__)
223
224#ifdef WRN
225#undef WRN
226#endif
227#define WRN(...) EINA_LOG_DOM_WARN(_emotion_gstreamer_log_domain, __VA_ARGS__)
228
229#ifdef ERR
230#undef ERR
231#endif
232#define ERR(...) EINA_LOG_DOM_ERR(_emotion_gstreamer_log_domain, __VA_ARGS__)
233
234#ifdef CRITICAL
235#undef CRITICAL
236#endif
237#define CRITICAL(...) EINA_LOG_DOM_CRIT(_emotion_gstreamer_log_domain, __VA_ARGS__)
238
239#define EVAS_TYPE_VIDEO_SINK evas_video_sink_get_type()
240
241GType fakeeos_bin_get_type(void);
242
243#define EVAS_VIDEO_SINK(obj) \
244 (G_TYPE_CHECK_INSTANCE_CAST((obj), \
245 EVAS_TYPE_VIDEO_SINK, EvasVideoSink))
246
247#define EVAS_VIDEO_SINK_CLASS(klass) \
248 (G_TYPE_CHECK_CLASS_CAST((klass), \
249 EVAS_TYPE_VIDEO_SINK, EvasVideoSinkClass))
250
251#define EVAS_IS_VIDEO_SINK(obj) \
252 (G_TYPE_CHECK_INSTANCE_TYPE((obj), \
253 EVAS_TYPE_VIDEO_SINK))
254
255#define EVAS_IS_VIDEO_SINK_CLASS(klass) \
256 (G_TYPE_CHECK_CLASS_TYPE((klass), \
257 EVAS_TYPE_VIDEO_SINK))
258
259#define EVAS_VIDEO_SINK_GET_CLASS(obj) \
260 (G_TYPE_INSTANCE_GET_CLASS((obj), \
261 EVAS_TYPE_VIDEO_SINK, EvasVideoSinkClass))
262
263#define GST_TYPE_FAKEEOS_BIN fakeeos_bin_get_type()
264
265GstElement *gstreamer_video_sink_new(Emotion_Gstreamer_Video *ev,
266 Evas_Object *obj,
267 const char *uri);
268
269gboolean gstreamer_plugin_init(GstPlugin *plugin);
270
271Emotion_Gstreamer_Buffer *emotion_gstreamer_buffer_alloc(EvasVideoSinkPrivate *sink,
272 GstBuffer *buffer,
273 Eina_Bool preroll);
274void emotion_gstreamer_buffer_free(Emotion_Gstreamer_Buffer *send);
275
276Emotion_Gstreamer_Message *emotion_gstreamer_message_alloc(Emotion_Gstreamer_Video *ev,
277 GstMessage *msg);
278void emotion_gstreamer_message_free(Emotion_Gstreamer_Message *send);
279Eina_Bool _emotion_gstreamer_video_pipeline_parse(Emotion_Gstreamer_Video *ev,
280 Eina_Bool force);
281
282typedef struct _ColorSpace_Format_Convertion ColorSpace_Format_Convertion;
283
284struct _ColorSpace_Format_Convertion
285{
286 const char *name;
287 GstVideoFormat format;
288 Evas_Colorspace eformat;
289 Evas_Video_Convert_Cb func;
290 Eina_Bool force_height;
291};
292
293extern const ColorSpace_Format_Convertion colorspace_format_convertion[];
294
295#endif /* __EMOTION_GSTREAMER_H__ */
diff --git a/src/modules/emotion/gstreamer1/emotion_sink.c b/src/modules/emotion/gstreamer1/emotion_sink.c
new file mode 100644
index 0000000000..3a061b6dd8
--- /dev/null
+++ b/src/modules/emotion/gstreamer1/emotion_sink.c
@@ -0,0 +1,1006 @@
1#ifdef HAVE_CONFIG_H
2# include "config.h"
3#endif
4
5#include "emotion_gstreamer.h"
6
7static GstStaticPadTemplate sinktemplate = GST_STATIC_PAD_TEMPLATE("sink",
8 GST_PAD_SINK, GST_PAD_ALWAYS,
9 GST_STATIC_CAPS(GST_VIDEO_CAPS_MAKE("{ I420, YV12, YUY2, NV12, BGRx, BGR, BGRA }")));
10
11GST_DEBUG_CATEGORY_STATIC(evas_video_sink_debug);
12#define GST_CAT_DEFAULT evas_video_sink_debug
13
14enum {
15 LAST_SIGNAL
16};
17
18enum {
19 PROP_0,
20 PROP_EVAS_OBJECT,
21 PROP_WIDTH,
22 PROP_HEIGHT,
23 PROP_EV,
24 PROP_LAST
25};
26
27#define _do_init \
28 GST_DEBUG_CATEGORY_INIT(evas_video_sink_debug, \
29 "emotion-sink", \
30 0, \
31 "emotion video sink")
32
33#define parent_class evas_video_sink_parent_class
34G_DEFINE_TYPE_WITH_CODE(EvasVideoSink,
35 evas_video_sink,
36 GST_TYPE_VIDEO_SINK,
37 _do_init);
38
39
40static void unlock_buffer_mutex(EvasVideoSinkPrivate* priv);
41static void evas_video_sink_main_render(void *data);
42
43static void
44evas_video_sink_init(EvasVideoSink* sink)
45{
46 EvasVideoSinkPrivate* priv;
47
48 INF("sink init");
49 sink->priv = priv = G_TYPE_INSTANCE_GET_PRIVATE(sink, EVAS_TYPE_VIDEO_SINK, EvasVideoSinkPrivate);
50 priv->o = NULL;
51 priv->info.width = 0;
52 priv->info.height = 0;
53 priv->eheight = 0;
54 priv->func = NULL;
55 priv->eformat = EVAS_COLORSPACE_ARGB8888;
56 eina_lock_new(&priv->m);
57 eina_condition_new(&priv->c, &priv->m);
58 priv->unlocked = EINA_FALSE;
59}
60
61/**** Object methods ****/
62static void
63_cleanup_priv(void *data, Evas *e EINA_UNUSED, Evas_Object *obj, void *event_info EINA_UNUSED)
64{
65 EvasVideoSinkPrivate* priv;
66
67 priv = data;
68
69 eina_lock_take(&priv->m);
70 if (priv->o == obj)
71 priv->o = NULL;
72 eina_lock_release(&priv->m);
73}
74
75static void
76evas_video_sink_set_property(GObject * object, guint prop_id,
77 const GValue * value, GParamSpec * pspec)
78{
79 EvasVideoSink* sink;
80 EvasVideoSinkPrivate* priv;
81
82 sink = EVAS_VIDEO_SINK (object);
83 priv = sink->priv;
84
85 switch (prop_id) {
86 case PROP_EVAS_OBJECT:
87 eina_lock_take(&priv->m);
88 if (priv->o)
89 evas_object_event_callback_del(priv->o, EVAS_CALLBACK_DEL, _cleanup_priv);
90 priv->o = g_value_get_pointer (value);
91 INF("sink set Evas_Object %p.", priv->o);
92 if (priv->o)
93 evas_object_event_callback_add(priv->o, EVAS_CALLBACK_DEL, _cleanup_priv, priv);
94 eina_lock_release(&priv->m);
95 break;
96 case PROP_EV:
97 INF("sink set ev.");
98 eina_lock_take(&priv->m);
99 priv->ev = g_value_get_pointer (value);
100 eina_lock_release(&priv->m);
101 break;
102 default:
103 G_OBJECT_WARN_INVALID_PROPERTY_ID (object, prop_id, pspec);
104 ERR("invalid property");
105 break;
106 }
107}
108
109static void
110evas_video_sink_get_property(GObject * object, guint prop_id,
111 GValue * value, GParamSpec * pspec)
112{
113 EvasVideoSink* sink;
114 EvasVideoSinkPrivate* priv;
115
116 sink = EVAS_VIDEO_SINK (object);
117 priv = sink->priv;
118
119 switch (prop_id) {
120 case PROP_EVAS_OBJECT:
121 INF("sink get property.");
122 eina_lock_take(&priv->m);
123 g_value_set_pointer(value, priv->o);
124 eina_lock_release(&priv->m);
125 break;
126 case PROP_WIDTH:
127 INF("sink get width.");
128 eina_lock_take(&priv->m);
129 g_value_set_int(value, priv->info.width);
130 eina_lock_release(&priv->m);
131 break;
132 case PROP_HEIGHT:
133 INF("sink get height.");
134 eina_lock_take(&priv->m);
135 g_value_set_int (value, priv->eheight);
136 eina_lock_release(&priv->m);
137 break;
138 case PROP_EV:
139 INF("sink get ev.");
140 eina_lock_take(&priv->m);
141 g_value_set_pointer (value, priv->ev);
142 eina_lock_release(&priv->m);
143 break;
144 default:
145 G_OBJECT_WARN_INVALID_PROPERTY_ID (object, prop_id, pspec);
146 ERR("invalide property");
147 break;
148 }
149}
150
151static void
152evas_video_sink_dispose(GObject* object)
153{
154 EvasVideoSink* sink;
155 EvasVideoSinkPrivate* priv;
156
157 INF("dispose.");
158
159 sink = EVAS_VIDEO_SINK(object);
160 priv = sink->priv;
161
162 eina_lock_free(&priv->m);
163 eina_condition_free(&priv->c);
164
165 G_OBJECT_CLASS(parent_class)->dispose(object);
166}
167
168
169/**** BaseSink methods ****/
170
171gboolean evas_video_sink_set_caps(GstBaseSink *bsink, GstCaps *caps)
172{
173 EvasVideoSink* sink;
174 EvasVideoSinkPrivate* priv;
175 GstVideoInfo info;
176 unsigned int i;
177
178 sink = EVAS_VIDEO_SINK(bsink);
179 priv = sink->priv;
180
181 if (!gst_video_info_from_caps(&info, caps))
182 {
183 ERR("Unable to parse caps.");
184 return FALSE;
185 }
186
187 priv->info = info;
188 priv->eheight = info.height;
189
190 for (i = 0; colorspace_format_convertion[i].name != NULL; ++i)
191 if (info.finfo->format == colorspace_format_convertion[i].format)
192 {
193 DBG("Found '%s'", colorspace_format_convertion[i].name);
194 priv->eformat = colorspace_format_convertion[i].eformat;
195 priv->func = colorspace_format_convertion[i].func;
196 if (colorspace_format_convertion[i].force_height)
197 {
198 priv->eheight = (priv->eheight >> 1) << 1;
199 }
200 if (priv->ev)
201 priv->ev->kill_buffer = EINA_TRUE;
202 return TRUE;
203 }
204
205 ERR("unsupported : %s\n", gst_video_format_to_string(info.finfo->format));
206 return FALSE;
207}
208
209static gboolean
210evas_video_sink_start(GstBaseSink* base_sink)
211{
212 EvasVideoSinkPrivate* priv;
213 gboolean res = TRUE;
214
215 INF("sink start");
216
217 priv = EVAS_VIDEO_SINK(base_sink)->priv;
218 eina_lock_take(&priv->m);
219 if (!priv->o)
220 res = FALSE;
221 else
222 priv->unlocked = EINA_FALSE;
223 eina_lock_release(&priv->m);
224 return res;
225}
226
227static gboolean
228evas_video_sink_stop(GstBaseSink* base_sink)
229{
230 EvasVideoSinkPrivate* priv = EVAS_VIDEO_SINK(base_sink)->priv;
231
232 INF("sink stop");
233
234 unlock_buffer_mutex(priv);
235 return TRUE;
236}
237
238static gboolean
239evas_video_sink_unlock(GstBaseSink* object)
240{
241 EvasVideoSink* sink;
242
243 INF("sink unlock");
244
245 sink = EVAS_VIDEO_SINK(object);
246
247 unlock_buffer_mutex(sink->priv);
248
249 return GST_CALL_PARENT_WITH_DEFAULT(GST_BASE_SINK_CLASS, unlock,
250 (object), TRUE);
251}
252
253static gboolean
254evas_video_sink_unlock_stop(GstBaseSink* object)
255{
256 EvasVideoSink* sink;
257 EvasVideoSinkPrivate* priv;
258
259 sink = EVAS_VIDEO_SINK(object);
260 priv = sink->priv;
261
262 INF("sink unlock stop");
263
264 eina_lock_take(&priv->m);
265 priv->unlocked = FALSE;
266 eina_lock_release(&priv->m);
267
268 return GST_CALL_PARENT_WITH_DEFAULT(GST_BASE_SINK_CLASS, unlock_stop,
269 (object), TRUE);
270}
271
272static GstFlowReturn
273evas_video_sink_preroll(GstBaseSink* bsink, GstBuffer* buffer)
274{
275 Emotion_Gstreamer_Buffer *send;
276 EvasVideoSinkPrivate *priv;
277 EvasVideoSink *sink;
278
279 INF("sink preroll %p [%" G_GSIZE_FORMAT "]", buffer, gst_buffer_get_size(buffer));
280
281 sink = EVAS_VIDEO_SINK(bsink);
282 priv = sink->priv;
283
284 if (gst_buffer_get_size(buffer) <= 0)
285 {
286 WRN("empty buffer");
287 return GST_FLOW_OK;
288 }
289
290 send = emotion_gstreamer_buffer_alloc(priv, buffer, EINA_TRUE);
291
292 if (send)
293 {
294 _emotion_pending_ecore_begin();
295 ecore_main_loop_thread_safe_call_async(evas_video_sink_main_render, send);
296 }
297
298 return GST_FLOW_OK;
299}
300
301static GstFlowReturn
302evas_video_sink_render(GstBaseSink* bsink, GstBuffer* buffer)
303{
304 Emotion_Gstreamer_Buffer *send;
305 EvasVideoSinkPrivate *priv;
306 EvasVideoSink *sink;
307
308 INF("sink render %p", buffer);
309
310 sink = EVAS_VIDEO_SINK(bsink);
311 priv = sink->priv;
312
313 eina_lock_take(&priv->m);
314
315 if (priv->unlocked) {
316 ERR("LOCKED");
317 eina_lock_release(&priv->m);
318 return GST_FLOW_FLUSHING;
319 }
320
321 send = emotion_gstreamer_buffer_alloc(priv, buffer, EINA_FALSE);
322 if (!send) {
323 eina_lock_release(&priv->m);
324 return GST_FLOW_ERROR;
325 }
326
327 _emotion_pending_ecore_begin();
328 ecore_main_loop_thread_safe_call_async(evas_video_sink_main_render, send);
329
330 eina_condition_wait(&priv->c);
331 eina_lock_release(&priv->m);
332
333 return GST_FLOW_OK;
334}
335
336static void
337_update_emotion_fps(Emotion_Gstreamer_Video *ev)
338{
339 double tim;
340
341 if (!debug_fps) return;
342
343 tim = ecore_time_get();
344 ev->frames++;
345
346 if (ev->rlapse == 0.0)
347 {
348 ev->rlapse = tim;
349 ev->flapse = ev->frames;
350 }
351 else if ((tim - ev->rlapse) >= 0.5)
352 {
353 printf("FRAME: %i, FPS: %3.1f\n",
354 ev->frames,
355 (ev->frames - ev->flapse) / (tim - ev->rlapse));
356 ev->rlapse = tim;
357 ev->flapse = ev->frames;
358 }
359}
360
361static void
362evas_video_sink_main_render(void *data)
363{
364 Emotion_Gstreamer_Buffer *send;
365 Emotion_Gstreamer_Video *ev = NULL;
366 Emotion_Video_Stream *vstream;
367 EvasVideoSinkPrivate *priv = NULL;
368 GstBuffer *buffer;
369 GstMapInfo map;
370 unsigned char *evas_data;
371 gint64 pos;
372 Eina_Bool preroll = EINA_FALSE;
373
374 send = data;
375
376 if (!send) goto exit_point;
377
378 priv = send->sink;
379 buffer = send->frame;
380 preroll = send->preroll;
381 ev = send->ev;
382
383 /* frame after cleanup */
384 if (!preroll && !ev->last_buffer)
385 {
386 priv = NULL;
387 goto exit_point;
388 }
389
390 if (!priv || !priv->o || priv->unlocked)
391 goto exit_point;
392
393 if (ev->send && send != ev->send)
394 {
395 emotion_gstreamer_buffer_free(ev->send);
396 ev->send = NULL;
397 }
398
399 if (!ev->stream && !send->force)
400 {
401 ev->send = send;
402 _emotion_frame_new(ev->obj);
403 evas_object_image_data_update_add(priv->o, 0, 0, priv->info.width, priv->eheight);
404 goto exit_stream;
405 }
406
407 if (!gst_buffer_map(buffer, &map, GST_MAP_READ))
408 goto exit_stream;
409
410 _emotion_gstreamer_video_pipeline_parse(ev, EINA_TRUE);
411
412 INF("sink main render [%i, %i] (source height: %i)", priv->info.width, priv->eheight, priv->info.height);
413
414 evas_object_image_alpha_set(priv->o, 0);
415 evas_object_image_colorspace_set(priv->o, priv->eformat);
416 evas_object_image_size_set(priv->o, priv->info.width, priv->eheight);
417
418 evas_data = evas_object_image_data_get(priv->o, 1);
419
420 if (priv->func)
421 priv->func(evas_data, map.data, priv->info.width, priv->info.height, priv->eheight);
422 else
423 WRN("No way to decode %x colorspace !", priv->eformat);
424
425 gst_buffer_unmap(buffer, &map);
426
427 evas_object_image_data_set(priv->o, evas_data);
428 evas_object_image_data_update_add(priv->o, 0, 0, priv->info.width, priv->eheight);
429 evas_object_image_pixels_dirty_set(priv->o, 0);
430
431 _update_emotion_fps(ev);
432
433 if (!preroll && ev->play_started)
434 {
435 _emotion_playback_started(ev->obj);
436 ev->play_started = 0;
437 }
438
439 if (!send->force)
440 {
441 _emotion_frame_new(ev->obj);
442 }
443
444 gst_element_query_position(ev->pipeline, GST_FORMAT_TIME, &pos);
445 ev->position = (double)pos / (double)GST_SECOND;
446
447 vstream = eina_list_nth(ev->video_streams, ev->video_stream_nbr - 1);
448
449 if (vstream)
450 {
451 vstream->info.width = priv->info.width;
452 vstream->info.height = priv->eheight;
453 _emotion_video_pos_update(ev->obj, ev->position, vstream->length_time);
454 }
455
456 ev->ratio = (double) priv->info.width / (double) priv->eheight;
457 ev->ratio *= (double) priv->info.par_n / (double) priv->info.par_d;
458
459 _emotion_frame_resize(ev->obj, priv->info.width, priv->eheight, ev->ratio);
460
461 buffer = gst_buffer_ref(buffer);
462 if (ev->last_buffer) gst_buffer_unref(ev->last_buffer);
463 ev->last_buffer = buffer;
464
465 exit_point:
466 if (send) emotion_gstreamer_buffer_free(send);
467
468 exit_stream:
469 if (priv)
470 {
471 if (preroll || !priv->o)
472 {
473 _emotion_pending_ecore_end();
474 return;
475 }
476
477 if (!priv->unlocked)
478 eina_condition_signal(&priv->c);
479 }
480 _emotion_pending_ecore_end();
481}
482
483static void
484unlock_buffer_mutex(EvasVideoSinkPrivate* priv)
485{
486 priv->unlocked = EINA_TRUE;
487
488 eina_condition_signal(&priv->c);
489}
490
491static void
492evas_video_sink_class_init(EvasVideoSinkClass* klass)
493{
494 GObjectClass* gobject_class;
495 GstElementClass* gstelement_class;
496 GstBaseSinkClass* gstbase_sink_class;
497
498 gobject_class = G_OBJECT_CLASS(klass);
499 gstelement_class = GST_ELEMENT_CLASS(klass);
500 gstbase_sink_class = GST_BASE_SINK_CLASS(klass);
501
502 g_type_class_add_private(klass, sizeof(EvasVideoSinkPrivate));
503
504 gobject_class->set_property = evas_video_sink_set_property;
505 gobject_class->get_property = evas_video_sink_get_property;
506
507 g_object_class_install_property (gobject_class, PROP_EVAS_OBJECT,
508 g_param_spec_pointer ("evas-object", "Evas Object",
509 "The Evas object where the display of the video will be done",
510 G_PARAM_READWRITE));
511
512 g_object_class_install_property (gobject_class, PROP_WIDTH,
513 g_param_spec_int ("width", "Width",
514 "The width of the video",
515 0, 65536, 0, G_PARAM_READABLE | G_PARAM_STATIC_STRINGS));
516
517 g_object_class_install_property (gobject_class, PROP_HEIGHT,
518 g_param_spec_int ("height", "Height",
519 "The height of the video",
520 0, 65536, 0, G_PARAM_READABLE | G_PARAM_STATIC_STRINGS));
521 g_object_class_install_property (gobject_class, PROP_EV,
522 g_param_spec_pointer ("ev", "Emotion_Gstreamer_Video",
523 "The internal data of the emotion object",
524 G_PARAM_READWRITE));
525
526 gobject_class->dispose = evas_video_sink_dispose;
527
528 gst_element_class_add_pad_template(gstelement_class, gst_static_pad_template_get(&sinktemplate));
529 gst_element_class_set_static_metadata(gstelement_class, "Evas video sink",
530 "Sink/Video", "Sends video data from a GStreamer pipeline to an Evas object",
531 "Vincent Torri <vtorri@univ-evry.fr>");
532
533 gstbase_sink_class->set_caps = evas_video_sink_set_caps;
534 gstbase_sink_class->stop = evas_video_sink_stop;
535 gstbase_sink_class->start = evas_video_sink_start;
536 gstbase_sink_class->unlock = evas_video_sink_unlock;
537 gstbase_sink_class->unlock_stop = evas_video_sink_unlock_stop;
538 gstbase_sink_class->render = evas_video_sink_render;
539 gstbase_sink_class->preroll = evas_video_sink_preroll;
540}
541
542gboolean
543gstreamer_plugin_init (GstPlugin * plugin)
544{
545 return gst_element_register (plugin,
546 "emotion-sink",
547 GST_RANK_NONE,
548 EVAS_TYPE_VIDEO_SINK);
549}
550
551static void
552_emotion_gstreamer_pause(void *data, Ecore_Thread *thread)
553{
554 Emotion_Gstreamer_Video *ev = data;
555 gboolean res;
556
557 if (ecore_thread_check(thread) || !ev->pipeline) return;
558
559 gst_element_set_state(ev->pipeline, GST_STATE_PAUSED);
560 res = gst_element_get_state(ev->pipeline, NULL, NULL, GST_CLOCK_TIME_NONE);
561 if (res == GST_STATE_CHANGE_NO_PREROLL)
562 {
563 gst_element_set_state(ev->pipeline, GST_STATE_PLAYING);
564 gst_element_get_state(ev->pipeline, NULL, NULL, GST_CLOCK_TIME_NONE);
565 }
566}
567
568static void
569_emotion_gstreamer_cancel(void *data, Ecore_Thread *thread)
570{
571 Emotion_Gstreamer_Video *ev = data;
572
573 ev->threads = eina_list_remove(ev->threads, thread);
574
575 if (getenv("EMOTION_GSTREAMER_DOT")) GST_DEBUG_BIN_TO_DOT_FILE_WITH_TS(GST_BIN(ev->pipeline), GST_DEBUG_GRAPH_SHOW_ALL, getenv("EMOTION_GSTREAMER_DOT"));
576
577 if (ev->in == ev->out && ev->delete_me)
578 ev->api->del(ev);
579}
580
581static void
582_emotion_gstreamer_end(void *data, Ecore_Thread *thread)
583{
584 Emotion_Gstreamer_Video *ev = data;
585
586 ev->threads = eina_list_remove(ev->threads, thread);
587
588 if (ev->play)
589 {
590 gst_element_set_state(ev->pipeline, GST_STATE_PLAYING);
591 ev->play_started = 1;
592 }
593
594 if (getenv("EMOTION_GSTREAMER_DOT")) GST_DEBUG_BIN_TO_DOT_FILE_WITH_TS(GST_BIN(ev->pipeline), GST_DEBUG_GRAPH_SHOW_ALL, getenv("EMOTION_GSTREAMER_DOT"));
595
596 if (ev->in == ev->out && ev->delete_me)
597 ev->api->del(ev);
598 else
599 _emotion_gstreamer_video_pipeline_parse(data, EINA_TRUE);
600}
601
602static void
603_video_resize(void *data, Evas_Object *obj EINA_UNUSED, const Evas_Video_Surface *surface EINA_UNUSED,
604 Evas_Coord w, Evas_Coord h)
605{
606#ifdef HAVE_ECORE_X
607 Emotion_Gstreamer_Video *ev = data;
608
609 ecore_x_window_resize(ev->win, w, h);
610 DBG("resize: %i, %i", w, h);
611#else
612 if (data)
613 {
614 DBG("resize: %i, %i (fake)", w, h);
615 }
616#endif
617}
618
619static void
620_video_move(void *data, Evas_Object *obj EINA_UNUSED, const Evas_Video_Surface *surface EINA_UNUSED,
621 Evas_Coord x, Evas_Coord y)
622{
623#ifdef HAVE_ECORE_X
624 Emotion_Gstreamer_Video *ev = data;
625 unsigned int pos[2];
626
627 DBG("move: %i, %i", x, y);
628 pos[0] = x; pos[1] = y;
629 ecore_x_window_prop_card32_set(ev->win, ECORE_X_ATOM_E_VIDEO_POSITION, pos, 2);
630#else
631 if (data)
632 {
633 DBG("move: %i, %i (fake)", x, y);
634 }
635#endif
636}
637
638#if 0
639/* Much better idea to always feed the XvImageSink and let him handle optimizing the rendering as we do */
640static void
641_block_pad_unlink_cb(GstPad *pad, gboolean blocked, gpointer user_data)
642{
643 if (blocked)
644 {
645 Emotion_Gstreamer_Video *ev = user_data;
646 GstEvent *gev;
647
648 gst_pad_unlink(ev->xvteepad, ev->xvpad);
649 gev = gst_event_new_eos();
650 gst_pad_send_event(ev->xvpad, gev);
651 gst_pad_set_blocked_async(pad, FALSE, _block_pad_unlink_cb, NULL);
652 }
653}
654
655static void
656_block_pad_link_cb(GstPad *pad, gboolean blocked, gpointer user_data)
657{
658 if (blocked)
659 {
660 Emotion_Gstreamer_Video *ev = user_data;
661
662 gst_pad_link(ev->xvteepad, ev->xvpad);
663 if (ev->play)
664 gst_element_set_state(ev->xvsink, GST_STATE_PLAYING);
665 else
666 gst_element_set_state(ev->xvsink, GST_STATE_PAUSED);
667 gst_pad_set_blocked_async(pad, FALSE, _block_pad_link_cb, NULL);
668 }
669}
670#endif
671
672static void
673_video_show(void *data, Evas_Object *obj EINA_UNUSED, const Evas_Video_Surface *surface EINA_UNUSED)
674{
675#ifdef HAVE_ECORE_X
676 Emotion_Gstreamer_Video *ev = data;
677
678 DBG("show xv");
679 ecore_x_window_show(ev->win);
680#else
681 if (data)
682 {
683 DBG("show xv (fake)");
684 }
685#endif
686 /* gst_pad_set_blocked_async(ev->xvteepad, TRUE, _block_pad_link_cb, ev); */
687}
688
689static void
690_video_hide(void *data, Evas_Object *obj EINA_UNUSED, const Evas_Video_Surface *surface EINA_UNUSED)
691{
692#ifdef HAVE_ECORE_X
693 Emotion_Gstreamer_Video *ev = data;
694
695 DBG("hide xv");
696 ecore_x_window_hide(ev->win);
697#else
698 if (data)
699 {
700 DBG("hide xv (fake)");
701 }
702#endif
703 /* gst_pad_set_blocked_async(ev->xvteepad, TRUE, _block_pad_unlink_cb, ev); */
704}
705
706static void
707_video_update_pixels(void *data, Evas_Object *obj EINA_UNUSED, const Evas_Video_Surface *surface EINA_UNUSED)
708{
709 Emotion_Gstreamer_Video *ev = data;
710 Emotion_Gstreamer_Buffer *send;
711
712 if (!ev->send) return;
713
714 send = ev->send;
715 send->force = EINA_TRUE;
716 ev->send = NULL;
717
718 _emotion_pending_ecore_begin();
719 evas_video_sink_main_render(send);
720}
721
722
723static void
724_image_resize(void *data EINA_UNUSED, Evas *e EINA_UNUSED, Evas_Object *obj EINA_UNUSED, void *event_info EINA_UNUSED)
725{
726}
727
728GstElement *
729gstreamer_video_sink_new(Emotion_Gstreamer_Video *ev,
730 Evas_Object *o,
731 const char *uri)
732{
733 GstElement *playbin;
734 GstElement *bin = NULL;
735 GstElement *esink = NULL;
736 GstElement *xvsink = NULL;
737 GstElement *tee = NULL;
738 GstElement *queue = NULL;
739 Evas_Object *obj;
740 GstPad *pad;
741 GstPad *teepad;
742 int flags;
743 const char *launch;
744#if defined HAVE_ECORE_X
745 const char *engine = NULL;
746 Eina_List *engines;
747#endif
748
749 obj = emotion_object_image_get(o);
750 if (!obj)
751 {