summaryrefslogtreecommitdiff
path: root/src/modules
diff options
context:
space:
mode:
authorStefan Schmidt <s.schmidt@samsung.com>2019-10-30 17:09:20 +0100
committerCedric BAIL <cedric.bail@free.fr>2019-12-04 12:21:25 -0800
commitb8dc80c144fec54a521987535c57b995748ccece (patch)
tree525452784e313429732ff12f1f556485e159a19e /src/modules
parentf3d9b8ee703621cfbb52a03d346335148318af7a (diff)
emotion & evas: remove gstreamer 0.10 support
We have Gstreamer 1.x support for a long time already. We used to keep this around as fallback. By now Linux distributions start to actually no longer ship the Gstreamer 0.10.x packages and upstream has not seen a release in in 5 years. Time to remove it on our side as well. Signed-off-by: Stefan Schmidt <s.schmidt@samsung.com> Reviewed-by: Cedric BAIL <cedric.bail@free.fr> Differential Revision: https://phab.enlightenment.org/D10779
Diffstat (limited to 'src/modules')
-rw-r--r--src/modules/emotion/gstreamer/emotion_alloc.c90
-rw-r--r--src/modules/emotion/gstreamer/emotion_convert.c251
-rw-r--r--src/modules/emotion/gstreamer/emotion_fakeeos.c70
-rw-r--r--src/modules/emotion/gstreamer/emotion_gstreamer.c2018
-rw-r--r--src/modules/emotion/gstreamer/emotion_gstreamer.h352
-rw-r--r--src/modules/emotion/gstreamer/emotion_sink.c1461
-rw-r--r--src/modules/emotion/gstreamer/meson.build18
-rw-r--r--src/modules/emotion/meson.build1
8 files changed, 0 insertions, 4261 deletions
diff --git a/src/modules/emotion/gstreamer/emotion_alloc.c b/src/modules/emotion/gstreamer/emotion_alloc.c
deleted file mode 100644
index c4aae047b7..0000000000
--- a/src/modules/emotion/gstreamer/emotion_alloc.c
+++ /dev/null
@@ -1,90 +0,0 @@
1#ifdef HAVE_CONFIG_H
2# include "config.h"
3#endif
4
5#include <Eina.h>
6#include <Evas.h>
7#include <Ecore.h>
8
9#include <glib.h>
10#include <gst/gst.h>
11#include <gst/video/video.h>
12#include <gst/video/gstvideosink.h>
13
14#ifdef HAVE_ECORE_X
15# include <Ecore_X.h>
16# ifdef HAVE_XOVERLAY_H
17# include <gst/interfaces/xoverlay.h>
18# endif
19#endif
20
21#include "Emotion.h"
22#include "emotion_gstreamer.h"
23
24Emotion_Gstreamer_Buffer *
25emotion_gstreamer_buffer_alloc(EvasVideoSinkPrivate *sink,
26 GstBuffer *buffer,
27 Eina_Bool preroll)
28{
29 Emotion_Gstreamer_Buffer *send;
30
31 if (!sink->ev) return NULL;
32
33 send = malloc(sizeof (Emotion_Gstreamer_Buffer));
34 if (!send) return NULL;
35
36 send->sink = sink;
37 send->frame = gst_buffer_ref(buffer);
38 send->preroll = preroll;
39 send->force = EINA_FALSE;
40 sink->ev->out++;
41 send->ev = sink->ev;
42
43 return send;
44}
45
46void
47emotion_gstreamer_buffer_free(Emotion_Gstreamer_Buffer *send)
48{
49 send->ev->in++;
50
51 if (send->ev->in == send->ev->out
52 && send->ev->threads == NULL
53 && send->ev->delete_me)
54 send->ev->api->del(send->ev);
55
56 gst_buffer_unref(send->frame);
57 free(send);
58}
59
60Emotion_Gstreamer_Message *
61emotion_gstreamer_message_alloc(Emotion_Gstreamer_Video *ev,
62 GstMessage *msg)
63{
64 Emotion_Gstreamer_Message *send;
65
66 if (!ev) return NULL;
67
68 send = malloc(sizeof (Emotion_Gstreamer_Message));
69 if (!send) return NULL;
70
71 ev->out++;
72 send->ev = ev;
73 send->msg = gst_message_ref(msg);
74
75 return send;
76}
77
78void
79emotion_gstreamer_message_free(Emotion_Gstreamer_Message *send)
80{
81 send->ev->in++;
82
83 if (send->ev->in == send->ev->out
84 && send->ev->threads == NULL
85 && send->ev->delete_me)
86 send->ev->api->del(send->ev);
87
88 gst_message_unref(send->msg);
89 free(send);
90}
diff --git a/src/modules/emotion/gstreamer/emotion_convert.c b/src/modules/emotion/gstreamer/emotion_convert.c
deleted file mode 100644
index 2664d28be6..0000000000
--- a/src/modules/emotion/gstreamer/emotion_convert.c
+++ /dev/null
@@ -1,251 +0,0 @@
1#ifdef HAVE_CONFIG_H
2# include "config.h"
3#endif
4
5#include <Eina.h>
6#include <Evas.h>
7
8#include <glib.h>
9#include <gst/gst.h>
10#include <gst/video/video.h>
11#include <gst/video/gstvideosink.h>
12
13#ifdef HAVE_ECORE_X
14# include <Ecore_X.h>
15# ifdef HAVE_XOVERLAY_H
16# include <gst/interfaces/xoverlay.h>
17# endif
18#endif
19
20#include "Emotion.h"
21#include "emotion_gstreamer.h"
22
23static inline void
24_evas_video_bgrx_step(unsigned char *evas_data, const unsigned char *gst_data,
25 unsigned int w, unsigned int h EINA_UNUSED, unsigned int output_height, unsigned int step)
26{
27 unsigned int x;
28 unsigned int y;
29
30 for (y = 0; y < output_height; ++y)
31 {
32 for (x = 0; x < w; x++)
33 {
34 evas_data[0] = gst_data[0];
35 evas_data[1] = gst_data[1];
36 evas_data[2] = gst_data[2];
37 evas_data[3] = 255;
38 gst_data += step;
39 evas_data += 4;
40 }
41 }
42}
43
44static void
45_evas_video_bgr(unsigned char *evas_data, const unsigned char *gst_data, unsigned int w, unsigned int h, unsigned int output_height)
46{
47 _evas_video_bgrx_step(evas_data, gst_data, w, h, output_height, 3);
48}
49
50static void
51_evas_video_bgrx(unsigned char *evas_data, const unsigned char *gst_data, unsigned int w, unsigned int h, unsigned int output_height)
52{
53 _evas_video_bgrx_step(evas_data, gst_data, w, h, output_height, 4);
54}
55
56static void
57_evas_video_bgra(unsigned char *evas_data, const unsigned char *gst_data, unsigned int w, unsigned int h EINA_UNUSED, unsigned int output_height)
58{
59 unsigned int x;
60 unsigned int y;
61
62 for (y = 0; y < output_height; ++y)
63 {
64 unsigned char alpha;
65
66 for (x = 0; x < w; ++x)
67 {
68 alpha = gst_data[3];
69 evas_data[0] = (gst_data[0] * alpha) / 255;
70 evas_data[1] = (gst_data[1] * alpha) / 255;
71 evas_data[2] = (gst_data[2] * alpha) / 255;
72 evas_data[3] = alpha;
73 gst_data += 4;
74 evas_data += 4;
75 }
76 }
77}
78
79static void
80_evas_video_i420(unsigned char *evas_data, const unsigned char *gst_data, unsigned int w, unsigned int h, unsigned int output_height)
81{
82 const unsigned char **rows;
83 unsigned int i, j;
84 unsigned int rh;
85 unsigned int stride_y, stride_uv;
86
87 rh = output_height;
88
89 rows = (const unsigned char **)evas_data;
90
91 stride_y = GST_ROUND_UP_4(w);
92 stride_uv = GST_ROUND_UP_8(w) / 2;
93
94 for (i = 0; i < rh; i++)
95 rows[i] = &gst_data[i * stride_y];
96
97 for (j = 0; j < (rh / 2); j++, i++)
98 rows[i] = &gst_data[h * stride_y + j * stride_uv];
99
100 for (j = 0; j < (rh / 2); j++, i++)
101 rows[i] = &gst_data[h * stride_y +
102 (rh / 2) * stride_uv +
103 j * stride_uv];
104}
105
106static void
107_evas_video_yv12(unsigned char *evas_data, const unsigned char *gst_data, unsigned int w, unsigned int h, unsigned int output_height)
108{
109 const unsigned char **rows;
110 unsigned int i, j;
111 unsigned int rh;
112 unsigned int stride_y, stride_uv;
113
114 rh = output_height;
115
116 rows = (const unsigned char **)evas_data;
117
118 stride_y = GST_ROUND_UP_4(w);
119 stride_uv = GST_ROUND_UP_8(w) / 2;
120
121 for (i = 0; i < rh; i++)
122 rows[i] = &gst_data[i * stride_y];
123
124 for (j = 0; j < (rh / 2); j++, i++)
125 rows[i] = &gst_data[h * stride_y +
126 (rh / 2) * stride_uv +
127 j * stride_uv];
128
129 for (j = 0; j < (rh / 2); j++, i++)
130 rows[i] = &gst_data[h * stride_y + j * stride_uv];
131}
132
133static void
134_evas_video_yuy2(unsigned char *evas_data, const unsigned char *gst_data, unsigned int w, unsigned int h EINA_UNUSED, unsigned int output_height)
135{
136 const unsigned char **rows;
137 unsigned int i;
138 unsigned int stride;
139
140 rows = (const unsigned char **)evas_data;
141
142 stride = GST_ROUND_UP_4(w * 2);
143
144 for (i = 0; i < output_height; i++)
145 rows[i] = &gst_data[i * stride];
146}
147
148static void
149_evas_video_nv12(unsigned char *evas_data, const unsigned char *gst_data, unsigned int w, unsigned int h EINA_UNUSED, unsigned int output_height)
150{
151 const unsigned char **rows;
152 unsigned int i, j;
153 unsigned int rh;
154
155 rh = output_height;
156
157 rows = (const unsigned char **)evas_data;
158
159 for (i = 0; i < rh; i++)
160 rows[i] = &gst_data[i * w];
161
162 for (j = 0; j < (rh / 2); j++, i++)
163 rows[i] = &gst_data[rh * w + j * w];
164}
165
166static void
167_evas_video_mt12(unsigned char *evas_data, const unsigned char *gst_data, unsigned int w, unsigned int h, unsigned int output_height EINA_UNUSED)
168{
169 const unsigned char **rows;
170 unsigned int i;
171 unsigned int j;
172
173 rows = (const unsigned char **)evas_data;
174
175 for (i = 0; i < (h / 32) / 2; i++)
176 rows[i] = &gst_data[i * w * 2 * 32];
177
178 if ((h / 32) % 2)
179 {
180 rows[i] = &gst_data[i * w * 2 * 32];
181 i++;
182 }
183
184 for (j = 0; j < ((h / 2) / 32) / 2; ++j, ++i)
185 rows[i] = &gst_data[h * w + j * (w / 2) * 2 * 16];
186}
187
188void
189_evas_video_st12_multiplane(unsigned char *evas_data, const unsigned char *gst_data, unsigned int w, unsigned int h, unsigned int output_height EINA_UNUSED)
190{
191 const GstMultiPlaneImageBuffer *mp_buf = (const GstMultiPlaneImageBuffer *) gst_data;
192 const unsigned char **rows;
193 unsigned int i;
194 unsigned int j;
195
196 rows = (const unsigned char **)evas_data;
197
198 for (i = 0; i < (h / 32) / 2; i++)
199 rows[i] = mp_buf->uaddr[0] + i * w * 2 * 32;
200 if ((h / 32) % 2)
201 {
202 rows[i] = mp_buf->uaddr[0] + i * w * 2 * 32;
203 i++;
204 }
205
206 for (j = 0; j < ((h / 2) / 16) / 2; j++, i++)
207 {
208 rows[i] = mp_buf->uaddr[1] + j * w * 2 * 16 * 2;
209 }
210 if (((h / 2) / 16) % 2)
211 rows[i] = mp_buf->uaddr[1] + j * w * 2 * 16 * 2;
212}
213
214void
215_evas_video_st12(unsigned char *evas_data, const unsigned char *gst_data, unsigned int w EINA_UNUSED, unsigned int h, unsigned int output_height EINA_UNUSED)
216{
217 const SCMN_IMGB *imgb = (const SCMN_IMGB *) gst_data;
218 const unsigned char **rows;
219 unsigned int i, j;
220
221 rows = (const unsigned char **)evas_data;
222
223 for (i = 0; i < (h / 32) / 2; i++)
224 rows[i] = imgb->uaddr[0] + i * imgb->stride[0] * 2 * 32;
225 if ((h / 32) % 2)
226 {
227 rows[i] = imgb->uaddr[0] + i * imgb->stride[0] * 2 * 32;
228 i++;
229 }
230
231 for (j = 0; j < (unsigned int) imgb->elevation[1] / 32 / 2; j++, i++)
232 rows[i] = imgb->uaddr[1] + j * imgb->stride[1] * 32 * 2;
233 if ((imgb->elevation[1] / 32) % 2)
234 rows[i++] = imgb->uaddr[1] + j * imgb->stride[1] * 32 * 2;
235}
236
237const ColorSpace_FourCC_Convertion colorspace_fourcc_convertion[] = {
238 { "I420", GST_MAKE_FOURCC('I', '4', '2', '0'), EVAS_COLORSPACE_YCBCR422P601_PL, _evas_video_i420, EINA_TRUE },
239 { "YV12", GST_MAKE_FOURCC('Y', 'V', '1', '2'), EVAS_COLORSPACE_YCBCR422P601_PL, _evas_video_yv12, EINA_TRUE },
240 { "YUY2", GST_MAKE_FOURCC('Y', 'U', 'Y', '2'), EVAS_COLORSPACE_YCBCR422601_PL, _evas_video_yuy2, EINA_FALSE },
241 { "NV12", GST_MAKE_FOURCC('N', 'V', '1', '2'), EVAS_COLORSPACE_YCBCR420NV12601_PL, _evas_video_nv12, EINA_TRUE },
242 { "TM12", GST_MAKE_FOURCC('T', 'M', '1', '2'), EVAS_COLORSPACE_YCBCR420TM12601_PL, _evas_video_mt12, EINA_TRUE },
243 { NULL, 0, 0, NULL, 0 }
244};
245
246const ColorSpace_Format_Convertion colorspace_format_convertion[] = {
247 { "BGR", GST_VIDEO_FORMAT_BGR, EVAS_COLORSPACE_ARGB8888, _evas_video_bgr },
248 { "BGRx", GST_VIDEO_FORMAT_BGRx, EVAS_COLORSPACE_ARGB8888, _evas_video_bgrx },
249 { "BGRA", GST_VIDEO_FORMAT_BGRA, EVAS_COLORSPACE_ARGB8888, _evas_video_bgra },
250 { NULL, 0, 0, NULL }
251};
diff --git a/src/modules/emotion/gstreamer/emotion_fakeeos.c b/src/modules/emotion/gstreamer/emotion_fakeeos.c
deleted file mode 100644
index fc6dc0f989..0000000000
--- a/src/modules/emotion/gstreamer/emotion_fakeeos.c
+++ /dev/null
@@ -1,70 +0,0 @@
1#ifdef HAVE_CONFIG_H
2# include "config.h"
3#endif
4
5#include <Eina.h>
6#include <Evas.h>
7
8#include <glib.h>
9#include <gst/gst.h>
10#include <gst/video/video.h>
11#include <gst/video/gstvideosink.h>
12
13#ifdef HAVE_ECORE_X
14# include <Ecore_X.h>
15# ifdef HAVE_XOVERLAY_H
16# include <gst/interfaces/xoverlay.h>
17# endif
18#endif
19
20#include "Emotion.h"
21#include "emotion_gstreamer.h"
22
23typedef struct _FakeEOSBin
24{
25 GstBin parent;
26} FakeEOSBin;
27
28typedef struct _FakeEOSBinClass
29{
30 GstBinClass parent;
31} FakeEOSBinClass;
32
33GST_BOILERPLATE(FakeEOSBin, fakeeos_bin, GstBin,
34 GST_TYPE_BIN);
35
36static void
37fakeeos_bin_handle_message(GstBin * bin, GstMessage * message)
38{
39 /* FakeEOSBin *fakeeos = (FakeEOSBin *)(bin); */
40
41 switch (GST_MESSAGE_TYPE(message)) {
42 case GST_MESSAGE_EOS:
43 /* what to do here ? just returning at the moment */
44 return;
45 default:
46 break;
47 }
48
49 GST_BIN_CLASS(parent_class)->handle_message(bin, message);
50}
51
52static void
53fakeeos_bin_base_init(gpointer g_class EINA_UNUSED)
54{
55}
56
57static void
58fakeeos_bin_class_init(FakeEOSBinClass * klass)
59{
60 GstBinClass *gstbin_class = GST_BIN_CLASS(klass);
61
62 gstbin_class->handle_message =
63 GST_DEBUG_FUNCPTR (fakeeos_bin_handle_message);
64}
65
66static void
67fakeeos_bin_init(FakeEOSBin *src EINA_UNUSED,
68 FakeEOSBinClass *klass EINA_UNUSED)
69{
70}
diff --git a/src/modules/emotion/gstreamer/emotion_gstreamer.c b/src/modules/emotion/gstreamer/emotion_gstreamer.c
deleted file mode 100644
index b37f5ce639..0000000000
--- a/src/modules/emotion/gstreamer/emotion_gstreamer.c
+++ /dev/null
@@ -1,2018 +0,0 @@
1#ifdef HAVE_CONFIG_H
2# include "config.h"
3#endif
4
5#include <unistd.h>
6#include <fcntl.h>
7
8#ifdef _WIN32
9# include <direct.h> /* getcwd */
10#endif
11
12#include <Eina.h>
13#include <Evas.h>
14#include <Ecore.h>
15
16#define HTTP_STREAM 0
17#define RTSP_STREAM 1
18#include <glib.h>
19#include <gst/gst.h>
20#include <glib-object.h>
21#include <gst/video/gstvideosink.h>
22#include <gst/video/video.h>
23
24// forcibly disable x overlay window.. broken badly.
25#undef HAVE_ECORE_X
26
27#ifdef HAVE_ECORE_X
28# include <Ecore_X.h>
29# ifdef HAVE_XOVERLAY_H
30# include <gst/interfaces/xoverlay.h>
31# endif
32#endif
33
34#include "emotion_modules.h"
35#include "emotion_gstreamer.h"
36
37Eina_Bool window_manager_video = EINA_FALSE;
38int _emotion_gstreamer_log_domain = -1;
39Eina_Bool debug_fps = EINA_FALSE;
40Eina_Bool _ecore_x_available = EINA_FALSE;
41
42static Ecore_Idler *restart_idler;
43static int _emotion_init_count = 0;
44
45/* Callbacks to get the eos */
46static void _for_each_tag (GstTagList const* list, gchar const* tag, void *data);
47static void _free_metadata (Emotion_Gstreamer_Metadata *m);
48
49static GstBusSyncReply _eos_sync_fct(GstBus *bus,
50 GstMessage *message,
51 gpointer data);
52
53static Eina_Bool _em_restart_stream(void *data);
54
55/* Module interface */
56
57
58static int priority_overide = 0;
59
60static Emotion_Video_Stream *
61emotion_video_stream_new(Emotion_Gstreamer_Video *ev)
62{
63 Emotion_Video_Stream *vstream;
64
65 if (!ev) return NULL;
66
67 vstream = (Emotion_Video_Stream *)calloc(1, sizeof(Emotion_Video_Stream));
68 if (!vstream) return NULL;
69
70 ev->video_streams = eina_list_append(ev->video_streams, vstream);
71 return vstream;
72}
73
74static const char *
75emotion_visualization_element_name_get(Emotion_Vis visualisation)
76{
77 switch (visualisation)
78 {
79 case EMOTION_VIS_NONE:
80 return NULL;
81 case EMOTION_VIS_GOOM:
82 return "goom";
83 case EMOTION_VIS_LIBVISUAL_BUMPSCOPE:
84 return "libvisual_bumpscope";
85 case EMOTION_VIS_LIBVISUAL_CORONA:
86 return "libvisual_corona";
87 case EMOTION_VIS_LIBVISUAL_DANCING_PARTICLES:
88 return "libvisual_dancingparticles";
89 case EMOTION_VIS_LIBVISUAL_GDKPIXBUF:
90 return "libvisual_gdkpixbuf";
91 case EMOTION_VIS_LIBVISUAL_G_FORCE:
92 return "libvisual_G-Force";
93 case EMOTION_VIS_LIBVISUAL_GOOM:
94 return "libvisual_goom";
95 case EMOTION_VIS_LIBVISUAL_INFINITE:
96 return "libvisual_infinite";
97 case EMOTION_VIS_LIBVISUAL_JAKDAW:
98 return "libvisual_jakdaw";
99 case EMOTION_VIS_LIBVISUAL_JESS:
100 return "libvisual_jess";
101 case EMOTION_VIS_LIBVISUAL_LV_ANALYSER:
102 return "libvisual_lv_analyzer";
103 case EMOTION_VIS_LIBVISUAL_LV_FLOWER:
104 return "libvisual_lv_flower";
105 case EMOTION_VIS_LIBVISUAL_LV_GLTEST:
106 return "libvisual_lv_gltest";
107 case EMOTION_VIS_LIBVISUAL_LV_SCOPE:
108 return "libvisual_lv_scope";
109 case EMOTION_VIS_LIBVISUAL_MADSPIN:
110 return "libvisual_madspin";
111 case EMOTION_VIS_LIBVISUAL_NEBULUS:
112 return "libvisual_nebulus";
113 case EMOTION_VIS_LIBVISUAL_OINKSIE:
114 return "libvisual_oinksie";
115 case EMOTION_VIS_LIBVISUAL_PLASMA:
116 return "libvisual_plazma";
117 default:
118 return "goom";
119 }
120}
121
122static void
123em_cleanup(Emotion_Gstreamer_Video *ev)
124{
125 Emotion_Audio_Stream *astream;
126 Emotion_Video_Stream *vstream;
127
128 if (ev->send)
129 {
130 emotion_gstreamer_buffer_free(ev->send);
131 ev->send = NULL;
132 }
133
134 if (ev->eos_bus)
135 {
136 gst_object_unref(GST_OBJECT(ev->eos_bus));
137 ev->eos_bus = NULL;
138 }
139
140 if (ev->metadata)
141 {
142 _free_metadata(ev->metadata);
143 ev->metadata = NULL;
144 }
145
146 if (ev->last_buffer)
147 {
148 gst_buffer_unref(ev->last_buffer);
149 ev->last_buffer = NULL;
150 }
151
152 if (!ev->stream)
153 {
154 evas_object_image_video_surface_set(emotion_object_image_get(ev->obj), NULL);
155 ev->stream = EINA_TRUE;
156 }
157
158 if (ev->pipeline)
159 {
160 gstreamer_video_sink_new(ev, ev->obj, NULL);
161
162 g_object_set(G_OBJECT(ev->esink), "ev", NULL, NULL);
163 g_object_set(G_OBJECT(ev->esink), "evas-object", NULL, NULL);
164 gst_element_set_state(ev->pipeline, GST_STATE_NULL);
165 gst_object_unref(ev->pipeline);
166
167 ev->pipeline = NULL;
168 ev->sink = NULL;
169
170 if (ev->eteepad) gst_object_unref(ev->eteepad);
171 ev->eteepad = NULL;
172 if (ev->xvteepad) gst_object_unref(ev->xvteepad);
173 ev->xvteepad = NULL;
174 if (ev->xvpad) gst_object_unref(ev->xvpad);
175 ev->xvpad = NULL;
176
177 ev->src_width = 0;
178 ev->src_height = 0;
179
180#ifdef HAVE_ECORE_X
181 INF("destroying window: %i", ev->win);
182 if (ev->win) ecore_x_window_free(ev->win);
183 ev->win = 0;
184#endif
185 }
186
187 if (restart_idler)
188 {
189 ecore_idler_del(restart_idler);
190 restart_idler = NULL;
191 }
192
193 EINA_LIST_FREE(ev->audio_streams, astream)
194 free(astream);
195 EINA_LIST_FREE(ev->video_streams, vstream)
196 free(vstream);
197}
198
199static void
200em_del(void *video)
201{
202 Emotion_Gstreamer_Video *ev = video;
203
204 if (ev->threads)
205 {
206 Ecore_Thread *t;
207
208 EINA_LIST_FREE(ev->threads, t)
209 ecore_thread_cancel(t);
210
211 ev->delete_me = EINA_TRUE;
212 return;
213 }
214
215 if (ev->in != ev->out)
216 {
217 ev->delete_me = EINA_TRUE;
218 return;
219 }
220
221 em_cleanup(ev);
222
223 free(ev);
224}
225
226static Eina_Bool
227em_file_open(void *video,
228 const char *file)
229{
230 Emotion_Gstreamer_Video *ev = video;
231 Eina_Strbuf *sbuf = NULL;
232 const char *uri;
233
234 if (!file) return EINA_FALSE;
235 if (strstr(file, "://") == NULL)
236 {
237 sbuf = eina_strbuf_new();
238 eina_strbuf_append(sbuf, "file://");
239 if (strncmp(file, "./", 2) == 0)
240 file += 2;
241 if (strstr(file, ":/") != NULL)
242 { /* We absolutely need file:///C:/ under Windows, so adding it here */
243 eina_strbuf_append(sbuf, "/");
244 }
245 else if (*file != '/')
246 {
247 char tmp[PATH_MAX];
248
249 if (getcwd(tmp, PATH_MAX))
250 {
251 eina_strbuf_append(sbuf, tmp);
252 eina_strbuf_append(sbuf, "/");
253 }
254 }
255 eina_strbuf_append(sbuf, file);
256 }
257
258 ev->play_started = 0;
259 ev->pipeline_parsed = 0;
260
261 uri = sbuf ? eina_strbuf_string_get(sbuf) : file;
262 DBG("setting file to '%s'", uri);
263 ev->pipeline = gstreamer_video_sink_new(ev, ev->obj, uri);
264 if (sbuf) eina_strbuf_free(sbuf);
265
266 if (!ev->pipeline)
267 return EINA_FALSE;
268
269 ev->eos_bus = gst_pipeline_get_bus(GST_PIPELINE(ev->pipeline));
270 if (!ev->eos_bus)
271 {
272 ERR("could not get the bus");
273 return EINA_FALSE;
274 }
275
276 gst_bus_set_sync_handler(ev->eos_bus, _eos_sync_fct, ev);
277
278 ev->position = 0.0;
279
280 return 1;
281}
282
283static void
284em_file_close(void *video)
285{
286 Emotion_Gstreamer_Video *ev;
287
288 ev = (Emotion_Gstreamer_Video *)video;
289 if (!ev)
290 return;
291
292 if (ev->threads)
293 {
294 Ecore_Thread *t;
295
296 EINA_LIST_FREE(ev->threads, t)
297 ecore_thread_cancel(t);
298 }
299
300 em_cleanup(ev);
301
302 ev->pipeline_parsed = EINA_FALSE;
303 ev->play_started = 0;
304}
305
306static void
307em_play(void *video,
308 double pos EINA_UNUSED)
309{
310 Emotion_Gstreamer_Video *ev;
311
312 ev = (Emotion_Gstreamer_Video *)video;
313 if (!ev->pipeline) return;
314
315 if (ev->pipeline_parsed)
316 gst_element_set_state(ev->pipeline, GST_STATE_PLAYING);
317 ev->play = 1;
318 ev->play_started = 1;
319}
320
321static void
322em_stop(void *video)
323{
324 Emotion_Gstreamer_Video *ev;
325
326 ev = (Emotion_Gstreamer_Video *)video;
327
328 if (!ev->pipeline) return;
329
330 if (ev->pipeline_parsed)
331 gst_element_set_state(ev->pipeline, GST_STATE_PAUSED);
332 ev->play = 0;
333}
334
335static void
336em_size_get(void *video,
337 int *width,
338 int *height)
339{
340 Emotion_Gstreamer_Video *ev;
341 Emotion_Video_Stream *vstream;
342
343 ev = (Emotion_Gstreamer_Video *)video;
344
345 if (!_emotion_gstreamer_video_pipeline_parse(ev, EINA_FALSE))
346 goto on_error;
347
348 vstream = eina_list_nth(ev->video_streams, ev->video_stream_nbr - 1);
349 if (vstream)
350 {
351 if (width) *width = vstream->width;
352 if (height) *height = vstream->height;
353
354 return;
355 }
356
357 on_error:
358 if (width) *width = 0;
359 if (height) *height = 0;
360}
361
362static void
363em_pos_set(void *video,
364 double pos)
365{
366 Emotion_Gstreamer_Video *ev;
367
368 ev = (Emotion_Gstreamer_Video *)video;
369
370 if (!ev->pipeline) return;
371
372 if (ev->play)
373 gst_element_set_state(ev->pipeline, GST_STATE_PAUSED);
374
375 gst_element_seek(ev->pipeline, 1.0,
376 GST_FORMAT_TIME,
377 GST_SEEK_FLAG_ACCURATE | GST_SEEK_FLAG_FLUSH,
378 GST_SEEK_TYPE_SET,
379 (gint64)(pos * (double)GST_SECOND),
380 GST_SEEK_TYPE_NONE, -1);
381
382 if (ev->play)
383 gst_element_set_state(ev->pipeline, GST_STATE_PLAYING);
384}
385
386/**
387 * Returns stream duration in seconds
388 */
389static double
390em_len_get(void *video)
391{
392 Emotion_Gstreamer_Video *ev;
393 Emotion_Video_Stream *vstream;
394 Emotion_Audio_Stream *astream;
395 Eina_List *l;
396 GstFormat fmt;
397 gint64 val;
398 gboolean ret;
399
400 ev = video;
401 fmt = GST_FORMAT_TIME;
402
403 if (!ev->pipeline) return 0.0;
404
405 ret = gst_element_query_duration(ev->pipeline, &fmt, &val);
406 if (!ret)
407 goto fallback;
408
409 if (fmt != GST_FORMAT_TIME)
410 {
411 DBG("requested duration in time, but got %s instead.",
412 gst_format_get_name(fmt));
413 goto fallback;
414 }
415
416 if (val <= 0.0)
417 goto fallback;
418
419 return GST_TIME_AS_SECONDS(val);
420
421 fallback:
422 if (!_emotion_gstreamer_video_pipeline_parse(ev, EINA_FALSE))
423 return 0.0;
424
425 EINA_LIST_FOREACH(ev->audio_streams, l, astream)
426 if (astream->length_time >= 0)
427 return astream->length_time;
428
429 EINA_LIST_FOREACH(ev->video_streams, l, vstream)
430 if (vstream->length_time >= 0)
431 return vstream->length_time;
432
433 return 0.0;
434}
435
436static double
437em_buffer_size_get(void *video)
438{
439 Emotion_Gstreamer_Video *ev;
440
441 GstQuery *query;
442 gboolean busy;
443 gint percent;
444
445 ev = video;
446
447 if (!ev->pipeline) return 0.0;
448
449 query = gst_query_new_buffering(GST_FORMAT_DEFAULT);
450 if (gst_element_query(ev->pipeline, query))
451 gst_query_parse_buffering_percent(query, &busy, &percent);
452 else
453 percent = 100;
454
455 gst_query_unref(query);
456 return ((float)(percent)) / 100.0;
457}
458
459static int
460em_fps_num_get(void *video)
461{
462 Emotion_Gstreamer_Video *ev;
463 Emotion_Video_Stream *vstream;
464
465 ev = (Emotion_Gstreamer_Video *)video;
466
467 if (!_emotion_gstreamer_video_pipeline_parse(ev, EINA_FALSE))
468 return 0;
469
470 vstream = eina_list_nth(ev->video_streams, ev->video_stream_nbr - 1);
471 if (vstream)
472 return vstream->fps_num;
473
474 return 0;
475}
476
477static int
478em_fps_den_get(void *video)
479{
480 Emotion_Gstreamer_Video *ev;
481 Emotion_Video_Stream *vstream;
482
483 ev = (Emotion_Gstreamer_Video *)video;
484
485 if (!_emotion_gstreamer_video_pipeline_parse(ev, EINA_FALSE))
486 return 1;
487
488 vstream = eina_list_nth(ev->video_streams, ev->video_stream_nbr - 1);
489 if (vstream)
490 return vstream->fps_den;
491
492 return 1;
493}
494
495static double
496em_fps_get(void *video)
497{
498 Emotion_Gstreamer_Video *ev;
499 Emotion_Video_Stream *vstream;
500
501 ev = (Emotion_Gstreamer_Video *)video;
502
503 if (!_emotion_gstreamer_video_pipeline_parse(ev, EINA_FALSE))
504 return 0.0;
505
506 vstream = eina_list_nth(ev->video_streams, ev->video_stream_nbr - 1);
507 if (vstream)
508 return (double)vstream->fps_num / (double)vstream->fps_den;
509
510 return 0.0;
511}
512
513/**
514 * Returns stream position in seconds
515 */
516static double
517em_pos_get(void *video)
518{
519 Emotion_Gstreamer_Video *ev;
520 GstFormat fmt;
521 gint64 val;
522 gboolean ret;
523
524 ev = video;
525 fmt = GST_FORMAT_TIME;
526
527 if (!ev->pipeline) return 0.0;
528
529 ret = gst_element_query_position(ev->pipeline, &fmt, &val);
530 if (!ret)
531 return ev->position;
532
533 if (fmt != GST_FORMAT_TIME)
534 {
535 ERR("requested position in time, but got %s instead.",
536 gst_format_get_name(fmt));
537 return ev->position;
538 }
539
540 ev->position = GST_TIME_AS_SECONDS(val);
541 return ev->position;
542}
543
544static void
545em_vis_set(void *video,
546 Emotion_Vis vis)
547{
548 Emotion_Gstreamer_Video *ev;
549
550 ev = (Emotion_Gstreamer_Video *)video;
551
552 ev->vis = vis;
553}
554
555static Emotion_Vis
556em_vis_get(void *video)
557{
558 Emotion_Gstreamer_Video *ev;
559
560 ev = (Emotion_Gstreamer_Video *)video;
561
562 return ev->vis;
563}
564
565static Eina_Bool
566em_vis_supported(void *ef EINA_UNUSED, Emotion_Vis vis)
567{
568 const char *name;
569 GstElementFactory *factory;
570
571 if (vis == EMOTION_VIS_NONE)
572 return EINA_TRUE;
573
574 name = emotion_visualization_element_name_get(vis);
575 if (!name)
576 return EINA_FALSE;
577
578 factory = gst_element_factory_find(name);
579 if (!factory)
580 return EINA_FALSE;
581
582 gst_object_unref(factory);
583 return EINA_TRUE;
584}
585
586static double
587em_ratio_get(void *video)
588{
589 Emotion_Gstreamer_Video *ev;
590
591 ev = (Emotion_Gstreamer_Video *)video;
592
593 return ev->ratio;
594}
595
596static int
597em_video_handled(void *video)
598{
599 Emotion_Gstreamer_Video *ev;
600
601 ev = (Emotion_Gstreamer_Video *)video;
602
603 _emotion_gstreamer_video_pipeline_parse(ev, EINA_FALSE);
604
605 if (!eina_list_count(ev->video_streams))
606 return 0;
607
608 return 1;
609}
610
611static int
612em_audio_handled(void *video)
613{
614 Emotion_Gstreamer_Video *ev;
615
616 ev = (Emotion_Gstreamer_Video *)video;
617
618 _emotion_gstreamer_video_pipeline_parse(ev, EINA_FALSE);
619
620 if (!eina_list_count(ev->audio_streams))
621 return 0;
622
623 return 1;
624}
625
626static int
627em_seekable(void *video EINA_UNUSED)
628{
629 return 1;
630}
631
632static void
633em_frame_done(void *video EINA_UNUSED)
634{
635}
636
637static Emotion_Format
638em_format_get(void *video)
639{
640 Emotion_Gstreamer_Video *ev;
641 Emotion_Video_Stream *vstream;
642
643 ev = (Emotion_Gstreamer_Video *)video;
644
645 if (!_emotion_gstreamer_video_pipeline_parse(ev, EINA_FALSE))
646 return EMOTION_FORMAT_NONE;
647
648 vstream = eina_list_nth(ev->video_streams, ev->video_stream_nbr - 1);
649 if (vstream)
650 {
651 switch (vstream->fourcc)
652 {
653 case GST_MAKE_FOURCC('I', '4', '2', '0'):
654 return EMOTION_FORMAT_I420;
655 case GST_MAKE_FOURCC('Y', 'V', '1', '2'):
656 return EMOTION_FORMAT_YV12;
657 case GST_MAKE_FOURCC('Y', 'U', 'Y', '2'):
658 return EMOTION_FORMAT_YUY2;
659 case GST_MAKE_FOURCC('A', 'R', 'G', 'B'):
660 return EMOTION_FORMAT_BGRA;
661 default:
662 return EMOTION_FORMAT_NONE;
663 }
664 }
665 return EMOTION_FORMAT_NONE;
666}
667
668static void
669em_video_data_size_get(void *video, int *w, int *h)
670{
671 Emotion_Gstreamer_Video *ev;
672 Emotion_Video_Stream *vstream;
673
674 ev = (Emotion_Gstreamer_Video *)video;
675
676 if (ev->pipeline && (!ev->video_stream_nbr || !ev->video_streams))
677 if (!_emotion_gstreamer_video_pipeline_parse(ev, EINA_FALSE))
678 goto on_error;
679
680 vstream = eina_list_nth(ev->video_streams, ev->video_stream_nbr - 1);
681 if (vstream)
682 {
683 *w = vstream->width;
684 *h = vstream->height;
685
686 return;
687 }
688
689 on_error:
690 *w = 0;
691 *h = 0;
692}
693
694static int
695em_yuv_rows_get(void *video EINA_UNUSED,
696 int w EINA_UNUSED,
697 int h EINA_UNUSED,
698 unsigned char **yrows EINA_UNUSED,
699 unsigned char **urows EINA_UNUSED,
700 unsigned char **vrows EINA_UNUSED)
701{
702 return 0;
703}
704
705static int
706em_bgra_data_get(void *video EINA_UNUSED, unsigned char **bgra_data EINA_UNUSED)
707{
708 return 0;
709}
710
711static void
712em_event_feed(void *video EINA_UNUSED, int event EINA_UNUSED)
713{
714}
715
716static void
717em_event_mouse_button_feed(void *video EINA_UNUSED, int button EINA_UNUSED, int x EINA_UNUSED, int y EINA_UNUSED)
718{
719}
720
721static void
722em_event_mouse_move_feed(void *video EINA_UNUSED, int x EINA_UNUSED, int y EINA_UNUSED)
723{
724}
725
726/* Video channels */
727static int
728em_video_channel_count(void *video)
729{
730 Emotion_Gstreamer_Video *ev;
731
732 ev = (Emotion_Gstreamer_Video *)video;
733
734 _emotion_gstreamer_video_pipeline_parse(ev, EINA_FALSE);
735
736 return eina_list_count(ev->video_streams);
737}
738
739static void
740em_video_channel_set(void *video EINA_UNUSED,
741 int channel EINA_UNUSED)
742{
743#if 0
744 Emotion_Gstreamer_Video *ev;
745
746 ev = (Emotion_Gstreamer_Video *)video;
747
748 if (channel < 0) channel = 0;
749#endif
750 /* FIXME: a faire... */
751}
752
753static int
754em_video_channel_get(void *video)
755{
756 Emotion_Gstreamer_Video *ev;
757
758 ev = (Emotion_Gstreamer_Video *)video;
759
760 _emotion_gstreamer_video_pipeline_parse(ev, EINA_FALSE);
761
762 return ev->video_stream_nbr;
763}
764
765static void
766em_video_subtitle_file_set(void *video EINA_UNUSED,
767 const char *filepath EINA_UNUSED)
768{
769 DBG("video_subtitle_file_set not implemented for gstreamer yet.");
770}
771
772static const char *
773em_video_subtitle_file_get(void *video EINA_UNUSED)
774{
775 DBG("video_subtitle_file_get not implemented for gstreamer yet.");
776 return NULL;
777}
778
779static const char *
780em_video_channel_name_get(void *video EINA_UNUSED,
781 int channel EINA_UNUSED)
782{
783 return NULL;
784}
785
786static void
787em_video_channel_mute_set(void *video,
788 int mute)
789{
790 Emotion_Gstreamer_Video *ev;
791
792 ev = (Emotion_Gstreamer_Video *)video;
793
794 ev->video_mute = mute;
795}
796
797static int
798em_video_channel_mute_get(void *video)
799{
800 Emotion_Gstreamer_Video *ev;
801
802 ev = (Emotion_Gstreamer_Video *)video;
803
804 return ev->video_mute;
805}
806
807/* Audio channels */
808
809static int
810em_audio_channel_count(void *video)
811{
812 Emotion_Gstreamer_Video *ev;
813
814 ev = (Emotion_Gstreamer_Video *)video;
815
816 _emotion_gstreamer_video_pipeline_parse(ev, EINA_FALSE);
817
818 return eina_list_count(ev->audio_streams);
819}
820
821static void
822em_audio_channel_set(void *video EINA_UNUSED,
823 int channel EINA_UNUSED)
824{
825#if 0
826 Emotion_Gstreamer_Video *ev;
827
828 ev = (Emotion_Gstreamer_Video *)video;
829
830 if (channel < -1) channel = -1;
831#endif
832 /* FIXME: a faire... */
833}
834
835static int
836em_audio_channel_get(void *video)
837{
838 Emotion_Gstreamer_Video *ev;
839
840 ev = (Emotion_Gstreamer_Video *)video;
841
842 _emotion_gstreamer_video_pipeline_parse(ev, EINA_FALSE);
843
844 return ev->audio_stream_nbr;
845}
846
847static const char *
848em_audio_channel_name_get(void *video EINA_UNUSED,
849 int channel EINA_UNUSED)
850{
851 return NULL;
852}
853
854#define GST_PLAY_FLAG_AUDIO (1 << 1)
855
856static void
857em_audio_channel_mute_set(void *video,
858 int mute)
859{
860 /* NOTE: at first I wanted to completly shutdown the audio path on mute,
861 but that's not possible as the audio sink could be the clock source
862 for the pipeline (at least that's the case on some of the hardware
863 I have been tested emotion on.
864 */
865 Emotion_Gstreamer_Video *ev;
866
867 ev = (Emotion_Gstreamer_Video *)video;
868
869 if (!ev->pipeline) return;
870
871 ev->audio_mute = mute;
872
873 g_object_set(G_OBJECT(ev->pipeline), "mute", !!mute, NULL);
874}
875
876static int
877em_audio_channel_mute_get(void *video)
878{
879 Emotion_Gstreamer_Video *ev;
880
881 ev = (Emotion_Gstreamer_Video *)video;
882
883 return ev->audio_mute;
884}
885
886static void
887em_audio_channel_volume_set(void *video,
888 double vol)
889{
890 Emotion_Gstreamer_Video *ev;
891
892 ev = (Emotion_Gstreamer_Video *)video;
893
894 if (!ev->pipeline) return;
895
896 if (vol < 0.0)
897 vol = 0.0;
898 if (vol > 1.0)
899 vol = 1.0;
900 ev->volume = vol;
901 g_object_set(G_OBJECT(ev->pipeline), "volume", vol, NULL);
902}
903
904static double
905em_audio_channel_volume_get(void *video)
906{
907 Emotion_Gstreamer_Video *ev;
908
909 ev = (Emotion_Gstreamer_Video *)video;
910
911 return ev->volume;
912}
913
914/* spu stuff */
915
916static int
917em_spu_channel_count(void *video EINA_UNUSED)
918{
919 return 0;
920}
921
922static void
923em_spu_channel_set(void *video EINA_UNUSED, int channel EINA_UNUSED)
924{
925}
926
927static int
928em_spu_channel_get(void *video EINA_UNUSED)
929{
930 return 1;
931}
932
933static const char *
934em_spu_channel_name_get(void *video EINA_UNUSED, int channel EINA_UNUSED)
935{
936 return NULL;
937}
938
939static void
940em_spu_channel_mute_set(void *video EINA_UNUSED, int mute EINA_UNUSED)
941{
942}
943
944static int
945em_spu_channel_mute_get(void *video EINA_UNUSED)
946{
947 return 0;
948}
949
950static int
951em_chapter_count(void *video EINA_UNUSED)
952{
953 return 0;
954}
955
956static void
957em_chapter_set(void *video EINA_UNUSED, int chapter EINA_UNUSED)
958{
959}
960
961static int
962em_chapter_get(void *video EINA_UNUSED)
963{
964 return 0;
965}
966
967static const char *
968em_chapter_name_get(void *video EINA_UNUSED, int chapter EINA_UNUSED)
969{
970 return NULL;
971}
972
973static void
974em_speed_set(void *video EINA_UNUSED, double speed EINA_UNUSED)
975{
976}
977
978static double
979em_speed_get(void *video EINA_UNUSED)
980{
981 return 1.0;
982}
983
984static int
985em_eject(void *video EINA_UNUSED)
986{
987 return 1;
988}
989
990static const char *
991em_meta_get(void *video, int meta)
992{
993 Emotion_Gstreamer_Video *ev;
994 const char *str = NULL;
995
996 ev = (Emotion_Gstreamer_Video *)video;
997
998 if (!ev || !ev->metadata) return NULL;
999 switch (meta)
1000 {
1001 case META_TRACK_TITLE:
1002 str = ev->metadata->title;
1003 break;
1004 case META_TRACK_ARTIST:
1005 str = ev->metadata->artist;
1006 break;
1007 case META_TRACK_ALBUM:
1008 str = ev->metadata->album;
1009 break;
1010 case META_TRACK_YEAR:
1011 str = ev->metadata->year;
1012 break;
1013 case META_TRACK_GENRE:
1014 str = ev->metadata->genre;
1015 break;
1016 case META_TRACK_COMMENT:
1017 str = ev->metadata->comment;
1018 break;
1019 case META_TRACK_DISCID:
1020 str = ev->metadata->disc_id;
1021 break;
1022 default:
1023 break;
1024 }
1025
1026 return str;
1027}
1028
1029static void
1030em_priority_set(void *video, Eina_Bool pri)
1031{
1032 Emotion_Gstreamer_Video *ev;
1033
1034 ev = video;
1035 if (priority_overide > 3) return; /* If we failed to much to create that pipeline, let's don't wast our time anymore */
1036
1037 if (ev->priority != pri && ev->pipeline)
1038 {
1039 if (ev->threads)
1040 {
1041 Ecore_Thread *t;
1042
1043 EINA_LIST_FREE(ev->threads, t)
1044 ecore_thread_cancel(t);
1045 }
1046 em_cleanup(ev);
1047 restart_idler = ecore_idler_add(_em_restart_stream, ev);
1048 }
1049 ev->priority = pri;
1050}
1051
1052static Eina_Bool
1053em_priority_get(void *video)
1054{
1055 Emotion_Gstreamer_Video *ev;
1056
1057 ev = video;
1058 return !ev->stream;
1059}
1060
1061#ifdef HAVE_ECORE_X
1062static Eina_Bool
1063_ecore_event_x_destroy(void *data EINA_UNUSED, int type EINA_UNUSED, void *event EINA_UNUSED)
1064{
1065 Ecore_X_Event_Window_Destroy *ev = event;
1066
1067 INF("killed window: %x (%x).", ev->win, ev->event_win);
1068
1069 return EINA_TRUE;
1070}
1071
1072static void
1073gstreamer_ecore_x_check(void)
1074{
1075 Ecore_X_Window *roots;
1076 int num;
1077
1078 ecore_event_handler_add(ECORE_X_EVENT_WINDOW_DESTROY, _ecore_event_x_destroy, NULL);
1079
1080 /* Check if the window manager is able to handle our special Xv window. */
1081 roots = ecore_x_window_root_list(&num);
1082 if (roots && num > 0)
1083 {
1084 Ecore_X_Window win, twin;
1085 int nwins;
1086
1087 nwins = ecore_x_window_prop_window_get(roots[0],
1088 ECORE_X_ATOM_NET_SUPPORTING_WM_CHECK,
1089 &win, 1);
1090 if (nwins > 0)
1091 {
1092 nwins = ecore_x_window_prop_window_get(win,
1093 ECORE_X_ATOM_NET_SUPPORTING_WM_CHECK,
1094 &twin, 1);
1095 if (nwins > 0 && twin == win)
1096 {
1097 Ecore_X_Atom *supported;
1098 int supported_num;
1099 int i;
1100
1101 if (ecore_x_netwm_supported_get(roots[0], &supported, &supported_num))
1102 {
1103 Eina_Bool parent = EINA_FALSE;
1104 Eina_Bool video_position = EINA_FALSE;
1105
1106 for (i = 0; i < supported_num; ++i)
1107 {
1108 if (supported[i] == ECORE_X_ATOM_E_VIDEO_PARENT)
1109 parent = EINA_TRUE;
1110 else if (supported[i] == ECORE_X_ATOM_E_VIDEO_POSITION)
1111 video_position = EINA_TRUE;
1112 if (parent && video_position)
1113 break;
1114 }
1115
1116 if (parent && video_position)
1117 {
1118 window_manager_video = EINA_TRUE;
1119 }
1120 }
1121 free(supported);
1122 }
1123 }
1124 }
1125 free(roots);
1126}
1127#endif
1128
1129static void *
1130em_add(const Emotion_Engine *api,
1131 Evas_Object *obj,
1132 const Emotion_Module_Options *opt EINA_UNUSED)
1133{
1134 Emotion_Gstreamer_Video *ev;
1135
1136 ev = calloc(1, sizeof(Emotion_Gstreamer_Video));
1137 EINA_SAFETY_ON_NULL_RETURN_VAL(ev, NULL);
1138
1139 ev->api = api;
1140 ev->obj = obj;
1141
1142 /* Default values */
1143 ev->ratio = 1.0;
1144 ev->vis = EMOTION_VIS_NONE;
1145 ev->volume = 0.8;
1146 ev->play_started = 0;
1147 ev->delete_me = EINA_FALSE;
1148 ev->threads = NULL;
1149
1150 return ev;
1151}
1152
1153static const Emotion_Engine em_engine =
1154{
1155 EMOTION_ENGINE_API_VERSION,
1156 EMOTION_ENGINE_PRIORITY_DEFAULT,
1157 "gstreamer",
1158 em_add, /* add */
1159 em_del, /* del */
1160 em_file_open, /* file_open */
1161 em_file_close, /* file_close */
1162 em_play, /* play */
1163 em_stop, /* stop */
1164 em_size_get, /* size_get */
1165 em_pos_set, /* pos_set */
1166 em_len_get, /* len_get */
1167 em_buffer_size_get, /* buffer_size_get */
1168 em_fps_num_get, /* fps_num_get */
1169 em_fps_den_get, /* fps_den_get */
1170 em_fps_get, /* fps_get */
1171 em_pos_get, /* pos_get */
1172 em_vis_set, /* vis_set */
1173 em_vis_get, /* vis_get */
1174 em_vis_supported, /* vis_supported */
1175 em_ratio_get, /* ratio_get */
1176 em_video_handled, /* video_handled */
1177 em_audio_handled, /* audio_handled */
1178 em_seekable, /* seekable */
1179 em_frame_done, /* frame_done */
1180 em_format_get, /* format_get */
1181 em_video_data_size_get, /* video_data_size_get */
1182 em_yuv_rows_get, /* yuv_rows_get */
1183 em_bgra_data_get, /* bgra_data_get */
1184 em_event_feed, /* event_feed */
1185 em_event_mouse_button_feed, /* event_mouse_button_feed */
1186 em_event_mouse_move_feed, /* event_mouse_move_feed */
1187 em_video_channel_count, /* video_channel_count */
1188 em_video_channel_set, /* video_channel_set */
1189 em_video_channel_get, /* video_channel_get */
1190 em_video_subtitle_file_set, /* video_subtitle_file_set */
1191 em_video_subtitle_file_get, /* video_subtitle_file_get */
1192 em_video_channel_name_get, /* video_channel_name_get */
1193 em_video_channel_mute_set, /* video_channel_mute_set */
1194 em_video_channel_mute_get, /* video_channel_mute_get */
1195 em_audio_channel_count, /* audio_channel_count */
1196 em_audio_channel_set, /* audio_channel_set */
1197 em_audio_channel_get, /* audio_channel_get */
1198 em_audio_channel_name_get, /* audio_channel_name_get */
1199 em_audio_channel_mute_set, /* audio_channel_mute_set */
1200 em_audio_channel_mute_get, /* audio_channel_mute_get */
1201 em_audio_channel_volume_set, /* audio_channel_volume_set */
1202 em_audio_channel_volume_get, /* audio_channel_volume_get */
1203 em_spu_channel_count, /* spu_channel_count */
1204 em_spu_channel_set, /* spu_channel_set */
1205 em_spu_channel_get, /* spu_channel_get */
1206 em_spu_channel_name_get, /* spu_channel_name_get */
1207 em_spu_channel_mute_set, /* spu_channel_mute_set */
1208 em_spu_channel_mute_get, /* spu_channel_mute_get */
1209 em_chapter_count, /* chapter_count */
1210 em_chapter_set, /* chapter_set */
1211 em_chapter_get, /* chapter_get */
1212 em_chapter_name_get, /* chapter_name_get */
1213 em_speed_set, /* speed_set */
1214 em_speed_get, /* speed_get */
1215 em_eject, /* eject */
1216 em_meta_get, /* meta_get */
1217 em_priority_set, /* priority_set */
1218 em_priority_get, /* priority_get */
1219 NULL /* em_meta_artwork_get */
1220};
1221
1222Eina_Bool
1223gstreamer_module_init(void)
1224{
1225 GError *error;
1226
1227 if (_emotion_init_count > 0)
1228 {
1229 _emotion_pending_ecore_begin();
1230 return EINA_TRUE;
1231 }
1232
1233 if (getenv("EMOTION_FPS_DEBUG")) debug_fps = EINA_TRUE;
1234
1235 eina_threads_init();
1236 eina_log_threads_enable();
1237 _emotion_gstreamer_log_domain = eina_log_domain_register
1238 ("emotion-gstreamer", EINA_COLOR_LIGHTCYAN);
1239 if (_emotion_gstreamer_log_domain < 0)
1240 {
1241 EINA_LOG_CRIT("Could not register log domain 'emotion-gstreamer'");
1242 return EINA_FALSE;
1243 }
1244
1245 if (!gst_init_check(0, NULL, &error))
1246 {
1247 EINA_LOG_CRIT("Could not init GStreamer");
1248 goto error_gst_init;
1249 }
1250
1251#ifdef HAVE_ECORE_X
1252 if (ecore_x_init(NULL) > 0)
1253 {
1254 _ecore_x_available = EINA_TRUE;
1255 gstreamer_ecore_x_check();
1256 }
1257#endif
1258
1259 if (gst_plugin_register_static(GST_VERSION_MAJOR, GST_VERSION_MINOR,
1260 "emotion-sink",
1261 "video sink plugin for Emotion",
1262 gstreamer_plugin_init,
1263 VERSION,
1264 "LGPL",
1265 "Enlightenment",
1266 PACKAGE,
1267 "http://www.enlightenment.org/") == FALSE)
1268 {
1269 EINA_LOG_CRIT("Could not load static gstreamer video sink for Emotion.");
1270 goto error_gst_plugin;
1271 }
1272
1273 if (!_emotion_module_register(&em_engine))
1274 {
1275 ERR("Could not register module %p", &em_engine);
1276 goto error_register;
1277 }
1278
1279 _emotion_init_count = 1;
1280 return EINA_TRUE;
1281
1282 error_register:
1283 error_gst_plugin:
1284#ifdef HAVE_ECORE_X
1285 if (_ecore_x_available)
1286 {
1287 ecore_x_shutdown();
1288 _ecore_x_available = EINA_FALSE;
1289 window_manager_video = EINA_FALSE;
1290 }
1291#endif
1292
1293 gst_deinit();
1294
1295 error_gst_init:
1296 eina_log_domain_unregister(_emotion_gstreamer_log_domain);
1297 _emotion_gstreamer_log_domain = -1;
1298
1299 return EINA_FALSE;
1300}
1301
1302void
1303gstreamer_module_shutdown(void)
1304{
1305 if (_emotion_init_count > 1)
1306 {
1307 _emotion_init_count--;
1308 return;
1309 }
1310 else if (_emotion_init_count == 0)
1311 {
1312 EINA_LOG_ERR("too many gstreamer_module_shutdown()");
1313 return;
1314 }
1315 _emotion_init_count = 0;
1316
1317 _emotion_module_unregister(&em_engine);
1318
1319#ifdef HAVE_ECORE_X
1320 if (_ecore_x_available)
1321 {
1322 ecore_x_shutdown();
1323 _ecore_x_available = EINA_FALSE;
1324 window_manager_video = EINA_FALSE;
1325 }
1326#endif
1327
1328 eina_log_domain_unregister(_emotion_gstreamer_log_domain);
1329 _emotion_gstreamer_log_domain = -1;
1330
1331 gst_deinit();
1332}
1333
1334#ifndef EMOTION_STATIC_BUILD_GSTREAMER
1335
1336EINA_MODULE_INIT(gstreamer_module_init);
1337EINA_MODULE_SHUTDOWN(gstreamer_module_shutdown);
1338
1339#endif
1340
1341static void
1342_for_each_tag(GstTagList const* list,
1343 gchar const* tag,
1344 void *data)
1345{
1346 Emotion_Gstreamer_Video *ev;
1347 int i;
1348 int count;
1349
1350
1351 ev = (Emotion_Gstreamer_Video*)data;
1352
1353 if (!ev || !ev->metadata) return;
1354
1355 count = gst_tag_list_get_tag_size(list, tag);
1356
1357 for (i = 0; i < count; i++)
1358 {
1359 if (!strcmp(tag, GST_TAG_TITLE))
1360 {
1361 char *str;
1362 g_free(ev->metadata->title);
1363 if (gst_tag_list_get_string(list, GST_TAG_TITLE, &str))
1364 ev->metadata->title = str;
1365 else
1366 ev->metadata->title = NULL;
1367 break;
1368 }
1369 if (!strcmp(tag, GST_TAG_ALBUM))
1370 {
1371 gchar *str;
1372 g_free(ev->metadata->album);
1373 if (gst_tag_list_get_string(list, GST_TAG_ALBUM, &str))
1374 ev->metadata->album = str;
1375 else
1376 ev->metadata->album = NULL;
1377 break;
1378 }
1379 if (!strcmp(tag, GST_TAG_ARTIST))
1380 {
1381 gchar *str;
1382 g_free(ev->metadata->artist);
1383 if (gst_tag_list_get_string(list, GST_TAG_ARTIST, &str))
1384 ev->metadata->artist = str;
1385 else
1386 ev->metadata->artist = NULL;
1387 break;
1388 }
1389 if (!strcmp(tag, GST_TAG_GENRE))
1390 {
1391 gchar *str;
1392 g_free(ev->metadata->genre);
1393 if (gst_tag_list_get_string(list, GST_TAG_GENRE, &str))
1394 ev->metadata->genre = str;
1395 else
1396 ev->metadata->genre = NULL;
1397 break;
1398 }
1399 if (!strcmp(tag, GST_TAG_COMMENT))
1400 {
1401 gchar *str;
1402 g_free(ev->metadata->comment);
1403 if (gst_tag_list_get_string(list, GST_TAG_COMMENT, &str))
1404 ev->metadata->comment = str;
1405 else
1406 ev->metadata->comment = NULL;
1407 break;
1408 }
1409 if (!strcmp(tag, GST_TAG_DATE))
1410 {
1411 gchar *str;
1412 const GValue *date;
1413 g_free(ev->metadata->year);
1414 date = gst_tag_list_get_value_index(list, GST_TAG_DATE, 0);
1415 if (date)
1416 str = g_strdup_value_contents(date);
1417 else
1418 str = NULL;
1419 ev->metadata->year = str;
1420 break;
1421 }
1422
1423 if (!strcmp(tag, GST_TAG_TRACK_NUMBER))
1424 {
1425 gchar *str;
1426 const GValue *track;
1427 g_free(ev->metadata->count);
1428 track = gst_tag_list_get_value_index(list, GST_TAG_TRACK_NUMBER, 0);
1429 if (track)
1430 str = g_strdup_value_contents(track);
1431 else
1432 str = NULL;
1433 ev->metadata->count = str;
1434 break;
1435 }
1436
1437#ifdef GST_TAG_CDDA_CDDB_DISCID
1438 if (!strcmp(tag, GST_TAG_CDDA_CDDB_DISCID))
1439 {
1440 gchar *str;
1441 const GValue *discid;
1442 g_free(ev->metadata->disc_id);
1443 discid = gst_tag_list_get_value_index(list, GST_TAG_CDDA_CDDB_DISCID, 0);
1444 if (discid)
1445 str = g_strdup_value_contents(discid);
1446 else
1447 str = NULL;
1448 ev->metadata->disc_id = str;
1449 break;
1450 }
1451#endif
1452 }
1453
1454}
1455
1456static void
1457_free_metadata(Emotion_Gstreamer_Metadata *m)
1458{
1459 if (!m) return;
1460
1461 g_free(m->title);
1462 g_free(m->album);
1463 g_free(m->artist);
1464 g_free(m->genre);
1465 g_free(m->comment);
1466 g_free(m->year);
1467 g_free(m->count);
1468 g_free(m->disc_id);
1469
1470 free(m);
1471}
1472
1473static Eina_Bool
1474_em_restart_stream(void *data)
1475{
1476 Emotion_Gstreamer_Video *ev;
1477
1478 ev = data;
1479
1480 ev->pipeline = gstreamer_video_sink_new(ev, ev->obj, ev->uri);
1481
1482 if (ev->pipeline)
1483 {
1484 ev->eos_bus = gst_pipeline_get_bus(GST_PIPELINE(ev->pipeline));
1485 if (!ev->eos_bus)
1486 {
1487 ERR("could not get the bus");
1488 return EINA_FALSE;
1489 }
1490
1491 gst_bus_set_sync_handler(ev->eos_bus, _eos_sync_fct, ev);
1492 }
1493
1494 restart_idler = NULL;
1495
1496 return ECORE_CALLBACK_CANCEL;
1497}
1498
1499static Eina_Bool
1500_video_size_get(GstElement *elem, int *width, int *height)
1501{
1502 GstIterator *itr = NULL;
1503 GstCaps *caps;
1504 GstStructure *str;
1505 gpointer pad;
1506 Eina_Bool ret = EINA_FALSE;
1507
1508 itr = gst_element_iterate_src_pads(elem);
1509 while(gst_iterator_next(itr, &pad) && !ret)
1510 {
1511 caps = gst_pad_get_caps(GST_PAD(pad));
1512 str = gst_caps_get_structure(caps, 0);
1513 if (g_strrstr(gst_structure_get_name(str), "video"))
1514 {
1515 if (gst_structure_get_int(str, "width", width) && gst_structure_get_int(str, "height", height))
1516 ret = EINA_TRUE;
1517 }
1518 gst_caps_unref(caps);
1519 gst_object_unref(pad);
1520 }
1521 gst_iterator_free(itr);
1522
1523 return ret;
1524}
1525
1526static void
1527_main_frame_resize(void *data)
1528{
1529 Emotion_Gstreamer_Video *ev = data;
1530 double ratio;
1531
1532 ratio = (double)ev->src_width / (double)ev->src_height;
1533 _emotion_frame_resize(ev->obj, ev->src_width, ev->src_height, ratio);
1534 _emotion_pending_ecore_end();
1535}
1536
1537static void
1538_no_more_pads(GstElement *decodebin, gpointer data)
1539{
1540 GstIterator *itr = NULL;
1541 gpointer elem;
1542 Emotion_Gstreamer_Video *ev = data;
1543
1544 itr = gst_bin_iterate_elements(GST_BIN(decodebin));
1545 while(gst_iterator_next(itr, &elem))
1546 {
1547 if(_video_size_get(GST_ELEMENT(elem), &ev->src_width, &ev->src_height))
1548 {
1549 _emotion_pending_ecore_begin();
1550 ecore_main_loop_thread_safe_call_async(_main_frame_resize, ev);
1551 gst_object_unref(elem);
1552 break;
1553 }
1554 gst_object_unref(elem);
1555 }
1556 gst_iterator_free(itr);
1557}
1558
1559static void
1560_eos_main_fct(void *data)
1561{
1562 Emotion_Gstreamer_Message *send;
1563 Emotion_Gstreamer_Video *ev;
1564 GstMessage *msg;
1565
1566 send = data;
1567 ev = send->ev;
1568 msg = send->msg;
1569
1570 if (ev->play_started && !ev->delete_me)
1571 {
1572 _emotion_playback_started(ev->obj);
1573 ev->play_started = 0;
1574 }
1575
1576 switch (GST_MESSAGE_TYPE(msg))
1577 {
1578 case GST_MESSAGE_EOS:
1579 if (!ev->delete_me)
1580 {
1581 ev->play = 0;
1582 _emotion_decode_stop(ev->obj);
1583 _emotion_playback_finished(ev->obj);
1584 }
1585 break;
1586 case GST_MESSAGE_TAG:
1587 if (!ev->delete_me)
1588 {
1589 GstTagList *new_tags;
1590 gst_message_parse_tag(msg, &new_tags);
1591 if (new_tags)
1592 {
1593 gst_tag_list_foreach(new_tags,
1594 (GstTagForeachFunc)_for_each_tag,
1595 ev);
1596 gst_tag_list_free(new_tags);
1597 }
1598 }
1599 break;
1600 case GST_MESSAGE_ASYNC_DONE:
1601 if (!ev->delete_me) _emotion_seek_done(ev->obj);
1602 break;
1603 case GST_MESSAGE_STREAM_STATUS:
1604 break;
1605 case GST_MESSAGE_STATE_CHANGED:
1606 if (!ev->delete_me)
1607 {
1608 if (!g_signal_handlers_disconnect_by_func(msg->src, _no_more_pads, ev))
1609 g_signal_connect(msg->src, "no-more-pads", G_CALLBACK(_no_more_pads), ev);
1610 }
1611 break;
1612 case GST_MESSAGE_ERROR:
1613 em_cleanup(ev);
1614
1615 if (ev->priority)
1616 {
1617 ERR("Switching back to canvas rendering.");
1618 ev->priority = EINA_FALSE;
1619 priority_overide++;
1620
1621 restart_idler = ecore_idler_add(_em_restart_stream, ev);
1622 }
1623 break;
1624 default:
1625 ERR("bus say: %s [%i - %s]",
1626 GST_MESSAGE_SRC_NAME(msg),
1627 GST_MESSAGE_TYPE(msg),
1628 GST_MESSAGE_TYPE_NAME(msg));
1629 break;
1630 }
1631
1632 emotion_gstreamer_message_free(send);
1633 _emotion_pending_ecore_end();
1634}
1635
1636static GstBusSyncReply
1637_eos_sync_fct(GstBus *bus EINA_UNUSED, GstMessage *msg, gpointer data)
1638{
1639 Emotion_Gstreamer_Video *ev = data;
1640 Emotion_Gstreamer_Message *send;
1641
1642 switch (GST_MESSAGE_TYPE(msg))
1643 {
1644 case GST_MESSAGE_EOS:
1645 case GST_MESSAGE_TAG:
1646 case GST_MESSAGE_ASYNC_DONE:
1647 case GST_MESSAGE_STREAM_STATUS:
1648 INF("bus say: %s [%i - %s]",
1649 GST_MESSAGE_SRC_NAME(msg),
1650 GST_MESSAGE_TYPE(msg),
1651 GST_MESSAGE_TYPE_NAME(msg));
1652 send = emotion_gstreamer_message_alloc(ev, msg);
1653
1654 if (send)
1655 {
1656 _emotion_pending_ecore_begin();
1657 ecore_main_loop_thread_safe_call_async(_eos_main_fct, send);
1658 }
1659
1660 break;
1661
1662 case GST_MESSAGE_STATE_CHANGED:
1663 {
1664 GstState old_state, new_state;
1665
1666 gst_message_parse_state_changed(msg, &old_state, &new_state, NULL);
1667 INF("Element %s changed state from %s to %s.",
1668 GST_OBJECT_NAME(msg->src),
1669 gst_element_state_get_name(old_state),
1670 gst_element_state_get_name(new_state));
1671
1672 if (!strncmp(GST_OBJECT_NAME(msg->src), "decodebin", 9) && !strcmp(gst_element_state_get_name(new_state), "READY"))
1673 {
1674 send = emotion_gstreamer_message_alloc(ev, msg);
1675
1676 if (send)
1677 {
1678 _emotion_pending_ecore_begin();
1679 ecore_main_loop_thread_safe_call_async(_eos_main_fct, send);
1680 }
1681 }
1682 break;
1683 }
1684 case GST_MESSAGE_ERROR:
1685 {
1686 GError *error;
1687 gchar *debug;
1688
1689 gst_message_parse_error(msg, &error, &debug);
1690 ERR("ERROR from element %s: %s", GST_OBJECT_NAME(msg->src), error->message);
1691 ERR("Debugging info: %s", (debug) ? debug : "none");
1692 g_error_free(error);
1693 g_free(debug);
1694
1695 if (strncmp(GST_OBJECT_NAME(msg->src), "xvimagesink", 11) == 0)
1696 {
1697 send = emotion_gstreamer_message_alloc(ev, msg);
1698
1699 if (send)
1700 {
1701 _emotion_pending_ecore_begin();
1702 ecore_main_loop_thread_safe_call_async(_eos_main_fct, send);
1703 }
1704 }
1705 break;
1706 }
1707 case GST_MESSAGE_WARNING:
1708 {
1709 GError *error;
1710 gchar *debug;
1711
1712 gst_message_parse_warning(msg, &error, &debug);
1713 WRN("WARNING from element %s: %s", GST_OBJECT_NAME(msg->src), error->message);
1714 WRN("Debugging info: %s", (debug) ? debug : "none");
1715 g_error_free(error);
1716 g_free(debug);
1717 break;
1718 }
1719 default:
1720 WRN("bus say: %s [%i - %s]",
1721 GST_MESSAGE_SRC_NAME(msg),
1722 GST_MESSAGE_TYPE(msg),
1723 GST_MESSAGE_TYPE_NAME(msg));
1724 break;
1725 }
1726
1727 gst_message_unref(msg);
1728
1729 return GST_BUS_DROP;
1730}
1731
1732Eina_Bool
1733_emotion_gstreamer_video_pipeline_parse(Emotion_Gstreamer_Video *ev,
1734 Eina_Bool force)
1735{
1736 gboolean mute = 0;
1737 gdouble vol = 0.0;
1738 gboolean res;
1739 int i;
1740
1741 if (ev->pipeline_parsed)
1742 return EINA_TRUE;
1743
1744 if (force && ev->threads)
1745 {
1746 Ecore_Thread *t;
1747
1748 EINA_LIST_FREE(ev->threads, t)
1749 ecore_thread_cancel(t);
1750 }
1751
1752 if (ev->threads)
1753 return EINA_FALSE;
1754
1755 res = gst_element_get_state(ev->pipeline, NULL, NULL, GST_CLOCK_TIME_NONE);
1756 if (res == GST_STATE_CHANGE_NO_PREROLL)
1757 {
1758 gst_element_set_state(ev->pipeline, GST_STATE_PLAYING);
1759
1760 res = gst_element_get_state(ev->pipeline, NULL, NULL, GST_CLOCK_TIME_NONE);
1761 }
1762
1763 /** NOTE: you need to set: GST_DEBUG_DUMP_DOT_DIR=/tmp EMOTION_ENGINE=gstreamer to save the $EMOTION_GSTREAMER_DOT file in '/tmp' */
1764 /** then call dot -Tpng -oemotion_pipeline.png /tmp/$TIMESTAMP-$EMOTION_GSTREAMER_DOT.dot */
1765#if defined(HAVE_GETUID) && defined(HAVE_GETEUID)
1766 if (getuid() == geteuid())
1767#endif
1768 {
1769 if (getenv("EMOTION_GSTREAMER_DOT"))
1770 GST_DEBUG_BIN_TO_DOT_FILE_WITH_TS(GST_BIN(ev->pipeline),
1771 GST_DEBUG_GRAPH_SHOW_ALL,
1772 getenv("EMOTION_GSTREAMER_DOT"));
1773 }
1774
1775 if (!(res == GST_STATE_CHANGE_SUCCESS
1776 || res == GST_STATE_CHANGE_NO_PREROLL))
1777 {
1778 ERR("Unable to get GST_CLOCK_TIME_NONE.");
1779 return EINA_FALSE;
1780 }
1781
1782 g_object_get(G_OBJECT(ev->pipeline),
1783 "n-audio", &ev->audio_stream_nbr,
1784 "n-video", &ev->video_stream_nbr,
1785 NULL);
1786
1787 if ((ev->video_stream_nbr == 0) && (ev->audio_stream_nbr == 0))
1788 {
1789 ERR("No audio nor video stream found");
1790 return EINA_FALSE;
1791 }
1792
1793 /* video stream */
1794 for (i = 0; i < ev->video_stream_nbr; i++)
1795 {
1796 Emotion_Video_Stream *vstream;
1797 GstPad *pad = NULL;
1798 GstCaps *caps;
1799 GstStructure *structure;
1800 GstQuery *query;
1801 const GValue *val;
1802 gchar *str = NULL;
1803
1804 gdouble length_time = 0.0;
1805 gint width;
1806 gint height;
1807 gint fps_num;
1808 gint fps_den;
1809 guint32 fourcc = 0;
1810
1811 g_signal_emit_by_name(ev->pipeline, "get-video-pad", i, &pad);
1812 if (!pad)
1813 continue;
1814
1815 caps = gst_pad_get_negotiated_caps(pad);
1816 if (!caps)
1817 goto unref_pad_v;
1818 structure = gst_caps_get_structure(caps, 0);
1819 str = gst_caps_to_string(caps);
1820
1821 if (!gst_structure_get_int(structure, "width", &width))
1822 goto unref_caps_v;
1823 if (!gst_structure_get_int(structure, "height", &height))
1824 goto unref_caps_v;
1825 if (!gst_structure_get_fraction(structure, "framerate", &fps_num, &fps_den))
1826 goto unref_caps_v;
1827
1828 if (g_str_has_prefix(str, "video/x-raw-yuv"))
1829 {
1830 val = gst_structure_get_value(structure, "format");
1831 fourcc = gst_value_get_fourcc(val);
1832 }
1833 else if (g_str_has_prefix(str, "video/x-raw-rgb"))
1834 fourcc = GST_MAKE_FOURCC('A', 'R', 'G', 'B');
1835 else
1836 goto unref_caps_v;
1837
1838 query = gst_query_new_duration(GST_FORMAT_TIME);
1839 if (gst_pad_peer_query(pad, query))
1840 {
1841 gint64 t;
1842
1843 gst_query_parse_duration(query, NULL, &t);
1844 length_time = (double)t / (double)GST_SECOND;
1845 }
1846 else
1847 goto unref_query_v;
1848
1849 vstream = emotion_video_stream_new(ev);
1850 if (!vstream) goto unref_query_v;
1851
1852 vstream->length_time = length_time;
1853 vstream->width = width;
1854 vstream->height = height;
1855 vstream->fps_num = fps_num;
1856 vstream->fps_den = fps_den;
1857 vstream->fourcc = fourcc;
1858 vstream->index = i;
1859
1860 unref_query_v:
1861 gst_query_unref(query);
1862 unref_caps_v:
1863 gst_caps_unref(caps);
1864 g_free(str);
1865 unref_pad_v:
1866 gst_object_unref(pad);
1867 }
1868
1869 /* Audio streams */
1870 for (i = 0; i < ev->audio_stream_nbr; i++)
1871 {
1872 Emotion_Audio_Stream *astream;
1873 GstPad *pad;
1874 GstCaps *caps;
1875 GstStructure *structure;
1876 GstQuery *query;
1877
1878 gdouble length_time = 0.0;
1879 gint channels;
1880 gint samplerate;
1881
1882 g_signal_emit_by_name(ev->pipeline, "get-audio-pad", i, &pad);
1883 if (!pad)
1884 continue;
1885
1886 caps = gst_pad_get_negotiated_caps(pad);
1887 if (!caps)
1888 goto unref_pad_a;
1889 structure = gst_caps_get_structure(caps, 0);
1890
1891 if (!gst_structure_get_int(structure, "channels", &channels))
1892 goto unref_caps_a;
1893 if (!gst_structure_get_int(structure, "rate", &samplerate))
1894 goto unref_caps_a;
1895
1896 query = gst_query_new_duration(GST_FORMAT_TIME);
1897 if (gst_pad_peer_query(pad, query))
1898 {
1899 gint64 t;
1900
1901 gst_query_parse_duration(query, NULL, &t);
1902 length_time = (double)t / (double)GST_SECOND;
1903 }
1904 else
1905 goto unref_query_a;
1906
1907 astream = calloc(1, sizeof(Emotion_Audio_Stream));
1908 if (!astream) continue;
1909 ev->audio_streams = eina_list_append(ev->audio_streams, astream);
1910
1911 astream->length_time = length_time;
1912 astream->channels = channels;
1913 astream->samplerate = samplerate;
1914
1915 unref_query_a:
1916 gst_query_unref(query);
1917 unref_caps_a:
1918 gst_caps_unref(caps);
1919 unref_pad_a:
1920 gst_object_unref(pad);
1921 }
1922
1923 /* Visualization sink */
1924 if (ev->video_stream_nbr == 0)
1925 {
1926 GstElement *vis = NULL;
1927 Emotion_Video_Stream *vstream;
1928 Emotion_Audio_Stream *astream;
1929 gint flags;
1930 const char *vis_name;
1931
1932 if (!(vis_name = emotion_visualization_element_name_get(ev->vis)))
1933 {
1934 WRN("pb vis name %d", ev->vis);
1935 goto finalize;
1936 }
1937
1938 astream = eina_list_data_get(ev->audio_streams);
1939
1940 vis = gst_element_factory_make(vis_name, "vissink");
1941 vstream = emotion_video_stream_new(ev);
1942 if (!vstream)
1943 goto finalize;
1944 else
1945 DBG("could not create visualization stream");
1946
1947 vstream->length_time = astream->length_time;
1948 vstream->width = 320;
1949 vstream->height = 200;
1950 vstream->fps_num = 25;
1951 vstream->fps_den = 1;
1952 vstream->fourcc = GST_MAKE_FOURCC('A', 'R', 'G', 'B');
1953
1954 g_object_set(G_OBJECT(ev->pipeline), "vis-plugin", vis, NULL);
1955 g_object_get(G_OBJECT(ev->pipeline), "flags", &flags, NULL);
1956 flags |= 0x00000008;
1957 g_object_set(G_OBJECT(ev->pipeline), "flags", flags, NULL);
1958 }
1959
1960 finalize:
1961
1962 ev->video_stream_nbr = eina_list_count(ev->video_streams);
1963 ev->audio_stream_nbr = eina_list_count(ev->audio_streams);
1964
1965 if (ev->video_stream_nbr == 1)
1966 {
1967 Emotion_Video_Stream *vstream;
1968
1969 vstream = eina_list_data_get(ev->video_streams);
1970 ev->ratio = (double)vstream->width / (double)vstream->height;
1971 _emotion_frame_resize(ev->obj, vstream->width, vstream->height, ev->ratio);
1972 }
1973
1974 {
1975 /* on recapitule : */
1976 Emotion_Video_Stream *vstream;
1977 Emotion_Audio_Stream *astream;
1978
1979 vstream = eina_list_data_get(ev->video_streams);
1980 if (vstream)
1981 {
1982 DBG("video size=%dx%d, fps=%d/%d, "
1983 "fourcc=%"GST_FOURCC_FORMAT", length=%"GST_TIME_FORMAT,
1984 vstream->width, vstream->height, vstream->fps_num, vstream->fps_den,
1985 GST_FOURCC_ARGS(vstream->fourcc),
1986 GST_TIME_ARGS((guint64)(vstream->length_time * GST_SECOND)));
1987 }
1988
1989 astream = eina_list_data_get(ev->audio_streams);
1990 if (astream)
1991 {
1992 DBG("audio channels=%d, rate=%d, length=%"GST_TIME_FORMAT,
1993 astream->channels, astream->samplerate,
1994 GST_TIME_ARGS((guint64)(astream->length_time * GST_SECOND)));
1995 }
1996 }
1997
1998 if (ev->metadata)
1999 _free_metadata(ev->metadata);
2000 ev->metadata = calloc(1, sizeof(Emotion_Gstreamer_Metadata));
2001
2002 ev->pipeline_parsed = EINA_TRUE;
2003
2004 g_object_get(ev->pipeline, "volume", &vol, NULL);
2005 g_object_get(ev->pipeline, "mute", &mute, NULL);
2006 ev->volume = vol;
2007 ev->audio_mute = mute;
2008
2009 if (ev->play_started)
2010 {
2011 _emotion_playback_started(ev->obj);
2012 ev->play_started = 0;
2013 }
2014
2015 _emotion_open_done(ev->obj);
2016
2017 return EINA_TRUE;
2018}
diff --git a/src/modules/emotion/gstreamer/emotion_gstreamer.h b/src/modules/emotion/gstreamer/emotion_gstreamer.h
deleted file mode 100644
index 4b15ae5777..0000000000
--- a/src/modules/emotion/gstreamer/emotion_gstreamer.h
+++ /dev/null
@@ -1,352 +0,0 @@
1#ifndef __EMOTION_GSTREAMER_H__
2#define __EMOTION_GSTREAMER_H__
3
4#include "emotion_modules.h"
5
6typedef void (*Evas_Video_Convert_Cb)(unsigned char *evas_data,
7 const unsigned char *gst_data,
8 unsigned int w,
9 unsigned int h,
10 unsigned int output_height);
11
12typedef struct _EvasVideoSinkPrivate EvasVideoSinkPrivate;
13typedef struct _EvasVideoSink EvasVideoSink;
14typedef struct _EvasVideoSinkClass EvasVideoSinkClass;
15typedef struct _Emotion_Gstreamer_Video Emotion_Gstreamer_Video;
16typedef struct _Emotion_Audio_Stream Emotion_Audio_Stream;
17typedef struct _Emotion_Gstreamer_Metadata Emotion_Gstreamer_Metadata;
18typedef struct _Emotion_Gstreamer_Buffer Emotion_Gstreamer_Buffer;
19typedef struct _Emotion_Gstreamer_Message Emotion_Gstreamer_Message;
20typedef struct _Emotion_Video_Stream Emotion_Video_Stream;
21
22struct _Emotion_Video_Stream
23{
24 gdouble length_time;
25 gint width;
26 gint height;
27 gint fps_num;
28 gint fps_den;
29 guint32 fourcc;
30 int index;
31};
32
33struct _Emotion_Audio_Stream
34{
35 gdouble length_time;
36 gint channels;
37 gint samplerate;
38};
39
40struct _Emotion_Gstreamer_Metadata
41{
42 char *title;
43 char *album;
44 char *artist;
45 char *genre;
46 char *comment;
47 char *year;
48 char *count;
49 char *disc_id;
50};
51
52struct _Emotion_Gstreamer_Video
53{
54 const Emotion_Engine *api;
55
56 /* Gstreamer elements */
57 GstElement *pipeline;
58 GstElement *sink;
59 GstElement *esink;
60 GstElement *xvsink;
61 GstElement *tee;
62 GstElement *convert;
63
64 GstPad *eteepad;
65 GstPad *xvteepad;
66 GstPad *xvpad;
67 Eina_List *threads;
68
69 /* eos */
70 GstBus *eos_bus;
71
72 /* Strams */
73 Eina_List *video_streams;
74 Eina_List *audio_streams;
75
76 int video_stream_nbr;
77 int audio_stream_nbr;
78
79 /* We need to keep a copy of the last inserted buffer as evas doesn't copy YUV data around */
80 GstBuffer *last_buffer;
81
82 /* Evas object */
83 Evas_Object *obj;
84
85 /* Characteristics of stream */
86 double position;
87 double ratio;
88 double volume;
89
90 volatile int seek_to;
91 volatile int get_poslen;
92
93 Emotion_Gstreamer_Metadata *metadata;
94
95#ifdef HAVE_ECORE_X
96 Ecore_X_Window win;
97#endif
98
99 const char *uri;
100
101 Emotion_Gstreamer_Buffer *send;
102
103 EvasVideoSinkPrivate *sink_data;
104
105 Emotion_Vis vis;
106
107 int in;
108 int out;
109
110 int frames;
111 int flapse;
112 double rtime;
113 double rlapse;
114
115 struct
116 {
117 double width;
118 double height;
119 } fill;
120
121 Eina_Bool play : 1;
122 Eina_Bool play_started : 1;
123 Eina_Bool video_mute : 1;
124 Eina_Bool audio_mute : 1;
125 Eina_Bool pipeline_parsed : 1;
126 Eina_Bool delete_me : 1;
127 Eina_Bool samsung : 1;
128 Eina_Bool kill_buffer : 1;
129 Eina_Bool stream : 1;
130 Eina_Bool priority : 1;
131
132 int src_width;
133 int src_height;
134};
135
136struct _EvasVideoSink {
137 /*< private >*/
138 GstVideoSink parent;
139 EvasVideoSinkPrivate *priv;
140};
141
142struct _EvasVideoSinkClass {
143 /*< private >*/
144 GstVideoSinkClass parent_class;
145};
146
147struct _EvasVideoSinkPrivate {
148 EINA_REFCOUNT;
149
150 Evas_Object *o;
151
152 Emotion_Gstreamer_Video *ev;
153
154 Evas_Video_Convert_Cb func;
155
156 unsigned int width;
157 unsigned int height;
158 unsigned int source_height;
159 Evas_Colorspace eformat;
160
161 Eina_Lock m;
162 Eina_Condition c;
163
164 // If this is TRUE all processing should finish ASAP
165 // This is necessary because there could be a race between
166 // unlock() and render(), where unlock() wins, signals the
167 // GCond, then render() tries to render a frame although
168 // everything else isn't running anymore. This will lead
169 // to deadlocks because render() holds the stream lock.
170 //
171 // Protected by the buffer mutex
172 Eina_Bool unlocked : 1;
173 Eina_Bool samsung : 1; /** ST12 will only define a Samsung specific GstBuffer */
174};
175
176struct _Emotion_Gstreamer_Buffer
177{
178 Emotion_Gstreamer_Video *ev;
179 EvasVideoSinkPrivate *sink;
180
181 GstBuffer *frame;
182
183 Eina_Bool preroll : 1;
184 Eina_Bool force : 1;
185};
186
187struct _Emotion_Gstreamer_Message
188{
189 Emotion_Gstreamer_Video *ev;
190
191 GstMessage *msg;
192};
193
194extern Eina_Bool window_manager_video;
195extern Eina_Bool debug_fps;
196extern int _emotion_gstreamer_log_domain;
197extern Eina_Bool _ecore_x_available;
198
199#ifdef DBG
200#undef DBG
201#endif
202#define DBG(...) EINA_LOG_DOM_DBG(_emotion_gstreamer_log_domain, __VA_ARGS__)
203
204#ifdef INF
205#undef INF
206#endif
207#define INF(...) EINA_LOG_DOM_INFO(_emotion_gstreamer_log_domain, __VA_ARGS__)
208
209#ifdef WRN
210#undef WRN
211#endif
212#define WRN(...) EINA_LOG_DOM_WARN(_emotion_gstreamer_log_domain, __VA_ARGS__)
213
214#ifdef ERR
215#undef ERR
216#endif
217#define ERR(...) EINA_LOG_DOM_ERR(_emotion_gstreamer_log_domain, __VA_ARGS__)
218
219#ifdef CRI
220#undef CRI
221#endif
222#define CRI(...) EINA_LOG_DOM_CRIT(_emotion_gstreamer_log_domain, __VA_ARGS__)
223
224#define EVAS_TYPE_VIDEO_SINK evas_video_sink_get_type()
225
226GType fakeeos_bin_get_type(void);
227
228#define EVAS_VIDEO_SINK(obj) \
229 (G_TYPE_CHECK_INSTANCE_CAST((obj), \
230 EVAS_TYPE_VIDEO_SINK, EvasVideoSink))
231
232#define EVAS_VIDEO_SINK_CLASS(klass) \
233 (G_TYPE_CHECK_CLASS_CAST((klass), \
234 EVAS_TYPE_VIDEO_SINK, EvasVideoSinkClass))
235
236#define EVAS_IS_VIDEO_SINK(obj) \
237 (G_TYPE_CHECK_INSTANCE_TYPE((obj), \
238 EVAS_TYPE_VIDEO_SINK))
239
240#define EVAS_IS_VIDEO_SINK_CLASS(klass) \
241 (G_TYPE_CHECK_CLASS_TYPE((klass), \
242 EVAS_TYPE_VIDEO_SINK))
243
244#define EVAS_VIDEO_SINK_GET_CLASS(obj) \
245 (G_TYPE_INSTANCE_GET_CLASS((obj), \
246 EVAS_TYPE_VIDEO_SINK, EvasVideoSinkClass))
247
248#define GST_TYPE_FAKEEOS_BIN fakeeos_bin_get_type()
249
250GstElement *gstreamer_video_sink_new(Emotion_Gstreamer_Video *ev,
251 Evas_Object *obj,
252 const char *uri);
253
254gboolean gstreamer_plugin_init(GstPlugin *plugin);
255
256Emotion_Gstreamer_Buffer *emotion_gstreamer_buffer_alloc(EvasVideoSinkPrivate *sink,
257 GstBuffer *buffer,
258 Eina_Bool preroll);
259void emotion_gstreamer_buffer_free(Emotion_Gstreamer_Buffer *send);
260
261Emotion_Gstreamer_Message *emotion_gstreamer_message_alloc(Emotion_Gstreamer_Video *ev,
262 GstMessage *msg);
263void emotion_gstreamer_message_free(Emotion_Gstreamer_Message *send);
264Eina_Bool _emotion_gstreamer_video_pipeline_parse(Emotion_Gstreamer_Video *ev,
265 Eina_Bool force);
266
267typedef struct _ColorSpace_FourCC_Convertion ColorSpace_FourCC_Convertion;
268typedef struct _ColorSpace_Format_Convertion ColorSpace_Format_Convertion;
269
270struct _ColorSpace_FourCC_Convertion
271{
272 const char *name;
273 guint32 fourcc;
274 Evas_Colorspace eformat;
275 Evas_Video_Convert_Cb func;
276 Eina_Bool force_height;
277};
278
279struct _ColorSpace_Format_Convertion
280{
281 const char *name;
282 GstVideoFormat format;
283 Evas_Colorspace eformat;
284 Evas_Video_Convert_Cb func;
285};
286
287extern const ColorSpace_FourCC_Convertion colorspace_fourcc_convertion[];
288extern const ColorSpace_Format_Convertion colorspace_format_convertion[];
289
290/** Samsung specific infrastructure - do not touch, do not modify */
291#define MPLANE_IMGB_MAX_COUNT 4
292#define SCMN_IMGB_MAX_PLANE 4
293
294typedef struct _GstMultiPlaneImageBuffer GstMultiPlaneImageBuffer;
295typedef struct _SCMN_IMGB SCMN_IMGB;
296
297struct _GstMultiPlaneImageBuffer
298{
299 GstBuffer buffer;
300
301 /* width of each image plane */
302 gint width[MPLANE_IMGB_MAX_COUNT];
303 /* height of each image plane */
304 gint height[MPLANE_IMGB_MAX_COUNT];
305 /* stride of each image plane */
306 gint stride[MPLANE_IMGB_MAX_COUNT];
307 /* elevation of each image plane */
308 gint elevation[MPLANE_IMGB_MAX_COUNT];
309 /* user space address of each image plane */
310 guchar *uaddr[MPLANE_IMGB_MAX_COUNT];
311 /* Index of real address of each image plane, if needs */
312 guchar *index[MPLANE_IMGB_MAX_COUNT];
313 /* left postion, if needs */
314 gint x;
315 /* top position, if needs */
316 gint y;
317 /* to align memory */
318 gint __dummy2;
319 /* arbitrary data */
320 gint data[16];
321};
322
323struct _SCMN_IMGB
324{
325 /* width of each image plane */
326 int width[SCMN_IMGB_MAX_PLANE];
327 /* height of each image plane */
328 int height[SCMN_IMGB_MAX_PLANE];
329 /* stride of each image plane */
330 int stride[SCMN_IMGB_MAX_PLANE];
331 /* elevation of each image plane */
332 int elevation[SCMN_IMGB_MAX_PLANE];
333 /* user space address of each image plane */
334 guchar *uaddr[SCMN_IMGB_MAX_PLANE];
335 /* physical address of each image plane, if needs */
336 guchar *p[SCMN_IMGB_MAX_PLANE];
337 /* color space type of image */
338 int cs;
339 /* left postion, if needs */
340 int x;
341 /* top position, if needs */
342 int y;
343 /* to align memory */
344 int __dummy2;
345 /* arbitrary data */
346 int data[16];
347};
348
349void _evas_video_st12_multiplane(unsigned char *evas_data, const unsigned char *gst_data, unsigned int w, unsigned int h, unsigned int output_height EINA_UNUSED);
350void _evas_video_st12(unsigned char *evas_data, const unsigned char *gst_data, unsigned int w EINA_UNUSED, unsigned int h, unsigned int output_height EINA_UNUSED);
351
352#endif /* __EMOTION_GSTREAMER_H__ */
diff --git a/src/modules/emotion/gstreamer/emotion_sink.c b/src/modules/emotion/gstreamer/emotion_sink.c
deleted file mode 100644
index 63fcbeeea7..0000000000
--- a/src/modules/emotion/gstreamer/emotion_sink.c
+++ /dev/null
@@ -1,1461 +0,0 @@
1#ifdef HAVE_CONFIG_H
2# include "config.h"
3#endif
4
5#include <Eina.h>
6#include <Evas.h>
7#include <Ecore.h>
8
9#define HTTP_STREAM 0
10#define RTSP_STREAM 1
11#include <glib.h>
12#include <gst/gst.h>
13#include <glib-object.h>
14#include <gst/video/gstvideosink.h>
15#include <gst/video/video.h>
16
17// forcibly disable x overlay window.. broken badly.
18#undef HAVE_ECORE_X
19
20#ifdef HAVE_ECORE_X
21# include <Ecore_X.h>
22# include <Ecore_Evas.h>
23# ifdef HAVE_XOVERLAY_H
24# include <gst/interfaces/xoverlay.h>
25# endif
26#endif
27
28#if defined(HAVE_GETUID) && defined(HAVE_GETEUID)
29# include <unistd.h>
30# include <sys/types.h>
31#endif
32
33#include "emotion_modules.h"
34#include "emotion_gstreamer.h"
35
36static GstStaticPadTemplate sinktemplate = GST_STATIC_PAD_TEMPLATE("sink",
37 GST_PAD_SINK, GST_PAD_ALWAYS,
38 GST_STATIC_CAPS(GST_VIDEO_CAPS_YUV("{ I420, YV12, YUY2, NV12, ST12, TM12 }") ";"
39 GST_VIDEO_CAPS_BGRx ";" GST_VIDEO_CAPS_BGR ";" GST_VIDEO_CAPS_BGRA));
40
41GST_DEBUG_CATEGORY_STATIC(evas_video_sink_debug);
42#define GST_CAT_DEFAULT evas_video_sink_debug
43
44enum {
45 REPAINT_REQUESTED,
46 LAST_SIGNAL
47};
48
49enum {
50 PROP_0,
51 PROP_EVAS_OBJECT,
52 PROP_WIDTH,
53 PROP_HEIGHT,
54 PROP_EV,
55 PROP_LAST
56};
57
58static guint evas_video_sink_signals[LAST_SIGNAL] = { 0, };
59
60#define _do_init(bla) \
61 GST_DEBUG_CATEGORY_INIT(evas_video_sink_debug, \
62 "emotion-sink", \
63 0, \
64 "emotion video sink")
65
66GST_BOILERPLATE_FULL(EvasVideoSink,
67 evas_video_sink,
68 GstVideoSink,
69 GST_TYPE_VIDEO_SINK,
70 _do_init);
71
72
73static void unlock_buffer_mutex(EvasVideoSinkPrivate* priv);
74static void evas_video_sink_main_render(void *data);
75static void evas_video_sink_samsung_main_render(void *data);
76
77static void
78evas_video_sink_base_init(gpointer g_class)
79{
80 GstElementClass* element_class;
81
82 element_class = GST_ELEMENT_CLASS(g_class);
83 gst_element_class_add_pad_template(element_class, gst_static_pad_template_get(&sinktemplate));
84 gst_element_class_set_details_simple(element_class, "Evas video sink",
85 "Sink/Video", "Sends video data from a GStreamer pipeline to an Evas object",
86 "Vincent Torri <vtorri@univ-evry.fr>");
87}
88
89static void
90evas_video_sink_init(EvasVideoSink* sink, EvasVideoSinkClass* klass EINA_UNUSED)
91{
92 EvasVideoSinkPrivate* priv;
93
94 INF("sink init");
95 sink->priv = priv = G_TYPE_INSTANCE_GET_PRIVATE(sink, EVAS_TYPE_VIDEO_SINK, EvasVideoSinkPrivate);
96 priv->o = NULL;
97 priv->width = 0;
98 priv->height = 0;
99 priv->func = NULL;
100 priv->eformat = EVAS_COLORSPACE_ARGB8888;
101 priv->samsung = EINA_FALSE;
102 eina_lock_new(&priv->m);
103 eina_condition_new(&priv->c, &priv->m);
104 priv->unlocked = EINA_FALSE;
105}
106
107/**** Object methods ****/
108static void
109_cleanup_priv(void *data, Evas *e EINA_UNUSED, Evas_Object *obj, void *event_info EINA_UNUSED)
110{
111 EvasVideoSinkPrivate* priv;
112
113 priv = data;
114
115 eina_lock_take(&priv->m);
116 if (priv->o == obj)
117 priv->o = NULL;
118 eina_lock_release(&priv->m);
119}
120
121static void
122evas_video_sink_set_property(GObject * object, guint prop_id,
123 const GValue * value, GParamSpec * pspec)
124{
125 EvasVideoSink* sink;
126 EvasVideoSinkPrivate* priv;
127
128 sink = EVAS_VIDEO_SINK (object);
129 priv = sink->priv;
130
131 switch (prop_id) {
132 case PROP_EVAS_OBJECT:
133 eina_lock_take(&priv->m);
134 if (priv->o)
135 evas_object_event_callback_del(priv->o, EVAS_CALLBACK_DEL, _cleanup_priv);
136 priv->o = g_value_get_pointer (value);
137 INF("sink set Evas_Object %p.", priv->o);
138 if (priv->o)
139 evas_object_event_callback_add(priv->o, EVAS_CALLBACK_DEL, _cleanup_priv, priv);
140 eina_lock_release(&priv->m);
141 break;
142 case PROP_EV:
143 INF("sink set ev.");
144 eina_lock_take(&priv->m);
145 priv->ev = g_value_get_pointer (value);
146 if (priv->ev)
147 priv->ev->samsung = EINA_TRUE;
148 eina_lock_release(&priv->m);
149 break;
150 default:
151 G_OBJECT_WARN_INVALID_PROPERTY_ID (object, prop_id, pspec);
152 ERR("invalid property");
153 break;
154 }
155}
156
157static void
158evas_video_sink_get_property(GObject * object, guint prop_id,
159 GValue * value, GParamSpec * pspec)
160{
161 EvasVideoSink* sink;
162 EvasVideoSinkPrivate* priv;
163
164 sink = EVAS_VIDEO_SINK (object);
165 priv = sink->priv;
166
167 switch (prop_id) {
168 case PROP_EVAS_OBJECT:
169 INF("sink get property.");
170 eina_lock_take(&priv->m);
171 g_value_set_pointer(value, priv->o);
172 eina_lock_release(&priv->m);
173 break;
174 case PROP_WIDTH:
175 INF("sink get width.");
176 eina_lock_take(&priv->m);
177 g_value_set_int(value, priv->width);
178 eina_lock_release(&priv->m);
179 break;
180 case PROP_HEIGHT:
181 INF("sink get height.");
182 eina_lock_take(&priv->m);
183 g_value_set_int (value, priv->height);
184 eina_lock_release(&priv->m);
185 break;
186 case PROP_EV:
187 INF("sink get ev.");
188 eina_lock_take(&priv->m);
189 g_value_set_pointer (value, priv->ev);
190 eina_lock_release(&priv->m);
191 break;
192 default:
193 G_OBJECT_WARN_INVALID_PROPERTY_ID (object, prop_id, pspec);
194 ERR("invalid property");
195 break;
196 }
197}
198
199static void
200evas_video_sink_dispose(GObject* object)
201{
202 EvasVideoSink* sink;
203 EvasVideoSinkPrivate* priv;
204
205 INF("dispose.");
206
207 sink = EVAS_VIDEO_SINK(object);
208 priv = sink->priv;
209
210 eina_lock_free(&priv->m);
211 eina_condition_free(&priv->c);
212
213 G_OBJECT_CLASS(parent_class)->dispose(object);
214}
215
216
217/**** BaseSink methods ****/
218
219gboolean evas_video_sink_set_caps(GstBaseSink *bsink, GstCaps *caps)
220{
221 EvasVideoSink* sink;
222 EvasVideoSinkPrivate* priv;
223 GstStructure *structure;
224 GstVideoFormat format;
225 guint32 fourcc;
226 unsigned int i;
227
228 sink = EVAS_VIDEO_SINK(bsink);
229 priv = sink->priv;
230
231 structure = gst_caps_get_structure(caps, 0);
232
233 if (gst_structure_get_int(structure, "width", (int*) &priv->width)
234 && gst_structure_get_int(structure, "height", (int*) &priv->height)
235 && gst_structure_get_fourcc(structure, "format", &fourcc))
236 {
237 priv->source_height = priv->height;
238
239 for (i = 0; colorspace_fourcc_convertion[i].name != NULL; ++i)
240 if (fourcc == colorspace_fourcc_convertion[i].fourcc)
241 {
242 DBG("Found '%s'", colorspace_fourcc_convertion[i].name);
243 priv->eformat = colorspace_fourcc_convertion[i].eformat;
244 priv->func = colorspace_fourcc_convertion[i].func;
245 if (colorspace_fourcc_convertion[i].force_height)
246 {
247 priv->height = (priv->height >> 1) << 1;
248 }
249 if (priv->ev)
250 priv->ev->kill_buffer = EINA_TRUE;
251 return TRUE;
252 }
253
254 if (fourcc == GST_MAKE_FOURCC('S', 'T', '1', '2'))
255 {
256 DBG("Found '%s'", "ST12");
257 priv->eformat = EVAS_COLORSPACE_YCBCR420TM12601_PL;
258 priv->samsung = EINA_TRUE;
259 priv->func = NULL;
260 if (priv->ev)
261 {
262 priv->ev->samsung = EINA_TRUE;
263 priv->ev->kill_buffer = EINA_TRUE;
264 }
265 return TRUE;
266 }
267 }
268
269 INF("fallback code !");
270 if (!gst_video_format_parse_caps(caps, &format, (int*) &priv->width, (int*) &priv->height))
271 {
272 ERR("Unable to parse caps.");
273 return FALSE;
274 }
275
276 priv->source_height = priv->height;
277
278 for (i = 0; colorspace_format_convertion[i].name != NULL; ++i)
279 if (format == colorspace_format_convertion[i].format)
280 {
281 DBG("Found '%s'", colorspace_format_convertion[i].name);
282 priv->eformat = colorspace_format_convertion[i].eformat;
283 priv->func = colorspace_format_convertion[i].func;
284 if (priv->ev)
285 priv->ev->kill_buffer = EINA_FALSE;
286 return TRUE;
287 }
288
289 ERR("unsupported : %d\n", format);
290 return FALSE;
291}
292
293static gboolean
294evas_video_sink_start(GstBaseSink* base_sink)
295{
296 EvasVideoSinkPrivate* priv;
297 gboolean res = TRUE;
298
299 INF("sink start");
300
301 priv = EVAS_VIDEO_SINK(base_sink)->priv;
302 eina_lock_take(&priv->m);
303 if (!priv->o)
304 res = FALSE;
305 else
306 priv->unlocked = EINA_FALSE;
307 eina_lock_release(&priv->m);
308 return res;
309}
310
311static gboolean
312evas_video_sink_stop(GstBaseSink* base_sink)
313{
314 EvasVideoSinkPrivate* priv = EVAS_VIDEO_SINK(base_sink)->priv;
315
316 INF("sink stop");
317
318 unlock_buffer_mutex(priv);
319 return TRUE;
320}
321
322static gboolean
323evas_video_sink_unlock(GstBaseSink* object)
324{
325 EvasVideoSink* sink;
326
327 INF("sink unlock");
328
329 sink = EVAS_VIDEO_SINK(object);
330
331 unlock_buffer_mutex(sink->priv);
332
333 return GST_CALL_PARENT_WITH_DEFAULT(GST_BASE_SINK_CLASS, unlock,
334 (object), TRUE);
335}
336
337static gboolean
338evas_video_sink_unlock_stop(GstBaseSink* object)
339{
340 EvasVideoSink* sink;
341 EvasVideoSinkPrivate* priv;
342
343 sink = EVAS_VIDEO_SINK(object);
344 priv = sink->priv;
345
346 INF("sink unlock stop");
347
348 eina_lock_take(&priv->m);
349 priv->unlocked = FALSE;
350 eina_lock_release(&priv->m);
351
352 return GST_CALL_PARENT_WITH_DEFAULT(GST_BASE_SINK_CLASS, unlock_stop,
353 (object), TRUE);
354}
355
356static GstFlowReturn
357evas_video_sink_preroll(GstBaseSink* bsink, GstBuffer* buffer)
358{
359 Emotion_Gstreamer_Buffer *send;
360 EvasVideoSinkPrivate *priv;
361 EvasVideoSink *sink;
362
363 INF("sink preroll %p [%i]", GST_BUFFER_DATA(buffer), GST_BUFFER_SIZE(buffer));
364
365 sink = EVAS_VIDEO_SINK(bsink);
366 priv = sink->priv;
367
368 if (GST_BUFFER_SIZE(buffer) <= 0 && !priv->samsung)
369 {
370 WRN("empty buffer");
371 return GST_FLOW_OK;
372 }
373
374 send = emotion_gstreamer_buffer_alloc(priv, buffer, EINA_TRUE);
375
376 if (send)
377 {
378 if (priv->samsung)
379 {
380 if (!priv->func)
381 {
382 GstStructure *structure;
383 GstCaps *caps;
384 gboolean is_multiplane = FALSE;
385
386 caps = GST_BUFFER_CAPS(buffer);
387 structure = gst_caps_get_structure (caps, 0);
388 gst_structure_get_boolean(structure, "multiplane", &is_multiplane);
389 gst_caps_unref(caps);
390
391 if (is_multiplane)
392 priv->func = _evas_video_st12_multiplane;
393 else
394 priv->func = _evas_video_st12;
395 }
396 _emotion_pending_ecore_begin();
397 ecore_main_loop_thread_safe_call_async(evas_video_sink_samsung_main_render, send);
398 }
399 else
400 {
401 _emotion_pending_ecore_begin();
402 ecore_main_loop_thread_safe_call_async(evas_video_sink_main_render, send);
403 }
404 }
405
406 return GST_FLOW_OK;
407}
408
409static GstFlowReturn
410evas_video_sink_render(GstBaseSink* bsink, GstBuffer* buffer)
411{
412 Emotion_Gstreamer_Buffer *send;
413 EvasVideoSinkPrivate *priv;
414 EvasVideoSink *sink;
415
416 INF("sink render %p", buffer);
417
418 sink = EVAS_VIDEO_SINK(bsink);
419 priv = sink->priv;
420
421 eina_lock_take(&priv->m);
422
423 if (priv->unlocked) {
424 ERR("LOCKED");
425 eina_lock_release(&priv->m);
426 return GST_FLOW_OK;
427 }
428
429 send = emotion_gstreamer_buffer_alloc(priv, buffer, EINA_FALSE);
430 if (!send) {
431 eina_lock_release(&priv->m);
432 return GST_FLOW_ERROR;
433 }
434
435 if (priv->samsung)
436 {
437 if (!priv->func)
438 {
439 GstStructure *structure;
440 GstCaps *caps;
441 gboolean is_multiplane = FALSE;
442
443 caps = GST_BUFFER_CAPS(buffer);
444 structure = gst_caps_get_structure (caps, 0);
445 gst_structure_get_boolean(structure, "multiplane", &is_multiplane);
446 gst_caps_unref(caps);
447
448 if (is_multiplane)
449 priv->func = _evas_video_st12_multiplane;
450 else
451 priv->func = _evas_video_st12;
452 }
453 _emotion_pending_ecore_begin();
454 ecore_main_loop_thread_safe_call_async(evas_video_sink_samsung_main_render, send);
455 }
456 else
457 {
458 _emotion_pending_ecore_begin();
459 ecore_main_loop_thread_safe_call_async(evas_video_sink_main_render, send);
460 }
461
462 eina_condition_wait(&priv->c);
463 eina_lock_release(&priv->m);
464
465 return GST_FLOW_OK;
466}
467
468static void
469_update_emotion_fps(Emotion_Gstreamer_Video *ev)
470{
471 double tim;
472
473 if (!debug_fps) return;
474
475 tim = ecore_time_get();
476 ev->frames++;
477
478 if (ev->rlapse == 0.0)
479 {
480 ev->rlapse = tim;
481 ev->flapse = ev->frames;
482 }
483 else if ((tim - ev->rlapse) >= 0.5)
484 {
485 printf("FRAME: %i, FPS: %3.1f\n",
486 ev->frames,
487 (ev->frames - ev->flapse) / (tim - ev->rlapse));
488 ev->rlapse = tim;
489 ev->flapse = ev->frames;
490 }
491}
492
493static void
494evas_video_sink_samsung_main_render(void *data)
495{
496 Emotion_Gstreamer_Buffer *send;
497 Emotion_Video_Stream *vstream;
498 EvasVideoSinkPrivate *priv = NULL;
499 GstBuffer* buffer;
500 unsigned char *evas_data;
501 const guint8 *gst_data;
502 GstFormat fmt = GST_FORMAT_TIME;
503 gint64 pos;
504 Eina_Bool preroll = EINA_FALSE;
505 int stride, elevation;
506 Evas_Coord w, h;
507
508 send = data;
509
510 if (!send) goto exit_point;
511
512 priv = send->sink;
513 buffer = send->frame;
514 preroll = send->preroll;
515
516 /* frame after cleanup */
517 if (!preroll && !send->ev->last_buffer)
518 {
519 priv = NULL;
520 goto exit_point;
521 }
522
523 if (!priv || !priv->o || priv->unlocked)
524 goto exit_point;
525
526 if (send->ev->send)
527 {
528 emotion_gstreamer_buffer_free(send->ev->send);
529 send->ev->send = NULL;
530 }
531
532 if (!send->ev->stream && !send->force)
533 {
534 send->ev->send = send;
535 _emotion_frame_new(send->ev->obj);
536 goto exit_stream;
537 }
538
539 _emotion_gstreamer_video_pipeline_parse(send->ev, EINA_TRUE);
540
541 /* Getting stride to compute the right size and then fill the object properly */
542 /* Y => [0] and UV in [1] */
543 if (priv->func == _evas_video_st12_multiplane)
544 {
545 const GstMultiPlaneImageBuffer *mp_buf = (const GstMultiPlaneImageBuffer *) buffer;
546
547 stride = mp_buf->stride[0];
548 elevation = mp_buf->elevation[0];
549 priv->width = mp_buf->width[0];
550 priv->height = mp_buf->height[0];
551
552 gst_data = (const guint8 *) mp_buf;
553 }
554 else
555 {
556 const SCMN_IMGB *imgb = (const SCMN_IMGB *) GST_BUFFER_MALLOCDATA(buffer);
557
558 stride = imgb->stride[0];
559 elevation = imgb->elevation[0];
560 priv->width = imgb->width[0];
561 priv->height = imgb->height[0];
562
563 gst_data = (const guint8 *) imgb;
564 }
565
566 evas_object_geometry_get(priv->o, NULL, NULL, &w, &h);
567
568 send->ev->fill.width = (double) stride / priv->width;
569 send->ev->fill.height = (double) elevation / priv->height;
570
571 evas_object_image_alpha_set(priv->o, 0);
572 evas_object_image_colorspace_set(priv->o, priv->eformat);
573 evas_object_image_size_set(priv->o, stride, elevation);
574
575 _update_emotion_fps(send->ev);
576
577 evas_data = evas_object_image_data_get(priv->o, 1);
578
579 if (priv->func)
580 priv->func(evas_data, gst_data, stride, elevation, elevation);
581 else
582 WRN("No way to decode colorspace '%x'!", priv->eformat);
583
584 evas_object_image_data_set(priv->o, evas_data);
585 evas_object_image_data_update_add(priv->o, 0, 0, priv->width, priv->height);
586 evas_object_image_pixels_dirty_set(priv->o, 0);
587
588 if (!preroll && send->ev->play_started)
589 {
590 _emotion_playback_started(send->ev->obj);
591 send->ev->play_started = 0;
592 }
593
594 if (!send->force)
595 {
596 _emotion_frame_new(send->ev->obj);
597 }
598
599 vstream = eina_list_nth(send->ev->video_streams, send->ev->video_stream_nbr - 1);
600
601 gst_element_query_position(send->ev->pipeline, &fmt, &pos);
602 send->ev->position = (double)pos / (double)GST_SECOND;
603
604 if (vstream)
605 {
606 vstream->width = priv->width;
607 vstream->height = priv->height;
608
609 _emotion_video_pos_update(send->ev->obj, send->ev->position, vstream->length_time);
610 }
611
612 send->ev->ratio = (double) priv->width / (double) priv->height;
613 _emotion_frame_refill(send->ev->obj, send->ev->fill.width, send->ev->fill.height);
614 _emotion_frame_resize(send->ev->obj, priv->width, priv->height, send->ev->ratio);
615
616 buffer = gst_buffer_ref(buffer);
617 if (send->ev->last_buffer) gst_buffer_unref(send->ev->last_buffer);
618 send->ev->last_buffer = buffer;
619
620 exit_point:
621 if (send) emotion_gstreamer_buffer_free(send);
622
623 exit_stream:
624 if (priv)
625 {
626 if (preroll || !priv->o)
627 {
628 _emotion_pending_ecore_end();
629 return;
630 }
631
632 if (!priv->unlocked)
633 eina_condition_signal(&priv->c);
634 }
635 _emotion_pending_ecore_end();
636}
637
638static void
639evas_video_sink_main_render(void *data)
640{
641 Emotion_Gstreamer_Buffer *send;
642 Emotion_Gstreamer_Video *ev = NULL;
643 Emotion_Video_Stream *vstream;
644 EvasVideoSinkPrivate *priv = NULL;
645 GstBuffer *buffer;
646 unsigned char *evas_data;
647 GstFormat fmt = GST_FORMAT_TIME;
648 gint64 pos;
649 Eina_Bool preroll = EINA_FALSE;
650
651 send = data;
652
653 if (!send) goto exit_point;
654
655 priv = send->sink;
656 buffer = send->frame;
657 preroll = send->preroll;
658 ev = send->ev;
659
660 /* frame after cleanup */
661 if (!preroll && !ev->last_buffer)
662 {
663 priv = NULL;
664 goto exit_point;
665 }
666
667 if (!priv || !priv->o || priv->unlocked)
668 goto exit_point;
669
670 if (ev->send && send != ev->send)
671 {
672 emotion_gstreamer_buffer_free(ev->send);
673 ev->send = NULL;
674 }
675
676 if (!ev->stream && !send->force)
677 {
678 ev->send = send;
679 _emotion_frame_new(ev->obj);
680 evas_object_image_data_update_add(priv->o, 0, 0, priv->width, priv->height);
681 goto exit_stream;
682 }
683
684 _emotion_gstreamer_video_pipeline_parse(ev, EINA_TRUE);
685
686 INF("sink main render [%i, %i] (source height: %i)", priv->width, priv->height, priv->source_height);
687
688 evas_object_image_alpha_set(priv->o, 0);
689 evas_object_image_colorspace_set(priv->o, priv->eformat);
690 evas_object_image_size_set(priv->o, priv->width, priv->height);
691
692 evas_data = evas_object_image_data_get(priv->o, 1);
693
694 if (priv->func)
695 priv->func(evas_data, GST_BUFFER_DATA(buffer), priv->width, priv->source_height, priv->height);
696 else
697 WRN("No way to decode colorspace '%x'!", priv->eformat);
698
699 evas_object_image_data_set(priv->o, evas_data);
700 evas_object_image_data_update_add(priv->o, 0, 0, priv->width, priv->height);
701 evas_object_image_pixels_dirty_set(priv->o, 0);
702
703 _update_emotion_fps(ev);
704
705 if (!preroll && ev->play_started)
706 {
707 _emotion_playback_started(ev->obj);
708 ev->play_started = 0;
709 }
710
711 if (!send->force)
712 {
713 _emotion_frame_new(ev->obj);
714 }
715
716 gst_element_query_position(ev->pipeline, &fmt, &pos);
717 ev->position = (double)pos / (double)GST_SECOND;
718
719 vstream = eina_list_nth(ev->video_streams, ev->video_stream_nbr - 1);
720
721 if (vstream)
722 {
723 vstream->width = priv->width;
724 vstream->height = priv->height;
725 _emotion_video_pos_update(ev->obj, ev->position, vstream->length_time);
726 }
727
728 ev->ratio = (double) priv->width / (double) priv->height;
729
730 _emotion_frame_resize(ev->obj, priv->width, priv->height, ev->ratio);
731
732 buffer = gst_buffer_ref(buffer);
733 if (ev->last_buffer) gst_buffer_unref(ev->last_buffer);
734 ev->last_buffer = buffer;
735
736 exit_point:
737 if (send) emotion_gstreamer_buffer_free(send);
738
739 exit_stream:
740 if (priv)
741 {
742 if (preroll || !priv->o)
743 {
744 _emotion_pending_ecore_end();
745 return;
746 }
747
748 if (!priv->unlocked)
749 eina_condition_signal(&priv->c);
750 }
751 _emotion_pending_ecore_end();
752}
753
754static void
755unlock_buffer_mutex(EvasVideoSinkPrivate* priv)
756{
757 priv->unlocked = EINA_TRUE;
758
759 eina_condition_signal(&priv->c);
760}
761
762static void
763marshal_VOID__MINIOBJECT(GClosure * closure, GValue * return_value EINA_UNUSED,
764 guint n_param_values, const GValue * param_values,
765 gpointer invocation_hint EINA_UNUSED, gpointer marshal_data)
766{
767 typedef void (*marshalfunc_VOID__MINIOBJECT) (gpointer obj, gpointer arg1, gpointer data2);
768 marshalfunc_VOID__MINIOBJECT callback;
769 GCClosure *cc;
770 gpointer data1, data2;
771
772 cc = (GCClosure *) closure;
773
774 g_return_if_fail(n_param_values == 2);
775
776 if (G_CCLOSURE_SWAP_DATA(closure)) {
777 data1 = closure->data;
778 data2 = g_value_peek_pointer(param_values + 0);
779 } else {
780 data1 = g_value_peek_pointer(param_values + 0);
781 data2 = closure->data;
782 }
783 callback = (marshalfunc_VOID__MINIOBJECT) (marshal_data ? marshal_data : cc->callback);
784
785 callback(data1, gst_value_get_mini_object(param_values + 1), data2);
786}
787
788static void
789evas_video_sink_class_init(EvasVideoSinkClass* klass)
790{
791 GObjectClass* gobject_class;
792 GstBaseSinkClass* gstbase_sink_class;
793
794 gobject_class = G_OBJECT_CLASS(klass);
795 gstbase_sink_class = GST_BASE_SINK_CLASS(klass);
796
797 g_type_class_add_private(klass, sizeof(EvasVideoSinkPrivate));
798
799 gobject_class->set_property = evas_video_sink_set_property;
800 gobject_class->get_property = evas_video_sink_get_property;
801
802 g_object_class_install_property (gobject_class, PROP_EVAS_OBJECT,
803 g_param_spec_pointer ("evas-object", "Evas Object",
804 "The Evas object where the display of the video will be done",
805 G_PARAM_READWRITE));
806
807 g_object_class_install_property (gobject_class, PROP_WIDTH,
808 g_param_spec_int ("width", "Width",
809 "The width of the video",
810 0, 65536, 0, G_PARAM_READABLE | G_PARAM_STATIC_STRINGS));
811
812 g_object_class_install_property (gobject_class, PROP_HEIGHT,
813 g_param_spec_int ("height", "Height",
814 "The height of the video",
815 0, 65536, 0, G_PARAM_READABLE | G_PARAM_STATIC_STRINGS));
816 g_object_class_install_property (gobject_class, PROP_EV,
817 g_param_spec_pointer ("ev", "Emotion_Gstreamer_Video",
818 "THe internal data of the emotion object",
819 G_PARAM_READWRITE));
820
821 gobject_class->dispose = evas_video_sink_dispose;
822
823 gstbase_sink_class->set_caps = evas_video_sink_set_caps;
824 gstbase_sink_class->stop = evas_video_sink_stop;
825 gstbase_sink_class->start = evas_video_sink_start;
826 gstbase_sink_class->unlock = evas_video_sink_unlock;
827 gstbase_sink_class->unlock_stop = evas_video_sink_unlock_stop;
828 gstbase_sink_class->render = evas_video_sink_render;
829 gstbase_sink_class->preroll = evas_video_sink_preroll;
830
831 evas_video_sink_signals[REPAINT_REQUESTED] = g_signal_new("repaint-requested",
832 G_TYPE_FROM_CLASS(klass),
833 (GSignalFlags)(G_SIGNAL_RUN_LAST | G_SIGNAL_ACTION),
834 0,
835 0,
836 0,
837 marshal_VOID__MINIOBJECT,
838 G_TYPE_NONE, 1, GST_TYPE_BUFFER);
839}
840
841gboolean
842gstreamer_plugin_init (GstPlugin * plugin)
843{
844 return gst_element_register (plugin,
845 "emotion-sink",
846 GST_RANK_NONE,
847 EVAS_TYPE_VIDEO_SINK);
848}
849
850static void
851_emotion_gstreamer_pause(void *data, Ecore_Thread *thread)
852{
853 Emotion_Gstreamer_Video *ev = data;
854 gboolean res;
855
856 if (ecore_thread_check(thread) || !ev->pipeline) return;
857
858 gst_element_set_state(ev->pipeline, GST_STATE_PAUSED);
859 res = gst_element_get_state(ev->pipeline, NULL, NULL, GST_CLOCK_TIME_NONE);
860 if (res == GST_STATE_CHANGE_NO_PREROLL)
861 {
862 gst_element_set_state(ev->pipeline, GST_STATE_PLAYING);
863 gst_element_get_state(ev->pipeline, NULL, NULL, GST_CLOCK_TIME_NONE);
864 }
865}
866
867static void
868_emotion_gstreamer_cancel(void *data, Ecore_Thread *thread)
869{
870 Emotion_Gstreamer_Video *ev = data;
871
872 ev->threads = eina_list_remove(ev->threads, thread);
873
874#if defined(HAVE_GETUID) && defined(HAVE_GETEUID)
875 if (getuid() == geteuid())
876#endif
877 {
878 if (getenv("EMOTION_GSTREAMER_DOT")) GST_DEBUG_BIN_TO_DOT_FILE_WITH_TS(GST_BIN(ev->pipeline), GST_DEBUG_GRAPH_SHOW_ALL, getenv("EMOTION_GSTREAMER_DOT"));
879 }
880
881 if (ev->in == ev->out && ev->delete_me)
882 ev->api->del(ev);
883}
884
885static void
886_emotion_gstreamer_end(void *data, Ecore_Thread *thread)
887{
888 Emotion_Gstreamer_Video *ev = data;
889
890 ev->threads = eina_list_remove(ev->threads, thread);
891
892 if (ev->play)
893 {
894 gst_element_set_state(ev->pipeline, GST_STATE_PLAYING);
895 ev->play_started = 1;
896 }
897
898#if defined(HAVE_GETUID) && defined(HAVE_GETEUID)
899 if (getuid() == geteuid())
900#endif
901 {
902 if (getenv("EMOTION_GSTREAMER_DOT")) GST_DEBUG_BIN_TO_DOT_FILE_WITH_TS(GST_BIN(ev->pipeline), GST_DEBUG_GRAPH_SHOW_ALL, getenv("EMOTION_GSTREAMER_DOT"));
903 }
904
905 if (ev->in == ev->out && ev->delete_me)
906 ev->api->del(ev);
907 else
908 _emotion_gstreamer_video_pipeline_parse(data, EINA_TRUE);
909}
910
911static void
912_video_resize(void *data, Evas_Object *obj EINA_UNUSED, const Evas_Video_Surface *surface EINA_UNUSED,
913 Evas_Coord w, Evas_Coord h)
914{
915#ifdef HAVE_ECORE_X
916 Emotion_Gstreamer_Video *ev = data;
917
918 ecore_x_window_resize(ev->win, w, h);
919 DBG("resize: %i, %i", w, h);
920#else
921 if (data)
922 {
923 DBG("resize: %i, %i (fake)", w, h);
924 }
925#endif
926}
927
928static void
929_video_move(void *data, Evas_Object *obj EINA_UNUSED, const Evas_Video_Surface *surface EINA_UNUSED,
930 Evas_Coord x, Evas_Coord y)
931{
932#ifdef HAVE_ECORE_X
933 Emotion_Gstreamer_Video *ev = data;
934 unsigned int pos[2];
935
936 DBG("move: %i, %i", x, y);
937 pos[0] = x; pos[1] = y;
938 ecore_x_window_prop_card32_set(ev->win, ECORE_X_ATOM_E_VIDEO_POSITION, pos, 2);
939#else
940 if (data)
941 {
942 DBG("move: %i, %i (fake)", x, y);
943 }
944#endif
945}
946
947#if 0
948/* Much better idea to always feed the XvImageSink and let him handle optimizing the rendering as we do */
949static void
950_block_pad_unlink_cb(GstPad *pad, gboolean blocked, gpointer user_data)
951{
952 if (blocked)
953 {
954 Emotion_Gstreamer_Video *ev = user_data;
955 GstEvent *gev;
956
957 gst_pad_unlink(ev->xvteepad, ev->xvpad);
958 gev = gst_event_new_eos();
959 gst_pad_send_event(ev->xvpad, gev);
960 gst_pad_set_blocked_async(pad, FALSE, _block_pad_unlink_cb, NULL);
961 }
962}
963
964static void
965_block_pad_link_cb(GstPad *pad, gboolean blocked, gpointer user_data)
966{
967 if (blocked)
968 {
969 Emotion_Gstreamer_Video *ev = user_data;
970
971 gst_pad_link(ev->xvteepad, ev->xvpad);
972 if (ev->play)
973 gst_element_set_state(ev->xvsink, GST_STATE_PLAYING);
974 else
975 gst_element_set_state(ev->xvsink, GST_STATE_PAUS