summaryrefslogtreecommitdiff
path: root/src/modules/emotion/gstreamer1/emotion_sink.c
diff options
context:
space:
mode:
authorSebastian Dröge <sebastian@centricular.com>2013-12-23 10:50:53 +0900
committerCedric BAIL <cedric.bail@samsung.com>2013-12-23 10:59:42 +0900
commit9c752106f75c9a6415971cd965eedaf3d15fedc6 (patch)
tree52c05f04936d7832c689f354624eca9468afaf7f /src/modules/emotion/gstreamer1/emotion_sink.c
parent574c5c1ba78d6a02f62b26e4070ef266b0d4bc8e (diff)
emotion: initial port of emotion to GStreamer 1.0
Some commits to port emotion to GStreamer 1.0 and implement some missing features, clean up code a bit and fix some bugs on the way. This works as good as the 0.10 code for me now with the emotion examples, just the Samsung hardware specific code is commented out. This should be ported by someone who has such hardware, and also in a clean way now that GStreamer since 1.0 has features to handle all this properly. There's still a lot of potential to clean things up and fix many bugs, and also to implement zerocopy rendering. But those are for later if there's actual interest in this at all. Commits: - Update configure checks and ecore example to GStreamer 1.0 - Initial port of emotion to GStreamer 1.0 - Samsung specific code commented out, should be ported by someone with the hardware. - Return GST_FLOW_FLUSHING when the sink is unlocked - Remove unused GSignal from the sink - Use GstVideoInfo to store the format details inside the sink - Add support for pixel-aspect-ratio - Store video format information in GstVideoInfo for the different video streams - Use GstAudioInfo to store the audio format information - Remove some unused defines - Header cleanup - Implement initial support for GstNavigation interface - Implement setting of audio/video channel Reviewers: cedric CC: cedric Differential Revision: https://phab.enlightenment.org/D387 Signed-off-by: Cedric BAIL <cedric.bail@samsung.com>
Diffstat (limited to '')
-rw-r--r--src/modules/emotion/gstreamer1/emotion_sink.c1006
1 files changed, 1006 insertions, 0 deletions
diff --git a/src/modules/emotion/gstreamer1/emotion_sink.c b/src/modules/emotion/gstreamer1/emotion_sink.c
new file mode 100644
index 0000000000..3a061b6dd8
--- /dev/null
+++ b/src/modules/emotion/gstreamer1/emotion_sink.c
@@ -0,0 +1,1006 @@
1#ifdef HAVE_CONFIG_H
2# include "config.h"
3#endif
4
5#include "emotion_gstreamer.h"
6
7static GstStaticPadTemplate sinktemplate = GST_STATIC_PAD_TEMPLATE("sink",
8 GST_PAD_SINK, GST_PAD_ALWAYS,
9 GST_STATIC_CAPS(GST_VIDEO_CAPS_MAKE("{ I420, YV12, YUY2, NV12, BGRx, BGR, BGRA }")));
10
11GST_DEBUG_CATEGORY_STATIC(evas_video_sink_debug);
12#define GST_CAT_DEFAULT evas_video_sink_debug
13
14enum {
15 LAST_SIGNAL
16};
17
18enum {
19 PROP_0,
20 PROP_EVAS_OBJECT,
21 PROP_WIDTH,
22 PROP_HEIGHT,
23 PROP_EV,
24 PROP_LAST
25};
26
27#define _do_init \
28 GST_DEBUG_CATEGORY_INIT(evas_video_sink_debug, \
29 "emotion-sink", \
30 0, \
31 "emotion video sink")
32
33#define parent_class evas_video_sink_parent_class
34G_DEFINE_TYPE_WITH_CODE(EvasVideoSink,
35 evas_video_sink,
36 GST_TYPE_VIDEO_SINK,
37 _do_init);
38
39
40static void unlock_buffer_mutex(EvasVideoSinkPrivate* priv);
41static void evas_video_sink_main_render(void *data);
42
43static void
44evas_video_sink_init(EvasVideoSink* sink)
45{
46 EvasVideoSinkPrivate* priv;
47
48 INF("sink init");
49 sink->priv = priv = G_TYPE_INSTANCE_GET_PRIVATE(sink, EVAS_TYPE_VIDEO_SINK, EvasVideoSinkPrivate);
50 priv->o = NULL;
51 priv->info.width = 0;
52 priv->info.height = 0;
53 priv->eheight = 0;
54 priv->func = NULL;
55 priv->eformat = EVAS_COLORSPACE_ARGB8888;
56 eina_lock_new(&priv->m);
57 eina_condition_new(&priv->c, &priv->m);
58 priv->unlocked = EINA_FALSE;
59}
60
61/**** Object methods ****/
62static void
63_cleanup_priv(void *data, Evas *e EINA_UNUSED, Evas_Object *obj, void *event_info EINA_UNUSED)
64{
65 EvasVideoSinkPrivate* priv;
66
67 priv = data;
68
69 eina_lock_take(&priv->m);
70 if (priv->o == obj)
71 priv->o = NULL;
72 eina_lock_release(&priv->m);
73}
74
75static void
76evas_video_sink_set_property(GObject * object, guint prop_id,
77 const GValue * value, GParamSpec * pspec)
78{
79 EvasVideoSink* sink;
80 EvasVideoSinkPrivate* priv;
81
82 sink = EVAS_VIDEO_SINK (object);
83 priv = sink->priv;
84
85 switch (prop_id) {
86 case PROP_EVAS_OBJECT:
87 eina_lock_take(&priv->m);
88 if (priv->o)
89 evas_object_event_callback_del(priv->o, EVAS_CALLBACK_DEL, _cleanup_priv);
90 priv->o = g_value_get_pointer (value);
91 INF("sink set Evas_Object %p.", priv->o);
92 if (priv->o)
93 evas_object_event_callback_add(priv->o, EVAS_CALLBACK_DEL, _cleanup_priv, priv);
94 eina_lock_release(&priv->m);
95 break;
96 case PROP_EV:
97 INF("sink set ev.");
98 eina_lock_take(&priv->m);
99 priv->ev = g_value_get_pointer (value);
100 eina_lock_release(&priv->m);
101 break;
102 default:
103 G_OBJECT_WARN_INVALID_PROPERTY_ID (object, prop_id, pspec);
104 ERR("invalid property");
105 break;
106 }
107}
108
109static void
110evas_video_sink_get_property(GObject * object, guint prop_id,
111 GValue * value, GParamSpec * pspec)
112{
113 EvasVideoSink* sink;
114 EvasVideoSinkPrivate* priv;
115
116 sink = EVAS_VIDEO_SINK (object);
117 priv = sink->priv;
118
119 switch (prop_id) {
120 case PROP_EVAS_OBJECT:
121 INF("sink get property.");
122 eina_lock_take(&priv->m);
123 g_value_set_pointer(value, priv->o);
124 eina_lock_release(&priv->m);
125 break;
126 case PROP_WIDTH:
127 INF("sink get width.");
128 eina_lock_take(&priv->m);
129 g_value_set_int(value, priv->info.width);
130 eina_lock_release(&priv->m);
131 break;
132 case PROP_HEIGHT:
133 INF("sink get height.");
134 eina_lock_take(&priv->m);
135 g_value_set_int (value, priv->eheight);
136 eina_lock_release(&priv->m);
137 break;
138 case PROP_EV:
139 INF("sink get ev.");
140 eina_lock_take(&priv->m);
141 g_value_set_pointer (value, priv->ev);
142 eina_lock_release(&priv->m);
143 break;
144 default:
145 G_OBJECT_WARN_INVALID_PROPERTY_ID (object, prop_id, pspec);
146 ERR("invalide property");
147 break;
148 }
149}
150
151static void
152evas_video_sink_dispose(GObject* object)
153{
154 EvasVideoSink* sink;
155 EvasVideoSinkPrivate* priv;
156
157 INF("dispose.");
158
159 sink = EVAS_VIDEO_SINK(object);
160 priv = sink->priv;
161
162 eina_lock_free(&priv->m);
163 eina_condition_free(&priv->c);
164
165 G_OBJECT_CLASS(parent_class)->dispose(object);
166}
167
168
169/**** BaseSink methods ****/
170
171gboolean evas_video_sink_set_caps(GstBaseSink *bsink, GstCaps *caps)
172{
173 EvasVideoSink* sink;
174 EvasVideoSinkPrivate* priv;
175 GstVideoInfo info;
176 unsigned int i;
177
178 sink = EVAS_VIDEO_SINK(bsink);
179 priv = sink->priv;
180
181 if (!gst_video_info_from_caps(&info, caps))
182 {
183 ERR("Unable to parse caps.");
184 return FALSE;
185 }
186
187 priv->info = info;
188 priv->eheight = info.height;
189
190 for (i = 0; colorspace_format_convertion[i].name != NULL; ++i)
191 if (info.finfo->format == colorspace_format_convertion[i].format)
192 {
193 DBG("Found '%s'", colorspace_format_convertion[i].name);
194 priv->eformat = colorspace_format_convertion[i].eformat;
195 priv->func = colorspace_format_convertion[i].func;
196 if (colorspace_format_convertion[i].force_height)
197 {
198 priv->eheight = (priv->eheight >> 1) << 1;
199 }
200 if (priv->ev)
201 priv->ev->kill_buffer = EINA_TRUE;
202 return TRUE;
203 }
204
205 ERR("unsupported : %s\n", gst_video_format_to_string(info.finfo->format));
206 return FALSE;
207}
208
209static gboolean
210evas_video_sink_start(GstBaseSink* base_sink)
211{
212 EvasVideoSinkPrivate* priv;
213 gboolean res = TRUE;
214
215 INF("sink start");
216
217 priv = EVAS_VIDEO_SINK(base_sink)->priv;
218 eina_lock_take(&priv->m);
219 if (!priv->o)
220 res = FALSE;
221 else
222 priv->unlocked = EINA_FALSE;
223 eina_lock_release(&priv->m);
224 return res;
225}
226
227static gboolean
228evas_video_sink_stop(GstBaseSink* base_sink)
229{
230 EvasVideoSinkPrivate* priv = EVAS_VIDEO_SINK(base_sink)->priv;
231
232 INF("sink stop");
233
234 unlock_buffer_mutex(priv);
235 return TRUE;
236}
237
238static gboolean
239evas_video_sink_unlock(GstBaseSink* object)
240{
241 EvasVideoSink* sink;
242
243 INF("sink unlock");
244
245 sink = EVAS_VIDEO_SINK(object);
246
247 unlock_buffer_mutex(sink->priv);
248
249 return GST_CALL_PARENT_WITH_DEFAULT(GST_BASE_SINK_CLASS, unlock,
250 (object), TRUE);
251}
252
253static gboolean
254evas_video_sink_unlock_stop(GstBaseSink* object)
255{
256 EvasVideoSink* sink;
257 EvasVideoSinkPrivate* priv;
258
259 sink = EVAS_VIDEO_SINK(object);
260 priv = sink->priv;
261
262 INF("sink unlock stop");
263
264 eina_lock_take(&priv->m);
265 priv->unlocked = FALSE;
266 eina_lock_release(&priv->m);
267
268 return GST_CALL_PARENT_WITH_DEFAULT(GST_BASE_SINK_CLASS, unlock_stop,
269 (object), TRUE);
270}
271
272static GstFlowReturn
273evas_video_sink_preroll(GstBaseSink* bsink, GstBuffer* buffer)
274{
275 Emotion_Gstreamer_Buffer *send;
276 EvasVideoSinkPrivate *priv;
277 EvasVideoSink *sink;
278
279 INF("sink preroll %p [%" G_GSIZE_FORMAT "]", buffer, gst_buffer_get_size(buffer));
280
281 sink = EVAS_VIDEO_SINK(bsink);
282 priv = sink->priv;
283
284 if (gst_buffer_get_size(buffer) <= 0)
285 {
286 WRN("empty buffer");
287 return GST_FLOW_OK;
288 }
289
290 send = emotion_gstreamer_buffer_alloc(priv, buffer, EINA_TRUE);
291
292 if (send)
293 {
294 _emotion_pending_ecore_begin();
295 ecore_main_loop_thread_safe_call_async(evas_video_sink_main_render, send);
296 }
297
298 return GST_FLOW_OK;
299}
300
301static GstFlowReturn
302evas_video_sink_render(GstBaseSink* bsink, GstBuffer* buffer)
303{
304 Emotion_Gstreamer_Buffer *send;
305 EvasVideoSinkPrivate *priv;
306 EvasVideoSink *sink;
307
308 INF("sink render %p", buffer);
309
310 sink = EVAS_VIDEO_SINK(bsink);
311 priv = sink->priv;
312
313 eina_lock_take(&priv->m);
314
315 if (priv->unlocked) {
316 ERR("LOCKED");
317 eina_lock_release(&priv->m);
318 return GST_FLOW_FLUSHING;
319 }
320
321 send = emotion_gstreamer_buffer_alloc(priv, buffer, EINA_FALSE);
322 if (!send) {
323 eina_lock_release(&priv->m);
324 return GST_FLOW_ERROR;
325 }
326
327 _emotion_pending_ecore_begin();
328 ecore_main_loop_thread_safe_call_async(evas_video_sink_main_render, send);
329
330 eina_condition_wait(&priv->c);
331 eina_lock_release(&priv->m);
332
333 return GST_FLOW_OK;
334}
335
336static void
337_update_emotion_fps(Emotion_Gstreamer_Video *ev)
338{
339 double tim;
340
341 if (!debug_fps) return;
342
343 tim = ecore_time_get();
344 ev->frames++;
345
346 if (ev->rlapse == 0.0)
347 {
348 ev->rlapse = tim;
349 ev->flapse = ev->frames;
350 }
351 else if ((tim - ev->rlapse) >= 0.5)
352 {
353 printf("FRAME: %i, FPS: %3.1f\n",
354 ev->frames,
355 (ev->frames - ev->flapse) / (tim - ev->rlapse));
356 ev->rlapse = tim;
357 ev->flapse = ev->frames;
358 }
359}
360
361static void
362evas_video_sink_main_render(void *data)
363{
364 Emotion_Gstreamer_Buffer *send;
365 Emotion_Gstreamer_Video *ev = NULL;
366 Emotion_Video_Stream *vstream;
367 EvasVideoSinkPrivate *priv = NULL;
368 GstBuffer *buffer;
369 GstMapInfo map;
370 unsigned char *evas_data;
371 gint64 pos;
372 Eina_Bool preroll = EINA_FALSE;
373
374 send = data;
375
376 if (!send) goto exit_point;
377
378 priv = send->sink;
379 buffer = send->frame;
380 preroll = send->preroll;
381 ev = send->ev;
382
383 /* frame after cleanup */
384 if (!preroll && !ev->last_buffer)
385 {
386 priv = NULL;
387 goto exit_point;
388 }
389
390 if (!priv || !priv->o || priv->unlocked)
391 goto exit_point;
392
393 if (ev->send && send != ev->send)
394 {
395 emotion_gstreamer_buffer_free(ev->send);
396 ev->send = NULL;
397 }
398
399 if (!ev->stream && !send->force)
400 {
401 ev->send = send;
402 _emotion_frame_new(ev->obj);
403 evas_object_image_data_update_add(priv->o, 0, 0, priv->info.width, priv->eheight);
404 goto exit_stream;
405 }
406
407 if (!gst_buffer_map(buffer, &map, GST_MAP_READ))
408 goto exit_stream;
409
410 _emotion_gstreamer_video_pipeline_parse(ev, EINA_TRUE);
411
412 INF("sink main render [%i, %i] (source height: %i)", priv->info.width, priv->eheight, priv->info.height);
413
414 evas_object_image_alpha_set(priv->o, 0);
415 evas_object_image_colorspace_set(priv->o, priv->eformat);
416 evas_object_image_size_set(priv->o, priv->info.width, priv->eheight);
417
418 evas_data = evas_object_image_data_get(priv->o, 1);
419
420 if (priv->func)
421 priv->func(evas_data, map.data, priv->info.width, priv->info.height, priv->eheight);
422 else
423 WRN("No way to decode %x colorspace !", priv->eformat);
424
425 gst_buffer_unmap(buffer, &map);
426
427 evas_object_image_data_set(priv->o, evas_data);
428 evas_object_image_data_update_add(priv->o, 0, 0, priv->info.width, priv->eheight);
429 evas_object_image_pixels_dirty_set(priv->o, 0);
430
431 _update_emotion_fps(ev);
432
433 if (!preroll && ev->play_started)
434 {
435 _emotion_playback_started(ev->obj);
436 ev->play_started = 0;
437 }
438
439 if (!send->force)
440 {
441 _emotion_frame_new(ev->obj);
442 }
443
444 gst_element_query_position(ev->pipeline, GST_FORMAT_TIME, &pos);
445 ev->position = (double)pos / (double)GST_SECOND;
446
447 vstream = eina_list_nth(ev->video_streams, ev->video_stream_nbr - 1);
448
449 if (vstream)
450 {
451 vstream->info.width = priv->info.width;
452 vstream->info.height = priv->eheight;
453 _emotion_video_pos_update(ev->obj, ev->position, vstream->length_time);
454 }
455
456 ev->ratio = (double) priv->info.width / (double) priv->eheight;
457 ev->ratio *= (double) priv->info.par_n / (double) priv->info.par_d;
458
459 _emotion_frame_resize(ev->obj, priv->info.width, priv->eheight, ev->ratio);
460
461 buffer = gst_buffer_ref(buffer);
462 if (ev->last_buffer) gst_buffer_unref(ev->last_buffer);
463 ev->last_buffer = buffer;
464
465 exit_point:
466 if (send) emotion_gstreamer_buffer_free(send);
467
468 exit_stream:
469 if (priv)
470 {
471 if (preroll || !priv->o)
472 {
473 _emotion_pending_ecore_end();
474 return;
475 }
476
477 if (!priv->unlocked)
478 eina_condition_signal(&priv->c);
479 }
480 _emotion_pending_ecore_end();
481}
482
483static void
484unlock_buffer_mutex(EvasVideoSinkPrivate* priv)
485{
486 priv->unlocked = EINA_TRUE;
487
488 eina_condition_signal(&priv->c);
489}
490
491static void
492evas_video_sink_class_init(EvasVideoSinkClass* klass)
493{
494 GObjectClass* gobject_class;
495 GstElementClass* gstelement_class;
496 GstBaseSinkClass* gstbase_sink_class;
497
498 gobject_class = G_OBJECT_CLASS(klass);
499 gstelement_class = GST_ELEMENT_CLASS(klass);
500 gstbase_sink_class = GST_BASE_SINK_CLASS(klass);
501
502 g_type_class_add_private(klass, sizeof(EvasVideoSinkPrivate));
503
504 gobject_class->set_property = evas_video_sink_set_property;
505 gobject_class->get_property = evas_video_sink_get_property;
506
507 g_object_class_install_property (gobject_class, PROP_EVAS_OBJECT,
508 g_param_spec_pointer ("evas-object", "Evas Object",
509 "The Evas object where the display of the video will be done",
510 G_PARAM_READWRITE));
511
512 g_object_class_install_property (gobject_class, PROP_WIDTH,
513 g_param_spec_int ("width", "Width",
514 "The width of the video",
515 0, 65536, 0, G_PARAM_READABLE | G_PARAM_STATIC_STRINGS));
516
517 g_object_class_install_property (gobject_class, PROP_HEIGHT,
518 g_param_spec_int ("height", "Height",
519 "The height of the video",
520 0, 65536, 0, G_PARAM_READABLE | G_PARAM_STATIC_STRINGS));
521 g_object_class_install_property (gobject_class, PROP_EV,
522 g_param_spec_pointer ("ev", "Emotion_Gstreamer_Video",
523 "The internal data of the emotion object",
524 G_PARAM_READWRITE));
525
526 gobject_class->dispose = evas_video_sink_dispose;
527
528 gst_element_class_add_pad_template(gstelement_class, gst_static_pad_template_get(&sinktemplate));
529 gst_element_class_set_static_metadata(gstelement_class, "Evas video sink",
530 "Sink/Video", "Sends video data from a GStreamer pipeline to an Evas object",
531 "Vincent Torri <vtorri@univ-evry.fr>");
532
533 gstbase_sink_class->set_caps = evas_video_sink_set_caps;
534 gstbase_sink_class->stop = evas_video_sink_stop;
535 gstbase_sink_class->start = evas_video_sink_start;
536 gstbase_sink_class->unlock = evas_video_sink_unlock;
537 gstbase_sink_class->unlock_stop = evas_video_sink_unlock_stop;
538 gstbase_sink_class->render = evas_video_sink_render;
539 gstbase_sink_class->preroll = evas_video_sink_preroll;
540}
541
542gboolean
543gstreamer_plugin_init (GstPlugin * plugin)
544{
545 return gst_element_register (plugin,
546 "emotion-sink",
547 GST_RANK_NONE,
548 EVAS_TYPE_VIDEO_SINK);
549}
550
551static void
552_emotion_gstreamer_pause(void *data, Ecore_Thread *thread)
553{
554 Emotion_Gstreamer_Video *ev = data;
555 gboolean res;
556
557 if (ecore_thread_check(thread) || !ev->pipeline) return;
558
559 gst_element_set_state(ev->pipeline, GST_STATE_PAUSED);
560 res = gst_element_get_state(ev->pipeline, NULL, NULL, GST_CLOCK_TIME_NONE);
561 if (res == GST_STATE_CHANGE_NO_PREROLL)
562 {
563 gst_element_set_state(ev->pipeline, GST_STATE_PLAYING);
564 gst_element_get_state(ev->pipeline, NULL, NULL, GST_CLOCK_TIME_NONE);
565 }
566}
567
568static void
569_emotion_gstreamer_cancel(void *data, Ecore_Thread *thread)
570{
571 Emotion_Gstreamer_Video *ev = data;
572
573 ev->threads = eina_list_remove(ev->threads, thread);
574
575 if (getenv("EMOTION_GSTREAMER_DOT")) GST_DEBUG_BIN_TO_DOT_FILE_WITH_TS(GST_BIN(ev->pipeline), GST_DEBUG_GRAPH_SHOW_ALL, getenv("EMOTION_GSTREAMER_DOT"));
576
577 if (ev->in == ev->out && ev->delete_me)
578 ev->api->del(ev);
579}
580
581static void
582_emotion_gstreamer_end(void *data, Ecore_Thread *thread)
583{
584 Emotion_Gstreamer_Video *ev = data;
585
586 ev->threads = eina_list_remove(ev->threads, thread);
587
588 if (ev->play)
589 {
590 gst_element_set_state(ev->pipeline, GST_STATE_PLAYING);
591 ev->play_started = 1;
592 }
593
594 if (getenv("EMOTION_GSTREAMER_DOT")) GST_DEBUG_BIN_TO_DOT_FILE_WITH_TS(GST_BIN(ev->pipeline), GST_DEBUG_GRAPH_SHOW_ALL, getenv("EMOTION_GSTREAMER_DOT"));
595
596 if (ev->in == ev->out && ev->delete_me)
597 ev->api->del(ev);
598 else
599 _emotion_gstreamer_video_pipeline_parse(data, EINA_TRUE);
600}
601
602static void
603_video_resize(void *data, Evas_Object *obj EINA_UNUSED, const Evas_Video_Surface *surface EINA_UNUSED,
604 Evas_Coord w, Evas_Coord h)
605{
606#ifdef HAVE_ECORE_X
607 Emotion_Gstreamer_Video *ev = data;
608
609 ecore_x_window_resize(ev->win, w, h);
610 DBG("resize: %i, %i", w, h);
611#else
612 if (data)
613 {
614 DBG("resize: %i, %i (fake)", w, h);
615 }
616#endif
617}
618
619static void
620_video_move(void *data, Evas_Object *obj EINA_UNUSED, const Evas_Video_Surface *surface EINA_UNUSED,
621 Evas_Coord x, Evas_Coord y)
622{
623#ifdef HAVE_ECORE_X
624 Emotion_Gstreamer_Video *ev = data;
625 unsigned int pos[2];
626
627 DBG("move: %i, %i", x, y);
628 pos[0] = x; pos[1] = y;
629 ecore_x_window_prop_card32_set(ev->win, ECORE_X_ATOM_E_VIDEO_POSITION, pos, 2);
630#else
631 if (data)
632 {
633 DBG("move: %i, %i (fake)", x, y);
634 }
635#endif
636}
637
638#if 0
639/* Much better idea to always feed the XvImageSink and let him handle optimizing the rendering as we do */
640static void
641_block_pad_unlink_cb(GstPad *pad, gboolean blocked, gpointer user_data)
642{
643 if (blocked)
644 {
645 Emotion_Gstreamer_Video *ev = user_data;
646 GstEvent *gev;
647
648 gst_pad_unlink(ev->xvteepad, ev->xvpad);
649 gev = gst_event_new_eos();
650 gst_pad_send_event(ev->xvpad, gev);
651 gst_pad_set_blocked_async(pad, FALSE, _block_pad_unlink_cb, NULL);
652 }
653}
654
655static void
656_block_pad_link_cb(GstPad *pad, gboolean blocked, gpointer user_data)
657{
658 if (blocked)
659 {
660 Emotion_Gstreamer_Video *ev = user_data;
661
662 gst_pad_link(ev->xvteepad, ev->xvpad);
663 if (ev->play)
664 gst_element_set_state(ev->xvsink, GST_STATE_PLAYING);
665 else
666 gst_element_set_state(ev->xvsink, GST_STATE_PAUSED);
667 gst_pad_set_blocked_async(pad, FALSE, _block_pad_link_cb, NULL);
668 }
669}
670#endif
671
672static void
673_video_show(void *data, Evas_Object *obj EINA_UNUSED, const Evas_Video_Surface *surface EINA_UNUSED)
674{
675#ifdef HAVE_ECORE_X
676 Emotion_Gstreamer_Video *ev = data;
677
678 DBG("show xv");
679 ecore_x_window_show(ev->win);
680#else
681 if (data)
682 {
683 DBG("show xv (fake)");
684 }
685#endif
686 /* gst_pad_set_blocked_async(ev->xvteepad, TRUE, _block_pad_link_cb, ev); */
687}
688
689static void
690_video_hide(void *data, Evas_Object *obj EINA_UNUSED, const Evas_Video_Surface *surface EINA_UNUSED)
691{
692#ifdef HAVE_ECORE_X
693 Emotion_Gstreamer_Video *ev = data;
694
695 DBG("hide xv");
696 ecore_x_window_hide(ev->win);
697#else
698 if (data)
699 {
700 DBG("hide xv (fake)");
701 }
702#endif
703 /* gst_pad_set_blocked_async(ev->xvteepad, TRUE, _block_pad_unlink_cb, ev); */
704}
705
706static void
707_video_update_pixels(void *data, Evas_Object *obj EINA_UNUSED, const Evas_Video_Surface *surface EINA_UNUSED)
708{
709 Emotion_Gstreamer_Video *ev = data;
710 Emotion_Gstreamer_Buffer *send;
711
712 if (!ev->send) return;
713
714 send = ev->send;
715 send->force = EINA_TRUE;
716 ev->send = NULL;
717
718 _emotion_pending_ecore_begin();
719 evas_video_sink_main_render(send);
720}
721
722
723static void
724_image_resize(void *data EINA_UNUSED, Evas *e EINA_UNUSED, Evas_Object *obj EINA_UNUSED, void *event_info EINA_UNUSED)
725{
726}
727
728GstElement *
729gstreamer_video_sink_new(Emotion_Gstreamer_Video *ev,
730 Evas_Object *o,
731 const char *uri)
732{
733 GstElement *playbin;
734 GstElement *bin = NULL;
735 GstElement *esink = NULL;
736 GstElement *xvsink = NULL;
737 GstElement *tee = NULL;
738 GstElement *queue = NULL;
739 Evas_Object *obj;
740 GstPad *pad;
741 GstPad *teepad;
742 int flags;
743 const char *launch;
744#if defined HAVE_ECORE_X
745 const char *engine = NULL;
746 Eina_List *engines;
747#endif
748
749 obj = emotion_object_image_get(o);
750 if (!obj)
751 {
752// ERR("Not Evas_Object specified");
753 return NULL;
754 }
755
756 if (!uri)
757 return NULL;
758
759 launch = emotion_webcam_custom_get(uri);
760 if (launch)
761 {
762 GError *error = NULL;
763
764 playbin = gst_parse_bin_from_description(launch, 1, &error);
765 if (!playbin)
766 {
767 ERR("Unable to setup command : '%s' got error '%s'.", launch, error->message);
768 g_error_free(error);
769 return NULL;
770 }
771 if (error)
772 {
773 WRN("got recoverable error '%s' for command : '%s'.", error->message, launch);
774 g_error_free(error);
775 }
776 }
777 else
778 {
779 playbin = gst_element_factory_make("playbin", "playbin");
780 if (!playbin)
781 {
782 ERR("Unable to create 'playbin' GstElement.");
783 return NULL;
784 }
785 }
786
787 bin = gst_bin_new(NULL);
788 if (!bin)
789 {
790 ERR("Unable to create GstBin !");
791 goto unref_pipeline;
792 }
793
794 tee = gst_element_factory_make("tee", NULL);
795 if (!tee)
796 {
797 ERR("Unable to create 'tee' GstElement.");
798 goto unref_pipeline;
799 }
800
801#if defined HAVE_ECORE_X
802 if (window_manager_video)
803 {
804 Eina_List *l;
805 const char *ename;
806
807 engines = evas_render_method_list();
808
809 EINA_LIST_FOREACH(engines, l, ename)
810 {
811 if (evas_render_method_lookup(ename) ==
812 evas_output_method_get(evas_object_evas_get(obj)))
813 {
814 engine = ename;
815 break;
816 }
817 }
818
819 if (ev->priority && engine && strstr(engine, "_x11") != NULL)
820 {
821 Ecore_Evas *ee;
822 Evas_Coord x, y, w, h;
823 Ecore_X_Window win;
824 Ecore_X_Window parent;
825
826 evas_object_geometry_get(obj, &x, &y, &w, &h);
827
828 ee = ecore_evas_ecore_evas_get(evas_object_evas_get(obj));
829
830 if (w < 4) w = 4;
831 if (h < 2) h = 2;
832
833 /* Here we really need to have the help of the window manager, this code will change when we update E17. */
834 parent = (Ecore_X_Window) ecore_evas_window_get(ee);
835 DBG("parent: %x", parent);
836
837 win = ecore_x_window_new(0, x, y, w, h);
838 DBG("creating window: %x [%i, %i, %i, %i]", win, x, y, w, h);
839 if (win)
840 {
841 Ecore_X_Window_State state[] = { ECORE_X_WINDOW_STATE_SKIP_TASKBAR, ECORE_X_WINDOW_STATE_SKIP_PAGER };
842
843 ecore_x_netwm_window_state_set(win, state, 2);
844 ecore_x_window_hide(win);
845 xvsink = gst_element_factory_make("xvimagesink", NULL);
846 if (xvsink)
847 {
848 unsigned int pos[2];
849
850 gst_video_overlay_set_window_handle(GST_VIDEO_OVERLAY(xvsink), win);
851 ev->win = win;
852
853 ecore_x_window_prop_card32_set(win, ECORE_X_ATOM_E_VIDEO_PARENT, &parent, 1);
854
855 pos[0] = x; pos[1] = y;
856 ecore_x_window_prop_card32_set(win, ECORE_X_ATOM_E_VIDEO_POSITION, pos, 2);
857 }
858 else
859 {
860 DBG("destroying win: %x", win);
861 ecore_x_window_free(win);
862 }
863 }
864 }
865 evas_render_method_list_free(engines);
866 }
867#else
868//# warning "missing: ecore_x"
869#endif
870
871 esink = gst_element_factory_make("emotion-sink", "sink");
872 if (!esink)
873 {
874 ERR("Unable to create 'emotion-sink' GstElement.");
875 goto unref_pipeline;
876 }
877
878 g_object_set(G_OBJECT(esink), "evas-object", obj, NULL);
879 g_object_set(G_OBJECT(esink), "ev", ev, NULL);
880
881 evas_object_image_pixels_get_callback_set(obj, NULL, NULL);
882 evas_object_event_callback_add(obj, EVAS_CALLBACK_RESIZE, _image_resize, ev);
883
884 /* We need queue to force each video sink to be in its own thread */
885 queue = gst_element_factory_make("queue", "equeue");
886 if (!queue)
887 {
888 ERR("Unable to create 'queue' GstElement.");
889 goto unref_pipeline;
890 }
891
892 gst_bin_add_many(GST_BIN(bin), tee, queue, esink, NULL);
893 gst_element_link_many(queue, esink, NULL);
894
895 /* link both sink to GstTee */
896 pad = gst_element_get_static_pad(queue, "sink");
897 teepad = gst_element_get_request_pad(tee, "src_%u");
898 gst_pad_link(teepad, pad);
899 gst_object_unref(pad);
900
901 ev->eteepad = teepad;
902
903 /* FIXME: Why a bin that drops the EOS message?! */
904 if (xvsink)
905 {
906 GstElement *fakeeos;
907
908 queue = gst_element_factory_make("queue", "xvqueue");
909 fakeeos = GST_ELEMENT(GST_BIN(g_object_new(GST_TYPE_FAKEEOS_BIN, "name", "eosbin", NULL)));
910 if (queue && fakeeos)
911 {
912 GstPad *queue_pad;
913
914 gst_bin_add_many(GST_BIN(bin), fakeeos, NULL);
915
916 gst_bin_add_many(GST_BIN(fakeeos), queue, xvsink, NULL);
917 gst_element_link_many(queue, xvsink, NULL);
918 queue_pad = gst_element_get_static_pad(queue, "sink");
919 gst_element_add_pad(fakeeos, gst_ghost_pad_new("sink", queue_pad));
920
921 pad = gst_element_get_static_pad(fakeeos, "sink");
922 teepad = gst_element_get_request_pad(tee, "src_%u");
923 gst_pad_link(teepad, pad);
924
925 xvsink = fakeeos;
926
927 ev->xvteepad = teepad;
928 ev->xvpad = pad;
929 }
930 else
931 {
932 if (fakeeos) gst_object_unref(fakeeos);
933 if (queue) gst_object_unref(queue);
934 gst_object_unref(xvsink);
935 xvsink = NULL;
936 }
937 }
938
939 teepad = gst_element_get_static_pad(tee, "sink");
940 gst_element_add_pad(bin, gst_ghost_pad_new("sink", teepad));
941 gst_object_unref(teepad);
942
943#define GST_PLAY_FLAG_NATIVE_VIDEO (1 << 6)
944#define GST_PLAY_FLAG_DOWNLOAD (1 << 7)
945#define GST_PLAY_FLAG_AUDIO (1 << 1)
946#define GST_PLAY_FLAG_NATIVE_AUDIO (1 << 5)
947
948 if (launch)
949 {
950 g_object_set(G_OBJECT(playbin), "sink", bin, NULL);
951 }
952 else
953 {
954 g_object_get(G_OBJECT(playbin), "flags", &flags, NULL);
955 g_object_set(G_OBJECT(playbin), "flags", flags | GST_PLAY_FLAG_NATIVE_VIDEO | GST_PLAY_FLAG_DOWNLOAD | GST_PLAY_FLAG_NATIVE_AUDIO, NULL);
956 g_object_set(G_OBJECT(playbin), "video-sink", bin, NULL);
957 g_object_set(G_OBJECT(playbin), "uri", uri, NULL);
958 }
959
960 evas_object_image_pixels_get_callback_set(obj, NULL, NULL);
961
962 ev->stream = EINA_TRUE;
963
964 if (xvsink)
965 {
966 Evas_Video_Surface video;
967
968 video.version = EVAS_VIDEO_SURFACE_VERSION;
969 video.data = ev;
970 video.parent = NULL;
971 video.move = _video_move;
972 video.resize = _video_resize;
973 video.show = _video_show;
974 video.hide = _video_hide;
975 video.update_pixels = _video_update_pixels;
976
977 evas_object_image_video_surface_set(obj, &video);
978 ev->stream = EINA_FALSE;
979 }
980
981 eina_stringshare_replace(&ev->uri, uri);
982 ev->pipeline = playbin;
983 ev->sink = bin;
984 ev->esink = esink;
985 ev->xvsink = xvsink;
986 ev->tee = tee;
987 ev->threads = eina_list_append(ev->threads,
988 ecore_thread_run(_emotion_gstreamer_pause,
989 _emotion_gstreamer_end,
990 _emotion_gstreamer_cancel,
991 ev));
992
993 /** NOTE: you need to set: GST_DEBUG_DUMP_DOT_DIR=/tmp EMOTION_ENGINE=gstreamer to save the $EMOTION_GSTREAMER_DOT file in '/tmp' */
994 /** then call dot -Tpng -oemotion_pipeline.png /tmp/$TIMESTAMP-$EMOTION_GSTREAMER_DOT.dot */
995 if (getenv("EMOTION_GSTREAMER_DOT")) GST_DEBUG_BIN_TO_DOT_FILE_WITH_TS(GST_BIN(playbin), GST_DEBUG_GRAPH_SHOW_ALL, getenv("EMOTION_GSTREAMER_DOT"));
996
997 return playbin;
998
999 unref_pipeline:
1000 gst_object_unref(xvsink);
1001 gst_object_unref(esink);
1002 gst_object_unref(tee);
1003 gst_object_unref(bin);
1004 gst_object_unref(playbin);
1005 return NULL;
1006}