summaryrefslogtreecommitdiff
path: root/legacy/emotion
diff options
context:
space:
mode:
authorCedric BAIL <cedric.bail@free.fr>2011-06-20 17:36:13 +0000
committerCedric BAIL <cedric.bail@free.fr>2011-06-20 17:36:13 +0000
commitc1f20d3e6b8fccf66b133049f806fca0c512664e (patch)
treec00cf5f8b97d58e488c6d76ba55ba752ca474bc2 /legacy/emotion
parent2a93705baf38f2a9885225d28c08d2e97e8ef02b (diff)
emotion: add a gstreamer sink.
This work was first started by Vincent Torri and Nicolas Aguirre. I took the necessary time to make it work and included inside emotion with Samsung sponsorship. This code is not yet completly clean and fast, but it work and make it possible to add more speed improvement later. This was a needed infrastructure work. SVN revision: 60532
Diffstat (limited to '')
-rw-r--r--legacy/emotion/m4/emotion_check.m42
-rw-r--r--legacy/emotion/src/lib/emotion_private.h1
-rw-r--r--legacy/emotion/src/lib/emotion_smart.c10
-rw-r--r--legacy/emotion/src/modules/gstreamer/Makefile.am3
-rw-r--r--legacy/emotion/src/modules/gstreamer/emotion_gstreamer.c470
-rw-r--r--legacy/emotion/src/modules/gstreamer/emotion_gstreamer.h49
6 files changed, 420 insertions, 115 deletions
diff --git a/legacy/emotion/m4/emotion_check.m4 b/legacy/emotion/m4/emotion_check.m4
index d763b04752..ead676a602 100644
--- a/legacy/emotion/m4/emotion_check.m4
+++ b/legacy/emotion/m4/emotion_check.m4
@@ -33,7 +33,7 @@ GST_MAJORMINOR=0.10
33 33
34requirement="" 34requirement=""
35PKG_CHECK_MODULES([GSTREAMER], 35PKG_CHECK_MODULES([GSTREAMER],
36 [gstreamer-$GST_MAJORMINOR >= $GST_REQS gstreamer-plugins-base-$GST_MAJORMINOR >= $GSTPLUG_REQS evas >= 0.9.9], 36 [gstreamer-$GST_MAJORMINOR >= $GST_REQS gstreamer-plugins-base-$GST_MAJORMINOR >= $GSTPLUG_REQS gstreamer-video-$GST_MAJORMINOR >= $GSTPLUG_REQS evas >= 0.9.9],
37 [ 37 [
38 have_dep="yes" 38 have_dep="yes"
39 requirement="gstreamer-$GST_MAJORMINOR gstreamer-plugins-base-$GST_MAJORMINOR" 39 requirement="gstreamer-$GST_MAJORMINOR gstreamer-plugins-base-$GST_MAJORMINOR"
diff --git a/legacy/emotion/src/lib/emotion_private.h b/legacy/emotion/src/lib/emotion_private.h
index 65ea4c7ed4..3fae9684b2 100644
--- a/legacy/emotion/src/lib/emotion_private.h
+++ b/legacy/emotion/src/lib/emotion_private.h
@@ -113,6 +113,7 @@ struct _Emotion_Video_Module
113 Eina_Emotion_Plugins *plugin; 113 Eina_Emotion_Plugins *plugin;
114}; 114};
115 115
116EAPI Evas_Object *_emotion_image_get(const Evas_Object *obj);
116EAPI void *_emotion_video_get(const Evas_Object *obj); 117EAPI void *_emotion_video_get(const Evas_Object *obj);
117EAPI void _emotion_frame_new(Evas_Object *obj); 118EAPI void _emotion_frame_new(Evas_Object *obj);
118EAPI void _emotion_video_pos_update(Evas_Object *obj, double pos, double len); 119EAPI void _emotion_video_pos_update(Evas_Object *obj, double pos, double len);
diff --git a/legacy/emotion/src/lib/emotion_smart.c b/legacy/emotion/src/lib/emotion_smart.c
index 395649c7e2..5fdbc33319 100644
--- a/legacy/emotion/src/lib/emotion_smart.c
+++ b/legacy/emotion/src/lib/emotion_smart.c
@@ -1547,3 +1547,13 @@ _smart_clip_unset(Evas_Object * obj)
1547 if (!sd) return; 1547 if (!sd) return;
1548 evas_object_clip_unset(sd->obj); 1548 evas_object_clip_unset(sd->obj);
1549} 1549}
1550
1551EAPI Evas_Object *
1552_emotion_image_get(const Evas_Object *obj)
1553{
1554 Smart_Data *sd;
1555
1556 sd = evas_object_smart_data_get(obj);
1557 if (!sd) return NULL;
1558 return sd->obj;
1559}
diff --git a/legacy/emotion/src/modules/gstreamer/Makefile.am b/legacy/emotion/src/modules/gstreamer/Makefile.am
index a25daf35ba..ca95f13f0b 100644
--- a/legacy/emotion/src/modules/gstreamer/Makefile.am
+++ b/legacy/emotion/src/modules/gstreamer/Makefile.am
@@ -19,8 +19,7 @@ pkgdir = $(libdir)/emotion
19pkg_LTLIBRARIES = gstreamer.la 19pkg_LTLIBRARIES = gstreamer.la
20gstreamer_la_SOURCES = \ 20gstreamer_la_SOURCES = \
21emotion_gstreamer.c \ 21emotion_gstreamer.c \
22emotion_gstreamer_pipeline.c \ 22emotion_sink.c
23emotion_gstreamer_pipeline.h
24gstreamer_la_LIBADD = @GSTREAMER_LIBS@ $(top_builddir)/src/lib/libemotion.la 23gstreamer_la_LIBADD = @GSTREAMER_LIBS@ $(top_builddir)/src/lib/libemotion.la
25gstreamer_la_LDFLAGS = -no-undefined @lt_enable_auto_import@ -module -avoid-version 24gstreamer_la_LDFLAGS = -no-undefined @lt_enable_auto_import@ -module -avoid-version
26gstreamer_la_LIBTOOLFLAGS = --tag=disable-static 25gstreamer_la_LIBTOOLFLAGS = --tag=disable-static
diff --git a/legacy/emotion/src/modules/gstreamer/emotion_gstreamer.c b/legacy/emotion/src/modules/gstreamer/emotion_gstreamer.c
index 041a191b85..0b59718b2b 100644
--- a/legacy/emotion/src/modules/gstreamer/emotion_gstreamer.c
+++ b/legacy/emotion/src/modules/gstreamer/emotion_gstreamer.c
@@ -5,14 +5,12 @@
5 5
6#include "emotion_private.h" 6#include "emotion_private.h"
7#include "emotion_gstreamer.h" 7#include "emotion_gstreamer.h"
8#include "emotion_gstreamer_pipeline.h"
9#include "Emotion.h" 8#include "Emotion.h"
10 9
11int _emotion_gstreamer_log_domain = -1; 10int _emotion_gstreamer_log_domain = -1;
12 11
13/* Callbacks to get the eos */ 12/* Callbacks to get the eos */
14static Eina_Bool _eos_timer_fct (void *data); 13static Eina_Bool _eos_timer_fct (void *data);
15static void _em_buffer_read(void *data, void *buffer, unsigned int nbyte);
16static void _for_each_tag (GstTagList const* list, gchar const* tag, void *data); 14static void _for_each_tag (GstTagList const* list, gchar const* tag, void *data);
17static void _free_metadata (Emotion_Gstreamer_Metadata *m); 15static void _free_metadata (Emotion_Gstreamer_Metadata *m);
18 16
@@ -232,6 +230,82 @@ static Emotion_Video_Module em_module =
232 NULL /* handle */ 230 NULL /* handle */
233}; 231};
234 232
233static Emotion_Video_Stream *
234emotion_video_stream_new(Emotion_Gstreamer_Video *ev)
235{
236 Emotion_Video_Stream *vstream;
237
238 if (!ev) return NULL;
239
240 vstream = (Emotion_Video_Stream *)calloc(1, sizeof(Emotion_Video_Stream));
241 if (!vstream) return NULL;
242
243 ev->video_streams = eina_list_append(ev->video_streams, vstream);
244 if (eina_error_get())
245 {
246 free(vstream);
247 return NULL;
248 }
249 return vstream;
250}
251
252static void
253emotion_video_stream_free(Emotion_Gstreamer_Video *ev, Emotion_Video_Stream *vstream)
254{
255 if (!ev || !vstream) return;
256
257 ev->video_streams = eina_list_remove(ev->video_streams, vstream);
258 free(vstream);
259}
260
261static const char *
262emotion_visualization_element_name_get(Emotion_Vis visualisation)
263{
264 switch (visualisation)
265 {
266 case EMOTION_VIS_NONE:
267 return NULL;
268 case EMOTION_VIS_GOOM:
269 return "goom";
270 case EMOTION_VIS_LIBVISUAL_BUMPSCOPE:
271 return "libvisual_bumpscope";
272 case EMOTION_VIS_LIBVISUAL_CORONA:
273 return "libvisual_corona";
274 case EMOTION_VIS_LIBVISUAL_DANCING_PARTICLES:
275 return "libvisual_dancingparticles";
276 case EMOTION_VIS_LIBVISUAL_GDKPIXBUF:
277 return "libvisual_gdkpixbuf";
278 case EMOTION_VIS_LIBVISUAL_G_FORCE:
279 return "libvisual_G-Force";
280 case EMOTION_VIS_LIBVISUAL_GOOM:
281 return "libvisual_goom";
282 case EMOTION_VIS_LIBVISUAL_INFINITE:
283 return "libvisual_infinite";
284 case EMOTION_VIS_LIBVISUAL_JAKDAW:
285 return "libvisual_jakdaw";
286 case EMOTION_VIS_LIBVISUAL_JESS:
287 return "libvisual_jess";
288 case EMOTION_VIS_LIBVISUAL_LV_ANALYSER:
289 return "libvisual_lv_analyzer";
290 case EMOTION_VIS_LIBVISUAL_LV_FLOWER:
291 return "libvisual_lv_flower";
292 case EMOTION_VIS_LIBVISUAL_LV_GLTEST:
293 return "libvisual_lv_gltest";
294 case EMOTION_VIS_LIBVISUAL_LV_SCOPE:
295 return "libvisual_lv_scope";
296 case EMOTION_VIS_LIBVISUAL_MADSPIN:
297 return "libvisual_madspin";
298 case EMOTION_VIS_LIBVISUAL_NEBULUS:
299 return "libvisual_nebulus";
300 case EMOTION_VIS_LIBVISUAL_OINKSIE:
301 return "libvisual_oinksie";
302 case EMOTION_VIS_LIBVISUAL_PLASMA:
303 return "libvisual_plazma";
304 default:
305 return "goom";
306 }
307}
308
235static unsigned char 309static unsigned char
236em_init(Evas_Object *obj, 310em_init(Evas_Object *obj,
237 void **emotion_video, 311 void **emotion_video,
@@ -247,7 +321,6 @@ em_init(Evas_Object *obj,
247 if (!ev) return 0; 321 if (!ev) return 0;
248 322
249 ev->obj = obj; 323 ev->obj = obj;
250 ev->obj_data = NULL;
251 324
252 /* Initialization of gstreamer */ 325 /* Initialization of gstreamer */
253 if (!gst_init_check(NULL, NULL, &error)) 326 if (!gst_init_check(NULL, NULL, &error))
@@ -258,11 +331,6 @@ em_init(Evas_Object *obj,
258 ev->vis = EMOTION_VIS_NONE; 331 ev->vis = EMOTION_VIS_NONE;
259 ev->volume = 0.8; 332 ev->volume = 0.8;
260 333
261 /* Create the file descriptors */
262 ev->pipe = ecore_pipe_add (_em_buffer_read, ev);
263 if (!ev->pipe)
264 goto failure;
265
266 *emotion_video = ev; 334 *emotion_video = ev;
267 335
268 return 1; 336 return 1;
@@ -284,11 +352,6 @@ em_shutdown(void *video)
284 if (!ev) 352 if (!ev)
285 return 0; 353 return 0;
286 354
287 ecore_pipe_del(ev->pipe);
288
289 /* FIXME: and the evas object ? */
290 if (ev->obj_data) free(ev->obj_data);
291
292 EINA_LIST_FREE(ev->audio_streams, astream) 355 EINA_LIST_FREE(ev->audio_streams, astream)
293 free(astream); 356 free(astream);
294 EINA_LIST_FREE(ev->video_streams, vstream) 357 EINA_LIST_FREE(ev->video_streams, vstream)
@@ -308,23 +371,258 @@ em_file_open(const char *file,
308 void *video) 371 void *video)
309{ 372{
310 Emotion_Gstreamer_Video *ev; 373 Emotion_Gstreamer_Video *ev;
374 Eina_Strbuf *sbuf = NULL;
375 const char *uri;
376 int i;
311 377
312 ev = (Emotion_Gstreamer_Video *)video; 378 ev = (Emotion_Gstreamer_Video *)video;
313 379
314 if (!_emotion_pipeline_build(ev, file)) 380 if (!file) return EINA_FALSE;
381 if (strstr(file, "://") == NULL)
382 {
383 sbuf = eina_strbuf_new();
384 eina_strbuf_append(sbuf, "file://");
385 if (strncmp(file, "./", 2) == 0)
386 file += 2;
387 if (*file != '/')
388 {
389 char tmp[PATH_MAX];
390
391 if (getcwd(tmp, PATH_MAX))
392 {
393 eina_strbuf_append(sbuf, tmp);
394 eina_strbuf_append(sbuf, "/");
395 }
396 }
397 eina_strbuf_append(sbuf, file);
398 }
399
400 uri = sbuf ? eina_strbuf_string_get(sbuf) : file;
401 DBG("setting file to '%s'", uri);
402 ev->pipeline = gstreamer_video_sink_new(ev, obj, uri);
403 if (sbuf) eina_strbuf_free(sbuf);
404
405 if (!ev->pipeline)
315 return EINA_FALSE; 406 return EINA_FALSE;
316 407
408 ev->eos_bus = gst_pipeline_get_bus(GST_PIPELINE(ev->pipeline));
409 if (!ev->eos_bus)
410 {
411 ERR("could not get the bus");
412 return EINA_FALSE;
413 }
414
317 /* Evas Object */ 415 /* Evas Object */
318 ev->obj = obj; 416 ev->obj = obj;
319 417
320 ev->position = 0.0; 418 ev->position = 0.0;
321 419
420 g_object_get(G_OBJECT(ev->pipeline),
421 "n-audio", &ev->audio_stream_nbr,
422 "n-video", &ev->video_stream_nbr,
423 NULL);
424
425 if ((ev->video_stream_nbr == 0) && (ev->audio_stream_nbr == 0))
426 {
427 ERR("No audio nor video stream found");
428 gst_object_unref(ev->pipeline);
429 ev->pipeline = NULL;
430 return EINA_FALSE;
431 }
432
433 /* video stream */
434
435 for (i = 0; i < ev->video_stream_nbr; i++)
436 {
437 Emotion_Video_Stream *vstream;
438 GstPad *pad;
439 GstCaps *caps;
440 GstStructure *structure;
441 GstQuery *query;
442 const GValue *val;
443 gchar *str;
444
445 gdouble length_time = 0.0;
446 gint width;
447 gint height;
448 gint fps_num;
449 gint fps_den;
450 guint32 fourcc = 0;
451
452 g_signal_emit_by_name(ev->pipeline, "get-video-pad", i, &pad);
453 if (!pad)
454 continue;
455
456 caps = gst_pad_get_negotiated_caps(pad);
457 if (!caps)
458 goto unref_pad_v;
459 structure = gst_caps_get_structure(caps, 0);
460 str = gst_caps_to_string(caps);
461
462 if (!gst_structure_get_int(structure, "width", &width))
463 goto unref_caps_v;
464 if (!gst_structure_get_int(structure, "height", &height))
465 goto unref_caps_v;
466 if (!gst_structure_get_fraction(structure, "framerate", &fps_num, &fps_den))
467 goto unref_caps_v;
468
469 if (g_str_has_prefix(str, "video/x-raw-yuv"))
470 {
471 val = gst_structure_get_value(structure, "format");
472 fourcc = gst_value_get_fourcc(val);
473 }
474 else if (g_str_has_prefix(str, "video/x-raw-rgb"))
475 fourcc = GST_MAKE_FOURCC('A', 'R', 'G', 'B');
476 else
477 goto unref_caps_v;
478
479 query = gst_query_new_duration(GST_FORMAT_TIME);
480 if (gst_pad_peer_query(pad, query))
481 {
482 gint64 t;
483
484 gst_query_parse_duration(query, NULL, &t);
485 length_time = (double)t / (double)GST_SECOND;
486 }
487 else
488 goto unref_query_v;
489
490 vstream = emotion_video_stream_new(ev);
491 if (!vstream) goto unref_query_v;
492
493 vstream->length_time = length_time;
494 vstream->width = width;
495 vstream->height = height;
496 vstream->fps_num = fps_num;
497 vstream->fps_den = fps_den;
498 vstream->fourcc = fourcc;
499 vstream->index = i;
500
501 unref_query_v:
502 gst_query_unref(query);
503 unref_caps_v:
504 gst_caps_unref(caps);
505 unref_pad_v:
506 gst_object_unref(pad);
507 }
508
509 /* Audio streams */
510
511 for (i = 0; i < ev->audio_stream_nbr; i++)
512 {
513 Emotion_Audio_Stream *astream;
514 GstPad *pad;
515 GstCaps *caps;
516 GstStructure *structure;
517 GstQuery *query;
518
519 gdouble length_time = 0.0;
520 gint channels;
521 gint samplerate;
522
523 g_signal_emit_by_name(ev->pipeline, "get-audio-pad", i, &pad);
524 if (!pad)
525 continue;
526
527 caps = gst_pad_get_negotiated_caps(pad);
528 if (!caps)
529 goto unref_pad_a;
530 structure = gst_caps_get_structure(caps, 0);
531
532 if (!gst_structure_get_int(structure, "channels", &channels))
533 goto unref_caps_a;
534 if (!gst_structure_get_int(structure, "rate", &samplerate))
535 goto unref_caps_a;
536
537 query = gst_query_new_duration(GST_FORMAT_TIME);
538 if (gst_pad_peer_query(pad, query))
539 {
540 gint64 t;
541
542 gst_query_parse_duration(query, NULL, &t);
543 length_time = (double)t / (double)GST_SECOND;
544 }
545 else
546 goto unref_query_a;
547
548 astream = calloc(1, sizeof(Emotion_Audio_Stream));
549 if (!astream) continue;
550 ev->audio_streams = eina_list_append(ev->audio_streams, astream);
551 if (eina_error_get())
552 {
553 free(astream);
554 continue;
555 }
556
557 astream->length_time = length_time;
558 astream->channels = channels;
559 astream->samplerate = samplerate;
560
561 unref_query_a:
562 gst_query_unref(query);
563 unref_caps_a:
564 gst_caps_unref(caps);
565 unref_pad_a:
566 gst_object_unref(pad);
567 }
568
569 /* Visualization sink */
570
571 if (ev->video_stream_nbr == 0)
572 {
573 GstElement *vis = NULL;
574 Emotion_Video_Stream *vstream;
575 Emotion_Audio_Stream *astream;
576 gint flags;
577 const char *vis_name;
578
579 if (!(vis_name = emotion_visualization_element_name_get(ev->vis)))
580 {
581 ERR("pb vis name %d\n", ev->vis);
582 goto finalize;
583 }
584
585 astream = eina_list_data_get(ev->audio_streams);
586
587 vis = gst_element_factory_make(vis_name, "vissink");
588 vstream = emotion_video_stream_new(ev);
589 if (!vstream)
590 goto finalize;
591 else
592 DBG("could not create visualization stream");
593
594 vstream->length_time = astream->length_time;
595 vstream->width = 320;
596 vstream->height = 200;
597 vstream->fps_num = 25;
598 vstream->fps_den = 1;
599 vstream->fourcc = GST_MAKE_FOURCC('A', 'R', 'G', 'B');
600
601 g_object_set(G_OBJECT(ev->pipeline), "vis-plugin", vis, NULL);
602 g_object_get(G_OBJECT(ev->pipeline), "flags", &flags, NULL);
603 flags |= 0x00000008;
604 g_object_set(G_OBJECT(ev->pipeline), "flags", flags, NULL);
605 }
606
607 finalize:
608
609 ev->video_stream_nbr = eina_list_count(ev->video_streams);
610 ev->audio_stream_nbr = eina_list_count(ev->audio_streams);
611
612 if (ev->video_stream_nbr == 1)
613 {
614 Emotion_Video_Stream *vstream;
615
616 vstream = eina_list_data_get(ev->video_streams);
617 ev->ratio = (double)vstream->width / (double)vstream->height;
618 }
619
322 { 620 {
323 /* on recapitule : */ 621 /* on recapitule : */
324 Emotion_Video_Stream *vstream; 622 Emotion_Video_Stream *vstream;
325 Emotion_Audio_Stream *astream; 623 Emotion_Audio_Stream *astream;
326 624
327 vstream = (Emotion_Video_Stream *)eina_list_data_get(ev->video_streams); 625 vstream = eina_list_data_get(ev->video_streams);
328 if (vstream) 626 if (vstream)
329 { 627 {
330 DBG("video size=%dx%d, fps=%d/%d, " 628 DBG("video size=%dx%d, fps=%d/%d, "
@@ -334,7 +632,7 @@ em_file_open(const char *file,
334 GST_TIME_ARGS((guint64)(vstream->length_time * GST_SECOND))); 632 GST_TIME_ARGS((guint64)(vstream->length_time * GST_SECOND)));
335 } 633 }
336 634
337 astream = (Emotion_Audio_Stream *)eina_list_data_get(ev->audio_streams); 635 astream = eina_list_data_get(ev->audio_streams);
338 if (astream) 636 if (astream)
339 { 637 {
340 DBG("audio channels=%d, rate=%d, length=%"GST_TIME_FORMAT, 638 DBG("audio channels=%d, rate=%d, length=%"GST_TIME_FORMAT,
@@ -460,34 +758,16 @@ em_pos_set(void *video,
460 double pos) 758 double pos)
461{ 759{
462 Emotion_Gstreamer_Video *ev; 760 Emotion_Gstreamer_Video *ev;
463 GstElement *vsink; 761 gboolean res;
464 GstElement *asink;
465 762
466 ev = (Emotion_Gstreamer_Video *)video; 763 ev = (Emotion_Gstreamer_Video *)video;
467 764
468 g_object_get (G_OBJECT (ev->pipeline), 765 res = gst_element_seek(ev->pipeline, 1.0,
469 "video-sink", &vsink, 766 GST_FORMAT_TIME,
470 "audio-sink", &asink, 767 GST_SEEK_FLAG_ACCURATE | GST_SEEK_FLAG_FLUSH,
471 NULL); 768 GST_SEEK_TYPE_SET,
472 769 (gint64)(pos * (double)GST_SECOND),
473 if (vsink) 770 GST_SEEK_TYPE_NONE, -1);
474 {
475 gst_element_seek(vsink, 1.0,
476 GST_FORMAT_TIME,
477 GST_SEEK_FLAG_ACCURATE | GST_SEEK_FLAG_FLUSH,
478 GST_SEEK_TYPE_SET,
479 (gint64)(pos * (double)GST_SECOND),
480 GST_SEEK_TYPE_NONE, -1);
481 }
482 if (asink)
483 {
484 gst_element_seek(asink, 1.0,
485 GST_FORMAT_TIME,
486 GST_SEEK_FLAG_ACCURATE | GST_SEEK_FLAG_FLUSH,
487 GST_SEEK_TYPE_SET,
488 (gint64)(pos * (double)GST_SECOND),
489 GST_SEEK_TYPE_NONE, -1);
490 }
491} 771}
492 772
493static double 773static double
@@ -741,63 +1021,19 @@ em_video_data_size_get(void *video, int *w, int *h)
741} 1021}
742 1022
743static int 1023static int
744em_yuv_rows_get(void *video, 1024em_yuv_rows_get(void *video __UNUSED__,
745 int w, 1025 int w __UNUSED__,
746 int h, 1026 int h __UNUSED__,
747 unsigned char **yrows, 1027 unsigned char **yrows __UNUSED__,
748 unsigned char **urows, 1028 unsigned char **urows __UNUSED__,
749 unsigned char **vrows) 1029 unsigned char **vrows __UNUSED__)
750{ 1030{
751 Emotion_Gstreamer_Video *ev;
752 int i;
753
754 ev = (Emotion_Gstreamer_Video *)video;
755
756 if (ev->obj_data)
757 {
758 if (em_format_get(video) == EMOTION_FORMAT_I420)
759 {
760 for (i = 0; i < h; i++)
761 yrows[i] = &ev->obj_data[i * w];
762
763 for (i = 0; i < (h / 2); i++)
764 urows[i] = &ev->obj_data[h * w + i * (w / 2)];
765
766 for (i = 0; i < (h / 2); i++)
767 vrows[i] = &ev->obj_data[h * w + h * (w /4) + i * (w / 2)];
768 }
769 else if (em_format_get(video) == EMOTION_FORMAT_YV12)
770 {
771 for (i = 0; i < h; i++)
772 yrows[i] = &ev->obj_data[i * w];
773
774 for (i = 0; i < (h / 2); i++)
775 vrows[i] = &ev->obj_data[h * w + i * (w / 2)];
776
777 for (i = 0; i < (h / 2); i++)
778 urows[i] = &ev->obj_data[h * w + h * (w /4) + i * (w / 2)];
779 }
780 else
781 return 0;
782
783 return 1;
784 }
785
786 return 0; 1031 return 0;
787} 1032}
788 1033
789static int 1034static int
790em_bgra_data_get(void *video, unsigned char **bgra_data) 1035em_bgra_data_get(void *video __UNUSED__, unsigned char **bgra_data __UNUSED__)
791{ 1036{
792 Emotion_Gstreamer_Video *ev;
793
794 ev = (Emotion_Gstreamer_Video *)video;
795
796 if (ev->obj_data && em_format_get(video) == EMOTION_FORMAT_BGRA)
797 {
798 *bgra_data = ev->obj_data;
799 return 1;
800 }
801 return 0; 1037 return 0;
802} 1038}
803 1039
@@ -1117,6 +1353,8 @@ module_open(Evas_Object *obj,
1117 if (!em_module.init(obj, video, opt)) 1353 if (!em_module.init(obj, video, opt))
1118 return EINA_FALSE; 1354 return EINA_FALSE;
1119 1355
1356 eina_threads_init();
1357
1120 *module = &em_module; 1358 *module = &em_module;
1121 return EINA_TRUE; 1359 return EINA_TRUE;
1122} 1360}
@@ -1126,11 +1364,35 @@ module_close(Emotion_Video_Module *module __UNUSED__,
1126 void *video) 1364 void *video)
1127{ 1365{
1128 em_module.shutdown(video); 1366 em_module.shutdown(video);
1367
1368 eina_threads_shutdown();
1129} 1369}
1130 1370
1131Eina_Bool 1371Eina_Bool
1132gstreamer_module_init(void) 1372gstreamer_module_init(void)
1133{ 1373{
1374 GError *error;
1375
1376 if (!gst_init_check(0, NULL, &error))
1377 {
1378 EINA_LOG_CRIT("Could not init GStreamer");
1379 return EINA_FALSE;
1380 }
1381
1382 if (gst_plugin_register_static(GST_VERSION_MAJOR, GST_VERSION_MINOR,
1383 "emotion-sink",
1384 "video sink plugin for Emotion",
1385 gstreamer_plugin_init,
1386 VERSION,
1387 "LGPL",
1388 "Enlightenment",
1389 PACKAGE,
1390 "http://www.enlightenment.org/") == FALSE)
1391 {
1392 EINA_LOG_CRIT("Could not load static gstreamer video sink for Emotion.");
1393 return EINA_FALSE;
1394 }
1395
1134 return _emotion_module_register("gstreamer", module_open, module_close); 1396 return _emotion_module_register("gstreamer", module_open, module_close);
1135} 1397}
1136 1398
@@ -1279,22 +1541,6 @@ _free_metadata(Emotion_Gstreamer_Metadata *m)
1279 free(m); 1541 free(m);
1280} 1542}
1281 1543
1282static void
1283_em_buffer_read(void *data, void *buf __UNUSED__, unsigned int nbyte __UNUSED__)
1284{
1285 Emotion_Gstreamer_Video *ev;
1286 Emotion_Video_Stream *vstream;
1287
1288 ev = (Emotion_Gstreamer_Video *)data;
1289 _emotion_frame_new(ev->obj);
1290 vstream = (Emotion_Video_Stream *)eina_list_nth(ev->video_streams, ev->video_stream_nbr - 1);
1291 if (vstream)
1292 {
1293 _emotion_video_pos_update(ev->obj, ev->position, vstream->length_time);
1294 _emotion_frame_resize(ev->obj, vstream->width, vstream->height, ev->ratio);
1295 }
1296}
1297
1298static Eina_Bool 1544static Eina_Bool
1299_eos_timer_fct(void *data) 1545_eos_timer_fct(void *data)
1300{ 1546{
@@ -1307,7 +1553,7 @@ _eos_timer_fct(void *data)
1307 _emotion_playback_started(ev->obj); 1553 _emotion_playback_started(ev->obj);
1308 ev->play_started = 0; 1554 ev->play_started = 0;
1309 } 1555 }
1310 while ((msg = gst_bus_poll(ev->eos_bus, GST_MESSAGE_ERROR | GST_MESSAGE_EOS | GST_MESSAGE_TAG, 0))) 1556 while ((msg = gst_bus_poll(ev->eos_bus, GST_MESSAGE_ERROR | GST_MESSAGE_EOS | GST_MESSAGE_TAG | GST_MESSAGE_ASYNC_DONE, 0)))
1311 { 1557 {
1312 switch (GST_MESSAGE_TYPE(msg)) 1558 switch (GST_MESSAGE_TYPE(msg))
1313 { 1559 {
@@ -1345,7 +1591,13 @@ _eos_timer_fct(void *data)
1345 } 1591 }
1346 break; 1592 break;
1347 } 1593 }
1594 case GST_MESSAGE_ASYNC_DONE:
1595 _emotion_seek_done(ev->obj);
1596 break;
1348 default: 1597 default:
1598 ERR("bus say: %s [%i]\n",
1599 GST_MESSAGE_SRC_NAME(msg),
1600 GST_MESSAGE_TYPE(msg));
1349 break; 1601 break;
1350 } 1602 }
1351 gst_message_unref(msg); 1603 gst_message_unref(msg);
diff --git a/legacy/emotion/src/modules/gstreamer/emotion_gstreamer.h b/legacy/emotion/src/modules/gstreamer/emotion_gstreamer.h
index 79a2b9f715..5281b220f4 100644
--- a/legacy/emotion/src/modules/gstreamer/emotion_gstreamer.h
+++ b/legacy/emotion/src/modules/gstreamer/emotion_gstreamer.h
@@ -8,6 +8,8 @@
8#define HTTP_STREAM 0 8#define HTTP_STREAM 0
9#define RTSP_STREAM 1 9#define RTSP_STREAM 1
10#include <gst/gst.h> 10#include <gst/gst.h>
11#include <glib-object.h>
12#include <gst/video/gstvideosink.h>
11 13
12#include "emotion_private.h" 14#include "emotion_private.h"
13 15
@@ -22,6 +24,7 @@ struct _Emotion_Video_Stream
22 gint fps_num; 24 gint fps_num;
23 gint fps_den; 25 gint fps_den;
24 guint32 fourcc; 26 guint32 fourcc;
27 int index;
25}; 28};
26 29
27typedef struct _Emotion_Audio_Stream Emotion_Audio_Stream; 30typedef struct _Emotion_Audio_Stream Emotion_Audio_Stream;
@@ -68,7 +71,6 @@ struct _Emotion_Gstreamer_Video
68 71
69 /* Evas object */ 72 /* Evas object */
70 Evas_Object *obj; 73 Evas_Object *obj;
71 unsigned char *obj_data;
72 74
73 /* Characteristics of stream */ 75 /* Characteristics of stream */
74 double position; 76 double position;
@@ -78,8 +80,6 @@ struct _Emotion_Gstreamer_Video
78 volatile int seek_to; 80 volatile int seek_to;
79 volatile int get_poslen; 81 volatile int get_poslen;
80 82
81 Ecore_Pipe *pipe;
82
83 Emotion_Gstreamer_Metadata *metadata; 83 Emotion_Gstreamer_Metadata *metadata;
84 84
85 Emotion_Vis vis; 85 Emotion_Vis vis;
@@ -97,4 +97,47 @@ extern int _emotion_gstreamer_log_domain;
97#define ERR(...) EINA_LOG_DOM_ERR(_emotion_gstreamer_log_domain, __VA_ARGS__) 97#define ERR(...) EINA_LOG_DOM_ERR(_emotion_gstreamer_log_domain, __VA_ARGS__)
98#define CRITICAL(...) EINA_LOG_DOM_CRIT(_emotion_gstreamer_log_domain, __VA_ARGS__) 98#define CRITICAL(...) EINA_LOG_DOM_CRIT(_emotion_gstreamer_log_domain, __VA_ARGS__)
99 99
100#define EVAS_TYPE_VIDEO_SINK evas_video_sink_get_type()
101
102#define EVAS_VIDEO_SINK(obj) \
103 (G_TYPE_CHECK_INSTANCE_CAST((obj), \
104 EVAS_TYPE_VIDEO_SINK, EvasVideoSink))
105
106#define EVAS_VIDEO_SINK_CLASS(klass) \
107 (G_TYPE_CHECK_CLASS_CAST((klass), \
108 EVAS_TYPE_VIDEO_SINK, EvasVideoSinkClass))
109
110#define EVAS_IS_VIDEO_SINK(obj) \
111 (G_TYPE_CHECK_INSTANCE_TYPE((obj), \
112 EVAS_TYPE_VIDEO_SINK))
113
114#define EVAS_IS_VIDEO_SINK_CLASS(klass) \
115 (G_TYPE_CHECK_CLASS_TYPE((klass), \
116 EVAS_TYPE_VIDEO_SINK))
117
118#define EVAS_VIDEO_SINK_GET_CLASS(obj) \
119 (G_TYPE_INSTANCE_GET_CLASS((obj), \
120 EVAS_TYPE_VIDEO_SINK, EvasVideoSinkClass))
121
122typedef struct _EvasVideoSink EvasVideoSink;
123typedef struct _EvasVideoSinkClass EvasVideoSinkClass;
124typedef struct _EvasVideoSinkPrivate EvasVideoSinkPrivate;
125
126struct _EvasVideoSink {
127 /*< private >*/
128 GstVideoSink parent;
129 EvasVideoSinkPrivate *priv;
130};
131
132struct _EvasVideoSinkClass {
133 /*< private >*/
134 GstVideoSinkClass parent_class;
135};
136
137GstElement *gstreamer_video_sink_new(Emotion_Gstreamer_Video *ev,
138 Evas_Object *obj,
139 const char *uri);
140
141gboolean gstreamer_plugin_init(GstPlugin *plugin);
142
100#endif /* __EMOTION_GSTREAMER_H__ */ 143#endif /* __EMOTION_GSTREAMER_H__ */