summaryrefslogtreecommitdiff
path: root/legacy/emotion/src/modules/gstreamer
diff options
context:
space:
mode:
authordoursse <doursse>2008-05-11 18:52:29 +0000
committerdoursse <doursse@7cbeb6ba-43b4-40fd-8cce-4c39aea84d33>2008-05-11 18:52:29 +0000
commitc014ed5267c18bdcd1449970d592972e89f7886f (patch)
tree16cb16d84a27be9c77594969c83a1ddab9e7fb49 /legacy/emotion/src/modules/gstreamer
parentd72b07f4aa8aac8b4466e5b05e3ddd158cd514b0 (diff)
put xine and gstreamer modules in their own subdir
SVN revision: 34539
Diffstat (limited to 'legacy/emotion/src/modules/gstreamer')
-rw-r--r--legacy/emotion/src/modules/gstreamer/Makefile.am33
-rw-r--r--legacy/emotion/src/modules/gstreamer/emotion_gstreamer.c1346
-rw-r--r--legacy/emotion/src/modules/gstreamer/emotion_gstreamer.h87
-rw-r--r--legacy/emotion/src/modules/gstreamer/emotion_gstreamer_pipeline.c541
-rw-r--r--legacy/emotion/src/modules/gstreamer/emotion_gstreamer_pipeline.h38
-rw-r--r--legacy/emotion/src/modules/gstreamer/emotion_gstreamer_pipeline_cdda.c123
-rw-r--r--legacy/emotion/src/modules/gstreamer/emotion_gstreamer_pipeline_dvd.c227
-rw-r--r--legacy/emotion/src/modules/gstreamer/emotion_gstreamer_pipeline_file.c61
-rw-r--r--legacy/emotion/src/modules/gstreamer/emotion_gstreamer_pipeline_uri.c63
9 files changed, 2519 insertions, 0 deletions
diff --git a/legacy/emotion/src/modules/gstreamer/Makefile.am b/legacy/emotion/src/modules/gstreamer/Makefile.am
new file mode 100644
index 0000000000..f3519afcaa
--- /dev/null
+++ b/legacy/emotion/src/modules/gstreamer/Makefile.am
@@ -0,0 +1,33 @@
1
2MAINTAINERCLEANFILES = Makefile.in
3
4AM_CPPFLAGS = \
5-I$(top_srcdir) \
6-I$(top_srcdir)/src/lib \
7-DPACKAGE_BIN_DIR=\"$(bindir)\" \
8-DPACKAGE_LIB_DIR=\"$(libdir)\" \
9-DPACKAGE_DATA_DIR=\"$(datadir)/$(PACKAGE)\" \
10@EVAS_CFLAGS@ \
11@ECORE_CFLAGS@ \
12@GST_CFLAGS@
13
14if BUILD_GSTREAMER_MODULE
15
16pkgdir = $(libdir)/emotion
17
18pkg_LTLIBRARIES = gstreamer.la
19gstreamer_la_SOURCES = \
20emotion_gstreamer.c \
21emotion_gstreamer.h \
22emotion_gstreamer_pipeline.c \
23emotion_gstreamer_pipeline.h \
24emotion_gstreamer_pipeline_cdda.c \
25emotion_gstreamer_pipeline_dvd.c \
26emotion_gstreamer_pipeline_file.c \
27emotion_gstreamer_pipeline_uri.c
28gstreamer_la_LIBADD = @EVAS_LIBS@ @ECORE_LIBS@ @GST_LIBS@ $(top_builddir)/src/lib/libemotion.la
29gstreamer_la_LDFLAGS = -module -avoid-version
30gstreamer_la_LIBTOOLFLAGS = --tag=disable-static
31gstreamer_la_DEPENDENCIES = $(top_builddir)/config.h
32
33endif \ No newline at end of file
diff --git a/legacy/emotion/src/modules/gstreamer/emotion_gstreamer.c b/legacy/emotion/src/modules/gstreamer/emotion_gstreamer.c
new file mode 100644
index 0000000000..dd8a5a9563
--- /dev/null
+++ b/legacy/emotion/src/modules/gstreamer/emotion_gstreamer.c
@@ -0,0 +1,1346 @@
1#include <unistd.h>
2#include <fcntl.h>
3
4#include "Emotion.h"
5#include "emotion_private.h"
6#include "emotion_gstreamer.h"
7#include "emotion_gstreamer_pipeline.h"
8
9
10/* Callbacks to get the eos */
11static int _eos_timer_fct (void *data);
12
13static int _em_fd_ev_active(void *data, Ecore_Fd_Handler *fdh);
14
15
16/* Interface */
17
18static unsigned char em_init (Evas_Object *obj,
19 void **emotion_video,
20 Emotion_Module_Options *opt);
21
22static int em_shutdown (void *video);
23
24static unsigned char em_file_open (const char *file,
25 Evas_Object *obj,
26 void *video);
27
28static void em_file_close (void *video);
29
30static void em_play (void *video,
31 double pos);
32
33static void em_stop (void *video);
34
35static void em_size_get (void *video,
36 int *width,
37 int *height);
38
39static void em_pos_set (void *video,
40 double pos);
41
42static void em_vis_set (void *video,
43 Emotion_Vis vis);
44
45static double em_len_get (void *video);
46
47static int em_fps_num_get (void *video);
48
49static int em_fps_den_get (void *video);
50
51static double em_fps_get (void *video);
52
53static double em_pos_get (void *video);
54
55static Emotion_Vis em_vis_get (void *video);
56
57static double em_ratio_get (void *video);
58
59static int em_video_handled (void *video);
60
61static int em_audio_handled (void *video);
62
63static int em_seekable (void *video);
64
65static void em_frame_done (void *video);
66
67static Emotion_Format em_format_get (void *video);
68
69static void em_video_data_size_get (void *video,
70 int *w,
71 int *h);
72
73static int em_yuv_rows_get (void *video,
74 int w,
75 int h,
76 unsigned char **yrows,
77 unsigned char **urows,
78 unsigned char **vrows);
79
80static int em_bgra_data_get (void *video,
81 unsigned char **bgra_data);
82
83static void em_event_feed (void *video,
84 int event);
85
86static void em_event_mouse_button_feed (void *video,
87 int button,
88 int x,
89 int y);
90
91static void em_event_mouse_move_feed (void *video,
92 int x,
93 int y);
94
95static int em_video_channel_count (void *video);
96
97static void em_video_channel_set (void *video,
98 int channel);
99
100static int em_video_channel_get (void *video);
101
102static const char *em_video_channel_name_get (void *video,
103 int channel);
104
105static void em_video_channel_mute_set (void *video,
106 int mute);
107
108static int em_video_channel_mute_get (void *video);
109
110static int em_audio_channel_count (void *video);
111
112static void em_audio_channel_set (void *video,
113 int channel);
114
115static int em_audio_channel_get (void *video);
116
117static const char *em_audio_channel_name_get (void *video,
118 int channel);
119
120static void em_audio_channel_mute_set (void *video,
121 int mute);
122
123static int em_audio_channel_mute_get (void *video);
124
125static void em_audio_channel_volume_set (void *video,
126 double vol);
127
128static double em_audio_channel_volume_get (void *video);
129
130static int em_spu_channel_count (void *video);
131
132static void em_spu_channel_set (void *video,
133 int channel);
134
135static int em_spu_channel_get (void *video);
136
137static const char *em_spu_channel_name_get (void *video,
138 int channel);
139
140static void em_spu_channel_mute_set (void *video,
141 int mute);
142
143static int em_spu_channel_mute_get (void *video);
144
145static int em_chapter_count (void *video);
146
147static void em_chapter_set (void *video,
148 int chapter);
149
150static int em_chapter_get (void *video);
151
152static const char *em_chapter_name_get (void *video,
153 int chapter);
154
155static void em_speed_set (void *video,
156 double speed);
157
158static double em_speed_get (void *video);
159
160static int em_eject (void *video);
161
162static const char *em_meta_get (void *video,
163 int meta);
164
165/* Module interface */
166
167static Emotion_Video_Module em_module =
168{
169 em_init, /* init */
170 em_shutdown, /* shutdown */
171 em_file_open, /* file_open */
172 em_file_close, /* file_close */
173 em_play, /* play */
174 em_stop, /* stop */
175 em_size_get, /* size_get */
176 em_pos_set, /* pos_set */
177 em_vis_set, /* vis_set */
178 em_len_get, /* len_get */
179 em_fps_num_get, /* fps_num_get */
180 em_fps_den_get, /* fps_den_get */
181 em_fps_get, /* fps_get */
182 em_pos_get, /* pos_get */
183 em_vis_get, /* vis_get */
184 em_ratio_get, /* ratio_get */
185 em_video_handled, /* video_handled */
186 em_audio_handled, /* audio_handled */
187 em_seekable, /* seekable */
188 em_frame_done, /* frame_done */
189 em_format_get, /* format_get */
190 em_video_data_size_get, /* video_data_size_get */
191 em_yuv_rows_get, /* yuv_rows_get */
192 em_bgra_data_get, /* bgra_data_get */
193 em_event_feed, /* event_feed */
194 em_event_mouse_button_feed, /* event_mouse_button_feed */
195 em_event_mouse_move_feed, /* event_mouse_move_feed */
196 em_video_channel_count, /* video_channel_count */
197 em_video_channel_set, /* video_channel_set */
198 em_video_channel_get, /* video_channel_get */
199 em_video_channel_name_get, /* video_channel_name_get */
200 em_video_channel_mute_set, /* video_channel_mute_set */
201 em_video_channel_mute_get, /* video_channel_mute_get */
202 em_audio_channel_count, /* audio_channel_count */
203 em_audio_channel_set, /* audio_channel_set */
204 em_audio_channel_get, /* audio_channel_get */
205 em_audio_channel_name_get, /* audio_channel_name_get */
206 em_audio_channel_mute_set, /* audio_channel_mute_set */
207 em_audio_channel_mute_get, /* audio_channel_mute_get */
208 em_audio_channel_volume_set, /* audio_channel_volume_set */
209 em_audio_channel_volume_get, /* audio_channel_volume_get */
210 em_spu_channel_count, /* spu_channel_count */
211 em_spu_channel_set, /* spu_channel_set */
212 em_spu_channel_get, /* spu_channel_get */
213 em_spu_channel_name_get, /* spu_channel_name_get */
214 em_spu_channel_mute_set, /* spu_channel_mute_set */
215 em_spu_channel_mute_get, /* spu_channel_mute_get */
216 em_chapter_count, /* chapter_count */
217 em_chapter_set, /* chapter_set */
218 em_chapter_get, /* chapter_get */
219 em_chapter_name_get, /* chapter_name_get */
220 em_speed_set, /* speed_set */
221 em_speed_get, /* speed_get */
222 em_eject, /* eject */
223 em_meta_get, /* meta_get */
224
225 NULL /* handle */
226};
227
228static unsigned char
229em_init(Evas_Object *obj,
230 void **emotion_video,
231 Emotion_Module_Options *opt)
232{
233 Emotion_Gstreamer_Video *ev;
234 GError *error;
235 int fds[2];
236
237 if (!emotion_video)
238 return 0;
239
240 printf ("Init gstreamer...\n");
241
242 ev = calloc(1, sizeof(Emotion_Gstreamer_Video));
243 if (!ev) return 0;
244
245 ev->obj = obj;
246 ev->obj_data = NULL;
247
248 /* Initialization of gstreamer */
249 if (!gst_init_check (NULL, NULL, &error))
250 goto failure_gstreamer;
251
252 ev->pipeline = gst_pipeline_new ("pipeline");
253 if (!ev->pipeline)
254 goto failure_pipeline;
255
256 ev->eos_bus = gst_pipeline_get_bus (GST_PIPELINE (ev->pipeline));
257 if (!ev->eos_bus)
258 goto failure_bus;
259
260 /* We allocate the sinks lists */
261 ev->video_sinks = ecore_list_new ();
262 if (!ev->video_sinks)
263 goto failure_video_sinks;
264 ecore_list_free_cb_set(ev->video_sinks, ECORE_FREE_CB(free));
265 ev->audio_sinks = ecore_list_new ();
266 if (!ev->audio_sinks)
267 goto failure_audio_sinks;
268 ecore_list_free_cb_set(ev->audio_sinks, ECORE_FREE_CB(free));
269
270 *emotion_video = ev;
271
272 /* Default values */
273 ev->ratio = 1.0;
274 ev->video_sink_nbr = 0;
275 ev->audio_sink_nbr = 0;
276 ev->vis = EMOTION_VIS_GOOM;
277
278 /* Create the file descriptors */
279 if (pipe(fds) == 0) {
280 ev->fd_ev_read = fds[0];
281 ev->fd_ev_write = fds[1];
282 fcntl(ev->fd_ev_read, F_SETFL, O_NONBLOCK);
283 ev->fd_ev_handler = ecore_main_fd_handler_add(ev->fd_ev_read,
284 ECORE_FD_READ,
285 _em_fd_ev_active,
286 ev,
287 NULL, NULL);
288 ecore_main_fd_handler_active_set(ev->fd_ev_handler, ECORE_FD_READ);
289 }
290 else
291 goto failure_pipe;
292
293 return 1;
294
295 failure_pipe:
296 ecore_list_destroy (ev->audio_sinks);
297 failure_audio_sinks:
298 ecore_list_destroy (ev->video_sinks);
299 failure_video_sinks:
300 gst_object_unref (GST_OBJECT (ev->eos_bus));
301 failure_bus:
302 /* this call is not really necessary */
303 gst_element_set_state (ev->pipeline, GST_STATE_NULL);
304 gst_object_unref (GST_OBJECT (ev->pipeline));
305 failure_pipeline:
306 gst_deinit ();
307 failure_gstreamer:
308 free (ev);
309
310 return 0;
311}
312
313static int
314em_shutdown(void *video)
315{
316 Emotion_Gstreamer_Video *ev;
317
318 ev = (Emotion_Gstreamer_Video *)video;
319 if (!ev)
320 return 0;
321
322 gst_element_set_state (ev->pipeline, GST_STATE_NULL);
323 gst_object_unref (GST_OBJECT (ev->pipeline));
324 gst_object_unref (GST_OBJECT (ev->eos_bus));
325 gst_deinit ();
326
327 ecore_list_destroy (ev->video_sinks);
328 ecore_list_destroy (ev->audio_sinks);
329
330 /* FIXME: and the evas object ? */
331 if (ev->obj_data) free(ev->obj_data);
332
333 ecore_main_fd_handler_del(ev->fd_ev_handler);
334 close(ev->fd_ev_write);
335 close(ev->fd_ev_read);
336
337 free(ev);
338
339 return 1;
340}
341
342static unsigned char
343em_file_open(const char *file,
344 Evas_Object *obj,
345 void *video)
346{
347 Emotion_Gstreamer_Video *ev;
348
349 ev = (Emotion_Gstreamer_Video *)video;
350
351 /* Evas Object */
352 ev->obj = obj;
353
354 /* CD Audio */
355 if (strstr (file,"cdda://")) {
356 const char *device = NULL;
357 unsigned int track = 1;
358
359 device = file + strlen ("cdda://");
360 if (device[0] == '/') {
361 char *tmp;
362
363 if ((tmp = strchr (device, '?')) || (tmp = strchr (device, '#'))) {
364 sscanf (tmp + 1,"%d", &track);
365 tmp[0] = '\0';
366 }
367 }
368 else {
369 device = NULL;
370 sscanf (file,"cdda://%d", &track);
371 }
372 fprintf (stderr, "[Emotion] [gst] build CD Audio pipeline\n");
373 if (!(emotion_pipeline_cdda_build (ev, device, track))) {
374 fprintf (stderr, "[Emotion] [gst] error while building CD Audio pipeline\n");
375 return 0;
376 }
377 }
378 /* Dvd */
379 else if (strstr (file, "dvd://")) {
380
381 fprintf (stderr, "[Emotion] [gst] build DVD pipeline \n");
382 if (!(emotion_pipeline_dvd_build (ev, NULL))) {
383 fprintf (stderr, "[Emotion] [gst] error while building DVD pipeline\n");
384 return 0;
385 }
386 }
387 /* http */
388 else if (strstr (file, "http://")) {
389 fprintf (stderr, "[Emotion] [gst] build URI pipeline \n");
390 if (!(emotion_pipeline_uri_build (ev, file))) {
391 fprintf (stderr, "[Emotion] [gst] error while building URI pipeline\n");
392 return 0;
393 }
394 }
395 /* Normal media file */
396 else {
397 const char *filename;
398
399 filename = strstr (file, "file://")
400 ? file + strlen ("file://")
401 : file;
402
403 fprintf (stderr, "[Emotion] [gst] build file pipeline \n");
404 if (!(emotion_pipeline_file_build (ev, filename))) {
405 fprintf (stderr, "[Emotion] [gst] error while building File pipeline\n");
406 return 0;
407 }
408 }
409
410 ev->position = 0.0;
411
412 {
413 /* on recapitule : */
414 Emotion_Video_Sink *vsink;
415 Emotion_Audio_Sink *asink;
416
417 vsink = (Emotion_Video_Sink *)ecore_list_first_goto (ev->video_sinks);
418 if (vsink) {
419 g_print ("video : \n");
420 g_print (" size : %dx%d\n", vsink->width, vsink->height);
421 g_print (" fps : %d/%d\n", vsink->fps_num, vsink->fps_den);
422 g_print (" fourcc : %" GST_FOURCC_FORMAT "\n", GST_FOURCC_ARGS (vsink->fourcc));
423 g_print (" length : %" GST_TIME_FORMAT "\n\n",
424 GST_TIME_ARGS ((guint64)(vsink->length_time * GST_SECOND)));
425 }
426
427 asink = (Emotion_Audio_Sink *)ecore_list_first_goto (ev->audio_sinks);
428 if (asink) {
429 g_print ("audio : \n");
430 g_print (" chan : %d\n", asink->channels);
431 g_print (" rate : %d\n", asink->samplerate);
432 g_print (" length : %" GST_TIME_FORMAT "\n\n",
433 GST_TIME_ARGS ((guint64)(asink->length_time * GST_SECOND)));
434 }
435 }
436
437 return 1;
438}
439
440static void
441em_file_close(void *video)
442{
443 Emotion_Gstreamer_Video *ev;
444 GstIterator *iter;
445 gpointer data;
446 gboolean done;
447
448 ev = (Emotion_Gstreamer_Video *)video;
449 if (!ev)
450 return;
451
452 printf("EX pause end...\n");
453 if (!emotion_object_play_get(ev->obj))
454 {
455 printf(" ... unpause\n");
456 emotion_pipeline_pause (ev->pipeline);
457 }
458
459 printf("EX stop\n");
460 gst_element_set_state (ev->pipeline, GST_STATE_READY);
461
462 /* we remove all the elements in the pipeline */
463 iter = gst_bin_iterate_elements (GST_BIN (ev->pipeline));
464 done = FALSE;
465 while (!done) {
466 switch (gst_iterator_next (iter, &data)) {
467 case GST_ITERATOR_OK: {
468 GstElement *element;
469
470 element = GST_ELEMENT (data);
471 if (element) {
472 gst_bin_remove (GST_BIN (ev->pipeline), element);
473 }
474 break;
475 }
476 case GST_ITERATOR_RESYNC: {
477 GstElement *element;
478
479 element = GST_ELEMENT (data);
480 if (element) {
481 gst_bin_remove (GST_BIN (ev->pipeline), element);
482 }
483 gst_iterator_resync (iter);
484 break;
485 }
486 case GST_ITERATOR_ERROR:
487 printf("error iter\n");
488 done = TRUE;
489 break;
490 case GST_ITERATOR_DONE:
491 done = TRUE;
492 break;
493 }
494 }
495 gst_iterator_free (iter);
496
497 /* we clear the sink lists */
498 ecore_list_clear (ev->video_sinks);
499 ecore_list_clear (ev->audio_sinks);
500
501 /* shutdown eos */
502 if (ev->eos_timer) {
503 ecore_timer_del (ev->eos_timer);
504 ev->eos_timer = NULL;
505 }
506}
507
508static void
509em_play(void *video,
510 double pos)
511{
512 Emotion_Gstreamer_Video *ev;
513
514 ev = (Emotion_Gstreamer_Video *)video;
515 gst_element_set_state (ev->pipeline, GST_STATE_PLAYING);
516 ev->play = 1;
517
518 /* eos */
519 ev->eos_timer = ecore_timer_add (0.1, _eos_timer_fct, ev);
520}
521
522static void
523em_stop(void *video)
524{
525 Emotion_Gstreamer_Video *ev;
526
527 ev = (Emotion_Gstreamer_Video *)video;
528
529 gst_element_set_state (ev->pipeline, GST_STATE_PAUSED);
530 ev->play = 0;
531
532 /* shutdown eos */
533 if (ev->eos_timer) {
534 ecore_timer_del (ev->eos_timer);
535 ev->eos_timer = NULL;
536 }
537}
538
539static void
540em_size_get(void *video,
541 int *width,
542 int *height)
543{
544 Emotion_Gstreamer_Video *ev;
545 Emotion_Video_Sink *vsink;
546
547 ev = (Emotion_Gstreamer_Video *)video;
548
549 vsink = (Emotion_Video_Sink *)ecore_list_index_goto (ev->video_sinks, ev->video_sink_nbr);
550 if (vsink) {
551 if (width) *width = vsink->width;
552 if (height) *height = vsink->height;
553 }
554 else {
555 if (width) *width = 0;
556 if (height) *height = 0;
557 }
558}
559
560static void
561em_pos_set(void *video,
562 double pos)
563{
564 Emotion_Gstreamer_Video *ev;
565 Emotion_Video_Sink *vsink;
566 Emotion_Audio_Sink *asink;
567
568 ev = (Emotion_Gstreamer_Video *)video;
569
570 if (ev->seek_to_pos == pos) return;
571
572 vsink = (Emotion_Video_Sink *)ecore_list_index_goto (ev->video_sinks, ev->video_sink_nbr);
573 asink = (Emotion_Audio_Sink *)ecore_list_index_goto (ev->video_sinks, ev->audio_sink_nbr);
574
575 if (vsink) {
576 gst_element_seek(vsink->sink, 1.0,
577 GST_FORMAT_TIME,
578 GST_SEEK_FLAG_ACCURATE | GST_SEEK_FLAG_FLUSH,
579 GST_SEEK_TYPE_SET,
580 (gint64)(pos * (double)GST_SECOND),
581 GST_SEEK_TYPE_NONE,
582 -1);
583 }
584 if (asink) {
585 gst_element_seek(asink->sink, 1.0,
586 GST_FORMAT_TIME,
587 GST_SEEK_FLAG_ACCURATE | GST_SEEK_FLAG_FLUSH,
588 GST_SEEK_TYPE_SET,
589 (gint64)(pos * (double)GST_SECOND),
590 GST_SEEK_TYPE_NONE,
591 -1);
592 }
593 ev->seek_to_pos = pos;
594}
595
596static void
597em_vis_set(void *video,
598 Emotion_Vis vis)
599{
600 Emotion_Gstreamer_Video *ev;
601
602 ev = (Emotion_Gstreamer_Video *)video;
603
604 if (ev->vis == vis) return;
605 ev->vis = vis;
606}
607
608static double
609em_len_get(void *video)
610{
611 Emotion_Gstreamer_Video *ev;
612 Emotion_Video_Sink *vsink;
613
614 ev = (Emotion_Gstreamer_Video *)video;
615
616 vsink = (Emotion_Video_Sink *)ecore_list_index_goto (ev->video_sinks, ev->video_sink_nbr);
617 if (vsink)
618 return (double)vsink->length_time;
619
620 return 0.0;
621}
622
623static int
624em_fps_num_get(void *video)
625{
626 Emotion_Gstreamer_Video *ev;
627 Emotion_Video_Sink *vsink;
628
629 ev = (Emotion_Gstreamer_Video *)video;
630
631 vsink = (Emotion_Video_Sink *)ecore_list_index_goto (ev->video_sinks, ev->video_sink_nbr);
632 if (vsink)
633 return vsink->fps_num;
634
635 return 0;
636}
637
638static int
639em_fps_den_get(void *video)
640{
641 Emotion_Gstreamer_Video *ev;
642 Emotion_Video_Sink *vsink;
643
644 ev = (Emotion_Gstreamer_Video *)video;
645
646 vsink = (Emotion_Video_Sink *)ecore_list_index_goto (ev->video_sinks, ev->video_sink_nbr);
647 if (vsink)
648 return vsink->fps_den;
649
650 return 1;
651}
652
653static double
654em_fps_get(void *video)
655{
656 Emotion_Gstreamer_Video *ev;
657 Emotion_Video_Sink *vsink;
658
659 ev = (Emotion_Gstreamer_Video *)video;
660
661 vsink = (Emotion_Video_Sink *)ecore_list_index_goto (ev->video_sinks, ev->video_sink_nbr);
662 if (vsink)
663 return (double)vsink->fps_num / (double)vsink->fps_den;
664
665 return 0.0;
666}
667
668static double
669em_pos_get(void *video)
670{
671 Emotion_Gstreamer_Video *ev;
672
673 ev = (Emotion_Gstreamer_Video *)video;
674
675 return ev->position;
676}
677
678static Emotion_Vis
679em_vis_get(void *video)
680{
681 Emotion_Gstreamer_Video *ev;
682
683 ev = (Emotion_Gstreamer_Video *)video;
684
685 return ev->vis;
686}
687
688static double
689em_ratio_get(void *video)
690{
691 Emotion_Gstreamer_Video *ev;
692
693 ev = (Emotion_Gstreamer_Video *)video;
694
695 return ev->ratio;
696}
697
698static int
699em_video_handled(void *video)
700{
701 Emotion_Gstreamer_Video *ev;
702
703 ev = (Emotion_Gstreamer_Video *)video;
704
705 if (ecore_list_empty_is (ev->video_sinks))
706 return 0;
707
708 return 1;
709}
710
711static int
712em_audio_handled(void *video)
713{
714 Emotion_Gstreamer_Video *ev;
715
716 ev = (Emotion_Gstreamer_Video *)video;
717
718 if (ecore_list_empty_is (ev->audio_sinks))
719 return 0;
720
721 return 1;
722}
723
724static int
725em_seekable(void *video)
726{
727 Emotion_Gstreamer_Video *ev;
728
729 ev = (Emotion_Gstreamer_Video *)video;
730
731 return 1;
732}
733
734static void
735em_frame_done(void *video)
736{
737 Emotion_Gstreamer_Video *ev;
738
739 ev = (Emotion_Gstreamer_Video *)video;
740}
741
742static Emotion_Format
743em_format_get (void *video)
744{
745 Emotion_Gstreamer_Video *ev;
746 Emotion_Video_Sink *vsink;
747
748 ev = (Emotion_Gstreamer_Video *)video;
749
750 vsink = (Emotion_Video_Sink *)ecore_list_index_goto (ev->video_sinks, ev->video_sink_nbr);
751 if (vsink) {
752 switch (vsink->fourcc) {
753 case GST_MAKE_FOURCC ('I','4','2','0'):
754 return EMOTION_FORMAT_I420;
755 case GST_MAKE_FOURCC ('Y','V','1','2'):
756 return EMOTION_FORMAT_YV12;
757 case GST_MAKE_FOURCC ('Y','U','Y','2'):
758 return EMOTION_FORMAT_YUY2;
759 case GST_MAKE_FOURCC ('A','R','G','B'):
760 return EMOTION_FORMAT_BGRA;
761 default:
762 return EMOTION_FORMAT_NONE;
763 }
764 }
765 return EMOTION_FORMAT_NONE;
766}
767
768static void
769em_video_data_size_get(void *video, int *w, int *h)
770{
771 Emotion_Gstreamer_Video *ev;
772 Emotion_Video_Sink *vsink;
773
774 ev = (Emotion_Gstreamer_Video *)video;
775
776 vsink = (Emotion_Video_Sink *)ecore_list_index_goto (ev->video_sinks, ev->video_sink_nbr);
777 if (vsink) {
778 *w = vsink->width;
779 *h = vsink->height;
780 }
781 else {
782 *w = 0;
783 *h = 0;
784 }
785}
786
787static int
788em_yuv_rows_get(void *video,
789 int w,
790 int h,
791 unsigned char **yrows,
792 unsigned char **urows,
793 unsigned char **vrows)
794{
795 Emotion_Gstreamer_Video *ev;
796 int i;
797
798 ev = (Emotion_Gstreamer_Video *)video;
799
800 if (ev->obj_data)
801 {
802 if (em_format_get(video) == EMOTION_FORMAT_I420) {
803 for (i = 0; i < h; i++)
804 yrows[i] = &ev->obj_data[i * w];
805
806 for (i = 0; i < (h / 2); i++)
807 urows[i] = &ev->obj_data[h * w + i * (w / 2) ];
808
809 for (i = 0; i < (h / 2); i++)
810 vrows[i] = &ev->obj_data[h * w + h * (w /4) + i * (w / 2)];
811 }
812 else if (em_format_get(video) == EMOTION_FORMAT_YV12) {
813 for (i = 0; i < h; i++)
814 yrows[i] = &ev->obj_data[i * w];
815
816 for (i = 0; i < (h / 2); i++)
817 vrows[i] = &ev->obj_data[h * w + i * (w / 2) ];
818
819 for (i = 0; i < (h / 2); i++)
820 urows[i] = &ev->obj_data[h * w + h * (w /4) + i * (w / 2)];
821 }
822 else
823 return 0;
824
825 return 1;
826 }
827
828 return 0;
829}
830
831static int
832em_bgra_data_get(void *video, unsigned char **bgra_data)
833{
834 Emotion_Gstreamer_Video *ev;
835
836 ev = (Emotion_Gstreamer_Video *)video;
837
838 if (ev->obj_data && em_format_get(video) == EMOTION_FORMAT_BGRA) {
839 *bgra_data = ev->obj_data;
840 return 1;
841 }
842 return 0;
843}
844
845static void
846em_event_feed(void *video, int event)
847{
848 Emotion_Gstreamer_Video *ev;
849
850 ev = (Emotion_Gstreamer_Video *)video;
851}
852
853static void
854em_event_mouse_button_feed(void *video, int button, int x, int y)
855{
856 Emotion_Gstreamer_Video *ev;
857
858 ev = (Emotion_Gstreamer_Video *)video;
859}
860
861static void
862em_event_mouse_move_feed(void *video, int x, int y)
863{
864 Emotion_Gstreamer_Video *ev;
865
866 ev = (Emotion_Gstreamer_Video *)video;
867}
868
869/* Video channels */
870static int
871em_video_channel_count(void *video)
872{
873 Emotion_Gstreamer_Video *ev;
874
875 ev = (Emotion_Gstreamer_Video *)video;
876
877 return ecore_list_count(ev->video_sinks);
878}
879
880static void
881em_video_channel_set(void *video,
882 int channel)
883{
884 Emotion_Gstreamer_Video *ev;
885
886 ev = (Emotion_Gstreamer_Video *)video;
887
888 if (channel < 0) channel = 0;
889 /* FIXME: a faire... */
890}
891
892static int
893em_video_channel_get(void *video)
894{
895 Emotion_Gstreamer_Video *ev;
896
897 ev = (Emotion_Gstreamer_Video *)video;
898
899 return ev->video_sink_nbr;
900}
901
902static const char *
903em_video_channel_name_get(void *video,
904 int channel)
905{
906 Emotion_Gstreamer_Video *ev;
907
908 ev = (Emotion_Gstreamer_Video *)video;
909
910 return NULL;
911}
912
913static void
914em_video_channel_mute_set(void *video,
915 int mute)
916{
917 Emotion_Gstreamer_Video *ev;
918
919 ev = (Emotion_Gstreamer_Video *)video;
920
921 ev->video_mute = mute;
922}
923
924static int
925em_video_channel_mute_get(void *video)
926{
927 Emotion_Gstreamer_Video *ev;
928
929 ev = (Emotion_Gstreamer_Video *)video;
930
931 return ev->video_mute;
932}
933
934/* Audio channels */
935
936static int
937em_audio_channel_count(void *video)
938{
939 Emotion_Gstreamer_Video *ev;
940
941 ev = (Emotion_Gstreamer_Video *)video;
942
943 return ecore_list_count(ev->audio_sinks);
944}
945
946static void
947em_audio_channel_set(void *video,
948 int channel)
949{
950 Emotion_Gstreamer_Video *ev;
951
952 ev = (Emotion_Gstreamer_Video *)video;
953
954 if (channel < -1) channel = -1;
955 /* FIXME: a faire... */
956}
957
958static int
959em_audio_channel_get(void *video)
960{
961 Emotion_Gstreamer_Video *ev;
962
963 ev = (Emotion_Gstreamer_Video *)video;
964
965 return ev->audio_sink_nbr;
966}
967
968static const char *
969em_audio_channel_name_get(void *video,
970 int channel)
971{
972 Emotion_Gstreamer_Video *ev;
973
974 ev = (Emotion_Gstreamer_Video *)video;
975
976 return NULL;
977}
978
979static void
980em_audio_channel_mute_set(void *video,
981 int mute)
982{
983 Emotion_Gstreamer_Video *ev;
984 GstElement *volume;
985
986 ev = (Emotion_Gstreamer_Video *)video;
987
988 if (ev->audio_mute == mute)
989 return;
990
991 ev->audio_mute = mute;
992 volume = gst_bin_get_by_name (GST_BIN (ev->pipeline), "volume");
993 if (!volume) return;
994
995 if (mute)
996 g_object_set (G_OBJECT (volume), "volume", 0.0, NULL);
997 else
998 g_object_set (G_OBJECT (volume), "volume", ev->volume * 10.0, NULL);
999
1000 gst_object_unref (volume);
1001}
1002
1003static int
1004em_audio_channel_mute_get(void *video)
1005{
1006 Emotion_Gstreamer_Video *ev;
1007
1008 ev = (Emotion_Gstreamer_Video *)video;
1009
1010 return ev->audio_mute;
1011}
1012
1013static void
1014em_audio_channel_volume_set(void *video,
1015 double vol)
1016{
1017 Emotion_Gstreamer_Video *ev;
1018 GstElement *volume;
1019
1020 ev = (Emotion_Gstreamer_Video *)video;
1021
1022 if (vol < 0.0)
1023 vol = 0.0;
1024 if (vol > 1.0)
1025 vol = 1.0;
1026 ev->volume = vol;
1027 volume = gst_bin_get_by_name (GST_BIN (ev->pipeline), "volume");
1028 if (!volume) return;
1029 g_object_set (G_OBJECT (volume), "volume",
1030 vol * 10.0, NULL);
1031 gst_object_unref (volume);
1032}
1033
1034static double
1035em_audio_channel_volume_get(void *video)
1036{
1037 Emotion_Gstreamer_Video *ev;
1038
1039 ev = (Emotion_Gstreamer_Video *)video;
1040
1041 return ev->volume;
1042}
1043
1044/* spu stuff */
1045
1046static int
1047em_spu_channel_count(void *video)
1048{
1049 Emotion_Gstreamer_Video *ev;
1050
1051 ev = (Emotion_Gstreamer_Video *)video;
1052
1053 return 0;
1054}
1055
1056static void
1057em_spu_channel_set(void *video, int channel)
1058{
1059 Emotion_Gstreamer_Video *ev;
1060
1061 ev = (Emotion_Gstreamer_Video *)video;
1062}
1063
1064static int
1065em_spu_channel_get(void *video)
1066{
1067 Emotion_Gstreamer_Video *ev;
1068
1069 ev = (Emotion_Gstreamer_Video *)video;
1070
1071 return 1;
1072}
1073
1074static const char *
1075em_spu_channel_name_get(void *video, int channel)
1076{
1077 Emotion_Gstreamer_Video *ev;
1078
1079 ev = (Emotion_Gstreamer_Video *)video;
1080 return NULL;
1081}
1082
1083static void
1084em_spu_channel_mute_set(void *video, int mute)
1085{
1086 Emotion_Gstreamer_Video *ev;
1087
1088 ev = (Emotion_Gstreamer_Video *)video;
1089}
1090
1091static int
1092em_spu_channel_mute_get(void *video)
1093{
1094 Emotion_Gstreamer_Video *ev;
1095
1096 ev = (Emotion_Gstreamer_Video *)video;
1097
1098 return 0;
1099}
1100
1101static int
1102em_chapter_count(void *video)
1103{
1104 Emotion_Gstreamer_Video *ev;
1105
1106 ev = (Emotion_Gstreamer_Video *)video;
1107 return 0;
1108}
1109
1110static void
1111em_chapter_set(void *video, int chapter)
1112{
1113 Emotion_Gstreamer_Video *ev;
1114
1115 ev = (Emotion_Gstreamer_Video *)video;
1116}
1117
1118static int
1119em_chapter_get(void *video)
1120{
1121 Emotion_Gstreamer_Video *ev;
1122
1123 ev = (Emotion_Gstreamer_Video *)video;
1124
1125 return 0;
1126}
1127
1128static const char *
1129em_chapter_name_get(void *video, int chapter)
1130{
1131 Emotion_Gstreamer_Video *ev;
1132
1133 ev = (Emotion_Gstreamer_Video *)video;
1134
1135 return NULL;
1136}
1137
1138static void
1139em_speed_set(void *video, double speed)
1140{
1141 Emotion_Gstreamer_Video *ev;
1142
1143 ev = (Emotion_Gstreamer_Video *)video;
1144}
1145
1146static double
1147em_speed_get(void *video)
1148{
1149 Emotion_Gstreamer_Video *ev;
1150
1151 ev = (Emotion_Gstreamer_Video *)video;
1152
1153 return 1.0;
1154}
1155
1156static int
1157em_eject(void *video)
1158{
1159 Emotion_Gstreamer_Video *ev;
1160
1161 ev = (Emotion_Gstreamer_Video *)video;
1162
1163 return 1;
1164}
1165
1166static const char *
1167em_meta_get(void *video, int meta)
1168{
1169 Emotion_Gstreamer_Video *ev;
1170 GstBus *bus;
1171 gchar *str = NULL;
1172 gboolean done;
1173
1174 ev = (Emotion_Gstreamer_Video *)video;
1175 if (!ev) return NULL;
1176
1177 done = FALSE;
1178 bus = gst_element_get_bus (ev->pipeline);
1179 if (!bus) return NULL;
1180
1181 while (!done) {
1182 GstMessage *message;
1183
1184 message = gst_bus_pop (bus);
1185 if (message == NULL)
1186 /* All messages read, we're done */
1187 break;
1188
1189 switch (GST_MESSAGE_TYPE (message)) {
1190 case GST_MESSAGE_TAG: {
1191 GstTagList *new_tags;
1192
1193 gst_message_parse_tag (message, &new_tags);
1194
1195 switch (meta) {
1196 case META_TRACK_TITLE:
1197 gst_tag_list_get_string (new_tags, GST_TAG_TITLE, &str);
1198 if (str) done = TRUE;
1199 break;
1200 case META_TRACK_ARTIST:
1201 gst_tag_list_get_string (new_tags, GST_TAG_ARTIST, &str);
1202 if (str) done = TRUE;
1203 break;
1204 case META_TRACK_GENRE:
1205 gst_tag_list_get_string (new_tags, GST_TAG_GENRE, &str);
1206 if (str) done = TRUE;
1207 break;
1208 case META_TRACK_COMMENT:
1209 gst_tag_list_get_string (new_tags, GST_TAG_COMMENT, &str);
1210 if (str) done = TRUE;
1211 break;
1212 case META_TRACK_ALBUM:
1213 gst_tag_list_get_string (new_tags, GST_TAG_ALBUM, &str);
1214 if (str) done = TRUE;
1215 break;
1216 case META_TRACK_YEAR: {
1217 const GValue *date;
1218
1219 date = gst_tag_list_get_value_index (new_tags, GST_TAG_DATE, 0);
1220 if (date)
1221 str = g_strdup_value_contents (date);
1222 if (str) done = TRUE;
1223 break;
1224 }
1225 case META_TRACK_DISCID:
1226#ifdef GST_TAG_CDDA_CDDB_DISCID
1227 gst_tag_list_get_string (new_tags, GST_TAG_CDDA_CDDB_DISCID, &str);
1228#endif
1229 if (str) done = TRUE;
1230 break;
1231 case META_TRACK_COUNT: {
1232 int track_count;
1233
1234 track_count = emotion_pipeline_cdda_track_count_get (video);
1235 if (track_count > 0) {
1236 char buf[64];
1237
1238 g_snprintf (buf, 64, "%d", track_count);
1239 str = g_strdup (buf);
1240 done = TRUE;
1241 }
1242 break;
1243 }
1244 }
1245 break;
1246 }
1247 default:
1248 break;
1249 }
1250 gst_message_unref (message);
1251 }
1252
1253 gst_object_unref (GST_OBJECT (bus));
1254
1255 return str;
1256}
1257
1258unsigned char
1259module_open(Evas_Object *obj,
1260 Emotion_Video_Module **module,
1261 void **video,
1262 Emotion_Module_Options *opt)
1263{
1264 if (!module)
1265 return 0;
1266
1267 if (!em_module.init(obj, video, opt))
1268 return 0;
1269
1270 *module = &em_module;
1271 return 1;
1272}
1273
1274void
1275module_close(Emotion_Video_Module *module,
1276 void *video)
1277{
1278 em_module.shutdown(video);
1279}
1280
1281static int
1282_em_fd_ev_active(void *data, Ecore_Fd_Handler *fdh)
1283{
1284 int fd;
1285 int len;
1286 void *buf[2];
1287 unsigned char *frame_data;
1288 Emotion_Gstreamer_Video *ev;
1289 GstBuffer *buffer;
1290
1291 ev = data;
1292 fd = ecore_main_fd_handler_fd_get(fdh);
1293
1294 while ((len = read(fd, buf, sizeof(buf))) > 0)
1295 {
1296 if (len == sizeof(buf))
1297 {
1298 Emotion_Video_Sink *vsink;
1299
1300 frame_data = buf[0];
1301 buffer = buf[1];
1302 _emotion_frame_new(ev->obj);
1303 vsink = (Emotion_Video_Sink *)ecore_list_index_goto (ev->video_sinks, ev->video_sink_nbr);
1304 _emotion_video_pos_update(ev->obj, ev->position, vsink->length_time);
1305 }
1306 }
1307 return 1;
1308}
1309
1310int _eos_timer_fct (void *data)
1311{
1312 Emotion_Gstreamer_Video *ev;
1313 GstMessage *msg;
1314
1315 ev = (Emotion_Gstreamer_Video *)data;
1316 while ((msg = gst_bus_poll (ev->eos_bus, GST_MESSAGE_ERROR | GST_MESSAGE_EOS, 0))) {
1317 switch (GST_MESSAGE_TYPE(msg)) {
1318 case GST_MESSAGE_ERROR: {
1319 gchar *debug;
1320 GError *err;
1321
1322 gst_message_parse_error (msg, &err, &debug);
1323 g_free (debug);
1324
1325 g_print ("Error: %s\n", err->message);
1326 g_error_free (err);
1327
1328 break;
1329 }
1330 case GST_MESSAGE_EOS:
1331 if (ev->eos_timer)
1332 {
1333 ecore_timer_del(ev->eos_timer);
1334 ev->eos_timer = NULL;
1335 }
1336 ev->play = 0;
1337 _emotion_decode_stop(ev->obj);
1338 _emotion_playback_finished(ev->obj);
1339 break;
1340 default:
1341 break;
1342 }
1343 gst_message_unref (msg);
1344 }
1345 return 1;
1346}
diff --git a/legacy/emotion/src/modules/gstreamer/emotion_gstreamer.h b/legacy/emotion/src/modules/gstreamer/emotion_gstreamer.h
new file mode 100644
index 0000000000..09f0fe61f1
--- /dev/null
+++ b/legacy/emotion/src/modules/gstreamer/emotion_gstreamer.h
@@ -0,0 +1,87 @@
1#ifndef __EMOTION_GSTREAMER_H__
2#define __EMOTION_GSTREAMER_H__
3
4
5#include <Evas.h>
6#include <Ecore.h>
7#include <Ecore_Data.h>
8
9#include <gst/gst.h>
10
11#include "emotion_private.h"
12
13
14typedef struct _Emotion_Video_Sink Emotion_Video_Sink;
15
16struct _Emotion_Video_Sink
17{
18 GstElement *sink;
19 gdouble length_time;
20 gint width;
21 gint height;
22 gint fps_num;
23 gint fps_den;
24 guint32 fourcc;
25};
26
27typedef struct _Emotion_Audio_Sink Emotion_Audio_Sink;
28
29struct _Emotion_Audio_Sink
30{
31 GstElement *sink;
32 gdouble length_time;
33 gint channels;
34 gint samplerate;
35};
36
37typedef struct _Emotion_Gstreamer_Video Emotion_Gstreamer_Video;
38
39struct _Emotion_Gstreamer_Video
40{
41 /* Gstreamer elements */
42 GstElement *pipeline;
43
44 /* eos */
45 GstBus *eos_bus;
46 Ecore_Timer *eos_timer;
47
48 /* Sinks */
49 Ecore_List *video_sinks;
50 Ecore_List *audio_sinks;
51
52 int video_sink_nbr;
53 int audio_sink_nbr;
54
55 /* Evas object */
56 Evas_Object *obj;
57 unsigned char *obj_data;
58
59 /* Characteristics of stream */
60 double position;
61 double ratio;
62 double volume;
63
64 volatile int seek_to;
65 volatile int get_poslen;
66 volatile double seek_to_pos;
67
68 int fd_ev_read;
69 int fd_ev_write;
70 Ecore_Fd_Handler *fd_ev_handler;
71
72 Emotion_Vis vis;
73
74 unsigned char play : 1;
75 unsigned char video_mute : 1;
76 unsigned char audio_mute : 1;
77};
78
79unsigned char module_open(Evas_Object *obj,
80 Emotion_Video_Module **module,
81 void **video, Emotion_Module_Options *opt);
82
83void module_close (Emotion_Video_Module *module,
84 void *video);
85
86
87#endif /* __EMOTION_GSTREAMER_H__ */
diff --git a/legacy/emotion/src/modules/gstreamer/emotion_gstreamer_pipeline.c b/legacy/emotion/src/modules/gstreamer/emotion_gstreamer_pipeline.c
new file mode 100644
index 0000000000..92431ec64d
--- /dev/null
+++ b/legacy/emotion/src/modules/gstreamer/emotion_gstreamer_pipeline.c
@@ -0,0 +1,541 @@
1#include <unistd.h>
2#include <fcntl.h>
3
4#include "emotion_private.h"
5#include "emotion_gstreamer.h"
6#include "emotion_gstreamer_pipeline.h"
7
8
9gboolean
10emotion_pipeline_pause (GstElement *pipeline)
11{
12 GstStateChangeReturn res;
13
14 res = gst_element_set_state ((pipeline), GST_STATE_PAUSED);
15 if (res == GST_STATE_CHANGE_FAILURE) {
16 g_print ("Emotion-Gstreamer ERROR: could not pause\n");
17 return 0;
18 }
19
20 res = gst_element_get_state ((pipeline), NULL, NULL, GST_CLOCK_TIME_NONE);
21 if (res != GST_STATE_CHANGE_SUCCESS) {
22 g_print ("Emotion-Gstreamer ERROR: could not complete pause\n");
23 return 0;
24 }
25
26 return 1;
27}
28
29/* Send the video frame to the evas object */
30void
31cb_handoff (GstElement *fakesrc,
32 GstBuffer *buffer,
33 GstPad *pad,
34 gpointer user_data)
35{
36 GstQuery *query;
37 void *buf[2];
38
39 Emotion_Gstreamer_Video *ev = ( Emotion_Gstreamer_Video *) user_data;
40 if (!ev)
41 return;
42
43 if (!ev->video_mute) {
44 if (!ev->obj_data)
45 ev->obj_data = (void*) malloc (GST_BUFFER_SIZE(buffer) * sizeof(void));
46
47 memcpy ( ev->obj_data, GST_BUFFER_DATA(buffer), GST_BUFFER_SIZE(buffer));
48 buf[0] = GST_BUFFER_DATA(buffer);
49 buf[1] = buffer;
50 write(ev->fd_ev_write, buf, sizeof(buf));
51 }
52 else {
53 Emotion_Audio_Sink *asink;
54 asink = (Emotion_Audio_Sink *)ecore_list_index_goto (ev->audio_sinks, ev->audio_sink_nbr);
55 _emotion_video_pos_update(ev->obj, ev->position, asink->length_time);
56 }
57
58 query = gst_query_new_position (GST_FORMAT_TIME);
59 if (gst_pad_query (gst_pad_get_peer (pad), query)) {
60 gint64 position;
61
62 gst_query_parse_position (query, NULL, &position);
63 ev->position = (double)position / (double)GST_SECOND;
64 }
65 gst_query_unref (query);
66}
67
68void
69file_new_decoded_pad_cb (GstElement *decodebin,
70 GstPad *new_pad,
71 gboolean last,
72 gpointer user_data)
73{
74 Emotion_Gstreamer_Video *ev;
75 GstCaps *caps;
76 gchar *str;
77
78 ev = (Emotion_Gstreamer_Video *)user_data;
79 caps = gst_pad_get_caps (new_pad);
80 str = gst_caps_to_string (caps);
81 /* video stream */
82 if (g_str_has_prefix (str, "video/")) {
83 Emotion_Video_Sink *vsink;
84 GstElement *queue;
85 GstPad *videopad;
86
87 vsink = (Emotion_Video_Sink *)malloc (sizeof (Emotion_Video_Sink));
88 if (!vsink) return;
89 if (!ecore_list_append (ev->video_sinks, vsink)) {
90 free(vsink);
91 return;
92 }
93
94 queue = gst_element_factory_make ("queue", NULL);
95 vsink->sink = gst_element_factory_make ("fakesink", "videosink");
96 gst_bin_add_many (GST_BIN (ev->pipeline), queue, vsink->sink, NULL);
97 gst_element_link (queue, vsink->sink);
98 videopad = gst_element_get_pad (queue, "sink");
99 gst_pad_link (new_pad, videopad);
100 gst_object_unref (videopad);
101 if (ecore_list_count(ev->video_sinks) == 1) {
102 ev->ratio = (double)vsink->width / (double)vsink->height;
103 }
104 gst_element_set_state (queue, GST_STATE_PAUSED);
105 gst_element_set_state (vsink->sink, GST_STATE_PAUSED);
106 }
107 /* audio stream */
108 else if (g_str_has_prefix (str, "audio/")) {
109 Emotion_Audio_Sink *asink;
110 GstPad *audiopad;
111
112 asink = (Emotion_Audio_Sink *)malloc (sizeof (Emotion_Audio_Sink));
113 if (!asink) return;
114 if (!ecore_list_append (ev->audio_sinks, asink)) {
115 free(asink);
116 return;
117 }
118
119 asink->sink = emotion_audio_sink_create (ev, ecore_list_index (ev->audio_sinks));
120 gst_bin_add (GST_BIN (ev->pipeline), asink->sink);
121 audiopad = gst_element_get_pad (asink->sink, "sink");
122 gst_pad_link(new_pad, audiopad);
123 gst_element_set_state (asink->sink, GST_STATE_PAUSED);
124 }
125}
126
127Emotion_Video_Sink *
128emotion_video_sink_new (Emotion_Gstreamer_Video *ev)
129{
130 Emotion_Video_Sink *vsink;
131
132 if (!ev) return NULL;
133
134 vsink = (Emotion_Video_Sink *)malloc (sizeof (Emotion_Video_Sink));
135 if (!vsink) return NULL;
136
137 if (!ecore_list_append (ev->video_sinks, vsink)) {
138 free (vsink);
139 return NULL;
140 }
141 return vsink;
142}
143
144void
145emotion_video_sink_free (Emotion_Gstreamer_Video *ev, Emotion_Video_Sink *vsink)
146{
147 if (!ev || !vsink) return;
148
149 if (ecore_list_goto (ev->video_sinks, vsink)) {
150 ecore_list_remove (ev->video_sinks);
151 free (vsink);
152 }
153}
154
155Emotion_Video_Sink *
156emotion_visualization_sink_create (Emotion_Gstreamer_Video *ev, Emotion_Audio_Sink *asink)
157{
158 Emotion_Video_Sink *vsink;
159
160 if (!ev) return NULL;
161
162 vsink = emotion_video_sink_new (ev);
163 if (!vsink) return NULL;
164
165 vsink->sink = gst_bin_get_by_name (GST_BIN (asink->sink), "vissink1");
166 if (!vsink->sink) {
167 emotion_video_sink_free (ev, vsink);
168 return NULL;
169 }
170 vsink->width = 320;
171 vsink->height = 200;
172 ev->ratio = (double)vsink->width / (double)vsink->height;
173 vsink->fps_num = 25;
174 vsink->fps_den = 1;
175 vsink->fourcc = GST_MAKE_FOURCC ('A','R','G','B');
176 vsink->length_time = asink->length_time;
177
178 g_object_set (G_OBJECT (vsink->sink), "sync", TRUE, NULL);
179 g_object_set (G_OBJECT (vsink->sink), "signal-handoffs", TRUE, NULL);
180 g_signal_connect (G_OBJECT (vsink->sink),
181 "handoff",
182 G_CALLBACK (cb_handoff), ev);
183 return vsink;
184}
185
186int
187emotion_pipeline_cdda_track_count_get(void *video)
188{
189 Emotion_Gstreamer_Video *ev;
190 GstBus *bus;
191 guint tracks_count = 0;
192 gboolean done;
193
194 ev = (Emotion_Gstreamer_Video *)video;
195 if (!ev) return tracks_count;
196
197 done = FALSE;
198 bus = gst_element_get_bus (ev->pipeline);
199 if (!bus) return tracks_count;
200
201 while (!done) {
202 GstMessage *message;
203
204 message = gst_bus_pop (bus);
205 if (message == NULL)
206 /* All messages read, we're done */
207 break;
208
209 switch (GST_MESSAGE_TYPE (message)) {
210 case GST_MESSAGE_TAG: {
211 GstTagList *tags;
212
213 gst_message_parse_tag (message, &tags);
214
215 gst_tag_list_get_uint (tags, GST_TAG_TRACK_COUNT, &tracks_count);
216 if (tracks_count) done = TRUE;
217 break;
218 }
219 case GST_MESSAGE_ERROR:
220 default:
221 break;
222 }
223 gst_message_unref (message);
224 }
225
226 gst_object_unref (GST_OBJECT (bus));
227
228 return tracks_count;
229}
230
231GstElement *
232emotion_audio_sink_create (Emotion_Gstreamer_Video *ev, int index)
233{
234 gchar buf[128];
235 GstElement *bin;
236 GstElement *audiobin;
237 GstElement *visbin = NULL;
238 GstElement *tee;
239 GstPad *teepad;
240 GstPad *binpad;
241
242 /* audio sink */
243 bin = gst_bin_new (NULL);
244 if (!bin) return NULL;
245
246 g_snprintf (buf, 128, "tee%d", index);
247 tee = gst_element_factory_make ("tee", buf);
248
249 /* audio part */
250 {
251 GstElement *queue;
252 GstElement *conv;
253 GstElement *resample;
254 GstElement *volume;
255 GstElement *sink;
256 GstPad *audiopad;
257 double vol;
258
259 audiobin = gst_bin_new (NULL);
260
261 queue = gst_element_factory_make ("queue", NULL);
262 conv = gst_element_factory_make ("audioconvert", NULL);
263 resample = gst_element_factory_make ("audioresample", NULL);
264 volume = gst_element_factory_make ("volume", "volume");
265 g_object_get (G_OBJECT (volume), "volume", &vol, NULL);
266 ev->volume = vol / 10.0;
267
268 if (index == 1)
269 sink = gst_element_factory_make ("autoaudiosink", NULL);
270 else
271 sink = gst_element_factory_make ("fakesink", NULL);
272
273 gst_bin_add_many (GST_BIN (audiobin),
274 queue, conv, resample, volume, sink, NULL);
275 gst_element_link_many (queue, conv, resample, volume, sink, NULL);
276
277 audiopad = gst_element_get_pad (queue, "sink");
278 gst_element_add_pad (audiobin, gst_ghost_pad_new ("sink", audiopad));
279 gst_object_unref (audiopad);
280 }
281
282 /* visualisation part */
283 {
284 GstElement *vis = NULL;
285 char *vis_name;
286
287 switch (ev->vis) {
288 case EMOTION_VIS_GOOM:
289 vis_name = "goom";
290 break;
291 case EMOTION_VIS_LIBVISUAL_BUMPSCOPE:
292 vis_name = "libvisual_bumpscope";
293 break;
294 case EMOTION_VIS_LIBVISUAL_CORONA:
295 vis_name = "libvisual_corona";
296 break;
297 case EMOTION_VIS_LIBVISUAL_DANCING_PARTICLES:
298 vis_name = "libvisual_dancingparticles";
299 break;
300 case EMOTION_VIS_LIBVISUAL_GDKPIXBUF:
301 vis_name = "libvisual_gdkpixbuf";
302 break;
303 case EMOTION_VIS_LIBVISUAL_G_FORCE:
304 vis_name = "libvisual_G-Force";
305 break;
306 case EMOTION_VIS_LIBVISUAL_GOOM:
307 vis_name = "libvisual_goom";
308 break;
309 case EMOTION_VIS_LIBVISUAL_INFINITE:
310 vis_name = "libvisual_infinite";
311 break;
312 case EMOTION_VIS_LIBVISUAL_JAKDAW:
313 vis_name = "libvisual_jakdaw";
314 break;
315 case EMOTION_VIS_LIBVISUAL_JESS:
316 vis_name = "libvisual_jess";
317 break;
318 case EMOTION_VIS_LIBVISUAL_LV_ANALYSER:
319 vis_name = "libvisual_lv_analyzer";
320 break;
321 case EMOTION_VIS_LIBVISUAL_LV_FLOWER:
322 vis_name = "libvisual_lv_flower";
323 break;
324 case EMOTION_VIS_LIBVISUAL_LV_GLTEST:
325 vis_name = "libvisual_lv_gltest";
326 break;
327 case EMOTION_VIS_LIBVISUAL_LV_SCOPE:
328 vis_name = "libvisual_lv_scope";
329 break;
330 case EMOTION_VIS_LIBVISUAL_MADSPIN:
331 vis_name = "libvisual_madspin";
332 break;
333 case EMOTION_VIS_LIBVISUAL_NEBULUS:
334 vis_name = "libvisual_nebulus";
335 break;
336 case EMOTION_VIS_LIBVISUAL_OINKSIE:
337 vis_name = "libvisual_oinksie";
338 break;
339 case EMOTION_VIS_LIBVISUAL_PLASMA:
340 vis_name = "libvisual_plazma";
341 break;
342 default:
343 vis_name = "goom";
344 break;
345 }
346
347 g_snprintf (buf, 128, "vis%d", index);
348 if ((vis = gst_element_factory_make (vis_name, buf))) {
349 GstElement *queue;
350 GstElement *conv;
351 GstElement *cspace;
352 GstElement *sink;
353 GstPad *vispad;
354 GstCaps *caps;
355
356 g_snprintf (buf, 128, "visbin%d", index);
357 visbin = gst_bin_new (buf);
358
359 queue = gst_element_factory_make ("queue", NULL);
360 conv = gst_element_factory_make ("audioconvert", NULL);
361 cspace = gst_element_factory_make ("ffmpegcolorspace", NULL);
362 g_snprintf (buf, 128, "vissink%d", index);
363 sink = gst_element_factory_make ("fakesink", buf);
364
365 gst_bin_add_many (GST_BIN (visbin),
366 queue, conv, vis, cspace, sink, NULL);
367 gst_element_link_many (queue, conv, vis, cspace, NULL);
368 caps = gst_caps_new_simple ("video/x-raw-rgb",
369 "bpp", G_TYPE_INT, 32,
370 "width", G_TYPE_INT, 320,
371 "height", G_TYPE_INT, 200,
372 NULL);
373 gst_element_link_filtered (cspace, sink, caps);
374
375 vispad = gst_element_get_pad (queue, "sink");
376 gst_element_add_pad (visbin, gst_ghost_pad_new ("sink", vispad));
377 gst_object_unref (vispad);
378 }
379 }
380
381 gst_bin_add_many (GST_BIN (bin), tee, audiobin, NULL);
382 if (visbin)
383 gst_bin_add (GST_BIN (bin), visbin);
384
385 binpad = gst_element_get_pad (audiobin, "sink");
386 teepad = gst_element_get_request_pad (tee, "src%d");
387 gst_pad_link (teepad, binpad);
388 gst_object_unref (teepad);
389 gst_object_unref (binpad);
390
391 if (visbin) {
392 binpad = gst_element_get_pad (visbin, "sink");
393 teepad = gst_element_get_request_pad (tee, "src%d");
394 gst_pad_link (teepad, binpad);
395 gst_object_unref (teepad);
396 gst_object_unref (binpad);
397 }
398
399 teepad = gst_element_get_pad (tee, "sink");
400 gst_element_add_pad (bin, gst_ghost_pad_new ("sink", teepad));
401 gst_object_unref (teepad);
402
403 return bin;
404}
405
406void
407emotion_streams_sinks_get (Emotion_Gstreamer_Video *ev, GstElement *decoder)
408{
409 GstIterator *it;
410 gpointer data;
411
412 ecore_list_first_goto (ev->video_sinks);
413 ecore_list_first_goto (ev->audio_sinks);
414
415 it = gst_element_iterate_src_pads (decoder);
416 while (gst_iterator_next (it, &data) == GST_ITERATOR_OK) {
417 GstPad *pad;
418 GstCaps *caps;
419 gchar *str;
420
421 pad = GST_PAD (data);
422
423 caps = gst_pad_get_caps (pad);
424 str = gst_caps_to_string (caps);
425 g_print ("caps !! %s\n", str);
426
427 /* video stream */
428 if (g_str_has_prefix (str, "video/")) {
429 Emotion_Video_Sink *vsink;
430
431 vsink = (Emotion_Video_Sink *)ecore_list_next (ev->video_sinks);
432
433 emotion_video_sink_fill (vsink, pad, caps);
434 }
435 /* audio stream */
436 else if (g_str_has_prefix (str, "audio/")) {
437 Emotion_Audio_Sink *asink;
438 gint index;
439
440 asink = (Emotion_Audio_Sink *)ecore_list_next (ev->audio_sinks);
441
442 emotion_audio_sink_fill (asink, pad, caps);
443
444 index = ecore_list_index (ev->audio_sinks);
445
446 if (ecore_list_count (ev->video_sinks) == 0) {
447 if (index == 1) {
448 Emotion_Video_Sink *vsink;
449
450 vsink = emotion_visualization_sink_create (ev, asink);
451 if (!vsink) goto finalize;
452 }
453 }
454 else {
455 gchar buf[128];
456 GstElement *visbin;
457
458 g_snprintf (buf, 128, "visbin%d", index);
459 visbin = gst_bin_get_by_name (GST_BIN (ev->pipeline), buf);
460 if (visbin) {
461 GstPad *srcpad;
462 GstPad *sinkpad;
463
464 sinkpad = gst_element_get_pad (visbin, "sink");
465 srcpad = gst_pad_get_peer (sinkpad);
466 gst_pad_unlink (srcpad, sinkpad);
467
468 gst_object_unref (srcpad);
469 gst_object_unref (sinkpad);
470 }
471 }
472 }
473 finalize:
474 gst_caps_unref (caps);
475 g_free (str);
476 gst_object_unref (pad);
477 }
478 gst_iterator_free (it);
479}
480
481void
482emotion_video_sink_fill (Emotion_Video_Sink *vsink, GstPad *pad, GstCaps *caps)
483{
484 GstStructure *structure;
485 GstQuery *query;
486 const GValue *val;
487 gchar *str;
488
489 structure = gst_caps_get_structure (caps, 0);
490 str = gst_caps_to_string (caps);
491
492 gst_structure_get_int (structure, "width", &vsink->width);
493 gst_structure_get_int (structure, "height", &vsink->height);
494
495 vsink->fps_num = 1;
496 vsink->fps_den = 1;
497 val = gst_structure_get_value (structure, "framerate");
498 if (val) {
499 vsink->fps_num = gst_value_get_fraction_numerator (val);
500 vsink->fps_den = gst_value_get_fraction_denominator (val);
501 }
502 if (g_str_has_prefix(str, "video/x-raw-yuv")) {
503 val = gst_structure_get_value (structure, "format");
504 vsink->fourcc = gst_value_get_fourcc (val);
505 }
506 else if (g_str_has_prefix(str, "video/x-raw-rgb"))
507 vsink->fourcc = GST_MAKE_FOURCC ('A','R','G','B');
508 else
509 vsink->fourcc = 0;
510
511 query = gst_query_new_duration (GST_FORMAT_TIME);
512 if (gst_pad_query (pad, query)) {
513 gint64 time;
514
515 gst_query_parse_duration (query, NULL, &time);
516 vsink->length_time = (double)time / (double)GST_SECOND;
517 }
518 g_free (str);
519 gst_query_unref (query);
520}
521
522void
523emotion_audio_sink_fill (Emotion_Audio_Sink *asink, GstPad *pad, GstCaps *caps)
524{
525 GstStructure *structure;
526 GstQuery *query;
527
528 structure = gst_caps_get_structure (caps, 0);
529
530 gst_structure_get_int (structure, "channels", &asink->channels);
531 gst_structure_get_int (structure, "rate", &asink->samplerate);
532
533 query = gst_query_new_duration (GST_FORMAT_TIME);
534 if (gst_pad_query (pad, query)) {
535 gint64 time;
536
537 gst_query_parse_duration (query, NULL, &time);
538 asink->length_time = (double)time / (double)GST_SECOND;
539 }
540 gst_query_unref (query);
541 }
diff --git a/legacy/emotion/src/modules/gstreamer/emotion_gstreamer_pipeline.h b/legacy/emotion/src/modules/gstreamer/emotion_gstreamer_pipeline.h
new file mode 100644
index 0000000000..f2a237ab77
--- /dev/null
+++ b/legacy/emotion/src/modules/gstreamer/emotion_gstreamer_pipeline.h
@@ -0,0 +1,38 @@
1#ifndef __EMOTION_GSTREAMER_PIPELINE_H__
2#define __EMOTION_GSTREAMER_PIPELINE_H__
3
4
5#include <gst/gst.h>
6
7
8gboolean emotion_pipeline_pause (GstElement *pipeline);
9
10int emotion_pipeline_cdda_build (void *video, const char * device, unsigned int track);
11int emotion_pipeline_file_build (void *video, const char *file);
12int emotion_pipeline_uri_build (void *video, const char *uri);
13int emotion_pipeline_dvd_build (void *video, const char *device);
14int emotion_pipeline_cdda_track_count_get(void *video);
15
16GstElement *emotion_audio_sink_create (Emotion_Gstreamer_Video *ev, int index);
17Emotion_Video_Sink *emotion_video_sink_new (Emotion_Gstreamer_Video *ev);
18void emotion_video_sink_free (Emotion_Gstreamer_Video *ev, Emotion_Video_Sink *vsink);
19Emotion_Video_Sink *emotion_visualization_sink_create (Emotion_Gstreamer_Video *ev, Emotion_Audio_Sink *asink);
20
21void emotion_streams_sinks_get (Emotion_Gstreamer_Video *ev, GstElement *decoder);
22
23void emotion_video_sink_fill (Emotion_Video_Sink *vsink, GstPad *pad, GstCaps *caps);
24
25void emotion_audio_sink_fill (Emotion_Audio_Sink *asink, GstPad *pad, GstCaps *caps);
26
27void cb_handoff (GstElement *fakesrc,
28 GstBuffer *buffer,
29 GstPad *pad,
30 gpointer user_data);
31
32void file_new_decoded_pad_cb (GstElement *decodebin,
33 GstPad *new_pad,
34 gboolean last,
35 gpointer user_data);
36
37
38#endif /* __EMOTION_GSTREAMER_PIPELINE_H__ */
diff --git a/legacy/emotion/src/modules/gstreamer/emotion_gstreamer_pipeline_cdda.c b/legacy/emotion/src/modules/gstreamer/emotion_gstreamer_pipeline_cdda.c
new file mode 100644
index 0000000000..e8f483345b
--- /dev/null
+++ b/legacy/emotion/src/modules/gstreamer/emotion_gstreamer_pipeline_cdda.c
@@ -0,0 +1,123 @@
1
2#include "emotion_gstreamer.h"
3#include "emotion_gstreamer_pipeline.h"
4
5
6static Emotion_Audio_Sink *_emotion_audio_sink_new (Emotion_Gstreamer_Video *ev);
7
8static void _emotion_audio_sink_free (Emotion_Gstreamer_Video *ev, Emotion_Audio_Sink *asink);
9
10int
11emotion_pipeline_cdda_build (void *video, const char * device, unsigned int track)
12{
13 GstElement *cdiocddasrc;
14 Emotion_Video_Sink *vsink;
15 Emotion_Audio_Sink *asink;
16 Emotion_Gstreamer_Video *ev;
17/* GstFormat format; */
18/* gint64 tracks_count; */
19
20 ev = (Emotion_Gstreamer_Video *)video;
21 if (!ev) return 0;
22
23 cdiocddasrc = gst_element_factory_make ("cdiocddasrc", "src");
24 if (!cdiocddasrc) {
25 g_print ("cdiocddasrc element missing. Install it.\n");
26 goto failure_cdiocddasrc;
27 }
28
29 if (device)
30 g_object_set (G_OBJECT (cdiocddasrc), "device", device, NULL);
31
32 g_object_set (G_OBJECT (cdiocddasrc), "track", track, NULL);
33
34 asink = _emotion_audio_sink_new (ev);
35 if (!asink)
36 goto failure_emotion_sink;
37
38 asink->sink = emotion_audio_sink_create (ev, 1);
39 if (!asink->sink)
40 goto failure_gstreamer_sink;
41
42 gst_bin_add_many((GST_BIN (ev->pipeline)), cdiocddasrc, asink->sink, NULL);
43
44 if (!gst_element_link (cdiocddasrc, asink->sink))
45 goto failure_link;
46
47 vsink = emotion_visualization_sink_create (ev, asink);
48 if (!vsink) goto failure_link;
49
50 if (!emotion_pipeline_pause (ev->pipeline))
51 goto failure_gstreamer_pause;
52
53 {
54 GstQuery *query;
55 GstPad *pad;
56 GstCaps *caps;
57 GstStructure *structure;
58
59 /* should always be found */
60 pad = gst_element_get_pad (cdiocddasrc, "src");
61
62 caps = gst_pad_get_caps (pad);
63 structure = gst_caps_get_structure (GST_CAPS (caps), 0);
64
65 gst_structure_get_int (structure, "channels", &asink->channels);
66 gst_structure_get_int (structure, "rate", &asink->samplerate);
67
68 gst_caps_unref (caps);
69
70 query = gst_query_new_duration (GST_FORMAT_TIME);
71 if (gst_pad_query (pad, query)) {
72 gint64 time;
73
74 gst_query_parse_duration (query, NULL, &time);
75 asink->length_time = (double)time / (double)GST_SECOND;
76 vsink->length_time = asink->length_time;
77 }
78 gst_query_unref (query);
79 gst_object_unref (GST_OBJECT (pad));
80 }
81
82 return 1;
83
84 failure_gstreamer_pause:
85 emotion_video_sink_free (ev, vsink);
86 failure_link:
87 gst_bin_remove (GST_BIN (ev->pipeline), asink->sink);
88 failure_gstreamer_sink:
89 _emotion_audio_sink_free (ev, asink);
90 failure_emotion_sink:
91 gst_bin_remove (GST_BIN (ev->pipeline), cdiocddasrc);
92 failure_cdiocddasrc:
93
94 return 0;
95}
96
97static Emotion_Audio_Sink *
98_emotion_audio_sink_new (Emotion_Gstreamer_Video *ev)
99{
100 Emotion_Audio_Sink *asink;
101
102 if (!ev) return NULL;
103
104 asink = (Emotion_Audio_Sink *)malloc (sizeof (Emotion_Audio_Sink));
105 if (!asink) return NULL;
106
107 if (!ecore_list_append (ev->audio_sinks, asink)) {
108 free (asink);
109 return NULL;
110 }
111 return asink;
112}
113
114static void
115_emotion_audio_sink_free (Emotion_Gstreamer_Video *ev, Emotion_Audio_Sink *asink)
116{
117 if (!ev || !asink) return;
118
119 if (ecore_list_goto (ev->audio_sinks, asink)) {
120 ecore_list_remove (ev->audio_sinks);
121 free (asink);
122 }
123}
diff --git a/legacy/emotion/src/modules/gstreamer/emotion_gstreamer_pipeline_dvd.c b/legacy/emotion/src/modules/gstreamer/emotion_gstreamer_pipeline_dvd.c
new file mode 100644
index 0000000000..2408817041
--- /dev/null
+++ b/legacy/emotion/src/modules/gstreamer/emotion_gstreamer_pipeline_dvd.c
@@ -0,0 +1,227 @@
1
2#include "emotion_gstreamer.h"
3#include "emotion_gstreamer_pipeline.h"
4
5
6static void dvd_pad_added_cb (GstElement *dvddemuxer,
7 GObject *new_pad,
8 gpointer user_data);
9
10static void dvd_no_more_pads_cb (GstElement *dvddemuxer,
11 gpointer user_data);
12
13static int no_more_pads = 0;
14
15
16int
17emotion_pipeline_dvd_build (void *video, const char *device)
18{
19 GstElement *dvdreadsrc;
20 GstElement *dvddemux;
21 Emotion_Gstreamer_Video *ev;
22
23 ev = (Emotion_Gstreamer_Video *)video;
24 if (!ev) return 0;
25
26 dvdreadsrc = gst_element_factory_make ("dvdreadsrc", "src");
27 if (!dvdreadsrc)
28 goto failure_dvdreadsrc;
29 if (device)
30 g_object_set (G_OBJECT (dvdreadsrc), "device", device, NULL);
31
32 dvddemux = gst_element_factory_make ("dvddemux", "dvddemux");
33 if (!dvddemux)
34 goto failure_dvddemux;
35 g_signal_connect (dvddemux, "pad-added",
36 G_CALLBACK (dvd_pad_added_cb), ev);
37 g_signal_connect (dvddemux, "no-more-pads",
38 G_CALLBACK (dvd_no_more_pads_cb), ev);
39
40 gst_bin_add_many (GST_BIN (ev->pipeline), dvdreadsrc, dvddemux, NULL);
41 if (!gst_element_link (dvdreadsrc, dvddemux))
42 goto failure_link;
43
44 if (!emotion_pipeline_pause (ev->pipeline))
45 goto failure_gstreamer_pause;
46
47 while (no_more_pads == 0) {
48 g_print ("toto\n");}
49 no_more_pads = 0;
50
51 /* We get the informations of streams */
52 ecore_list_first_goto (ev->video_sinks);
53 ecore_list_first_goto (ev->audio_sinks);
54
55 {
56 GstIterator *it;
57 gpointer data;
58
59 it = gst_element_iterate_src_pads (dvddemux);
60 while (gst_iterator_next (it, &data) == GST_ITERATOR_OK) {
61 GstPad *pad;
62 GstCaps *caps;
63 gchar *str;
64
65 pad = GST_PAD (data);
66
67 caps = gst_pad_get_caps (pad);
68 str = gst_caps_to_string (caps);
69 g_print ("caps !! %s\n", str);
70 /* video stream */
71 if (g_str_has_prefix (str, "video/mpeg")) {
72 Emotion_Video_Sink *vsink;
73 GstPad *sink_pad;
74 GstCaps *sink_caps;
75
76 vsink = (Emotion_Video_Sink *)ecore_list_next (ev->video_sinks);
77 sink_pad = gst_element_get_pad (gst_bin_get_by_name (GST_BIN (ev->pipeline), "mpeg2dec"), "src");
78 sink_caps = gst_pad_get_caps (sink_pad);
79 str = gst_caps_to_string (sink_caps);
80 g_print (" ** caps v !! %s\n", str);
81
82 emotion_video_sink_fill (vsink, sink_pad, sink_caps);
83
84 gst_caps_unref (sink_caps);
85 gst_object_unref (sink_pad);
86 }
87 /* audio stream */
88 else if (g_str_has_prefix (str, "audio/")) {
89 Emotion_Audio_Sink *asink;
90 GstPad *sink_pad;
91 GstCaps *sink_caps;
92
93 asink = (Emotion_Audio_Sink *)ecore_list_next (ev->audio_sinks);
94 sink_pad = gst_element_get_pad (gst_bin_get_by_name (GST_BIN (ev->pipeline), "a52dec"), "src");
95 sink_caps = gst_pad_get_caps (sink_pad);
96
97 emotion_audio_sink_fill (asink, sink_pad, sink_caps);
98 }
99 gst_caps_unref (caps);
100 g_free (str);
101 gst_object_unref (pad);
102 }
103 gst_iterator_free (it);
104 }
105
106 /* The first vsink is a valid Emotion_Video_Sink * */
107 /* If no video stream is found, it's a visualisation sink */
108 {
109 Emotion_Video_Sink *vsink;
110
111 vsink = (Emotion_Video_Sink *)ecore_list_first_goto (ev->video_sinks);
112 if (vsink && vsink->sink) {
113 g_object_set (G_OBJECT (vsink->sink), "sync", TRUE, NULL);
114 g_object_set (G_OBJECT (vsink->sink), "signal-handoffs", TRUE, NULL);
115 g_signal_connect (G_OBJECT (vsink->sink),
116 "handoff",
117 G_CALLBACK (cb_handoff), ev);
118 }
119 }
120
121 return 1;
122
123 failure_gstreamer_pause:
124 failure_link:
125 gst_element_set_state (ev->pipeline, GST_STATE_NULL);
126 gst_bin_remove (GST_BIN (ev->pipeline), dvddemux);
127 failure_dvddemux:
128 gst_bin_remove (GST_BIN (ev->pipeline), dvdreadsrc);
129 failure_dvdreadsrc:
130
131 return 0;
132}
133
134static void
135dvd_pad_added_cb (GstElement *dvddemuxer,
136 GObject *new_pad,
137 gpointer user_data)
138{
139 Emotion_Gstreamer_Video *ev;
140 GstCaps *caps;
141 gchar *str;
142
143 ev = (Emotion_Gstreamer_Video *)user_data;
144 caps = gst_pad_get_caps (GST_PAD (new_pad));
145 str = gst_caps_to_string (caps);
146 /* video stream */
147 if (g_str_has_prefix (str, "video/mpeg")) {
148 Emotion_Video_Sink *vsink;
149 GstElement *queue;
150 GstElement *decoder;
151 GstPad *videopad;
152
153 vsink = (Emotion_Video_Sink *)malloc (sizeof (Emotion_Video_Sink));
154 if (!vsink) return;
155 if (!ecore_list_append (ev->video_sinks, vsink)) {
156 free(vsink);
157 return;
158 }
159
160 queue = gst_element_factory_make ("queue", NULL);
161 decoder = gst_element_factory_make ("mpeg2dec", "mpeg2dec");
162 vsink->sink = gst_element_factory_make ("fakesink", "videosink");
163 gst_bin_add_many (GST_BIN (ev->pipeline), queue, decoder, vsink->sink, NULL);
164 gst_element_link (queue, decoder);
165 gst_element_link (decoder, vsink->sink);
166 videopad = gst_element_get_pad (queue, "sink");
167 gst_pad_link (GST_PAD (new_pad), videopad);
168 gst_object_unref (videopad);
169 if (ecore_list_count(ev->video_sinks) == 1) {
170 ev->ratio = (double)vsink->width / (double)vsink->height;
171 }
172 gst_element_set_state (queue, GST_STATE_PAUSED);
173 gst_element_set_state (decoder, GST_STATE_PAUSED);
174 gst_element_set_state (vsink->sink, GST_STATE_PAUSED);
175 }
176 /* audio stream */
177 else if (g_str_has_prefix (str, "audio/")) {
178 Emotion_Audio_Sink *asink;
179 GstElement *queue;
180 GstElement *decoder;
181 GstElement *conv;
182 GstElement *resample;
183 GstElement *volume;
184 GstPad *audiopad;
185 double vol;
186
187 asink = (Emotion_Audio_Sink *)malloc (sizeof (Emotion_Audio_Sink));
188 if (!asink) return;
189 if (!ecore_list_append (ev->audio_sinks, asink)) {
190 free(asink);
191 return;
192 }
193
194 queue = gst_element_factory_make ("queue", NULL);
195 decoder = gst_element_factory_make ("a52dec", "a52dec");
196 conv = gst_element_factory_make ("audioconvert", NULL);
197 resample = gst_element_factory_make ("audioresample", NULL);
198 volume = gst_element_factory_make ("volume", "volume");
199 g_object_get (G_OBJECT (volume), "volume", &vol, NULL);
200 ev->volume = vol / 10.0;
201
202 /* FIXME: must manage several audio streams */
203 asink->sink = gst_element_factory_make ("fakesink", NULL);
204
205 gst_bin_add_many (GST_BIN (ev->pipeline),
206 queue, decoder, conv, resample, volume, asink->sink, NULL);
207 gst_element_link_many (queue, decoder, conv, resample, volume, asink->sink, NULL);
208
209 audiopad = gst_element_get_pad (queue, "sink");
210 gst_pad_link (GST_PAD (new_pad), audiopad);
211 gst_object_unref (audiopad);
212
213 gst_element_set_state (queue, GST_STATE_PAUSED);
214 gst_element_set_state (decoder, GST_STATE_PAUSED);
215 gst_element_set_state (conv, GST_STATE_PAUSED);
216 gst_element_set_state (resample, GST_STATE_PAUSED);
217 gst_element_set_state (volume, GST_STATE_PAUSED);
218 gst_element_set_state (asink->sink, GST_STATE_PAUSED);
219 }
220}
221
222static void
223dvd_no_more_pads_cb (GstElement *dvddemuxer,
224 gpointer user_data)
225{
226 no_more_pads = 1;
227}
diff --git a/legacy/emotion/src/modules/gstreamer/emotion_gstreamer_pipeline_file.c b/legacy/emotion/src/modules/gstreamer/emotion_gstreamer_pipeline_file.c
new file mode 100644
index 0000000000..6a76fc63fe
--- /dev/null
+++ b/legacy/emotion/src/modules/gstreamer/emotion_gstreamer_pipeline_file.c
@@ -0,0 +1,61 @@
1
2#include "emotion_gstreamer.h"
3#include "emotion_gstreamer_pipeline.h"
4
5int
6emotion_pipeline_file_build (void *video, const char *file)
7{
8 GstElement *filesrc;
9 GstElement *decodebin;
10 Emotion_Gstreamer_Video *ev;
11
12 ev = (Emotion_Gstreamer_Video *)video;
13 if (!ev) return 0;
14
15 filesrc = gst_element_factory_make ("filesrc", "src");
16 if (!filesrc)
17 goto failure_filesrc;
18 g_object_set (G_OBJECT (filesrc), "location", file, NULL);
19
20 decodebin = gst_element_factory_make ("decodebin", "decodebin");
21 if (!decodebin)
22 goto failure_decodebin;
23 g_signal_connect (decodebin, "new-decoded-pad",
24 G_CALLBACK (file_new_decoded_pad_cb), ev);
25
26 gst_bin_add_many (GST_BIN (ev->pipeline), filesrc, decodebin, NULL);
27 if (!gst_element_link (filesrc, decodebin))
28 goto failure_link;
29
30 if (!emotion_pipeline_pause (ev->pipeline))
31 goto failure_gstreamer_pause;
32
33 emotion_streams_sinks_get (ev, decodebin);
34
35 /* The first vsink is a valid Emotion_Video_Sink * */
36 /* If no video stream is found, it's a visualisation sink */
37 {
38 Emotion_Video_Sink *vsink;
39
40 vsink = (Emotion_Video_Sink *)ecore_list_first_goto (ev->video_sinks);
41 if (vsink && vsink->sink) {
42 g_object_set (G_OBJECT (vsink->sink), "sync", TRUE, NULL);
43 g_object_set (G_OBJECT (vsink->sink), "signal-handoffs", TRUE, NULL);
44 g_signal_connect (G_OBJECT (vsink->sink),
45 "handoff",
46 G_CALLBACK (cb_handoff), ev);
47 }
48 }
49
50 return 1;
51
52 failure_gstreamer_pause:
53 failure_link:
54 gst_element_set_state (ev->pipeline, GST_STATE_NULL);
55 gst_bin_remove (GST_BIN (ev->pipeline), decodebin);
56 failure_decodebin:
57 gst_bin_remove (GST_BIN (ev->pipeline), filesrc);
58 failure_filesrc:
59
60 return 0;
61}
diff --git a/legacy/emotion/src/modules/gstreamer/emotion_gstreamer_pipeline_uri.c b/legacy/emotion/src/modules/gstreamer/emotion_gstreamer_pipeline_uri.c
new file mode 100644
index 0000000000..f647a85a10
--- /dev/null
+++ b/legacy/emotion/src/modules/gstreamer/emotion_gstreamer_pipeline_uri.c
@@ -0,0 +1,63 @@
1
2#include "emotion_gstreamer.h"
3#include "emotion_gstreamer_pipeline.h"
4
5int
6emotion_pipeline_uri_build (void *video, const char *uri)
7{
8 GstElement *src;
9 GstElement *decodebin;
10 Emotion_Gstreamer_Video *ev;
11
12 ev = (Emotion_Gstreamer_Video *)video;
13 if (!ev) return 0;
14
15 if (gst_uri_protocol_is_supported(GST_URI_SRC, uri))
16 goto failure_src;
17 src = gst_element_make_from_uri (GST_URI_SRC, uri, "src");
18 if (!src)
19 goto failure_src;
20 g_object_set (G_OBJECT (src), "location", uri, NULL);
21
22 decodebin = gst_element_factory_make ("decodebin", "decodebin");
23 if (!decodebin)
24 goto failure_decodebin;
25 g_signal_connect (decodebin, "new-decoded-pad",
26 G_CALLBACK (file_new_decoded_pad_cb), ev);
27
28 gst_bin_add_many (GST_BIN (ev->pipeline), src, decodebin, NULL);
29 if (!gst_element_link (src, decodebin))
30 goto failure_link;
31
32 if (!emotion_pipeline_pause (ev->pipeline))
33 goto failure_gstreamer_pause;
34
35 emotion_streams_sinks_get (ev, decodebin);
36
37 /* The first vsink is a valid Emotion_Video_Sink * */
38 /* If no video stream is found, it's a visualisation sink */
39 {
40 Emotion_Video_Sink *vsink;
41
42 vsink = (Emotion_Video_Sink *)ecore_list_first_goto (ev->video_sinks);
43 if (vsink && vsink->sink) {
44 g_object_set (G_OBJECT (vsink->sink), "sync", TRUE, NULL);
45 g_object_set (G_OBJECT (vsink->sink), "signal-handoffs", TRUE, NULL);
46 g_signal_connect (G_OBJECT (vsink->sink),
47 "handoff",
48 G_CALLBACK (cb_handoff), ev);
49 }
50 }
51
52 return 1;
53
54 failure_gstreamer_pause:
55 failure_link:
56 gst_element_set_state (ev->pipeline, GST_STATE_NULL);
57 gst_bin_remove (GST_BIN (ev->pipeline), decodebin);
58 failure_decodebin:
59 gst_bin_remove (GST_BIN (ev->pipeline), src);
60 failure_src:
61
62 return 0;
63}