summaryrefslogtreecommitdiff
diff options
context:
space:
mode:
authordoursse <doursse>2008-05-11 18:52:29 +0000
committerdoursse <doursse@7cbeb6ba-43b4-40fd-8cce-4c39aea84d33>2008-05-11 18:52:29 +0000
commitc014ed5267c18bdcd1449970d592972e89f7886f (patch)
tree16cb16d84a27be9c77594969c83a1ddab9e7fb49
parentd72b07f4aa8aac8b4466e5b05e3ddd158cd514b0 (diff)
put xine and gstreamer modules in their own subdir
SVN revision: 34539
Diffstat (limited to '')
-rw-r--r--legacy/emotion/configure.in2
-rw-r--r--legacy/emotion/src/modules/Makefile.am45
-rw-r--r--legacy/emotion/src/modules/emotion_gstreamer_pipeline.c1019
-rw-r--r--legacy/emotion/src/modules/emotion_gstreamer_pipeline.h17
-rw-r--r--legacy/emotion/src/modules/gstreamer/Makefile.am33
-rw-r--r--legacy/emotion/src/modules/gstreamer/emotion_gstreamer.c (renamed from legacy/emotion/src/modules/emotion_gstreamer.c)2
-rw-r--r--legacy/emotion/src/modules/gstreamer/emotion_gstreamer.h (renamed from legacy/emotion/src/modules/emotion_gstreamer.h)6
-rw-r--r--legacy/emotion/src/modules/gstreamer/emotion_gstreamer_pipeline.c541
-rw-r--r--legacy/emotion/src/modules/gstreamer/emotion_gstreamer_pipeline.h38
-rw-r--r--legacy/emotion/src/modules/gstreamer/emotion_gstreamer_pipeline_cdda.c123
-rw-r--r--legacy/emotion/src/modules/gstreamer/emotion_gstreamer_pipeline_dvd.c227
-rw-r--r--legacy/emotion/src/modules/gstreamer/emotion_gstreamer_pipeline_file.c61
-rw-r--r--legacy/emotion/src/modules/gstreamer/emotion_gstreamer_pipeline_uri.c63
-rw-r--r--legacy/emotion/src/modules/xine/Makefile.am30
-rw-r--r--legacy/emotion/src/modules/xine/emotion_xine.c (renamed from legacy/emotion/src/modules/emotion_xine.c)0
-rw-r--r--legacy/emotion/src/modules/xine/emotion_xine.h (renamed from legacy/emotion/src/modules/emotion_xine.h)0
-rw-r--r--legacy/emotion/src/modules/xine/emotion_xine_vo_out.c (renamed from legacy/emotion/src/modules/emotion_xine_vo_out.c)0
17 files changed, 1125 insertions, 1082 deletions
diff --git a/legacy/emotion/configure.in b/legacy/emotion/configure.in
index db90cee72f..93e88fd77c 100644
--- a/legacy/emotion/configure.in
+++ b/legacy/emotion/configure.in
@@ -125,6 +125,8 @@ emotion.spec
125src/Makefile 125src/Makefile
126src/lib/Makefile 126src/lib/Makefile
127src/modules/Makefile 127src/modules/Makefile
128src/modules/xine/Makefile
129src/modules/gstreamer/Makefile
128src/bin/Makefile 130src/bin/Makefile
129data/Makefile 131data/Makefile
130debian/changelog 132debian/changelog
diff --git a/legacy/emotion/src/modules/Makefile.am b/legacy/emotion/src/modules/Makefile.am
index b27b874fbb..1af8c8c7e8 100644
--- a/legacy/emotion/src/modules/Makefile.am
+++ b/legacy/emotion/src/modules/Makefile.am
@@ -1,47 +1,4 @@
1 1
2MAINTAINERCLEANFILES = Makefile.in 2MAINTAINERCLEANFILES = Makefile.in
3 3
4if BUILD_XINE_MODULE 4SUBDIRS = xine gstreamer
5XINE_LIB_NAME = xine.la
6endif
7
8if BUILD_GSTREAMER_MODULE
9GSTREAMER_LIB_NAME = gstreamer.la
10endif
11
12AM_CPPFLAGS = \
13-I$(top_srcdir) \
14-I$(top_srcdir)/src/lib \
15-I$(top_srcdir)/src/modules \
16-DPACKAGE_BIN_DIR=\"$(bindir)\" \
17-DPACKAGE_LIB_DIR=\"$(libdir)\" \
18-DPACKAGE_DATA_DIR=\"$(datadir)/$(PACKAGE)\" \
19@EVAS_CFLAGS@ \
20@ECORE_CFLAGS@ \
21@XINE_CFLAGS@ \
22@GST_CFLAGS@
23
24pkgdir = $(libdir)/emotion
25
26pkg_LTLIBRARIES = $(XINE_LIB_NAME) $(GSTREAMER_LIB_NAME)
27
28if BUILD_XINE_MODULE
29xine_la_SOURCES = \
30emotion_xine.c \
31emotion_xine.h \
32emotion_xine_vo_out.c
33xine_la_LIBADD = @EVAS_LIBS@ @ECORE_LIBS@ @XINE_LIBS@ $(top_builddir)/src/lib/libemotion.la -lpthread
34xine_la_LDFLAGS = -module -avoid-version
35xine_la_DEPENDENCIES = $(top_builddir)/config.h
36endif
37
38if BUILD_GSTREAMER_MODULE
39gstreamer_la_SOURCES = \
40emotion_gstreamer.c \
41emotion_gstreamer.h \
42emotion_gstreamer_pipeline.c \
43emotion_gstreamer_pipeline.h
44gstreamer_la_LIBADD = @EVAS_LIBS@ @ECORE_LIBS@ @GST_LIBS@ $(top_builddir)/src/lib/libemotion.la
45gstreamer_la_LDFLAGS = -module -avoid-version
46gstreamer_la_DEPENDENCIES = $(top_builddir)/config.h
47endif
diff --git a/legacy/emotion/src/modules/emotion_gstreamer_pipeline.c b/legacy/emotion/src/modules/emotion_gstreamer_pipeline.c
deleted file mode 100644
index 250439410f..0000000000
--- a/legacy/emotion/src/modules/emotion_gstreamer_pipeline.c
+++ /dev/null
@@ -1,1019 +0,0 @@
1#include <unistd.h>
2#include <fcntl.h>
3
4#include "emotion_private.h"
5#include "emotion_gstreamer.h"
6
7
8static int no_more_pads = 0;
9
10static void file_new_decoded_pad_cb (GstElement *decodebin,
11 GstPad *new_pad,
12 gboolean last,
13 gpointer user_data);
14
15static void dvd_pad_added_cb (GstElement *dvddemuxer,
16 GObject *new_pad,
17 gpointer user_data);
18
19static void dvd_no_more_pads_cb (GstElement *dvddemuxer,
20 gpointer user_data);
21
22static void cb_handoff (GstElement *fakesrc,
23 GstBuffer *buffer,
24 GstPad *pad,
25 gpointer user_data);
26
27
28static Emotion_Video_Sink * _emotion_video_sink_new (Emotion_Gstreamer_Video *ev);
29static void _emotion_video_sink_free (Emotion_Gstreamer_Video *ev, Emotion_Video_Sink *vsink);
30static void _emotion_video_sink_fill (Emotion_Video_Sink *vsink, GstPad *pad, GstCaps *caps);
31
32static Emotion_Audio_Sink * _emotion_audio_sink_new (Emotion_Gstreamer_Video *ev);
33static void _emotion_audio_sink_free (Emotion_Gstreamer_Video *ev, Emotion_Audio_Sink *asink);
34static GstElement * _emotion_audio_sink_create (Emotion_Gstreamer_Video *ev, int index);
35static void _emotion_audio_sink_fill (Emotion_Audio_Sink *asink, GstPad *pad, GstCaps *caps);
36
37static Emotion_Video_Sink * _emotion_visualization_sink_create (Emotion_Gstreamer_Video *ev, Emotion_Audio_Sink *asink);
38
39static void _emotion_streams_sinks_get (Emotion_Gstreamer_Video *ev, GstElement *decoder);
40
41gboolean
42emotion_pipeline_pause (GstElement *pipeline)
43{
44 GstStateChangeReturn res;
45
46 res = gst_element_set_state ((pipeline), GST_STATE_PAUSED);
47 if (res == GST_STATE_CHANGE_FAILURE) {
48 g_print ("Emotion-Gstreamer ERROR: could not pause\n");
49 return 0;
50 }
51
52 res = gst_element_get_state ((pipeline), NULL, NULL, GST_CLOCK_TIME_NONE);
53 if (res != GST_STATE_CHANGE_SUCCESS) {
54 g_print ("Emotion-Gstreamer ERROR: could not complete pause\n");
55 return 0;
56 }
57
58 return 1;
59}
60
61/* Send the video frame to the evas object */
62static void
63cb_handoff (GstElement *fakesrc,
64 GstBuffer *buffer,
65 GstPad *pad,
66 gpointer user_data)
67{
68 GstQuery *query;
69 void *buf[2];
70
71 Emotion_Gstreamer_Video *ev = ( Emotion_Gstreamer_Video *) user_data;
72 if (!ev)
73 return;
74
75 if (!ev->video_mute) {
76 if (!ev->obj_data)
77 ev->obj_data = (void*) malloc (GST_BUFFER_SIZE(buffer) * sizeof(void));
78
79 memcpy ( ev->obj_data, GST_BUFFER_DATA(buffer), GST_BUFFER_SIZE(buffer));
80 buf[0] = GST_BUFFER_DATA(buffer);
81 buf[1] = buffer;
82 write(ev->fd_ev_write, buf, sizeof(buf));
83 }
84 else {
85 Emotion_Audio_Sink *asink;
86 asink = (Emotion_Audio_Sink *)ecore_list_index_goto (ev->audio_sinks, ev->audio_sink_nbr);
87 _emotion_video_pos_update(ev->obj, ev->position, asink->length_time);
88 }
89
90 query = gst_query_new_position (GST_FORMAT_TIME);
91 if (gst_pad_query (gst_pad_get_peer (pad), query)) {
92 gint64 position;
93
94 gst_query_parse_position (query, NULL, &position);
95 ev->position = (double)position / (double)GST_SECOND;
96 }
97 gst_query_unref (query);
98}
99
100int
101emotion_pipeline_cdda_build (void *video, const char * device, unsigned int track)
102{
103 GstElement *cdiocddasrc;
104 Emotion_Video_Sink *vsink;
105 Emotion_Audio_Sink *asink;
106 Emotion_Gstreamer_Video *ev;
107/* GstFormat format; */
108/* gint64 tracks_count; */
109
110 ev = (Emotion_Gstreamer_Video *)video;
111 if (!ev) return 0;
112
113 cdiocddasrc = gst_element_factory_make ("cdiocddasrc", "src");
114 if (!cdiocddasrc) {
115 g_print ("cdiocddasrc element missing. Install it.\n");
116 goto failure_cdiocddasrc;
117 }
118
119 if (device)
120 g_object_set (G_OBJECT (cdiocddasrc), "device", device, NULL);
121
122 g_object_set (G_OBJECT (cdiocddasrc), "track", track, NULL);
123
124 asink = _emotion_audio_sink_new (ev);
125 if (!asink)
126 goto failure_emotion_sink;
127
128 asink->sink = _emotion_audio_sink_create (ev, 1);
129 if (!asink->sink)
130 goto failure_gstreamer_sink;
131
132 gst_bin_add_many((GST_BIN (ev->pipeline)), cdiocddasrc, asink->sink, NULL);
133
134 if (!gst_element_link (cdiocddasrc, asink->sink))
135 goto failure_link;
136
137 vsink = _emotion_visualization_sink_create (ev, asink);
138 if (!vsink) goto failure_link;
139
140 if (!emotion_pipeline_pause (ev->pipeline))
141 goto failure_gstreamer_pause;
142
143 {
144 GstQuery *query;
145 GstPad *pad;
146 GstCaps *caps;
147 GstStructure *structure;
148
149 /* should always be found */
150 pad = gst_element_get_pad (cdiocddasrc, "src");
151
152 caps = gst_pad_get_caps (pad);
153 structure = gst_caps_get_structure (GST_CAPS (caps), 0);
154
155 gst_structure_get_int (structure, "channels", &asink->channels);
156 gst_structure_get_int (structure, "rate", &asink->samplerate);
157
158 gst_caps_unref (caps);
159
160 query = gst_query_new_duration (GST_FORMAT_TIME);
161 if (gst_pad_query (pad, query)) {
162 gint64 time;
163
164 gst_query_parse_duration (query, NULL, &time);
165 asink->length_time = (double)time / (double)GST_SECOND;
166 vsink->length_time = asink->length_time;
167 }
168 gst_query_unref (query);
169 gst_object_unref (GST_OBJECT (pad));
170 }
171
172 return 1;
173
174 failure_gstreamer_pause:
175 _emotion_video_sink_free (ev, vsink);
176 failure_link:
177 gst_bin_remove (GST_BIN (ev->pipeline), asink->sink);
178 failure_gstreamer_sink:
179 _emotion_audio_sink_free (ev, asink);
180 failure_emotion_sink:
181 gst_bin_remove (GST_BIN (ev->pipeline), cdiocddasrc);
182 failure_cdiocddasrc:
183
184 return 0;
185}
186
187int
188emotion_pipeline_dvd_build (void *video, const char *device)
189{
190 GstElement *dvdreadsrc;
191 GstElement *dvddemux;
192 Emotion_Gstreamer_Video *ev;
193
194 ev = (Emotion_Gstreamer_Video *)video;
195 if (!ev) return 0;
196
197 dvdreadsrc = gst_element_factory_make ("dvdreadsrc", "src");
198 if (!dvdreadsrc)
199 goto failure_dvdreadsrc;
200 if (device)
201 g_object_set (G_OBJECT (dvdreadsrc), "device", device, NULL);
202
203 dvddemux = gst_element_factory_make ("dvddemux", "dvddemux");
204 if (!dvddemux)
205 goto failure_dvddemux;
206 g_signal_connect (dvddemux, "pad-added",
207 G_CALLBACK (dvd_pad_added_cb), ev);
208 g_signal_connect (dvddemux, "no-more-pads",
209 G_CALLBACK (dvd_no_more_pads_cb), ev);
210
211 gst_bin_add_many (GST_BIN (ev->pipeline), dvdreadsrc, dvddemux, NULL);
212 if (!gst_element_link (dvdreadsrc, dvddemux))
213 goto failure_link;
214
215 if (!emotion_pipeline_pause (ev->pipeline))
216 goto failure_gstreamer_pause;
217
218 while (no_more_pads == 0) {
219 g_print ("toto\n");}
220 no_more_pads = 0;
221
222 /* We get the informations of streams */
223 ecore_list_first_goto (ev->video_sinks);
224 ecore_list_first_goto (ev->audio_sinks);
225
226 {
227 GstIterator *it;
228 gpointer data;
229
230 it = gst_element_iterate_src_pads (dvddemux);
231 while (gst_iterator_next (it, &data) == GST_ITERATOR_OK) {
232 GstPad *pad;
233 GstCaps *caps;
234 gchar *str;
235
236 pad = GST_PAD (data);
237
238 caps = gst_pad_get_caps (pad);
239 str = gst_caps_to_string (caps);
240 g_print ("caps !! %s\n", str);
241 /* video stream */
242 if (g_str_has_prefix (str, "video/mpeg")) {
243 Emotion_Video_Sink *vsink;
244 GstPad *sink_pad;
245 GstCaps *sink_caps;
246
247 vsink = (Emotion_Video_Sink *)ecore_list_next (ev->video_sinks);
248 sink_pad = gst_element_get_pad (gst_bin_get_by_name (GST_BIN (ev->pipeline), "mpeg2dec"), "src");
249 sink_caps = gst_pad_get_caps (sink_pad);
250 str = gst_caps_to_string (sink_caps);
251 g_print (" ** caps v !! %s\n", str);
252
253 _emotion_video_sink_fill (vsink, sink_pad, sink_caps);
254
255 gst_caps_unref (sink_caps);
256 gst_object_unref (sink_pad);
257 }
258 /* audio stream */
259 else if (g_str_has_prefix (str, "audio/")) {
260 Emotion_Audio_Sink *asink;
261 GstPad *sink_pad;
262 GstCaps *sink_caps;
263
264 asink = (Emotion_Audio_Sink *)ecore_list_next (ev->audio_sinks);
265 sink_pad = gst_element_get_pad (gst_bin_get_by_name (GST_BIN (ev->pipeline), "a52dec"), "src");
266 sink_caps = gst_pad_get_caps (sink_pad);
267
268 _emotion_audio_sink_fill (asink, sink_pad, sink_caps);
269 }
270 gst_caps_unref (caps);
271 g_free (str);
272 gst_object_unref (pad);
273 }
274 gst_iterator_free (it);
275 }
276
277 /* The first vsink is a valid Emotion_Video_Sink * */
278 /* If no video stream is found, it's a visualisation sink */
279 {
280 Emotion_Video_Sink *vsink;
281
282 vsink = (Emotion_Video_Sink *)ecore_list_first_goto (ev->video_sinks);
283 if (vsink && vsink->sink) {
284 g_object_set (G_OBJECT (vsink->sink), "sync", TRUE, NULL);
285 g_object_set (G_OBJECT (vsink->sink), "signal-handoffs", TRUE, NULL);
286 g_signal_connect (G_OBJECT (vsink->sink),
287 "handoff",
288 G_CALLBACK (cb_handoff), ev);
289 }
290 }
291
292 return 1;
293
294 failure_gstreamer_pause:
295 failure_link:
296 gst_element_set_state (ev->pipeline, GST_STATE_NULL);
297 gst_bin_remove (GST_BIN (ev->pipeline), dvddemux);
298 failure_dvddemux:
299 gst_bin_remove (GST_BIN (ev->pipeline), dvdreadsrc);
300 failure_dvdreadsrc:
301
302 return 0;
303}
304
305int
306emotion_pipeline_uri_build (void *video, const char *uri)
307{
308 GstElement *src;
309 GstElement *decodebin;
310 Emotion_Gstreamer_Video *ev;
311
312 ev = (Emotion_Gstreamer_Video *)video;
313 if (!ev) return 0;
314
315 if (gst_uri_protocol_is_supported(GST_URI_SRC, uri))
316 goto failure_src;
317 src = gst_element_make_from_uri (GST_URI_SRC, uri, "src");
318 if (!src)
319 goto failure_src;
320 g_object_set (G_OBJECT (src), "location", uri, NULL);
321
322 decodebin = gst_element_factory_make ("decodebin", "decodebin");
323 if (!decodebin)
324 goto failure_decodebin;
325 g_signal_connect (decodebin, "new-decoded-pad",
326 G_CALLBACK (file_new_decoded_pad_cb), ev);
327
328 gst_bin_add_many (GST_BIN (ev->pipeline), src, decodebin, NULL);
329 if (!gst_element_link (src, decodebin))
330 goto failure_link;
331
332 if (!emotion_pipeline_pause (ev->pipeline))
333 goto failure_gstreamer_pause;
334
335 _emotion_streams_sinks_get (ev, decodebin);
336
337 /* The first vsink is a valid Emotion_Video_Sink * */
338 /* If no video stream is found, it's a visualisation sink */
339 {
340 Emotion_Video_Sink *vsink;
341
342 vsink = (Emotion_Video_Sink *)ecore_list_first_goto (ev->video_sinks);
343 if (vsink && vsink->sink) {
344 g_object_set (G_OBJECT (vsink->sink), "sync", TRUE, NULL);
345 g_object_set (G_OBJECT (vsink->sink), "signal-handoffs", TRUE, NULL);
346 g_signal_connect (G_OBJECT (vsink->sink),
347 "handoff",
348 G_CALLBACK (cb_handoff), ev);
349 }
350 }
351
352 return 1;
353
354 failure_gstreamer_pause:
355 failure_link:
356 gst_element_set_state (ev->pipeline, GST_STATE_NULL);
357 gst_bin_remove (GST_BIN (ev->pipeline), decodebin);
358 failure_decodebin:
359 gst_bin_remove (GST_BIN (ev->pipeline), src);
360 failure_src:
361
362 return 0;
363}
364
365int
366emotion_pipeline_file_build (void *video, const char *file)
367{
368 GstElement *filesrc;
369 GstElement *decodebin;
370 Emotion_Gstreamer_Video *ev;
371
372 ev = (Emotion_Gstreamer_Video *)video;
373 if (!ev) return 0;
374
375 filesrc = gst_element_factory_make ("filesrc", "src");
376 if (!filesrc)
377 goto failure_filesrc;
378 g_object_set (G_OBJECT (filesrc), "location", file, NULL);
379
380 decodebin = gst_element_factory_make ("decodebin", "decodebin");
381 if (!decodebin)
382 goto failure_decodebin;
383 g_signal_connect (decodebin, "new-decoded-pad",
384 G_CALLBACK (file_new_decoded_pad_cb), ev);
385
386 gst_bin_add_many (GST_BIN (ev->pipeline), filesrc, decodebin, NULL);
387 if (!gst_element_link (filesrc, decodebin))
388 goto failure_link;
389
390 if (!emotion_pipeline_pause (ev->pipeline))
391 goto failure_gstreamer_pause;
392
393 _emotion_streams_sinks_get (ev, decodebin);
394
395 /* The first vsink is a valid Emotion_Video_Sink * */
396 /* If no video stream is found, it's a visualisation sink */
397 {
398 Emotion_Video_Sink *vsink;
399
400 vsink = (Emotion_Video_Sink *)ecore_list_first_goto (ev->video_sinks);
401 if (vsink && vsink->sink) {
402 g_object_set (G_OBJECT (vsink->sink), "sync", TRUE, NULL);
403 g_object_set (G_OBJECT (vsink->sink), "signal-handoffs", TRUE, NULL);
404 g_signal_connect (G_OBJECT (vsink->sink),
405 "handoff",
406 G_CALLBACK (cb_handoff), ev);
407 }
408 }
409
410 return 1;
411
412 failure_gstreamer_pause:
413 failure_link:
414 gst_element_set_state (ev->pipeline, GST_STATE_NULL);
415 gst_bin_remove (GST_BIN (ev->pipeline), decodebin);
416 failure_decodebin:
417 gst_bin_remove (GST_BIN (ev->pipeline), filesrc);
418 failure_filesrc:
419
420 return 0;
421}
422
423int
424emotion_pipeline_cdda_track_count_get(void *video)
425{
426 Emotion_Gstreamer_Video *ev;
427 GstBus *bus;
428 guint tracks_count = 0;
429 gboolean done;
430
431 ev = (Emotion_Gstreamer_Video *)video;
432 if (!ev) return tracks_count;
433
434 done = FALSE;
435 bus = gst_element_get_bus (ev->pipeline);
436 if (!bus) return tracks_count;
437
438 while (!done) {
439 GstMessage *message;
440
441 message = gst_bus_pop (bus);
442 if (message == NULL)
443 /* All messages read, we're done */
444 break;
445
446 switch (GST_MESSAGE_TYPE (message)) {
447 case GST_MESSAGE_TAG: {
448 GstTagList *tags;
449
450 gst_message_parse_tag (message, &tags);
451
452 gst_tag_list_get_uint (tags, GST_TAG_TRACK_COUNT, &tracks_count);
453 if (tracks_count) done = TRUE;
454 break;
455 }
456 case GST_MESSAGE_ERROR:
457 default:
458 break;
459 }
460 gst_message_unref (message);
461 }
462
463 gst_object_unref (GST_OBJECT (bus));
464
465 return tracks_count;
466}
467
468static void
469file_new_decoded_pad_cb (GstElement *decodebin,
470 GstPad *new_pad,
471 gboolean last,
472 gpointer user_data)
473{
474 Emotion_Gstreamer_Video *ev;
475 GstCaps *caps;
476 gchar *str;
477
478 ev = (Emotion_Gstreamer_Video *)user_data;
479 caps = gst_pad_get_caps (new_pad);
480 str = gst_caps_to_string (caps);
481 /* video stream */
482 if (g_str_has_prefix (str, "video/")) {
483 Emotion_Video_Sink *vsink;
484 GstElement *queue;
485 GstPad *videopad;
486
487 vsink = (Emotion_Video_Sink *)malloc (sizeof (Emotion_Video_Sink));
488 if (!vsink) return;
489 if (!ecore_list_append (ev->video_sinks, vsink)) {
490 free(vsink);
491 return;
492 }
493
494 queue = gst_element_factory_make ("queue", NULL);
495 vsink->sink = gst_element_factory_make ("fakesink", "videosink");
496 gst_bin_add_many (GST_BIN (ev->pipeline), queue, vsink->sink, NULL);
497 gst_element_link (queue, vsink->sink);
498 videopad = gst_element_get_pad (queue, "sink");
499 gst_pad_link (new_pad, videopad);
500 gst_object_unref (videopad);
501 if (ecore_list_count(ev->video_sinks) == 1) {
502 ev->ratio = (double)vsink->width / (double)vsink->height;
503 }
504 gst_element_set_state (queue, GST_STATE_PAUSED);
505 gst_element_set_state (vsink->sink, GST_STATE_PAUSED);
506 }
507 /* audio stream */
508 else if (g_str_has_prefix (str, "audio/")) {
509 Emotion_Audio_Sink *asink;
510 GstPad *audiopad;
511
512 asink = (Emotion_Audio_Sink *)malloc (sizeof (Emotion_Audio_Sink));
513 if (!asink) return;
514 if (!ecore_list_append (ev->audio_sinks, asink)) {
515 free(asink);
516 return;
517 }
518
519 asink->sink = _emotion_audio_sink_create (ev, ecore_list_index (ev->audio_sinks));
520 gst_bin_add (GST_BIN (ev->pipeline), asink->sink);
521 audiopad = gst_element_get_pad (asink->sink, "sink");
522 gst_pad_link(new_pad, audiopad);
523 gst_element_set_state (asink->sink, GST_STATE_PAUSED);
524 }
525}
526
527static void
528dvd_pad_added_cb (GstElement *dvddemuxer,
529 GObject *new_pad,
530 gpointer user_data)
531{
532 Emotion_Gstreamer_Video *ev;
533 GstCaps *caps;
534 gchar *str;
535
536 ev = (Emotion_Gstreamer_Video *)user_data;
537 caps = gst_pad_get_caps (GST_PAD (new_pad));
538 str = gst_caps_to_string (caps);
539 /* video stream */
540 if (g_str_has_prefix (str, "video/mpeg")) {
541 Emotion_Video_Sink *vsink;
542 GstElement *queue;
543 GstElement *decoder;
544 GstPad *videopad;
545
546 vsink = (Emotion_Video_Sink *)malloc (sizeof (Emotion_Video_Sink));
547 if (!vsink) return;
548 if (!ecore_list_append (ev->video_sinks, vsink)) {
549 free(vsink);
550 return;
551 }
552
553 queue = gst_element_factory_make ("queue", NULL);
554 decoder = gst_element_factory_make ("mpeg2dec", "mpeg2dec");
555 vsink->sink = gst_element_factory_make ("fakesink", "videosink");
556 gst_bin_add_many (GST_BIN (ev->pipeline), queue, decoder, vsink->sink, NULL);
557 gst_element_link (queue, decoder);
558 gst_element_link (decoder, vsink->sink);
559 videopad = gst_element_get_pad (queue, "sink");
560 gst_pad_link (GST_PAD (new_pad), videopad);
561 gst_object_unref (videopad);
562 if (ecore_list_count(ev->video_sinks) == 1) {
563 ev->ratio = (double)vsink->width / (double)vsink->height;
564 }
565 gst_element_set_state (queue, GST_STATE_PAUSED);
566 gst_element_set_state (decoder, GST_STATE_PAUSED);
567 gst_element_set_state (vsink->sink, GST_STATE_PAUSED);
568 }
569 /* audio stream */
570 else if (g_str_has_prefix (str, "audio/")) {
571 Emotion_Audio_Sink *asink;
572 GstElement *queue;
573 GstElement *decoder;
574 GstElement *conv;
575 GstElement *resample;
576 GstElement *volume;
577 GstPad *audiopad;
578 double vol;
579
580 asink = (Emotion_Audio_Sink *)malloc (sizeof (Emotion_Audio_Sink));
581 if (!asink) return;
582 if (!ecore_list_append (ev->audio_sinks, asink)) {
583 free(asink);
584 return;
585 }
586
587 queue = gst_element_factory_make ("queue", NULL);
588 decoder = gst_element_factory_make ("a52dec", "a52dec");
589 conv = gst_element_factory_make ("audioconvert", NULL);
590 resample = gst_element_factory_make ("audioresample", NULL);
591 volume = gst_element_factory_make ("volume", "volume");
592 g_object_get (G_OBJECT (volume), "volume", &vol, NULL);
593 ev->volume = vol / 10.0;
594
595 /* FIXME: must manage several audio streams */
596 asink->sink = gst_element_factory_make ("fakesink", NULL);
597
598 gst_bin_add_many (GST_BIN (ev->pipeline),
599 queue, decoder, conv, resample, volume, asink->sink, NULL);
600 gst_element_link_many (queue, decoder, conv, resample, volume, asink->sink, NULL);
601
602 audiopad = gst_element_get_pad (queue, "sink");
603 gst_pad_link (GST_PAD (new_pad), audiopad);
604 gst_object_unref (audiopad);
605
606 gst_element_set_state (queue, GST_STATE_PAUSED);
607 gst_element_set_state (decoder, GST_STATE_PAUSED);
608 gst_element_set_state (conv, GST_STATE_PAUSED);
609 gst_element_set_state (resample, GST_STATE_PAUSED);
610 gst_element_set_state (volume, GST_STATE_PAUSED);
611 gst_element_set_state (asink->sink, GST_STATE_PAUSED);
612 }
613}
614
615static void
616dvd_no_more_pads_cb (GstElement *dvddemuxer,
617 gpointer user_data)
618{
619 no_more_pads = 1;
620}
621
622static Emotion_Audio_Sink *
623_emotion_audio_sink_new (Emotion_Gstreamer_Video *ev)
624{
625 Emotion_Audio_Sink *asink;
626
627 if (!ev) return NULL;
628
629 asink = (Emotion_Audio_Sink *)malloc (sizeof (Emotion_Audio_Sink));
630 if (!asink) return NULL;
631
632 if (!ecore_list_append (ev->audio_sinks, asink)) {
633 free (asink);
634 return NULL;
635 }
636 return asink;
637}
638
639static void
640_emotion_audio_sink_free (Emotion_Gstreamer_Video *ev, Emotion_Audio_Sink *asink)
641{
642 if (!ev || !asink) return;
643
644 if (ecore_list_goto (ev->audio_sinks, asink)) {
645 ecore_list_remove (ev->audio_sinks);
646 free (asink);
647 }
648}
649
650static Emotion_Video_Sink *
651_emotion_video_sink_new (Emotion_Gstreamer_Video *ev)
652{
653 Emotion_Video_Sink *vsink;
654
655 if (!ev) return NULL;
656
657 vsink = (Emotion_Video_Sink *)malloc (sizeof (Emotion_Video_Sink));
658 if (!vsink) return NULL;
659
660 if (!ecore_list_append (ev->video_sinks, vsink)) {
661 free (vsink);
662 return NULL;
663 }
664 return vsink;
665}
666
667static void
668_emotion_video_sink_free (Emotion_Gstreamer_Video *ev, Emotion_Video_Sink *vsink)
669{
670 if (!ev || !vsink) return;
671
672 if (ecore_list_goto (ev->video_sinks, vsink)) {
673 ecore_list_remove (ev->video_sinks);
674 free (vsink);
675 }
676}
677
678static Emotion_Video_Sink *
679_emotion_visualization_sink_create (Emotion_Gstreamer_Video *ev, Emotion_Audio_Sink *asink)
680{
681 Emotion_Video_Sink *vsink;
682
683 if (!ev) return NULL;
684
685 vsink = _emotion_video_sink_new (ev);
686 if (!vsink) return NULL;
687
688 vsink->sink = gst_bin_get_by_name (GST_BIN (asink->sink), "vissink1");
689 if (!vsink->sink) {
690 _emotion_video_sink_free (ev, vsink);
691 return NULL;
692 }
693 vsink->width = 320;
694 vsink->height = 200;
695 ev->ratio = (double)vsink->width / (double)vsink->height;
696 vsink->fps_num = 25;
697 vsink->fps_den = 1;
698 vsink->fourcc = GST_MAKE_FOURCC ('A','R','G','B');
699 vsink->length_time = asink->length_time;
700
701 g_object_set (G_OBJECT (vsink->sink), "sync", TRUE, NULL);
702 g_object_set (G_OBJECT (vsink->sink), "signal-handoffs", TRUE, NULL);
703 g_signal_connect (G_OBJECT (vsink->sink),
704 "handoff",
705 G_CALLBACK (cb_handoff), ev);
706 return vsink;
707}
708
709static GstElement *
710_emotion_audio_sink_create (Emotion_Gstreamer_Video *ev, int index)
711{
712 gchar buf[128];
713 GstElement *bin;
714 GstElement *audiobin;
715 GstElement *visbin = NULL;
716 GstElement *tee;
717 GstPad *teepad;
718 GstPad *binpad;
719
720 /* audio sink */
721 bin = gst_bin_new (NULL);
722 if (!bin) return NULL;
723
724 g_snprintf (buf, 128, "tee%d", index);
725 tee = gst_element_factory_make ("tee", buf);
726
727 /* audio part */
728 {
729 GstElement *queue;
730 GstElement *conv;
731 GstElement *resample;
732 GstElement *volume;
733 GstElement *sink;
734 GstPad *audiopad;
735 double vol;
736
737 audiobin = gst_bin_new (NULL);
738
739 queue = gst_element_factory_make ("queue", NULL);
740 conv = gst_element_factory_make ("audioconvert", NULL);
741 resample = gst_element_factory_make ("audioresample", NULL);
742 volume = gst_element_factory_make ("volume", "volume");
743 g_object_get (G_OBJECT (volume), "volume", &vol, NULL);
744 ev->volume = vol / 10.0;
745
746 if (index == 1)
747 sink = gst_element_factory_make ("autoaudiosink", NULL);
748 else
749 sink = gst_element_factory_make ("fakesink", NULL);
750
751 gst_bin_add_many (GST_BIN (audiobin),
752 queue, conv, resample, volume, sink, NULL);
753 gst_element_link_many (queue, conv, resample, volume, sink, NULL);
754
755 audiopad = gst_element_get_pad (queue, "sink");
756 gst_element_add_pad (audiobin, gst_ghost_pad_new ("sink", audiopad));
757 gst_object_unref (audiopad);
758 }
759
760 /* visualisation part */
761 {
762 GstElement *vis = NULL;
763 char *vis_name;
764
765 switch (ev->vis) {
766 case EMOTION_VIS_GOOM:
767 vis_name = "goom";
768 break;
769 case EMOTION_VIS_LIBVISUAL_BUMPSCOPE:
770 vis_name = "libvisual_bumpscope";
771 break;
772 case EMOTION_VIS_LIBVISUAL_CORONA:
773 vis_name = "libvisual_corona";
774 break;
775 case EMOTION_VIS_LIBVISUAL_DANCING_PARTICLES:
776 vis_name = "libvisual_dancingparticles";
777 break;
778 case EMOTION_VIS_LIBVISUAL_GDKPIXBUF:
779 vis_name = "libvisual_gdkpixbuf";
780 break;
781 case EMOTION_VIS_LIBVISUAL_G_FORCE:
782 vis_name = "libvisual_G-Force";
783 break;
784 case EMOTION_VIS_LIBVISUAL_GOOM:
785 vis_name = "libvisual_goom";
786 break;
787 case EMOTION_VIS_LIBVISUAL_INFINITE:
788 vis_name = "libvisual_infinite";
789 break;
790 case EMOTION_VIS_LIBVISUAL_JAKDAW:
791 vis_name = "libvisual_jakdaw";
792 break;
793 case EMOTION_VIS_LIBVISUAL_JESS:
794 vis_name = "libvisual_jess";
795 break;
796 case EMOTION_VIS_LIBVISUAL_LV_ANALYSER:
797 vis_name = "libvisual_lv_analyzer";
798 break;
799 case EMOTION_VIS_LIBVISUAL_LV_FLOWER:
800 vis_name = "libvisual_lv_flower";
801 break;
802 case EMOTION_VIS_LIBVISUAL_LV_GLTEST:
803 vis_name = "libvisual_lv_gltest";
804 break;
805 case EMOTION_VIS_LIBVISUAL_LV_SCOPE:
806 vis_name = "libvisual_lv_scope";
807 break;
808 case EMOTION_VIS_LIBVISUAL_MADSPIN:
809 vis_name = "libvisual_madspin";
810 break;
811 case EMOTION_VIS_LIBVISUAL_NEBULUS:
812 vis_name = "libvisual_nebulus";
813 break;
814 case EMOTION_VIS_LIBVISUAL_OINKSIE:
815 vis_name = "libvisual_oinksie";
816 break;
817 case EMOTION_VIS_LIBVISUAL_PLASMA:
818 vis_name = "libvisual_plazma";
819 break;
820 default:
821 vis_name = "goom";
822 break;
823 }
824
825 g_snprintf (buf, 128, "vis%d", index);
826 if ((vis = gst_element_factory_make (vis_name, buf))) {
827 GstElement *queue;
828 GstElement *conv;
829 GstElement *cspace;
830 GstElement *sink;
831 GstPad *vispad;
832 GstCaps *caps;
833
834 g_snprintf (buf, 128, "visbin%d", index);
835 visbin = gst_bin_new (buf);
836
837 queue = gst_element_factory_make ("queue", NULL);
838 conv = gst_element_factory_make ("audioconvert", NULL);
839 cspace = gst_element_factory_make ("ffmpegcolorspace", NULL);
840 g_snprintf (buf, 128, "vissink%d", index);
841 sink = gst_element_factory_make ("fakesink", buf);
842
843 gst_bin_add_many (GST_BIN (visbin),
844 queue, conv, vis, cspace, sink, NULL);
845 gst_element_link_many (queue, conv, vis, cspace, NULL);
846 caps = gst_caps_new_simple ("video/x-raw-rgb",
847 "bpp", G_TYPE_INT, 32,
848 "width", G_TYPE_INT, 320,
849 "height", G_TYPE_INT, 200,
850 NULL);
851 gst_element_link_filtered (cspace, sink, caps);
852
853 vispad = gst_element_get_pad (queue, "sink");
854 gst_element_add_pad (visbin, gst_ghost_pad_new ("sink", vispad));
855 gst_object_unref (vispad);
856 }
857 }
858
859 gst_bin_add_many (GST_BIN (bin), tee, audiobin, NULL);
860 if (visbin)
861 gst_bin_add (GST_BIN (bin), visbin);
862
863 binpad = gst_element_get_pad (audiobin, "sink");
864 teepad = gst_element_get_request_pad (tee, "src%d");
865 gst_pad_link (teepad, binpad);
866 gst_object_unref (teepad);
867 gst_object_unref (binpad);
868
869 if (visbin) {
870 binpad = gst_element_get_pad (visbin, "sink");
871 teepad = gst_element_get_request_pad (tee, "src%d");
872 gst_pad_link (teepad, binpad);
873 gst_object_unref (teepad);
874 gst_object_unref (binpad);
875 }
876
877 teepad = gst_element_get_pad (tee, "sink");
878 gst_element_add_pad (bin, gst_ghost_pad_new ("sink", teepad));
879 gst_object_unref (teepad);
880
881 return bin;
882}
883
884static void
885_emotion_streams_sinks_get (Emotion_Gstreamer_Video *ev, GstElement *decoder)
886{
887 GstIterator *it;
888 gpointer data;
889
890 ecore_list_first_goto (ev->video_sinks);
891 ecore_list_first_goto (ev->audio_sinks);
892
893 it = gst_element_iterate_src_pads (decoder);
894 while (gst_iterator_next (it, &data) == GST_ITERATOR_OK) {
895 GstPad *pad;
896 GstCaps *caps;
897 gchar *str;
898
899 pad = GST_PAD (data);
900
901 caps = gst_pad_get_caps (pad);
902 str = gst_caps_to_string (caps);
903 g_print ("caps !! %s\n", str);
904
905 /* video stream */
906 if (g_str_has_prefix (str, "video/")) {
907 Emotion_Video_Sink *vsink;
908
909 vsink = (Emotion_Video_Sink *)ecore_list_next (ev->video_sinks);
910
911 _emotion_video_sink_fill (vsink, pad, caps);
912 }
913 /* audio stream */
914 else if (g_str_has_prefix (str, "audio/")) {
915 Emotion_Audio_Sink *asink;
916 gint index;
917
918 asink = (Emotion_Audio_Sink *)ecore_list_next (ev->audio_sinks);
919
920 _emotion_audio_sink_fill (asink, pad, caps);
921
922 index = ecore_list_index (ev->audio_sinks);
923
924 if (ecore_list_count (ev->video_sinks) == 0) {
925 if (index == 1) {
926 Emotion_Video_Sink *vsink;
927
928 vsink = _emotion_visualization_sink_create (ev, asink);
929 if (!vsink) goto finalize;
930 }
931 }
932 else {
933 gchar buf[128];
934 GstElement *visbin;
935
936 g_snprintf (buf, 128, "visbin%d", index);
937 visbin = gst_bin_get_by_name (GST_BIN (ev->pipeline), buf);
938 if (visbin) {
939 GstPad *srcpad;
940 GstPad *sinkpad;
941
942 sinkpad = gst_element_get_pad (visbin, "sink");
943 srcpad = gst_pad_get_peer (sinkpad);
944 gst_pad_unlink (srcpad, sinkpad);
945
946 gst_object_unref (srcpad);
947 gst_object_unref (sinkpad);
948 }
949 }
950 }
951 finalize:
952 gst_caps_unref (caps);
953 g_free (str);
954 gst_object_unref (pad);
955 }
956 gst_iterator_free (it);
957}
958
959static void
960_emotion_video_sink_fill (Emotion_Video_Sink *vsink, GstPad *pad, GstCaps *caps)
961{
962 GstStructure *structure;
963 GstQuery *query;
964 const GValue *val;
965 gchar *str;
966
967 structure = gst_caps_get_structure (caps, 0);
968 str = gst_caps_to_string (caps);
969
970 gst_structure_get_int (structure, "width", &vsink->width);
971 gst_structure_get_int (structure, "height", &vsink->height);
972
973 vsink->fps_num = 1;
974 vsink->fps_den = 1;
975 val = gst_structure_get_value (structure, "framerate");
976 if (val) {
977 vsink->fps_num = gst_value_get_fraction_numerator (val);
978 vsink->fps_den = gst_value_get_fraction_denominator (val);
979 }
980 if (g_str_has_prefix(str, "video/x-raw-yuv")) {
981 val = gst_structure_get_value (structure, "format");
982 vsink->fourcc = gst_value_get_fourcc (val);
983 }
984 else if (g_str_has_prefix(str, "video/x-raw-rgb"))
985 vsink->fourcc = GST_MAKE_FOURCC ('A','R','G','B');
986 else
987 vsink->fourcc = 0;
988
989 query = gst_query_new_duration (GST_FORMAT_TIME);
990 if (gst_pad_query (pad, query)) {
991 gint64 time;
992
993 gst_query_parse_duration (query, NULL, &time);
994 vsink->length_time = (double)time / (double)GST_SECOND;
995 }
996 g_free (str);
997 gst_query_unref (query);
998}
999
1000static void
1001_emotion_audio_sink_fill (Emotion_Audio_Sink *asink, GstPad *pad, GstCaps *caps)
1002{
1003 GstStructure *structure;
1004 GstQuery *query;
1005
1006 structure = gst_caps_get_structure (caps, 0);
1007
1008 gst_structure_get_int (structure, "channels", &asink->channels);
1009 gst_structure_get_int (structure, "rate", &asink->samplerate);
1010
1011 query = gst_query_new_duration (GST_FORMAT_TIME);
1012 if (gst_pad_query (pad, query)) {
1013 gint64 time;
1014
1015 gst_query_parse_duration (query, NULL, &time);
1016 asink->length_time = (double)time / (double)GST_SECOND;
1017 }
1018 gst_query_unref (query);
1019 }
diff --git a/legacy/emotion/src/modules/emotion_gstreamer_pipeline.h b/legacy/emotion/src/modules/emotion_gstreamer_pipeline.h
deleted file mode 100644
index df98b6973b..0000000000
--- a/legacy/emotion/src/modules/emotion_gstreamer_pipeline.h
+++ /dev/null
@@ -1,17 +0,0 @@
1#ifndef __EMOTION_GSTREAMER_PIPELINE_H__
2#define __EMOTION_GSTREAMER_PIPELINE_H__
3
4
5#include <gst/gst.h>
6
7
8gboolean emotion_pipeline_pause (GstElement *pipeline);
9
10int emotion_pipeline_cdda_build (void *video, const char * device, unsigned int track);
11int emotion_pipeline_file_build (void *video, const char *file);
12int emotion_pipeline_uri_build (void *video, const char *uri);
13int emotion_pipeline_dvd_build (void *video, const char *device);
14int emotion_pipeline_cdda_track_count_get(void *video);
15
16
17#endif /* __EMOTION_GSTREAMER_PIPELINE_H__ */
diff --git a/legacy/emotion/src/modules/gstreamer/Makefile.am b/legacy/emotion/src/modules/gstreamer/Makefile.am
new file mode 100644
index 0000000000..f3519afcaa
--- /dev/null
+++ b/legacy/emotion/src/modules/gstreamer/Makefile.am
@@ -0,0 +1,33 @@
1
2MAINTAINERCLEANFILES = Makefile.in
3
4AM_CPPFLAGS = \
5-I$(top_srcdir) \
6-I$(top_srcdir)/src/lib \
7-DPACKAGE_BIN_DIR=\"$(bindir)\" \
8-DPACKAGE_LIB_DIR=\"$(libdir)\" \
9-DPACKAGE_DATA_DIR=\"$(datadir)/$(PACKAGE)\" \
10@EVAS_CFLAGS@ \
11@ECORE_CFLAGS@ \
12@GST_CFLAGS@
13
14if BUILD_GSTREAMER_MODULE
15
16pkgdir = $(libdir)/emotion
17
18pkg_LTLIBRARIES = gstreamer.la
19gstreamer_la_SOURCES = \
20emotion_gstreamer.c \
21emotion_gstreamer.h \
22emotion_gstreamer_pipeline.c \
23emotion_gstreamer_pipeline.h \
24emotion_gstreamer_pipeline_cdda.c \
25emotion_gstreamer_pipeline_dvd.c \
26emotion_gstreamer_pipeline_file.c \
27emotion_gstreamer_pipeline_uri.c
28gstreamer_la_LIBADD = @EVAS_LIBS@ @ECORE_LIBS@ @GST_LIBS@ $(top_builddir)/src/lib/libemotion.la
29gstreamer_la_LDFLAGS = -module -avoid-version
30gstreamer_la_LIBTOOLFLAGS = --tag=disable-static
31gstreamer_la_DEPENDENCIES = $(top_builddir)/config.h
32
33endif \ No newline at end of file
diff --git a/legacy/emotion/src/modules/emotion_gstreamer.c b/legacy/emotion/src/modules/gstreamer/emotion_gstreamer.c
index 856bba1200..dd8a5a9563 100644
--- a/legacy/emotion/src/modules/emotion_gstreamer.c
+++ b/legacy/emotion/src/modules/gstreamer/emotion_gstreamer.c
@@ -221,7 +221,7 @@ static Emotion_Video_Module em_module =
221 em_speed_get, /* speed_get */ 221 em_speed_get, /* speed_get */
222 em_eject, /* eject */ 222 em_eject, /* eject */
223 em_meta_get, /* meta_get */ 223 em_meta_get, /* meta_get */
224 224
225 NULL /* handle */ 225 NULL /* handle */
226}; 226};
227 227
diff --git a/legacy/emotion/src/modules/emotion_gstreamer.h b/legacy/emotion/src/modules/gstreamer/emotion_gstreamer.h
index 7881dcd3b9..09f0fe61f1 100644
--- a/legacy/emotion/src/modules/emotion_gstreamer.h
+++ b/legacy/emotion/src/modules/gstreamer/emotion_gstreamer.h
@@ -2,10 +2,14 @@
2#define __EMOTION_GSTREAMER_H__ 2#define __EMOTION_GSTREAMER_H__
3 3
4 4
5#include <Evas.h>
6#include <Ecore.h>
5#include <Ecore_Data.h> 7#include <Ecore_Data.h>
6 8
7#include <gst/gst.h> 9#include <gst/gst.h>
8 10
11#include "emotion_private.h"
12
9 13
10typedef struct _Emotion_Video_Sink Emotion_Video_Sink; 14typedef struct _Emotion_Video_Sink Emotion_Video_Sink;
11 15
@@ -40,7 +44,7 @@ struct _Emotion_Gstreamer_Video
40 /* eos */ 44 /* eos */
41 GstBus *eos_bus; 45 GstBus *eos_bus;
42 Ecore_Timer *eos_timer; 46 Ecore_Timer *eos_timer;
43 47
44 /* Sinks */ 48 /* Sinks */
45 Ecore_List *video_sinks; 49 Ecore_List *video_sinks;
46 Ecore_List *audio_sinks; 50 Ecore_List *audio_sinks;
diff --git a/legacy/emotion/src/modules/gstreamer/emotion_gstreamer_pipeline.c b/legacy/emotion/src/modules/gstreamer/emotion_gstreamer_pipeline.c
new file mode 100644
index 0000000000..92431ec64d
--- /dev/null
+++ b/legacy/emotion/src/modules/gstreamer/emotion_gstreamer_pipeline.c
@@ -0,0 +1,541 @@
1#include <unistd.h>
2#include <fcntl.h>
3
4#include "emotion_private.h"
5#include "emotion_gstreamer.h"
6#include "emotion_gstreamer_pipeline.h"
7
8
9gboolean
10emotion_pipeline_pause (GstElement *pipeline)
11{
12 GstStateChangeReturn res;
13
14 res = gst_element_set_state ((pipeline), GST_STATE_PAUSED);
15 if (res == GST_STATE_CHANGE_FAILURE) {
16 g_print ("Emotion-Gstreamer ERROR: could not pause\n");
17 return 0;
18 }
19
20 res = gst_element_get_state ((pipeline), NULL, NULL, GST_CLOCK_TIME_NONE);
21 if (res != GST_STATE_CHANGE_SUCCESS) {
22 g_print ("Emotion-Gstreamer ERROR: could not complete pause\n");
23 return 0;
24 }
25
26 return 1;
27}
28
29/* Send the video frame to the evas object */
30void
31cb_handoff (GstElement *fakesrc,
32 GstBuffer *buffer,
33 GstPad *pad,
34 gpointer user_data)
35{
36 GstQuery *query;
37 void *buf[2];
38
39 Emotion_Gstreamer_Video *ev = ( Emotion_Gstreamer_Video *) user_data;
40 if (!ev)
41 return;
42
43 if (!ev->video_mute) {
44 if (!ev->obj_data)
45 ev->obj_data = (void*) malloc (GST_BUFFER_SIZE(buffer) * sizeof(void));
46
47 memcpy ( ev->obj_data, GST_BUFFER_DATA(buffer), GST_BUFFER_SIZE(buffer));
48 buf[0] = GST_BUFFER_DATA(buffer);
49 buf[1] = buffer;
50 write(ev->fd_ev_write, buf, sizeof(buf));
51 }
52 else {
53 Emotion_Audio_Sink *asink;
54 asink = (Emotion_Audio_Sink *)ecore_list_index_goto (ev->audio_sinks, ev->audio_sink_nbr);
55 _emotion_video_pos_update(ev->obj, ev->position, asink->length_time);
56 }
57
58 query = gst_query_new_position (GST_FORMAT_TIME);
59 if (gst_pad_query (gst_pad_get_peer (pad), query)) {
60 gint64 position;
61
62 gst_query_parse_position (query, NULL, &position);
63 ev->position = (double)position / (double)GST_SECOND;
64 }
65 gst_query_unref (query);
66}
67
68void
69file_new_decoded_pad_cb (GstElement *decodebin,
70 GstPad *new_pad,
71 gboolean last,
72 gpointer user_data)
73{
74 Emotion_Gstreamer_Video *ev;
75 GstCaps *caps;
76 gchar *str;
77
78 ev = (Emotion_Gstreamer_Video *)user_data;
79 caps = gst_pad_get_caps (new_pad);
80 str = gst_caps_to_string (caps);
81 /* video stream */
82 if (g_str_has_prefix (str, "video/")) {
83 Emotion_Video_Sink *vsink;
84 GstElement *queue;
85 GstPad *videopad;
86
87 vsink = (Emotion_Video_Sink *)malloc (sizeof (Emotion_Video_Sink));
88 if (!vsink) return;
89 if (!ecore_list_append (ev->video_sinks, vsink)) {
90 free(vsink);
91 return;
92 }
93
94 queue = gst_element_factory_make ("queue", NULL);
95 vsink->sink = gst_element_factory_make ("fakesink", "videosink");
96 gst_bin_add_many (GST_BIN (ev->pipeline), queue, vsink->sink, NULL);
97 gst_element_link (queue, vsink->sink);
98 videopad = gst_element_get_pad (queue, "sink");
99 gst_pad_link (new_pad, videopad);
100 gst_object_unref (videopad);
101 if (ecore_list_count(ev->video_sinks) == 1) {
102 ev->ratio = (double)vsink->width / (double)vsink->height;
103 }
104 gst_element_set_state (queue, GST_STATE_PAUSED);
105 gst_element_set_state (vsink->sink, GST_STATE_PAUSED);
106 }
107 /* audio stream */
108 else if (g_str_has_prefix (str, "audio/")) {
109 Emotion_Audio_Sink *asink;
110 GstPad *audiopad;
111
112 asink = (Emotion_Audio_Sink *)malloc (sizeof (Emotion_Audio_Sink));
113 if (!asink) return;
114 if (!ecore_list_append (ev->audio_sinks, asink)) {
115 free(asink);
116 return;
117 }
118
119 asink->sink = emotion_audio_sink_create (ev, ecore_list_index (ev->audio_sinks));
120 gst_bin_add (GST_BIN (ev->pipeline), asink->sink);
121 audiopad = gst_element_get_pad (asink->sink, "sink");
122 gst_pad_link(new_pad, audiopad);
123 gst_element_set_state (asink->sink, GST_STATE_PAUSED);
124 }
125}
126
127Emotion_Video_Sink *
128emotion_video_sink_new (Emotion_Gstreamer_Video *ev)
129{
130 Emotion_Video_Sink *vsink;
131
132 if (!ev) return NULL;
133
134 vsink = (Emotion_Video_Sink *)malloc (sizeof (Emotion_Video_Sink));
135 if (!vsink) return NULL;
136
137 if (!ecore_list_append (ev->video_sinks, vsink)) {
138 free (vsink);
139 return NULL;
140 }
141 return vsink;
142}
143
144void
145emotion_video_sink_free (Emotion_Gstreamer_Video *ev, Emotion_Video_Sink *vsink)
146{
147 if (!ev || !vsink) return;
148
149 if (ecore_list_goto (ev->video_sinks, vsink)) {
150 ecore_list_remove (ev->video_sinks);
151 free (vsink);
152 }
153}
154
155Emotion_Video_Sink *
156emotion_visualization_sink_create (Emotion_Gstreamer_Video *ev, Emotion_Audio_Sink *asink)
157{
158 Emotion_Video_Sink *vsink;
159
160 if (!ev) return NULL;
161
162 vsink = emotion_video_sink_new (ev);
163 if (!vsink) return NULL;
164
165 vsink->sink = gst_bin_get_by_name (GST_BIN (asink->sink), "vissink1");
166 if (!vsink->sink) {
167 emotion_video_sink_free (ev, vsink);
168 return NULL;
169 }
170 vsink->width = 320;
171 vsink->height = 200;
172 ev->ratio = (double)vsink->width / (double)vsink->height;
173 vsink->fps_num = 25;
174 vsink->fps_den = 1;
175 vsink->fourcc = GST_MAKE_FOURCC ('A','R','G','B');
176 vsink->length_time = asink->length_time;
177
178 g_object_set (G_OBJECT (vsink->sink), "sync", TRUE, NULL);
179 g_object_set (G_OBJECT (vsink->sink), "signal-handoffs", TRUE, NULL);
180 g_signal_connect (G_OBJECT (vsink->sink),
181 "handoff",
182 G_CALLBACK (cb_handoff), ev);
183 return vsink;
184}
185
186int
187emotion_pipeline_cdda_track_count_get(void *video)
188{
189 Emotion_Gstreamer_Video *ev;
190 GstBus *bus;
191 guint tracks_count = 0;
192 gboolean done;
193
194 ev = (Emotion_Gstreamer_Video *)video;
195 if (!ev) return tracks_count;
196
197 done = FALSE;
198 bus = gst_element_get_bus (ev->pipeline);
199 if (!bus) return tracks_count;
200
201 while (!done) {
202 GstMessage *message;
203
204 message = gst_bus_pop (bus);
205 if (message == NULL)
206 /* All messages read, we're done */
207 break;
208
209 switch (GST_MESSAGE_TYPE (message)) {
210 case GST_MESSAGE_TAG: {
211 GstTagList *tags;
212
213 gst_message_parse_tag (message, &tags);
214
215 gst_tag_list_get_uint (tags, GST_TAG_TRACK_COUNT, &tracks_count);
216 if (tracks_count) done = TRUE;
217 break;
218 }
219 case GST_MESSAGE_ERROR:
220 default:
221 break;
222 }
223 gst_message_unref (message);
224 }
225
226 gst_object_unref (GST_OBJECT (bus));
227
228 return tracks_count;
229}
230
231GstElement *
232emotion_audio_sink_create (Emotion_Gstreamer_Video *ev, int index)
233{
234 gchar buf[128];
235 GstElement *bin;
236 GstElement *audiobin;
237 GstElement *visbin = NULL;
238 GstElement *tee;
239 GstPad *teepad;
240 GstPad *binpad;
241
242 /* audio sink */
243 bin = gst_bin_new (NULL);
244 if (!bin) return NULL;
245
246 g_snprintf (buf, 128, "tee%d", index);
247 tee = gst_element_factory_make ("tee", buf);
248
249 /* audio part */
250 {
251 GstElement *queue;
252 GstElement *conv;
253 GstElement *resample;
254 GstElement *volume;
255 GstElement *sink;
256 GstPad *audiopad;
257 double vol;
258
259 audiobin = gst_bin_new (NULL);
260
261 queue = gst_element_factory_make ("queue", NULL);
262 conv = gst_element_factory_make ("audioconvert", NULL);
263 resample = gst_element_factory_make ("audioresample", NULL);
264 volume = gst_element_factory_make ("volume", "volume");
265 g_object_get (G_OBJECT (volume), "volume", &vol, NULL);
266 ev->volume = vol / 10.0;
267
268 if (index == 1)
269 sink = gst_element_factory_make ("autoaudiosink", NULL);
270 else
271 sink = gst_element_factory_make ("fakesink", NULL);
272
273 gst_bin_add_many (GST_BIN (audiobin),
274 queue, conv, resample, volume, sink, NULL);
275 gst_element_link_many (queue, conv, resample, volume, sink, NULL);
276
277 audiopad = gst_element_get_pad (queue, "sink");
278 gst_element_add_pad (audiobin, gst_ghost_pad_new ("sink", audiopad));
279 gst_object_unref (audiopad);
280 }
281
282 /* visualisation part */
283 {
284 GstElement *vis = NULL;
285 char *vis_name;
286
287 switch (ev->vis) {
288 case EMOTION_VIS_GOOM:
289 vis_name = "goom";
290 break;
291 case EMOTION_VIS_LIBVISUAL_BUMPSCOPE:
292 vis_name = "libvisual_bumpscope";
293 break;
294 case EMOTION_VIS_LIBVISUAL_CORONA:
295 vis_name = "libvisual_corona";
296 break;
297 case EMOTION_VIS_LIBVISUAL_DANCING_PARTICLES:
298 vis_name = "libvisual_dancingparticles";
299 break;
300 case EMOTION_VIS_LIBVISUAL_GDKPIXBUF:
301 vis_name = "libvisual_gdkpixbuf";
302 break;
303 case EMOTION_VIS_LIBVISUAL_G_FORCE:
304 vis_name = "libvisual_G-Force";
305 break;
306 case EMOTION_VIS_LIBVISUAL_GOOM:
307 vis_name = "libvisual_goom";
308 break;
309 case EMOTION_VIS_LIBVISUAL_INFINITE:
310 vis_name = "libvisual_infinite";
311 break;
312 case EMOTION_VIS_LIBVISUAL_JAKDAW:
313 vis_name = "libvisual_jakdaw";
314 break;
315 case EMOTION_VIS_LIBVISUAL_JESS:
316 vis_name = "libvisual_jess";
317 break;
318 case EMOTION_VIS_LIBVISUAL_LV_ANALYSER:
319 vis_name = "libvisual_lv_analyzer";
320 break;
321 case EMOTION_VIS_LIBVISUAL_LV_FLOWER:
322 vis_name = "libvisual_lv_flower";
323 break;
324 case EMOTION_VIS_LIBVISUAL_LV_GLTEST:
325 vis_name = "libvisual_lv_gltest";
326 break;
327 case EMOTION_VIS_LIBVISUAL_LV_SCOPE:
328 vis_name = "libvisual_lv_scope";
329 break;
330 case EMOTION_VIS_LIBVISUAL_MADSPIN:
331 vis_name = "libvisual_madspin";
332 break;
333 case EMOTION_VIS_LIBVISUAL_NEBULUS:
334 vis_name = "libvisual_nebulus";
335 break;
336 case EMOTION_VIS_LIBVISUAL_OINKSIE:
337 vis_name = "libvisual_oinksie";
338 break;
339 case EMOTION_VIS_LIBVISUAL_PLASMA:
340 vis_name = "libvisual_plazma";
341 break;
342 default:
343 vis_name = "goom";
344 break;
345 }
346
347 g_snprintf (buf, 128, "vis%d", index);
348 if ((vis = gst_element_factory_make (vis_name, buf))) {
349 GstElement *queue;
350 GstElement *conv;
351 GstElement *cspace;
352 GstElement *sink;
353 GstPad *vispad;
354 GstCaps *caps;
355
356 g_snprintf (buf, 128, "visbin%d", index);
357 visbin = gst_bin_new (buf);
358
359 queue = gst_element_factory_make ("queue", NULL);
360 conv = gst_element_factory_make ("audioconvert", NULL);
361 cspace = gst_element_factory_make ("ffmpegcolorspace", NULL);
362 g_snprintf (buf, 128, "vissink%d", index);
363 sink = gst_element_factory_make ("fakesink", buf);
364
365 gst_bin_add_many (GST_BIN (visbin),
366 queue, conv, vis, cspace, sink, NULL);
367 gst_element_link_many (queue, conv, vis, cspace, NULL);
368 caps = gst_caps_new_simple ("video/x-raw-rgb",
369 "bpp", G_TYPE_INT, 32,
370 "width", G_TYPE_INT, 320,
371 "height", G_TYPE_INT, 200,
372 NULL);
373 gst_element_link_filtered (cspace, sink, caps);
374
375 vispad = gst_element_get_pad (queue, "sink");
376 gst_element_add_pad (visbin, gst_ghost_pad_new ("sink", vispad));
377 gst_object_unref (vispad);
378 }
379 }
380
381 gst_bin_add_many (GST_BIN (bin), tee, audiobin, NULL);
382 if (visbin)
383 gst_bin_add (GST_BIN (bin), visbin);
384
385 binpad = gst_element_get_pad (audiobin, "sink");
386 teepad = gst_element_get_request_pad (tee, "src%d");
387 gst_pad_link (teepad, binpad);
388 gst_object_unref (teepad);
389 gst_object_unref (binpad);
390
391 if (visbin) {
392 binpad = gst_element_get_pad (visbin, "sink");
393 teepad = gst_element_get_request_pad (tee, "src%d");
394 gst_pad_link (teepad, binpad);
395 gst_object_unref (teepad);
396 gst_object_unref (binpad);
397 }
398
399 teepad = gst_element_get_pad (tee, "sink");
400 gst_element_add_pad (bin, gst_ghost_pad_new ("sink", teepad));
401 gst_object_unref (teepad);
402
403 return bin;
404}
405
406void
407emotion_streams_sinks_get (Emotion_Gstreamer_Video *ev, GstElement *decoder)
408{
409 GstIterator *it;
410 gpointer data;
411
412 ecore_list_first_goto (ev->video_sinks);
413 ecore_list_first_goto (ev->audio_sinks);
414
415 it = gst_element_iterate_src_pads (decoder);
416 while (gst_iterator_next (it, &data) == GST_ITERATOR_OK) {
417 GstPad *pad;
418 GstCaps *caps;
419 gchar *str;
420
421 pad = GST_PAD (data);
422
423 caps = gst_pad_get_caps (pad);
424 str = gst_caps_to_string (caps);
425 g_print ("caps !! %s\n", str);
426
427 /* video stream */
428 if (g_str_has_prefix (str, "video/")) {
429 Emotion_Video_Sink *vsink;
430
431 vsink = (Emotion_Video_Sink *)ecore_list_next (ev->video_sinks);
432
433 emotion_video_sink_fill (vsink, pad, caps);
434 }
435 /* audio stream */
436 else if (g_str_has_prefix (str, "audio/")) {
437 Emotion_Audio_Sink *asink;
438 gint index;
439
440 asink = (Emotion_Audio_Sink *)ecore_list_next (ev->audio_sinks);
441
442 emotion_audio_sink_fill (asink, pad, caps);
443
444 index = ecore_list_index (ev->audio_sinks);
445
446 if (ecore_list_count (ev->video_sinks) == 0) {
447 if (index == 1) {
448 Emotion_Video_Sink *vsink;
449
450 vsink = emotion_visualization_sink_create (ev, asink);
451 if (!vsink) goto finalize;
452 }
453 }
454 else {
455 gchar buf[128];
456 GstElement *visbin;
457
458 g_snprintf (buf, 128, "visbin%d", index);
459 visbin = gst_bin_get_by_name (GST_BIN (ev->pipeline), buf);
460 if (visbin) {
461 GstPad *srcpad;
462 GstPad *sinkpad;
463
464 sinkpad = gst_element_get_pad (visbin, "sink");
465 srcpad = gst_pad_get_peer (sinkpad);
466 gst_pad_unlink (srcpad, sinkpad);
467
468 gst_object_unref (srcpad);
469 gst_object_unref (sinkpad);
470 }
471 }
472 }
473 finalize:
474 gst_caps_unref (caps);
475 g_free (str);
476 gst_object_unref (pad);
477 }
478 gst_iterator_free (it);
479}
480
481void
482emotion_video_sink_fill (Emotion_Video_Sink *vsink, GstPad *pad, GstCaps *caps)
483{
484 GstStructure *structure;
485 GstQuery *query;
486 const GValue *val;
487 gchar *str;
488
489 structure = gst_caps_get_structure (caps, 0);
490 str = gst_caps_to_string (caps);
491
492 gst_structure_get_int (structure, "width", &vsink->width);
493 gst_structure_get_int (structure, "height", &vsink->height);
494
495 vsink->fps_num = 1;
496 vsink->fps_den = 1;
497 val = gst_structure_get_value (structure, "framerate");
498 if (val) {
499 vsink->fps_num = gst_value_get_fraction_numerator (val);
500 vsink->fps_den = gst_value_get_fraction_denominator (val);
501 }
502 if (g_str_has_prefix(str, "video/x-raw-yuv")) {
503 val = gst_structure_get_value (structure, "format");
504 vsink->fourcc = gst_value_get_fourcc (val);
505 }
506 else if (g_str_has_prefix(str, "video/x-raw-rgb"))
507 vsink->fourcc = GST_MAKE_FOURCC ('A','R','G','B');
508 else
509 vsink->fourcc = 0;
510
511 query = gst_query_new_duration (GST_FORMAT_TIME);
512 if (gst_pad_query (pad, query)) {
513 gint64 time;
514
515 gst_query_parse_duration (query, NULL, &time);
516 vsink->length_time = (double)time / (double)GST_SECOND;
517 }
518 g_free (str);
519 gst_query_unref (query);
520}
521
522void
523emotion_audio_sink_fill (Emotion_Audio_Sink *asink, GstPad *pad, GstCaps *caps)
524{
525 GstStructure *structure;
526 GstQuery *query;
527
528 structure = gst_caps_get_structure (caps, 0);
529
530 gst_structure_get_int (structure, "channels", &asink->channels);
531 gst_structure_get_int (structure, "rate", &asink->samplerate);
532
533 query = gst_query_new_duration (GST_FORMAT_TIME);
534 if (gst_pad_query (pad, query)) {
535 gint64 time;
536
537 gst_query_parse_duration (query, NULL, &time);
538 asink->length_time = (double)time / (double)GST_SECOND;
539 }
540 gst_query_unref (query);
541 }
diff --git a/legacy/emotion/src/modules/gstreamer/emotion_gstreamer_pipeline.h b/legacy/emotion/src/modules/gstreamer/emotion_gstreamer_pipeline.h
new file mode 100644
index 0000000000..f2a237ab77
--- /dev/null
+++ b/legacy/emotion/src/modules/gstreamer/emotion_gstreamer_pipeline.h
@@ -0,0 +1,38 @@
1#ifndef __EMOTION_GSTREAMER_PIPELINE_H__
2#define __EMOTION_GSTREAMER_PIPELINE_H__
3
4
5#include <gst/gst.h>
6
7
8gboolean emotion_pipeline_pause (GstElement *pipeline);
9
10int emotion_pipeline_cdda_build (void *video, const char * device, unsigned int track);
11int emotion_pipeline_file_build (void *video, const char *file);
12int emotion_pipeline_uri_build (void *video, const char *uri);
13int emotion_pipeline_dvd_build (void *video, const char *device);
14int emotion_pipeline_cdda_track_count_get(void *video);
15
16GstElement *emotion_audio_sink_create (Emotion_Gstreamer_Video *ev, int index);
17Emotion_Video_Sink *emotion_video_sink_new (Emotion_Gstreamer_Video *ev);
18void emotion_video_sink_free (Emotion_Gstreamer_Video *ev, Emotion_Video_Sink *vsink);
19Emotion_Video_Sink *emotion_visualization_sink_create (Emotion_Gstreamer_Video *ev, Emotion_Audio_Sink *asink);
20
21void emotion_streams_sinks_get (Emotion_Gstreamer_Video *ev, GstElement *decoder);
22
23void emotion_video_sink_fill (Emotion_Video_Sink *vsink, GstPad *pad, GstCaps *caps);
24
25void emotion_audio_sink_fill (Emotion_Audio_Sink *asink, GstPad *pad, GstCaps *caps);
26
27void cb_handoff (GstElement *fakesrc,
28 GstBuffer *buffer,
29 GstPad *pad,
30 gpointer user_data);
31
32void file_new_decoded_pad_cb (GstElement *decodebin,
33 GstPad *new_pad,
34 gboolean last,
35 gpointer user_data);
36
37
38#endif /* __EMOTION_GSTREAMER_PIPELINE_H__ */
diff --git a/legacy/emotion/src/modules/gstreamer/emotion_gstreamer_pipeline_cdda.c b/legacy/emotion/src/modules/gstreamer/emotion_gstreamer_pipeline_cdda.c
new file mode 100644
index 0000000000..e8f483345b
--- /dev/null
+++ b/legacy/emotion/src/modules/gstreamer/emotion_gstreamer_pipeline_cdda.c
@@ -0,0 +1,123 @@
1
2#include "emotion_gstreamer.h"
3#include "emotion_gstreamer_pipeline.h"
4
5
6static Emotion_Audio_Sink *_emotion_audio_sink_new (Emotion_Gstreamer_Video *ev);
7
8static void _emotion_audio_sink_free (Emotion_Gstreamer_Video *ev, Emotion_Audio_Sink *asink);
9
10int
11emotion_pipeline_cdda_build (void *video, const char * device, unsigned int track)
12{
13 GstElement *cdiocddasrc;
14 Emotion_Video_Sink *vsink;
15 Emotion_Audio_Sink *asink;
16 Emotion_Gstreamer_Video *ev;
17/* GstFormat format; */
18/* gint64 tracks_count; */
19
20 ev = (Emotion_Gstreamer_Video *)video;
21 if (!ev) return 0;
22
23 cdiocddasrc = gst_element_factory_make ("cdiocddasrc", "src");
24 if (!cdiocddasrc) {
25 g_print ("cdiocddasrc element missing. Install it.\n");
26 goto failure_cdiocddasrc;
27 }
28
29 if (device)
30 g_object_set (G_OBJECT (cdiocddasrc), "device", device, NULL);
31
32 g_object_set (G_OBJECT (cdiocddasrc), "track", track, NULL);
33
34 asink = _emotion_audio_sink_new (ev);
35 if (!asink)
36 goto failure_emotion_sink;
37
38 asink->sink = emotion_audio_sink_create (ev, 1);
39 if (!asink->sink)
40 goto failure_gstreamer_sink;
41
42 gst_bin_add_many((GST_BIN (ev->pipeline)), cdiocddasrc, asink->sink, NULL);
43
44 if (!gst_element_link (cdiocddasrc, asink->sink))
45 goto failure_link;
46
47 vsink = emotion_visualization_sink_create (ev, asink);
48 if (!vsink) goto failure_link;
49
50 if (!emotion_pipeline_pause (ev->pipeline))
51 goto failure_gstreamer_pause;
52
53 {
54 GstQuery *query;
55 GstPad *pad;
56 GstCaps *caps;
57 GstStructure *structure;
58
59 /* should always be found */
60 pad = gst_element_get_pad (cdiocddasrc, "src");
61
62 caps = gst_pad_get_caps (pad);
63 structure = gst_caps_get_structure (GST_CAPS (caps), 0);
64
65 gst_structure_get_int (structure, "channels", &asink->channels);
66 gst_structure_get_int (structure, "rate", &asink->samplerate);
67
68 gst_caps_unref (caps);
69
70 query = gst_query_new_duration (GST_FORMAT_TIME);
71 if (gst_pad_query (pad, query)) {
72 gint64 time;
73
74 gst_query_parse_duration (query, NULL, &time);
75 asink->length_time = (double)time / (double)GST_SECOND;
76 vsink->length_time = asink->length_time;
77 }
78 gst_query_unref (query);
79 gst_object_unref (GST_OBJECT (pad));
80 }
81
82 return 1;
83
84 failure_gstreamer_pause:
85 emotion_video_sink_free (ev, vsink);
86 failure_link:
87 gst_bin_remove (GST_BIN (ev->pipeline), asink->sink);
88 failure_gstreamer_sink:
89 _emotion_audio_sink_free (ev, asink);
90 failure_emotion_sink:
91 gst_bin_remove (GST_BIN (ev->pipeline), cdiocddasrc);
92 failure_cdiocddasrc:
93
94 return 0;
95}
96
97static Emotion_Audio_Sink *
98_emotion_audio_sink_new (Emotion_Gstreamer_Video *ev)
99{
100 Emotion_Audio_Sink *asink;
101
102 if (!ev) return NULL;
103
104 asink = (Emotion_Audio_Sink *)malloc (sizeof (Emotion_Audio_Sink));
105 if (!asink) return NULL;
106
107 if (!ecore_list_append (ev->audio_sinks, asink)) {
108 free (asink);
109 return NULL;
110 }
111 return asink;
112}
113
114static void
115_emotion_audio_sink_free (Emotion_Gstreamer_Video *ev, Emotion_Audio_Sink *asink)
116{
117 if (!ev || !asink) return;
118
119 if (ecore_list_goto (ev->audio_sinks, asink)) {
120 ecore_list_remove (ev->audio_sinks);
121 free (asink);
122 }
123}
diff --git a/legacy/emotion/src/modules/gstreamer/emotion_gstreamer_pipeline_dvd.c b/legacy/emotion/src/modules/gstreamer/emotion_gstreamer_pipeline_dvd.c
new file mode 100644
index 0000000000..2408817041
--- /dev/null
+++ b/legacy/emotion/src/modules/gstreamer/emotion_gstreamer_pipeline_dvd.c
@@ -0,0 +1,227 @@
1
2#include "emotion_gstreamer.h"
3#include "emotion_gstreamer_pipeline.h"
4
5
6static void dvd_pad_added_cb (GstElement *dvddemuxer,
7 GObject *new_pad,
8 gpointer user_data);
9
10static void dvd_no_more_pads_cb (GstElement *dvddemuxer,
11 gpointer user_data);
12
13static int no_more_pads = 0;
14
15
16int
17emotion_pipeline_dvd_build (void *video, const char *device)
18{
19 GstElement *dvdreadsrc;
20 GstElement *dvddemux;
21 Emotion_Gstreamer_Video *ev;
22
23 ev = (Emotion_Gstreamer_Video *)video;
24 if (!ev) return 0;
25
26 dvdreadsrc = gst_element_factory_make ("dvdreadsrc", "src");
27 if (!dvdreadsrc)
28 goto failure_dvdreadsrc;
29 if (device)
30 g_object_set (G_OBJECT (dvdreadsrc), "device", device, NULL);
31
32 dvddemux = gst_element_factory_make ("dvddemux", "dvddemux");
33 if (!dvddemux)
34 goto failure_dvddemux;
35 g_signal_connect (dvddemux, "pad-added",
36 G_CALLBACK (dvd_pad_added_cb), ev);
37 g_signal_connect (dvddemux, "no-more-pads",
38 G_CALLBACK (dvd_no_more_pads_cb), ev);
39
40 gst_bin_add_many (GST_BIN (ev->pipeline), dvdreadsrc, dvddemux, NULL);
41 if (!gst_element_link (dvdreadsrc, dvddemux))
42 goto failure_link;
43
44 if (!emotion_pipeline_pause (ev->pipeline))
45 goto failure_gstreamer_pause;
46
47 while (no_more_pads == 0) {
48 g_print ("toto\n");}
49 no_more_pads = 0;
50
51 /* We get the informations of streams */
52 ecore_list_first_goto (ev->video_sinks);
53 ecore_list_first_goto (ev->audio_sinks);
54
55 {
56 GstIterator *it;
57 gpointer data;
58
59 it = gst_element_iterate_src_pads (dvddemux);
60 while (gst_iterator_next (it, &data) == GST_ITERATOR_OK) {
61 GstPad *pad;
62 GstCaps *caps;
63 gchar *str;
64
65 pad = GST_PAD (data);
66
67 caps = gst_pad_get_caps (pad);
68 str = gst_caps_to_string (caps);
69 g_print ("caps !! %s\n", str);
70 /* video stream */
71 if (g_str_has_prefix (str, "video/mpeg")) {
72 Emotion_Video_Sink *vsink;
73 GstPad *sink_pad;
74 GstCaps *sink_caps;
75
76 vsink = (Emotion_Video_Sink *)ecore_list_next (ev->video_sinks);
77 sink_pad = gst_element_get_pad (gst_bin_get_by_name (GST_BIN (ev->pipeline), "mpeg2dec"), "src");
78 sink_caps = gst_pad_get_caps (sink_pad);
79 str = gst_caps_to_string (sink_caps);
80 g_print (" ** caps v !! %s\n", str);
81
82 emotion_video_sink_fill (vsink, sink_pad, sink_caps);
83
84 gst_caps_unref (sink_caps);
85 gst_object_unref (sink_pad);
86 }
87 /* audio stream */
88 else if (g_str_has_prefix (str, "audio/")) {
89 Emotion_Audio_Sink *asink;
90 GstPad *sink_pad;
91 GstCaps *sink_caps;
92
93 asink = (Emotion_Audio_Sink *)ecore_list_next (ev->audio_sinks);
94 sink_pad = gst_element_get_pad (gst_bin_get_by_name (GST_BIN (ev->pipeline), "a52dec"), "src");
95 sink_caps = gst_pad_get_caps (sink_pad);
96
97 emotion_audio_sink_fill (asink, sink_pad, sink_caps);
98 }
99 gst_caps_unref (caps);
100 g_free (str);
101 gst_object_unref (pad);
102 }
103 gst_iterator_free (it);
104 }
105
106 /* The first vsink is a valid Emotion_Video_Sink * */
107 /* If no video stream is found, it's a visualisation sink */
108 {
109 Emotion_Video_Sink *vsink;
110
111 vsink = (Emotion_Video_Sink *)ecore_list_first_goto (ev->video_sinks);
112 if (vsink && vsink->sink) {
113 g_object_set (G_OBJECT (vsink->sink), "sync", TRUE, NULL);
114 g_object_set (G_OBJECT (vsink->sink), "signal-handoffs", TRUE, NULL);
115 g_signal_connect (G_OBJECT (vsink->sink),
116 "handoff",
117 G_CALLBACK (cb_handoff), ev);
118 }
119 }
120
121 return 1;
122
123 failure_gstreamer_pause:
124 failure_link:
125 gst_element_set_state (ev->pipeline, GST_STATE_NULL);
126 gst_bin_remove (GST_BIN (ev->pipeline), dvddemux);
127 failure_dvddemux:
128 gst_bin_remove (GST_BIN (ev->pipeline), dvdreadsrc);
129 failure_dvdreadsrc:
130
131 return 0;
132}
133
134static void
135dvd_pad_added_cb (GstElement *dvddemuxer,
136 GObject *new_pad,
137 gpointer user_data)
138{
139 Emotion_Gstreamer_Video *ev;
140 GstCaps *caps;
141 gchar *str;
142
143 ev = (Emotion_Gstreamer_Video *)user_data;
144 caps = gst_pad_get_caps (GST_PAD (new_pad));
145 str = gst_caps_to_string (caps);
146 /* video stream */
147 if (g_str_has_prefix (str, "video/mpeg")) {
148 Emotion_Video_Sink *vsink;
149 GstElement *queue;
150 GstElement *decoder;
151 GstPad *videopad;
152
153 vsink = (Emotion_Video_Sink *)malloc (sizeof (Emotion_Video_Sink));
154 if (!vsink) return;
155 if (!ecore_list_append (ev->video_sinks, vsink)) {
156 free(vsink);
157 return;
158 }
159
160 queue = gst_element_factory_make ("queue", NULL);
161 decoder = gst_element_factory_make ("mpeg2dec", "mpeg2dec");
162 vsink->sink = gst_element_factory_make ("fakesink", "videosink");
163 gst_bin_add_many (GST_BIN (ev->pipeline), queue, decoder, vsink->sink, NULL);
164 gst_element_link (queue, decoder);
165 gst_element_link (decoder, vsink->sink);
166 videopad = gst_element_get_pad (queue, "sink");
167 gst_pad_link (GST_PAD (new_pad), videopad);
168 gst_object_unref (videopad);
169 if (ecore_list_count(ev->video_sinks) == 1) {
170 ev->ratio = (double)vsink->width / (double)vsink->height;
171 }
172 gst_element_set_state (queue, GST_STATE_PAUSED);
173 gst_element_set_state (decoder, GST_STATE_PAUSED);
174 gst_element_set_state (vsink->sink, GST_STATE_PAUSED);
175 }
176 /* audio stream */
177 else if (g_str_has_prefix (str, "audio/")) {
178 Emotion_Audio_Sink *asink;
179 GstElement *queue;
180 GstElement *decoder;
181 GstElement *conv;
182 GstElement *resample;
183 GstElement *volume;
184 GstPad *audiopad;
185 double vol;
186
187 asink = (Emotion_Audio_Sink *)malloc (sizeof (Emotion_Audio_Sink));
188 if (!asink) return;
189 if (!ecore_list_append (ev->audio_sinks, asink)) {
190 free(asink);
191 return;
192 }
193
194 queue = gst_element_factory_make ("queue", NULL);
195 decoder = gst_element_factory_make ("a52dec", "a52dec");
196 conv = gst_element_factory_make ("audioconvert", NULL);
197 resample = gst_element_factory_make ("audioresample", NULL);
198 volume = gst_element_factory_make ("volume", "volume");
199 g_object_get (G_OBJECT (volume), "volume", &vol, NULL);
200 ev->volume = vol / 10.0;
201
202 /* FIXME: must manage several audio streams */
203 asink->sink = gst_element_factory_make ("fakesink", NULL);
204
205 gst_bin_add_many (GST_BIN (ev->pipeline),
206 queue, decoder, conv, resample, volume, asink->sink, NULL);
207 gst_element_link_many (queue, decoder, conv, resample, volume, asink->sink, NULL);
208
209 audiopad = gst_element_get_pad (queue, "sink");
210 gst_pad_link (GST_PAD (new_pad), audiopad);
211 gst_object_unref (audiopad);
212
213 gst_element_set_state (queue, GST_STATE_PAUSED);
214 gst_element_set_state (decoder, GST_STATE_PAUSED);
215 gst_element_set_state (conv, GST_STATE_PAUSED);
216 gst_element_set_state (resample, GST_STATE_PAUSED);
217 gst_element_set_state (volume, GST_STATE_PAUSED);
218 gst_element_set_state (asink->sink, GST_STATE_PAUSED);
219 }
220}
221
222static void
223dvd_no_more_pads_cb (GstElement *dvddemuxer,
224 gpointer user_data)
225{
226 no_more_pads = 1;
227}
diff --git a/legacy/emotion/src/modules/gstreamer/emotion_gstreamer_pipeline_file.c b/legacy/emotion/src/modules/gstreamer/emotion_gstreamer_pipeline_file.c
new file mode 100644
index 0000000000..6a76fc63fe
--- /dev/null
+++ b/legacy/emotion/src/modules/gstreamer/emotion_gstreamer_pipeline_file.c
@@ -0,0 +1,61 @@
1
2#include "emotion_gstreamer.h"
3#include "emotion_gstreamer_pipeline.h"
4
5int
6emotion_pipeline_file_build (void *video, const char *file)
7{
8 GstElement *filesrc;
9 GstElement *decodebin;
10 Emotion_Gstreamer_Video *ev;
11
12 ev = (Emotion_Gstreamer_Video *)video;
13 if (!ev) return 0;
14
15 filesrc = gst_element_factory_make ("filesrc", "src");
16 if (!filesrc)
17 goto failure_filesrc;
18 g_object_set (G_OBJECT (filesrc), "location", file, NULL);
19
20 decodebin = gst_element_factory_make ("decodebin", "decodebin");
21 if (!decodebin)
22 goto failure_decodebin;
23 g_signal_connect (decodebin, "new-decoded-pad",
24 G_CALLBACK (file_new_decoded_pad_cb), ev);
25
26 gst_bin_add_many (GST_BIN (ev->pipeline), filesrc, decodebin, NULL);
27 if (!gst_element_link (filesrc, decodebin))
28 goto failure_link;
29
30 if (!emotion_pipeline_pause (ev->pipeline))
31 goto failure_gstreamer_pause;
32
33 emotion_streams_sinks_get (ev, decodebin);
34
35 /* The first vsink is a valid Emotion_Video_Sink * */
36 /* If no video stream is found, it's a visualisation sink */
37 {
38 Emotion_Video_Sink *vsink;
39
40 vsink = (Emotion_Video_Sink *)ecore_list_first_goto (ev->video_sinks);
41 if (vsink && vsink->sink) {
42 g_object_set (G_OBJECT (vsink->sink), "sync", TRUE, NULL);
43 g_object_set (G_OBJECT (vsink->sink), "signal-handoffs", TRUE, NULL);
44 g_signal_connect (G_OBJECT (vsink->sink),
45 "handoff",
46 G_CALLBACK (cb_handoff), ev);
47 }
48 }
49
50 return 1;
51
52 failure_gstreamer_pause:
53 failure_link:
54 gst_element_set_state (ev->pipeline, GST_STATE_NULL);
55 gst_bin_remove (GST_BIN (ev->pipeline), decodebin);
56 failure_decodebin:
57 gst_bin_remove (GST_BIN (ev->pipeline), filesrc);
58 failure_filesrc:
59
60 return 0;
61}
diff --git a/legacy/emotion/src/modules/gstreamer/emotion_gstreamer_pipeline_uri.c b/legacy/emotion/src/modules/gstreamer/emotion_gstreamer_pipeline_uri.c
new file mode 100644
index 0000000000..f647a85a10
--- /dev/null
+++ b/legacy/emotion/src/modules/gstreamer/emotion_gstreamer_pipeline_uri.c
@@ -0,0 +1,63 @@
1
2#include "emotion_gstreamer.h"
3#include "emotion_gstreamer_pipeline.h"
4
5int
6emotion_pipeline_uri_build (void *video, const char *uri)
7{
8 GstElement *src;
9 GstElement *decodebin;
10 Emotion_Gstreamer_Video *ev;
11
12 ev = (Emotion_Gstreamer_Video *)video;
13 if (!ev) return 0;
14
15 if (gst_uri_protocol_is_supported(GST_URI_SRC, uri))
16 goto failure_src;
17 src = gst_element_make_from_uri (GST_URI_SRC, uri, "src");
18 if (!src)
19 goto failure_src;
20 g_object_set (G_OBJECT (src), "location", uri, NULL);
21
22 decodebin = gst_element_factory_make ("decodebin", "decodebin");
23 if (!decodebin)
24 goto failure_decodebin;
25 g_signal_connect (decodebin, "new-decoded-pad",
26 G_CALLBACK (file_new_decoded_pad_cb), ev);
27
28 gst_bin_add_many (GST_BIN (ev->pipeline), src, decodebin, NULL);
29 if (!gst_element_link (src, decodebin))
30 goto failure_link;
31
32 if (!emotion_pipeline_pause (ev->pipeline))
33 goto failure_gstreamer_pause;
34
35 emotion_streams_sinks_get (ev, decodebin);
36
37 /* The first vsink is a valid Emotion_Video_Sink * */
38 /* If no video stream is found, it's a visualisation sink */
39 {
40 Emotion_Video_Sink *vsink;
41
42 vsink = (Emotion_Video_Sink *)ecore_list_first_goto (ev->video_sinks);
43 if (vsink && vsink->sink) {
44 g_object_set (G_OBJECT (vsink->sink), "sync", TRUE, NULL);
45 g_object_set (G_OBJECT (vsink->sink), "signal-handoffs", TRUE, NULL);
46 g_signal_connect (G_OBJECT (vsink->sink),
47 "handoff",
48 G_CALLBACK (cb_handoff), ev);
49 }
50 }
51
52 return 1;
53
54 failure_gstreamer_pause:
55 failure_link:
56 gst_element_set_state (ev->pipeline, GST_STATE_NULL);
57 gst_bin_remove (GST_BIN (ev->pipeline), decodebin);
58 failure_decodebin:
59 gst_bin_remove (GST_BIN (ev->pipeline), src);
60 failure_src:
61
62 return 0;
63}
diff --git a/legacy/emotion/src/modules/xine/Makefile.am b/legacy/emotion/src/modules/xine/Makefile.am
new file mode 100644
index 0000000000..6ae4990882
--- /dev/null
+++ b/legacy/emotion/src/modules/xine/Makefile.am
@@ -0,0 +1,30 @@
1
2MAINTAINERCLEANFILES = Makefile.in
3
4AM_CPPFLAGS = \
5-I$(top_srcdir) \
6-I$(top_srcdir)/src/lib \
7-I$(top_srcdir)/src/modules \
8-DPACKAGE_BIN_DIR=\"$(bindir)\" \
9-DPACKAGE_LIB_DIR=\"$(libdir)\" \
10-DPACKAGE_DATA_DIR=\"$(datadir)/$(PACKAGE)\" \
11@EVAS_CFLAGS@ \
12@ECORE_CFLAGS@ \
13@XINE_CFLAGS@
14
15if BUILD_XINE_MODULE
16
17pkgdir = $(libdir)/emotion
18
19pkg_LTLIBRARIES = xine.la
20
21xine_la_SOURCES = \
22emotion_xine.c \
23emotion_xine.h \
24emotion_xine_vo_out.c
25xine_la_LIBADD = @EVAS_LIBS@ @ECORE_LIBS@ @XINE_LIBS@ $(top_builddir)/src/lib/libemotion.la -lpthread
26xine_la_LDFLAGS = -module -avoid-version
27xine_la_LIBTOOLFLAGS = --tag=disable-static
28xine_la_DEPENDENCIES = $(top_builddir)/config.h
29
30endif
diff --git a/legacy/emotion/src/modules/emotion_xine.c b/legacy/emotion/src/modules/xine/emotion_xine.c
index 41624af87f..41624af87f 100644
--- a/legacy/emotion/src/modules/emotion_xine.c
+++ b/legacy/emotion/src/modules/xine/emotion_xine.c
diff --git a/legacy/emotion/src/modules/emotion_xine.h b/legacy/emotion/src/modules/xine/emotion_xine.h
index 8a9efcc426..8a9efcc426 100644
--- a/legacy/emotion/src/modules/emotion_xine.h
+++ b/legacy/emotion/src/modules/xine/emotion_xine.h
diff --git a/legacy/emotion/src/modules/emotion_xine_vo_out.c b/legacy/emotion/src/modules/xine/emotion_xine_vo_out.c
index 439ae92509..439ae92509 100644
--- a/legacy/emotion/src/modules/emotion_xine_vo_out.c
+++ b/legacy/emotion/src/modules/xine/emotion_xine_vo_out.c