summaryrefslogtreecommitdiff
diff options
context:
space:
mode:
authordoursse <doursse>2006-03-12 09:56:13 +0000
committerdoursse <doursse@7cbeb6ba-43b4-40fd-8cce-4c39aea84d33>2006-03-12 09:56:13 +0000
commit10618e769a28e3733be3471e09859c7ab6fcf852 (patch)
tree28ff3db645b7d4791b4fef9f74f42aa598176552
parent4cefa67d64e7f8c543236070fcdbfabe3e211a86 (diff)
visualization stuff for emotion_gstreamer. There's a gstreamer bug on the duration of mp3 files (). ogg files are handles correctly
SVN revision: 21217
Diffstat (limited to '')
-rw-r--r--legacy/emotion/src/lib/emotion_private.h127
-rw-r--r--legacy/emotion/src/modules/emotion_gstreamer.c396
-rw-r--r--legacy/emotion/src/modules/emotion_gstreamer.h1
-rw-r--r--legacy/emotion/src/modules/emotion_xine.c26
-rw-r--r--legacy/emotion/src/modules/emotion_xine.h1
5 files changed, 444 insertions, 107 deletions
diff --git a/legacy/emotion/src/lib/emotion_private.h b/legacy/emotion/src/lib/emotion_private.h
index 4ad012caea..ac07d62486 100644
--- a/legacy/emotion/src/lib/emotion_private.h
+++ b/legacy/emotion/src/lib/emotion_private.h
@@ -20,6 +20,7 @@
20#define META_TRACK_DISCID 7 20#define META_TRACK_DISCID 7
21 21
22typedef enum _Emotion_Format Emotion_Format; 22typedef enum _Emotion_Format Emotion_Format;
23typedef enum _Emotion_Vis Emotion_Vis;
23typedef struct _Emotion_Video_Module Emotion_Video_Module; 24typedef struct _Emotion_Video_Module Emotion_Video_Module;
24 25
25enum _Emotion_Format 26enum _Emotion_Format
@@ -31,61 +32,85 @@ enum _Emotion_Format
31 EMOTION_FORMAT_BGRA 32 EMOTION_FORMAT_BGRA
32}; 33};
33 34
35enum _Emotion_Vis
36{
37 EMOTION_VIS_GOOM,
38 EMOTION_VIS_LIBVISUAL_BUMPSCOPE,
39 EMOTION_VIS_LIBVISUAL_CORONA,
40 EMOTION_VIS_LIBVISUAL_DANCING_PARTICLES,
41 EMOTION_VIS_LIBVISUAL_GDKPIXBUF,
42 EMOTION_VIS_LIBVISUAL_G_FORCE,
43 EMOTION_VIS_LIBVISUAL_GOOM,
44 EMOTION_VIS_LIBVISUAL_INFINITE,
45 EMOTION_VIS_LIBVISUAL_JAKDAW,
46 EMOTION_VIS_LIBVISUAL_JESS,
47 EMOTION_VIS_LIBVISUAL_LV_ANALYSER,
48 EMOTION_VIS_LIBVISUAL_LV_FLOWER,
49 EMOTION_VIS_LIBVISUAL_LV_GLTEST,
50 EMOTION_VIS_LIBVISUAL_LV_SCOPE,
51 EMOTION_VIS_LIBVISUAL_MADSPIN,
52 EMOTION_VIS_LIBVISUAL_NEBULUS,
53 EMOTION_VIS_LIBVISUAL_OINKSIE,
54 EMOTION_VIS_LIBVISUAL_PLASMA
55};
56
34struct _Emotion_Video_Module 57struct _Emotion_Video_Module
35{ 58{
36 unsigned char (*init) (Evas_Object *obj, void **video); 59 unsigned char (*init) (Evas_Object *obj, void **video);
37 int (*shutdown) (void *video); 60 int (*shutdown) (void *video);
38 unsigned char (*file_open) (const char *file, Evas_Object *obj, void *video); 61 unsigned char (*file_open) (const char *file, Evas_Object *obj, void *video);
39 void (*file_close) (void *ef); 62 void (*file_close) (void *ef);
40 void (*play) (void *ef, double pos); 63 void (*play) (void *ef, double pos);
41 void (*stop) (void *ef); 64 void (*stop) (void *ef);
42 void (*size_get) (void *ef, int *w, int *h); 65 void (*size_get) (void *ef, int *w, int *h);
43 void (*pos_set) (void *ef, double pos); 66 void (*pos_set) (void *ef, double pos);
44 double (*len_get) (void *ef); 67 void (*vis_set) (void *ef, Emotion_Vis vis);
45 int (*fps_num_get) (void *ef); 68 double (*len_get) (void *ef);
46 int (*fps_den_get) (void *ef); 69 int (*fps_num_get) (void *ef);
47 double (*fps_get) (void *ef); 70 int (*fps_den_get) (void *ef);
48 double (*pos_get) (void *ef); 71 double (*fps_get) (void *ef);
49 double (*ratio_get) (void *ef); 72 double (*pos_get) (void *ef);
50 int (*video_handled) (void *ef); 73 Emotion_Vis (*vis_get) (void *ef);
51 int (*audio_handled) (void *ef); 74 double (*ratio_get) (void *ef);
52 int (*seekable) (void *ef); 75 int (*video_handled) (void *ef);
53 void (*frame_done) (void *ef); 76 int (*audio_handled) (void *ef);
77 int (*seekable) (void *ef);
78 void (*frame_done) (void *ef);
54 Emotion_Format (*format_get) (void *ef); 79 Emotion_Format (*format_get) (void *ef);
55 void (*video_data_size_get) (void *ef, int *w, int *h); 80 void (*video_data_size_get) (void *ef, int *w, int *h);
56 int (*yuv_rows_get) (void *ef, int w, int h, unsigned char **yrows, unsigned char **urows, unsigned char **vrows); 81 int (*yuv_rows_get) (void *ef, int w, int h, unsigned char **yrows, unsigned char **urows, unsigned char **vrows);
57 int (*bgra_data_get) (void *ef, unsigned char **bgra_data); 82 int (*bgra_data_get) (void *ef, unsigned char **bgra_data);
58 void (*event_feed) (void *ef, int event); 83 void (*event_feed) (void *ef, int event);
59 void (*event_mouse_button_feed) (void *ef, int button, int x, int y); 84 void (*event_mouse_button_feed) (void *ef, int button, int x, int y);
60 void (*event_mouse_move_feed) (void *ef, int x, int y); 85 void (*event_mouse_move_feed) (void *ef, int x, int y);
61 int (*video_channel_count) (void *ef); 86 int (*video_channel_count) (void *ef);
62 void (*video_channel_set) (void *ef, int channel); 87 void (*video_channel_set) (void *ef, int channel);
63 int (*video_channel_get) (void *ef); 88 int (*video_channel_get) (void *ef);
64 const char * (*video_channel_name_get) (void *ef, int channel); 89 const char * (*video_channel_name_get) (void *ef, int channel);
65 void (*video_channel_mute_set) (void *ef, int mute); 90 void (*video_channel_mute_set) (void *ef, int mute);
66 int (*video_channel_mute_get) (void *ef); 91 int (*video_channel_mute_get) (void *ef);
67 int (*audio_channel_count) (void *ef); 92 int (*audio_channel_count) (void *ef);
68 void (*audio_channel_set) (void *ef, int channel); 93 void (*audio_channel_set) (void *ef, int channel);
69 int (*audio_channel_get) (void *ef); 94 int (*audio_channel_get) (void *ef);
70 const char * (*audio_channel_name_get) (void *ef, int channel); 95 const char * (*audio_channel_name_get) (void *ef, int channel);
71 void (*audio_channel_mute_set) (void *ef, int mute); 96 void (*audio_channel_mute_set) (void *ef, int mute);
72 int (*audio_channel_mute_get) (void *ef); 97 int (*audio_channel_mute_get) (void *ef);
73 void (*audio_channel_volume_set) (void *ef, double vol); 98 void (*audio_channel_volume_set) (void *ef, double vol);
74 double (*audio_channel_volume_get) (void *ef); 99 double (*audio_channel_volume_get) (void *ef);
75 int (*spu_channel_count) (void *ef); 100 int (*spu_channel_count) (void *ef);
76 void (*spu_channel_set) (void *ef, int channel); 101 void (*spu_channel_set) (void *ef, int channel);
77 int (*spu_channel_get) (void *ef); 102 int (*spu_channel_get) (void *ef);
78 const char * (*spu_channel_name_get) (void *ef, int channel); 103 const char * (*spu_channel_name_get) (void *ef, int channel);
79 void (*spu_channel_mute_set) (void *ef, int mute); 104 void (*spu_channel_mute_set) (void *ef, int mute);
80 int (*spu_channel_mute_get) (void *ef); 105 int (*spu_channel_mute_get) (void *ef);
81 int (*chapter_count) (void *ef); 106 int (*chapter_count) (void *ef);
82 void (*chapter_set) (void *ef, int chapter); 107 void (*chapter_set) (void *ef, int chapter);
83 int (*chapter_get) (void *ef); 108 int (*chapter_get) (void *ef);
84 const char * (*chapter_name_get) (void *ef, int chapter); 109 const char * (*chapter_name_get) (void *ef, int chapter);
85 void (*speed_set) (void *ef, double speed); 110 void (*speed_set) (void *ef, double speed);
86 double (*speed_get) (void *ef); 111 double (*speed_get) (void *ef);
87 int (*eject) (void *ef); 112 int (*eject) (void *ef);
88 const char * (*meta_get) (void *ef, int meta); 113 const char * (*meta_get) (void *ef, int meta);
89 114
90 void *handle; 115 void *handle;
91}; 116};
diff --git a/legacy/emotion/src/modules/emotion_gstreamer.c b/legacy/emotion/src/modules/emotion_gstreamer.c
index 9989185fba..a3be6443da 100644
--- a/legacy/emotion/src/modules/emotion_gstreamer.c
+++ b/legacy/emotion/src/modules/emotion_gstreamer.c
@@ -6,7 +6,6 @@
6#include "emotion_gstreamer.h" 6#include "emotion_gstreamer.h"
7 7
8 8
9
10/* Callbacks to handle errors and EOS */ 9/* Callbacks to handle errors and EOS */
11static void cb_end_of_stream (GstElement *thread, 10static void cb_end_of_stream (GstElement *thread,
12 gpointer data); 11 gpointer data);
@@ -30,7 +29,21 @@ static void new_decoded_pad_cb (GstElement *decodebin,
30 gboolean last, 29 gboolean last,
31 gpointer user_data); 30 gpointer user_data);
32 31
32GstElement *
33make_queue ()
34{
35 GstElement *queue = gst_element_factory_make ("queue", NULL);
36 g_object_set (queue,
37 "max-size-time", (guint64) 3 * GST_SECOND,
38 "max-size-bytes", (guint32) 0,
39 "max-size-buffers", (guint32) 0, NULL);
40
41 return queue;
42}
43
33static int _em_fd_ev_active(void *data, Ecore_Fd_Handler *fdh); 44static int _em_fd_ev_active(void *data, Ecore_Fd_Handler *fdh);
45static GstElement *_em_audio_sink_create (Emotion_Gstreamer_Video *ev,
46 int index);
34 47
35/* Interface */ 48/* Interface */
36 49
@@ -49,11 +62,14 @@ static void em_size_get(void *video,
49 int *height); 62 int *height);
50static void em_pos_set(void *video, 63static void em_pos_set(void *video,
51 double pos); 64 double pos);
65static void em_vis_set(void *video,
66 Emotion_Vis vis);
52static double em_len_get(void *video); 67static double em_len_get(void *video);
53static int em_fps_num_get(void *video); 68static int em_fps_num_get(void *video);
54static int em_fps_den_get(void *video); 69static int em_fps_den_get(void *video);
55static double em_fps_get(void *video); 70static double em_fps_get(void *video);
56static double em_pos_get(void *video); 71static double em_pos_get(void *video);
72static Emotion_Vis em_vis_get(void *video);
57static double em_ratio_get(void *video); 73static double em_ratio_get(void *video);
58 74
59static int em_video_handled(void *video); 75static int em_video_handled(void *video);
@@ -120,11 +136,13 @@ static Emotion_Video_Module em_module =
120 em_stop, /* stop */ 136 em_stop, /* stop */
121 em_size_get, /* size_get */ 137 em_size_get, /* size_get */
122 em_pos_set, /* pos_set */ 138 em_pos_set, /* pos_set */
139 em_vis_set, /* vis_set */
123 em_len_get, /* len_get */ 140 em_len_get, /* len_get */
124 em_fps_num_get, /* fps_num_get */ 141 em_fps_num_get, /* fps_num_get */
125 em_fps_den_get, /* fps_den_get */ 142 em_fps_den_get, /* fps_den_get */
126 em_fps_get, /* fps_get */ 143 em_fps_get, /* fps_get */
127 em_pos_get, /* pos_get */ 144 em_pos_get, /* pos_get */
145 em_vis_get, /* vis_get */
128 em_ratio_get, /* ratio_get */ 146 em_ratio_get, /* ratio_get */
129 em_video_handled, /* video_handled */ 147 em_video_handled, /* video_handled */
130 em_audio_handled, /* audio_handled */ 148 em_audio_handled, /* audio_handled */
@@ -201,8 +219,8 @@ em_init(Evas_Object *obj,
201 decodebin = gst_element_factory_make ("decodebin", "decodebin"); 219 decodebin = gst_element_factory_make ("decodebin", "decodebin");
202 if (!decodebin) 220 if (!decodebin)
203 gst_object_unref (GST_OBJECT (ev->pipeline)); 221 gst_object_unref (GST_OBJECT (ev->pipeline));
204 g_signal_connect (decodebin, "new-decoded-pad", 222 g_signal_connect (decodebin, "new-decoded-pad",
205 G_CALLBACK (new_decoded_pad_cb), ev); 223 G_CALLBACK (new_decoded_pad_cb), ev);
206 224
207 gst_bin_add_many (GST_BIN (ev->pipeline), filesrc, decodebin, NULL); 225 gst_bin_add_many (GST_BIN (ev->pipeline), filesrc, decodebin, NULL);
208 gst_element_link (filesrc, decodebin); 226 gst_element_link (filesrc, decodebin);
@@ -217,11 +235,14 @@ em_init(Evas_Object *obj,
217 235
218 /* Default values */ 236 /* Default values */
219 ev->ratio = 1.0; 237 ev->ratio = 1.0;
238 ev->video_sink_nbr = 0;
239 ev->audio_sink_nbr = 0;
240 ev->vis = EMOTION_VIS_LIBVISUAL_GOOM;
220 241
221 /* Create the file descriptors */ 242 /* Create the file descriptors */
222 if (pipe(fds) == 0) 243 if (pipe(fds) == 0)
223 { 244 {
224 ev->fd_ev_read = fds[0]; 245 ev->fd_ev_read = fds[0];
225 ev->fd_ev_write = fds[1]; 246 ev->fd_ev_write = fds[1];
226 fcntl(ev->fd_ev_read, F_SETFL, O_NONBLOCK); 247 fcntl(ev->fd_ev_read, F_SETFL, O_NONBLOCK);
227 ev->fd_ev_handler = ecore_main_fd_handler_add(ev->fd_ev_read, 248 ev->fd_ev_handler = ecore_main_fd_handler_add(ev->fd_ev_read,
@@ -269,7 +290,7 @@ em_file_open(const char *file,
269 GstStateChangeReturn res; 290 GstStateChangeReturn res;
270 291
271 ev = (Emotion_Gstreamer_Video *)video; 292 ev = (Emotion_Gstreamer_Video *)video;
272 printf ("Open file gstreamer...\n"); 293 printf ("Open file gstreamer... %s\n", file);
273 294
274 /* Evas Object */ 295 /* Evas Object */
275 ev->obj = obj; 296 ev->obj = obj;
@@ -310,7 +331,6 @@ em_file_open(const char *file,
310 331
311 caps = gst_pad_get_caps (pad); 332 caps = gst_pad_get_caps (pad);
312 str = gst_caps_to_string (caps); 333 str = gst_caps_to_string (caps);
313 g_print ("%s\n", str);
314 /* video stream */ 334 /* video stream */
315 if (g_str_has_prefix (str, "video/")) { 335 if (g_str_has_prefix (str, "video/")) {
316 Emotion_Video_Sink *vsink; 336 Emotion_Video_Sink *vsink;
@@ -324,6 +344,8 @@ em_file_open(const char *file,
324 gst_structure_get_int (structure, "width", &vsink->width); 344 gst_structure_get_int (structure, "width", &vsink->width);
325 gst_structure_get_int (structure, "height", &vsink->height); 345 gst_structure_get_int (structure, "height", &vsink->height);
326 346
347 vsink->fps_num = 1;
348 vsink->fps_den = 1;
327 val = gst_structure_get_value (structure, "framerate"); 349 val = gst_structure_get_value (structure, "framerate");
328 if (val) { 350 if (val) {
329 vsink->fps_num = gst_value_get_fraction_numerator (val); 351 vsink->fps_num = gst_value_get_fraction_numerator (val);
@@ -358,11 +380,11 @@ em_file_open(const char *file,
358 Emotion_Audio_Sink *asink; 380 Emotion_Audio_Sink *asink;
359 GstStructure *structure; 381 GstStructure *structure;
360 GstQuery *query; 382 GstQuery *query;
383 gint index;
361 384
362 asink = (Emotion_Audio_Sink *)ecore_list_next (ev->audio_sinks); 385 asink = (Emotion_Audio_Sink *)ecore_list_next (ev->audio_sinks);
363 386
364 structure = gst_caps_get_structure (GST_CAPS (caps), 0); 387 structure = gst_caps_get_structure (GST_CAPS (caps), 0);
365
366 gst_structure_get_int (structure, "channels", &asink->channels); 388 gst_structure_get_int (structure, "channels", &asink->channels);
367 gst_structure_get_int (structure, "rate", &asink->samplerate); 389 gst_structure_get_int (structure, "rate", &asink->samplerate);
368 390
@@ -375,16 +397,101 @@ em_file_open(const char *file,
375 asink->length_time = (double)time / (double)GST_SECOND; 397 asink->length_time = (double)time / (double)GST_SECOND;
376 } 398 }
377 gst_query_unref (query); 399 gst_query_unref (query);
378 }
379 400
401 index = ecore_list_index (ev->audio_sinks);
402
403 if (ecore_list_nodes (ev->video_sinks) == 0) {
404 if (index == 1) {
405 gchar buf[128];
406 Emotion_Video_Sink *vsink;
407
408 vsink = (Emotion_Video_Sink *)malloc (sizeof (Emotion_Video_Sink));
409 if (!vsink) return 0;
410 if (!ecore_list_append (ev->video_sinks, vsink)) {
411 free(vsink);
412 goto finalize;
413 }
414 g_snprintf (buf, 128, "vissink%d", index);
415 vsink->sink = gst_bin_get_by_name (GST_BIN (asink->sink), buf);
416 if (!vsink->sink) {
417 free(vsink);
418 goto finalize;
419 }
420 vsink->width = 320;
421 vsink->height = 200;
422 ev->ratio = (double)vsink->width / (double)vsink->height;
423 vsink->fps_num = 25;
424 vsink->fps_den = 1;
425 vsink->fourcc = GST_MAKE_FOURCC ('A','R','G','B');
426 vsink->length_time = asink->length_time;
427 }
428 }
429 else {
430 gchar buf[128];
431 GstElement *visbin;
432
433 g_snprintf (buf, 128, "visbin%d", index);
434 g_print ("vis : %s\n", buf);
435 visbin = gst_bin_get_by_name (GST_BIN (ev->pipeline), buf);
436 if (visbin) {
437 GstPad *srcpad;
438 GstPad *sinkpad;
439
440 sinkpad = gst_element_get_pad (visbin, "sink");
441 srcpad = gst_pad_get_peer (sinkpad);
442 gst_pad_unlink (srcpad, sinkpad);
443
444 gst_object_unref (srcpad);
445 gst_object_unref (sinkpad);
446 }
447 }
448 }
449 finalize:
380 g_free (str); 450 g_free (str);
381 gst_object_unref (pad); 451 gst_object_unref (pad);
382 } 452 }
383 gst_iterator_free (it); 453 gst_iterator_free (it);
384 } 454 }
385 455
456 /* The first vsink is a valid Emotion_Video_Sink * */
457 /* If no video stream is found, it's a visualisation sink */
458 {
459 Emotion_Video_Sink *vsink;
460
461 vsink = (Emotion_Video_Sink *)ecore_list_goto_first (ev->video_sinks);
462 if (vsink && vsink->sink) {
463 g_object_set (G_OBJECT (vsink->sink), "sync", TRUE, NULL);
464 g_object_set (G_OBJECT (vsink->sink), "signal-handoffs", TRUE, NULL);
465 g_signal_connect (G_OBJECT (vsink->sink),
466 "handoff",
467 G_CALLBACK (cb_handoff), ev);
468 }
469 }
470
386 ev->position = 0.0; 471 ev->position = 0.0;
387 472
473 {
474 /* on recapitule : */
475 Emotion_Video_Sink *vsink;
476 Emotion_Audio_Sink *asink;
477
478 vsink = (Emotion_Video_Sink *)ecore_list_goto_first (ev->video_sinks);
479 if (vsink) {
480 g_print ("video : \n");
481 g_print (" size : %dx%d\n", vsink->width, vsink->height);
482 g_print (" fps : %dx%d\n", vsink->fps_num, vsink->fps_den);
483 g_print (" length : %f\n", vsink->length_time);
484 }
485
486 asink = (Emotion_Audio_Sink *)ecore_list_goto_first (ev->audio_sinks);
487 if (asink) {
488 g_print ("audio : \n");
489 g_print (" chan : %d\n", asink->channels);
490 g_print (" rate : %d\n", asink->samplerate);
491 g_print (" length : %f\n", asink->length_time);
492 }
493 }
494
388 return 1; 495 return 1;
389} 496}
390 497
@@ -489,6 +596,18 @@ em_pos_set(void *video,
489 ev->seek_to_pos = pos; 596 ev->seek_to_pos = pos;
490} 597}
491 598
599static void
600em_vis_set(void *video,
601 Emotion_Vis vis)
602{
603 Emotion_Gstreamer_Video *ev;
604
605 ev = (Emotion_Gstreamer_Video *)video;
606
607 if (ev->vis == vis) return;
608 ev->vis = vis;
609}
610
492static double 611static double
493em_len_get(void *video) 612em_len_get(void *video)
494{ 613{
@@ -559,6 +678,16 @@ em_pos_get(void *video)
559 return ev->position; 678 return ev->position;
560} 679}
561 680
681static Emotion_Vis
682em_vis_get(void *video)
683{
684 Emotion_Gstreamer_Video *ev;
685
686 ev = (Emotion_Gstreamer_Video *)video;
687
688 return ev->vis;
689}
690
562static double 691static double
563em_ratio_get(void *video) 692em_ratio_get(void *video)
564{ 693{
@@ -1108,22 +1237,22 @@ cb_handoff (GstElement *fakesrc,
1108 if (!ev) 1237 if (!ev)
1109 return; 1238 return;
1110 1239
1111 if (!ev->obj_data) 1240 if (!ev->obj_data)
1112 ev->obj_data = (void*) malloc (GST_BUFFER_SIZE(buffer) * sizeof(void)); 1241 ev->obj_data = (void*) malloc (GST_BUFFER_SIZE(buffer) * sizeof(void));
1113 1242
1114 memcpy ( ev->obj_data, GST_BUFFER_DATA(buffer), GST_BUFFER_SIZE(buffer)); 1243 memcpy ( ev->obj_data, GST_BUFFER_DATA(buffer), GST_BUFFER_SIZE(buffer));
1115 buf[0] = GST_BUFFER_DATA(buffer); 1244 buf[0] = GST_BUFFER_DATA(buffer);
1116 buf[1] = buffer; 1245 buf[1] = buffer;
1117 write(ev->fd_ev_write, buf, sizeof(buf)); 1246 write(ev->fd_ev_write, buf, sizeof(buf));
1118 1247
1119 query = gst_query_new_position (GST_FORMAT_TIME); 1248 query = gst_query_new_position (GST_FORMAT_TIME);
1120 if (gst_pad_query (gst_pad_get_peer (pad), query)) { 1249 if (gst_pad_query (gst_pad_get_peer (pad), query)) {
1121 gint64 position; 1250 gint64 position;
1122 1251
1123 gst_query_parse_position (query, NULL, &position); 1252 gst_query_parse_position (query, NULL, &position);
1124 ev->position = (double)position / (double)GST_SECOND; 1253 ev->position = (double)position / (double)GST_SECOND;
1125 } 1254 }
1126 gst_query_unref (query); 1255 gst_query_unref (query);
1127} 1256}
1128 1257
1129static void 1258static void
@@ -1139,10 +1268,11 @@ new_decoded_pad_cb (GstElement *decodebin,
1139 ev = (Emotion_Gstreamer_Video *)user_data; 1268 ev = (Emotion_Gstreamer_Video *)user_data;
1140 caps = gst_pad_get_caps (new_pad); 1269 caps = gst_pad_get_caps (new_pad);
1141 str = gst_caps_to_string (caps); 1270 str = gst_caps_to_string (caps);
1142 1271/* g_print ("New pad : %s\n", str); */
1143 /* video stream */ 1272 /* video stream */
1144 if (g_str_has_prefix (str, "video/")) { 1273 if (g_str_has_prefix (str, "video/")) {
1145 Emotion_Video_Sink *vsink; 1274 Emotion_Video_Sink *vsink;
1275 GstElement *queue;
1146 GstPad *videopad; 1276 GstPad *videopad;
1147 1277
1148 vsink = (Emotion_Video_Sink *)malloc (sizeof (Emotion_Video_Sink)); 1278 vsink = (Emotion_Video_Sink *)malloc (sizeof (Emotion_Video_Sink));
@@ -1152,53 +1282,207 @@ new_decoded_pad_cb (GstElement *decodebin,
1152 return; 1282 return;
1153 } 1283 }
1154 1284
1155 vsink->sink = gst_element_factory_make ("fakesink", NULL); 1285 queue = gst_element_factory_make ("queue", NULL);
1156 gst_bin_add (GST_BIN (ev->pipeline), vsink->sink); 1286 vsink->sink = gst_element_factory_make ("fakesink", "videosink");
1157 videopad = gst_element_get_pad (vsink->sink, "sink"); 1287 gst_bin_add_many (GST_BIN (ev->pipeline), queue, vsink->sink, NULL);
1288 gst_element_link (queue, vsink->sink);
1289 videopad = gst_element_get_pad (queue, "sink");
1158 gst_pad_link(new_pad, videopad); 1290 gst_pad_link(new_pad, videopad);
1291 gst_object_unref (videopad);
1159 if (ecore_list_nodes(ev->video_sinks) == 1) { 1292 if (ecore_list_nodes(ev->video_sinks) == 1) {
1160 ev->ratio = (double)vsink->width / (double)vsink->height; 1293 ev->ratio = (double)vsink->width / (double)vsink->height;
1161 g_object_set (G_OBJECT (vsink->sink), "sync", TRUE, NULL);
1162 g_object_set (G_OBJECT (vsink->sink), "signal-handoffs", TRUE, NULL);
1163 g_signal_connect (G_OBJECT (vsink->sink),
1164 "handoff",
1165 G_CALLBACK (cb_handoff), ev);
1166 } 1294 }
1295 gst_element_set_state (queue, GST_STATE_PAUSED);
1167 gst_element_set_state (vsink->sink, GST_STATE_PAUSED); 1296 gst_element_set_state (vsink->sink, GST_STATE_PAUSED);
1168 } 1297 }
1169 /* audio stream */ 1298 /* audio stream */
1170 else if (g_str_has_prefix (str, "audio/")) { 1299 else if (g_str_has_prefix (str, "audio/")) {
1171 Emotion_Audio_Sink *asink; 1300 Emotion_Audio_Sink *asink;
1172 GstElement *audioqueue; 1301 GstPad *audiopad;
1173 GstElement *conv; 1302
1174 GstElement *resample; 1303 asink = (Emotion_Audio_Sink *)malloc (sizeof (Emotion_Audio_Sink));
1175 GstPad *audiopad; 1304 if (!asink) return;
1176 1305 if (!ecore_list_append (ev->audio_sinks, asink)) {
1177 asink = (Emotion_Audio_Sink *)malloc (sizeof (Emotion_Audio_Sink)); 1306 free(asink);
1178 if (!asink) return; 1307 return;
1179 if (!ecore_list_append (ev->audio_sinks, asink)) { 1308 }
1180 free(asink);
1181 return;
1182 }
1183 1309
1184 g_print ("node # %d\n", ecore_list_nodes(ev->audio_sinks)); 1310 asink->sink = _em_audio_sink_create (ev, ecore_list_index (ev->audio_sinks));
1185 audioqueue = gst_element_factory_make ("queue", NULL); 1311 gst_bin_add (GST_BIN (ev->pipeline), asink->sink);
1312 audiopad = gst_element_get_pad (asink->sink, "sink");
1313 gst_pad_link(new_pad, audiopad);
1314 gst_element_set_state (asink->sink, GST_STATE_PAUSED);
1315 }
1316}
1317
1318static GstElement *
1319_em_audio_sink_create (Emotion_Gstreamer_Video *ev, int index)
1320{
1321 gchar buf[128];
1322 GstElement *bin;
1323 GstElement *audiobin;
1324 GstElement *visbin = NULL;
1325 GstElement *tee;
1326 GstPad *teepad;
1327 GstPad *binpad;
1328
1329 /* audio sink */
1330 bin = gst_bin_new (NULL);
1331
1332 g_print ("1\n");
1333 g_snprintf (buf, 128, "tee%d", index);
1334 tee = gst_element_factory_make ("tee", buf);
1335
1336 /* audio part */
1337 {
1338 GstElement *queue;
1339 GstElement *conv;
1340 GstElement *resample;
1341 GstElement *sink;
1342 GstPad *audiopad;
1343
1344 audiobin = gst_bin_new (NULL);
1345
1346 queue = gst_element_factory_make ("queue", NULL);
1186 conv = gst_element_factory_make ("audioconvert", NULL); 1347 conv = gst_element_factory_make ("audioconvert", NULL);
1187 resample = gst_element_factory_make ("audioresample", NULL); 1348 resample = gst_element_factory_make ("audioresample", NULL);
1188 if (ecore_list_nodes(ev->audio_sinks) == 1) 1349 if (index == 1)
1189 asink->sink = gst_element_factory_make ("alsasink", NULL); 1350 sink = gst_element_factory_make ("alsasink", NULL);
1190 else 1351 else
1191 asink->sink = gst_element_factory_make ("fakesink", NULL); 1352 sink = gst_element_factory_make ("fakesink", NULL);
1192 gst_bin_add_many (GST_BIN (ev->pipeline), 1353
1193 audioqueue, conv, resample, asink->sink, NULL); 1354 gst_bin_add_many (GST_BIN (audiobin),
1194 gst_element_link_many (audioqueue, conv, resample, asink->sink, NULL); 1355 queue, conv, resample, sink, NULL);
1195 audiopad = gst_element_get_pad (audioqueue, "sink"); 1356 gst_element_link_many (queue, conv, resample, sink, NULL);
1196 gst_pad_link(new_pad, audiopad); 1357
1197 gst_element_set_state (audioqueue, GST_STATE_PAUSED); 1358 audiopad = gst_element_get_pad (queue, "sink");
1198 gst_element_set_state (conv, GST_STATE_PAUSED); 1359 gst_element_add_pad (audiobin, gst_ghost_pad_new ("sink", audiopad));
1199 gst_element_set_state (resample, GST_STATE_PAUSED); 1360 gst_object_unref (audiopad);
1200 gst_element_set_state (asink->sink, GST_STATE_PAUSED); 1361 }
1362
1363 /* visualisation part */
1364 {
1365 GstElement *vis = NULL;
1366 char *vis_name;
1367
1368 switch (ev->vis) {
1369 case EMOTION_VIS_GOOM:
1370 vis_name = "goom";
1371 break;
1372 case EMOTION_VIS_LIBVISUAL_BUMPSCOPE:
1373 vis_name = "libvisual_bumpscope";
1374 break;
1375 case EMOTION_VIS_LIBVISUAL_CORONA:
1376 vis_name = "libvisual_corona";
1377 break;
1378 case EMOTION_VIS_LIBVISUAL_DANCING_PARTICLES:
1379 vis_name = "libvisual_dancingparticles";
1380 break;
1381 case EMOTION_VIS_LIBVISUAL_GDKPIXBUF:
1382 vis_name = "libvisual_gdkpixbuf";
1383 break;
1384 case EMOTION_VIS_LIBVISUAL_G_FORCE:
1385 vis_name = "libvisual_G-Force";
1386 break;
1387 case EMOTION_VIS_LIBVISUAL_GOOM:
1388 vis_name = "libvisual_goom";
1389 break;
1390 case EMOTION_VIS_LIBVISUAL_INFINITE:
1391 vis_name = "libvisual_infinite";
1392 break;
1393 case EMOTION_VIS_LIBVISUAL_JAKDAW:
1394 vis_name = "libvisual_jakdaw";
1395 break;
1396 case EMOTION_VIS_LIBVISUAL_JESS:
1397 vis_name = "libvisual_jess";
1398 break;
1399 case EMOTION_VIS_LIBVISUAL_LV_ANALYSER:
1400 vis_name = "libvisual_lv_analyzer";
1401 break;
1402 case EMOTION_VIS_LIBVISUAL_LV_FLOWER:
1403 vis_name = "libvisual_lv_flower";
1404 break;
1405 case EMOTION_VIS_LIBVISUAL_LV_GLTEST:
1406 vis_name = "libvisual_lv_gltest";
1407 break;
1408 case EMOTION_VIS_LIBVISUAL_LV_SCOPE:
1409 vis_name = "libvisual_lv_scope";
1410 break;
1411 case EMOTION_VIS_LIBVISUAL_MADSPIN:
1412 vis_name = "libvisual_madspin";
1413 break;
1414 case EMOTION_VIS_LIBVISUAL_NEBULUS:
1415 vis_name = "libvisual_nebulus";
1416 break;
1417 case EMOTION_VIS_LIBVISUAL_OINKSIE:
1418 vis_name = "libvisual_oinksie";
1419 break;
1420 case EMOTION_VIS_LIBVISUAL_PLASMA:
1421 vis_name = "libvisual_plazma";
1422 break;
1423 default:
1424 vis_name = "goom";
1425 break;
1426 }
1427
1428 g_snprintf (buf, 128, "vis%d", index);
1429 if ((vis = gst_element_factory_make (vis_name, buf))) {
1430 GstElement *queue;
1431 GstElement *conv;
1432 GstElement *cspace;
1433 GstElement *sink;
1434 GstPad *vispad;
1435 GstCaps *caps;
1436
1437 g_snprintf (buf, 128, "visbin%d", index);
1438 visbin = gst_bin_new (buf);
1439
1440 queue = gst_element_factory_make ("queue", NULL);
1441 conv = gst_element_factory_make ("audioconvert", NULL);
1442 cspace = gst_element_factory_make ("ffmpegcolorspace", NULL);
1443 g_snprintf (buf, 128, "vissink%d", index);
1444 sink = gst_element_factory_make ("fakesink", buf);
1445
1446 gst_bin_add_many (GST_BIN (visbin),
1447 queue, conv, vis, cspace, sink, NULL);
1448 gst_element_link_many (queue, conv, vis, cspace, NULL);
1449 caps = gst_caps_new_simple ("video/x-raw-rgb",
1450 "bpp", G_TYPE_INT, 32,
1451 "width", G_TYPE_INT, 320,
1452 "height", G_TYPE_INT, 200,
1453 NULL);
1454 gst_element_link_filtered (cspace, sink, caps);
1455
1456 vispad = gst_element_get_pad (queue, "sink");
1457 gst_element_add_pad (visbin, gst_ghost_pad_new ("sink", vispad));
1458 gst_object_unref (vispad);
1459 }
1201 } 1460 }
1461
1462 gst_bin_add_many (GST_BIN (bin), tee, audiobin, NULL);
1463 if (visbin)
1464 gst_bin_add (GST_BIN (bin), visbin);
1465
1466 binpad = gst_element_get_pad (audiobin, "sink");
1467 teepad = gst_element_get_request_pad (tee, "src%d");
1468 gst_pad_link (teepad, binpad);
1469 gst_object_unref (teepad);
1470 gst_object_unref (binpad);
1471
1472 if (visbin) {
1473 binpad = gst_element_get_pad (visbin, "sink");
1474 teepad = gst_element_get_request_pad (tee, "src%d");
1475 gst_pad_link (teepad, binpad);
1476 gst_object_unref (teepad);
1477 gst_object_unref (binpad);
1478 }
1479
1480 teepad = gst_element_get_pad (tee, "sink");
1481 gst_element_add_pad (bin, gst_ghost_pad_new ("sink", teepad));
1482 gst_object_unref (teepad);
1483
1484 g_print ("6\n");
1485 return bin;
1202} 1486}
1203 1487
1204static int 1488static int
diff --git a/legacy/emotion/src/modules/emotion_gstreamer.h b/legacy/emotion/src/modules/emotion_gstreamer.h
index 98e6eb4c74..f667a35f77 100644
--- a/legacy/emotion/src/modules/emotion_gstreamer.h
+++ b/legacy/emotion/src/modules/emotion_gstreamer.h
@@ -60,6 +60,7 @@ struct _Emotion_Gstreamer_Video
60 int fd_ev_write; 60 int fd_ev_write;
61 Ecore_Fd_Handler *fd_ev_handler; 61 Ecore_Fd_Handler *fd_ev_handler;
62 62
63 Emotion_Vis vis;
63 64
64 unsigned char play : 1; 65 unsigned char play : 1;
65 unsigned char video_mute : 1; 66 unsigned char video_mute : 1;
diff --git a/legacy/emotion/src/modules/emotion_xine.c b/legacy/emotion/src/modules/emotion_xine.c
index eb387a4f20..a94c499963 100644
--- a/legacy/emotion/src/modules/emotion_xine.c
+++ b/legacy/emotion/src/modules/emotion_xine.c
@@ -10,11 +10,13 @@ static void em_play(void *ef, double pos);
10static void em_stop(void *ef); 10static void em_stop(void *ef);
11static void em_size_get(void *ef, int *w, int *h); 11static void em_size_get(void *ef, int *w, int *h);
12static void em_pos_set(void *ef, double pos); 12static void em_pos_set(void *ef, double pos);
13static void em_vis_set(void *video, Emotion_Vis vis);
13static double em_len_get(void *ef); 14static double em_len_get(void *ef);
14static int em_fps_num_get(void *ef); 15static int em_fps_num_get(void *ef);
15static int em_fps_den_get(void *ef); 16static int em_fps_den_get(void *ef);
16static double em_fps_get(void *ef); 17static double em_fps_get(void *ef);
17static double em_pos_get(void *ef); 18static double em_pos_get(void *ef);
19static Emotion_Vis em_vis_get(void *video);
18static double em_ratio_get(void *ef); 20static double em_ratio_get(void *ef);
19static int em_seekable(void *ef); 21static int em_seekable(void *ef);
20static void em_frame_done(void *ef); 22static void em_frame_done(void *ef);
@@ -476,6 +478,18 @@ em_pos_set(void *ef, double pos)
476 } 478 }
477} 479}
478 480
481static void
482em_vis_set(void *ef,
483 Emotion_Vis vis)
484{
485 Emotion_Xine_Video *ev;
486
487 ev = (Emotion_Xine_Video *)ef;
488
489 if (ev->vis == vis) return;
490 ev->vis = vis;
491}
492
479static double 493static double
480em_len_get(void *ef) 494em_len_get(void *ef)
481{ 495{
@@ -521,6 +535,16 @@ em_pos_get(void *ef)
521 return ev->pos; 535 return ev->pos;
522} 536}
523 537
538static Emotion_Vis
539em_vis_get(void *ef)
540{
541 Emotion_Xine_Video *ev;
542
543 ev = (Emotion_Xine_Video *)ef;
544
545 return ev->vis;
546}
547
524static double 548static double
525em_ratio_get(void *ef) 549em_ratio_get(void *ef)
526{ 550{
@@ -1389,11 +1413,13 @@ static Emotion_Video_Module em_module =
1389 em_stop, /* stop */ 1413 em_stop, /* stop */
1390 em_size_get, /* size_get */ 1414 em_size_get, /* size_get */
1391 em_pos_set, /* pos_set */ 1415 em_pos_set, /* pos_set */
1416 em_vis_set, /* vis_set */
1392 em_len_get, /* len_get */ 1417 em_len_get, /* len_get */
1393 em_fps_num_get, /* fps_num_get */ 1418 em_fps_num_get, /* fps_num_get */
1394 em_fps_den_get, /* fps_den_get */ 1419 em_fps_den_get, /* fps_den_get */
1395 em_fps_get, /* fps_get */ 1420 em_fps_get, /* fps_get */
1396 em_pos_get, /* pos_get */ 1421 em_pos_get, /* pos_get */
1422 em_vis_get, /* vis_get */
1397 em_ratio_get, /* ratio_get */ 1423 em_ratio_get, /* ratio_get */
1398 em_video_handled, /* video_handled */ 1424 em_video_handled, /* video_handled */
1399 em_audio_handled, /* audio_handled */ 1425 em_audio_handled, /* audio_handled */
diff --git a/legacy/emotion/src/modules/emotion_xine.h b/legacy/emotion/src/modules/emotion_xine.h
index 746ff52f98..5816b4ef87 100644
--- a/legacy/emotion/src/modules/emotion_xine.h
+++ b/legacy/emotion/src/modules/emotion_xine.h
@@ -29,6 +29,7 @@ struct _Emotion_Xine_Video
29 volatile int seek_to; 29 volatile int seek_to;
30 volatile int get_poslen; 30 volatile int get_poslen;
31 volatile double seek_to_pos; 31 volatile double seek_to_pos;
32 Emotion_Vis vis;
32 Ecore_Timer *timer; 33 Ecore_Timer *timer;
33 int fd_read; 34 int fd_read;
34 int fd_write; 35 int fd_write;