formatting

SVN revision: 35011
This commit is contained in:
Sebastian Dransfeld 2008-07-06 09:52:21 +00:00
parent 10bac60aa1
commit ec19c6f700
2 changed files with 451 additions and 418 deletions

View File

@ -10,181 +10,194 @@
gboolean gboolean
emotion_pipeline_pause (GstElement *pipeline) emotion_pipeline_pause(GstElement *pipeline)
{ {
GstStateChangeReturn res; GstStateChangeReturn res;
res = gst_element_set_state ((pipeline), GST_STATE_PAUSED); res = gst_element_set_state((pipeline), GST_STATE_PAUSED);
if (res == GST_STATE_CHANGE_FAILURE) { if (res == GST_STATE_CHANGE_FAILURE)
g_print ("Emotion-Gstreamer ERROR: could not pause\n"); {
return 0; g_print("Emotion-Gstreamer ERROR: could not pause\n");
} return 0;
}
res = gst_element_get_state ((pipeline), NULL, NULL, GST_CLOCK_TIME_NONE); res = gst_element_get_state((pipeline), NULL, NULL, GST_CLOCK_TIME_NONE);
if (res != GST_STATE_CHANGE_SUCCESS) { if (res != GST_STATE_CHANGE_SUCCESS)
g_print ("Emotion-Gstreamer ERROR: could not complete pause\n"); {
return 0; g_print("Emotion-Gstreamer ERROR: could not complete pause\n");
} return 0;
}
return 1; return 1;
} }
/* Send the video frame to the evas object */ /* Send the video frame to the evas object */
void void
cb_handoff (GstElement *fakesrc, cb_handoff(GstElement *fakesrc,
GstBuffer *buffer, GstBuffer *buffer,
GstPad *pad, GstPad *pad,
gpointer user_data) gpointer user_data)
{ {
GstQuery *query; GstQuery *query;
void *buf[2]; void *buf[2];
Emotion_Gstreamer_Video *ev = ( Emotion_Gstreamer_Video *) user_data; Emotion_Gstreamer_Video *ev = (Emotion_Gstreamer_Video *)user_data;
if (!ev) if (!ev)
return; return;
if (!ev->video_mute) { if (!ev->video_mute)
if (!ev->obj_data) {
ev->obj_data = (void*) malloc (GST_BUFFER_SIZE(buffer) * sizeof(void)); if (!ev->obj_data)
ev->obj_data = malloc(GST_BUFFER_SIZE(buffer) * sizeof(void));
memcpy ( ev->obj_data, GST_BUFFER_DATA(buffer), GST_BUFFER_SIZE(buffer)); memcpy(ev->obj_data, GST_BUFFER_DATA(buffer), GST_BUFFER_SIZE(buffer));
buf[0] = GST_BUFFER_DATA(buffer); buf[0] = GST_BUFFER_DATA(buffer);
buf[1] = buffer; buf[1] = buffer;
write(ev->fd_ev_write, buf, sizeof(buf)); write(ev->fd_ev_write, buf, sizeof(buf));
} }
else { else
Emotion_Audio_Sink *asink; {
asink = (Emotion_Audio_Sink *)ecore_list_index_goto (ev->audio_sinks, ev->audio_sink_nbr); Emotion_Audio_Sink *asink;
_emotion_video_pos_update(ev->obj, ev->position, asink->length_time); asink = (Emotion_Audio_Sink *)ecore_list_index_goto(ev->audio_sinks, ev->audio_sink_nbr);
} _emotion_video_pos_update(ev->obj, ev->position, asink->length_time);
}
query = gst_query_new_position (GST_FORMAT_TIME); query = gst_query_new_position(GST_FORMAT_TIME);
if (gst_pad_query (gst_pad_get_peer (pad), query)) { if (gst_pad_query(gst_pad_get_peer(pad), query))
gint64 position; {
gint64 position;
gst_query_parse_position (query, NULL, &position); gst_query_parse_position(query, NULL, &position);
ev->position = (double)position / (double)GST_SECOND; ev->position = (double)position / (double)GST_SECOND;
} }
gst_query_unref (query); gst_query_unref(query);
} }
void void
file_new_decoded_pad_cb (GstElement *decodebin, file_new_decoded_pad_cb(GstElement *decodebin,
GstPad *new_pad, GstPad *new_pad,
gboolean last, gboolean last,
gpointer user_data) gpointer user_data)
{ {
Emotion_Gstreamer_Video *ev; Emotion_Gstreamer_Video *ev;
GstCaps *caps; GstCaps *caps;
gchar *str; gchar *str;
ev = (Emotion_Gstreamer_Video *)user_data; ev = (Emotion_Gstreamer_Video *)user_data;
caps = gst_pad_get_caps (new_pad); caps = gst_pad_get_caps(new_pad);
str = gst_caps_to_string (caps); str = gst_caps_to_string(caps);
/* video stream */ /* video stream */
if (g_str_has_prefix (str, "video/")) { if (g_str_has_prefix(str, "video/"))
Emotion_Video_Sink *vsink; {
GstElement *queue; Emotion_Video_Sink *vsink;
GstPad *videopad; GstElement *queue;
GstPad *videopad;
vsink = (Emotion_Video_Sink *)malloc (sizeof (Emotion_Video_Sink)); vsink = (Emotion_Video_Sink *)malloc(sizeof(Emotion_Video_Sink));
if (!vsink) return; if (!vsink) return;
if (!ecore_list_append (ev->video_sinks, vsink)) { if (!ecore_list_append(ev->video_sinks, vsink))
free(vsink); {
return; free(vsink);
} return;
}
queue = gst_element_factory_make ("queue", NULL); queue = gst_element_factory_make("queue", NULL);
vsink->sink = gst_element_factory_make ("fakesink", "videosink"); vsink->sink = gst_element_factory_make("fakesink", "videosink");
gst_bin_add_many (GST_BIN (ev->pipeline), queue, vsink->sink, NULL); gst_bin_add_many(GST_BIN(ev->pipeline), queue, vsink->sink, NULL);
gst_element_link (queue, vsink->sink); gst_element_link(queue, vsink->sink);
videopad = gst_element_get_pad (queue, "sink"); videopad = gst_element_get_pad(queue, "sink");
gst_pad_link (new_pad, videopad); gst_pad_link(new_pad, videopad);
gst_object_unref (videopad); gst_object_unref(videopad);
if (ecore_list_count(ev->video_sinks) == 1) { if (ecore_list_count(ev->video_sinks) == 1)
ev->ratio = (double)vsink->width / (double)vsink->height; {
} ev->ratio = (double)vsink->width / (double)vsink->height;
gst_element_set_state (queue, GST_STATE_PAUSED); }
gst_element_set_state (vsink->sink, GST_STATE_PAUSED); gst_element_set_state(queue, GST_STATE_PAUSED);
} gst_element_set_state(vsink->sink, GST_STATE_PAUSED);
}
/* audio stream */ /* audio stream */
else if (g_str_has_prefix (str, "audio/")) { else if (g_str_has_prefix(str, "audio/"))
Emotion_Audio_Sink *asink; {
GstPad *audiopad; Emotion_Audio_Sink *asink;
GstPad *audiopad;
asink = (Emotion_Audio_Sink *)malloc (sizeof (Emotion_Audio_Sink)); asink = (Emotion_Audio_Sink *)malloc(sizeof(Emotion_Audio_Sink));
if (!asink) return; if (!asink) return;
if (!ecore_list_append (ev->audio_sinks, asink)) { if (!ecore_list_append(ev->audio_sinks, asink))
free(asink); {
return; free(asink);
} return;
}
asink->sink = emotion_audio_sink_create (ev, ecore_list_index (ev->audio_sinks)); asink->sink = emotion_audio_sink_create(ev, ecore_list_index(ev->audio_sinks));
gst_bin_add (GST_BIN (ev->pipeline), asink->sink); gst_bin_add(GST_BIN(ev->pipeline), asink->sink);
audiopad = gst_element_get_pad (asink->sink, "sink"); audiopad = gst_element_get_pad(asink->sink, "sink");
gst_pad_link(new_pad, audiopad); gst_pad_link(new_pad, audiopad);
gst_element_set_state (asink->sink, GST_STATE_PAUSED); gst_element_set_state(asink->sink, GST_STATE_PAUSED);
} }
free(str); free(str);
} }
Emotion_Video_Sink * Emotion_Video_Sink *
emotion_video_sink_new (Emotion_Gstreamer_Video *ev) emotion_video_sink_new(Emotion_Gstreamer_Video *ev)
{ {
Emotion_Video_Sink *vsink; Emotion_Video_Sink *vsink;
if (!ev) return NULL; if (!ev) return NULL;
vsink = (Emotion_Video_Sink *)malloc (sizeof (Emotion_Video_Sink)); vsink = (Emotion_Video_Sink *)malloc(sizeof(Emotion_Video_Sink));
if (!vsink) return NULL; if (!vsink) return NULL;
if (!ecore_list_append (ev->video_sinks, vsink)) { if (!ecore_list_append(ev->video_sinks, vsink))
free (vsink); {
return NULL; free(vsink);
} return NULL;
}
return vsink; return vsink;
} }
void void
emotion_video_sink_free (Emotion_Gstreamer_Video *ev, Emotion_Video_Sink *vsink) emotion_video_sink_free(Emotion_Gstreamer_Video *ev, Emotion_Video_Sink *vsink)
{ {
if (!ev || !vsink) return; if (!ev || !vsink) return;
if (ecore_list_goto (ev->video_sinks, vsink)) { if (ecore_list_goto(ev->video_sinks, vsink))
ecore_list_remove (ev->video_sinks); {
free (vsink); ecore_list_remove(ev->video_sinks);
} free(vsink);
}
} }
Emotion_Video_Sink * Emotion_Video_Sink *
emotion_visualization_sink_create (Emotion_Gstreamer_Video *ev, Emotion_Audio_Sink *asink) emotion_visualization_sink_create(Emotion_Gstreamer_Video *ev, Emotion_Audio_Sink *asink)
{ {
Emotion_Video_Sink *vsink; Emotion_Video_Sink *vsink;
if (!ev) return NULL; if (!ev) return NULL;
vsink = emotion_video_sink_new (ev); vsink = emotion_video_sink_new(ev);
if (!vsink) return NULL; if (!vsink) return NULL;
vsink->sink = gst_bin_get_by_name (GST_BIN (asink->sink), "vissink1"); vsink->sink = gst_bin_get_by_name(GST_BIN(asink->sink), "vissink1");
if (!vsink->sink) { if (!vsink->sink)
emotion_video_sink_free (ev, vsink); {
return NULL; emotion_video_sink_free(ev, vsink);
} return NULL;
}
vsink->width = 320; vsink->width = 320;
vsink->height = 200; vsink->height = 200;
ev->ratio = (double)vsink->width / (double)vsink->height; ev->ratio = (double)vsink->width / (double)vsink->height;
vsink->fps_num = 25; vsink->fps_num = 25;
vsink->fps_den = 1; vsink->fps_den = 1;
vsink->fourcc = GST_MAKE_FOURCC ('A','R','G','B'); vsink->fourcc = GST_MAKE_FOURCC('A', 'R', 'G', 'B');
vsink->length_time = asink->length_time; vsink->length_time = asink->length_time;
g_object_set (G_OBJECT (vsink->sink), "sync", TRUE, NULL); g_object_set(G_OBJECT(vsink->sink), "sync", TRUE, NULL);
g_object_set (G_OBJECT (vsink->sink), "signal-handoffs", TRUE, NULL); g_object_set(G_OBJECT(vsink->sink), "signal-handoffs", TRUE, NULL);
g_signal_connect (G_OBJECT (vsink->sink), g_signal_connect(G_OBJECT(vsink->sink),
"handoff", "handoff",
G_CALLBACK (cb_handoff), ev); G_CALLBACK(cb_handoff), ev);
return vsink; return vsink;
} }
@ -200,41 +213,44 @@ emotion_pipeline_cdda_track_count_get(void *video)
if (!ev) return tracks_count; if (!ev) return tracks_count;
done = FALSE; done = FALSE;
bus = gst_element_get_bus (ev->pipeline); bus = gst_element_get_bus(ev->pipeline);
if (!bus) return tracks_count; if (!bus) return tracks_count;
while (!done) { while (!done)
GstMessage *message; {
GstMessage *message;
message = gst_bus_pop (bus); message = gst_bus_pop(bus);
if (message == NULL) if (message == NULL)
/* All messages read, we're done */ /* All messages read, we're done */
break; break;
switch (GST_MESSAGE_TYPE (message)) { switch (GST_MESSAGE_TYPE(message))
case GST_MESSAGE_TAG: { {
GstTagList *tags; case GST_MESSAGE_TAG:
{
GstTagList *tags;
gst_message_parse_tag (message, &tags); gst_message_parse_tag(message, &tags);
gst_tag_list_get_uint (tags, GST_TAG_TRACK_COUNT, &tracks_count); gst_tag_list_get_uint(tags, GST_TAG_TRACK_COUNT, &tracks_count);
if (tracks_count) done = TRUE; if (tracks_count) done = TRUE;
break; break;
} }
case GST_MESSAGE_ERROR: case GST_MESSAGE_ERROR:
default: default:
break; break;
} }
gst_message_unref (message); gst_message_unref(message);
} }
gst_object_unref (GST_OBJECT (bus)); gst_object_unref(GST_OBJECT(bus));
return tracks_count; return tracks_count;
} }
GstElement * GstElement *
emotion_audio_sink_create (Emotion_Gstreamer_Video *ev, int index) emotion_audio_sink_create(Emotion_Gstreamer_Video *ev, int index)
{ {
gchar buf[128]; gchar buf[128];
GstElement *bin; GstElement *bin;
@ -245,302 +261,316 @@ emotion_audio_sink_create (Emotion_Gstreamer_Video *ev, int index)
GstPad *binpad; GstPad *binpad;
/* audio sink */ /* audio sink */
bin = gst_bin_new (NULL); bin = gst_bin_new(NULL);
if (!bin) return NULL; if (!bin) return NULL;
g_snprintf (buf, 128, "tee%d", index); g_snprintf(buf, 128, "tee%d", index);
tee = gst_element_factory_make ("tee", buf); tee = gst_element_factory_make("tee", buf);
/* audio part */ /* audio part */
{ {
GstElement *queue; GstElement *queue;
GstElement *conv; GstElement *conv;
GstElement *resample; GstElement *resample;
GstElement *volume; GstElement *volume;
GstElement *sink; GstElement *sink;
GstPad *audiopad; GstPad *audiopad;
double vol; double vol;
audiobin = gst_bin_new (NULL); audiobin = gst_bin_new(NULL);
queue = gst_element_factory_make ("queue", NULL); queue = gst_element_factory_make("queue", NULL);
conv = gst_element_factory_make ("audioconvert", NULL); conv = gst_element_factory_make("audioconvert", NULL);
resample = gst_element_factory_make ("audioresample", NULL); resample = gst_element_factory_make("audioresample", NULL);
volume = gst_element_factory_make ("volume", "volume"); volume = gst_element_factory_make("volume", "volume");
g_object_get (G_OBJECT (volume), "volume", &vol, NULL); g_object_get(G_OBJECT(volume), "volume", &vol, NULL);
ev->volume = vol / 10.0; ev->volume = vol / 10.0;
if (index == 1) if (index == 1)
sink = gst_element_factory_make ("autoaudiosink", NULL); sink = gst_element_factory_make("autoaudiosink", NULL);
else else
sink = gst_element_factory_make ("fakesink", NULL); sink = gst_element_factory_make("fakesink", NULL);
gst_bin_add_many (GST_BIN (audiobin), gst_bin_add_many(GST_BIN(audiobin),
queue, conv, resample, volume, sink, NULL); queue, conv, resample, volume, sink, NULL);
gst_element_link_many (queue, conv, resample, volume, sink, NULL); gst_element_link_many(queue, conv, resample, volume, sink, NULL);
audiopad = gst_element_get_pad (queue, "sink"); audiopad = gst_element_get_pad(queue, "sink");
gst_element_add_pad (audiobin, gst_ghost_pad_new ("sink", audiopad)); gst_element_add_pad(audiobin, gst_ghost_pad_new("sink", audiopad));
gst_object_unref (audiopad); gst_object_unref(audiopad);
} }
/* visualisation part */ /* visualisation part */
{ {
GstElement *vis = NULL; GstElement *vis = NULL;
char *vis_name; char *vis_name;
switch (ev->vis) { switch (ev->vis)
case EMOTION_VIS_GOOM: {
vis_name = "goom"; case EMOTION_VIS_GOOM:
break; vis_name = "goom";
case EMOTION_VIS_LIBVISUAL_BUMPSCOPE: break;
vis_name = "libvisual_bumpscope"; case EMOTION_VIS_LIBVISUAL_BUMPSCOPE:
break; vis_name = "libvisual_bumpscope";
case EMOTION_VIS_LIBVISUAL_CORONA: break;
vis_name = "libvisual_corona"; case EMOTION_VIS_LIBVISUAL_CORONA:
break; vis_name = "libvisual_corona";
case EMOTION_VIS_LIBVISUAL_DANCING_PARTICLES: break;
vis_name = "libvisual_dancingparticles"; case EMOTION_VIS_LIBVISUAL_DANCING_PARTICLES:
break; vis_name = "libvisual_dancingparticles";
case EMOTION_VIS_LIBVISUAL_GDKPIXBUF: break;
vis_name = "libvisual_gdkpixbuf"; case EMOTION_VIS_LIBVISUAL_GDKPIXBUF:
break; vis_name = "libvisual_gdkpixbuf";
case EMOTION_VIS_LIBVISUAL_G_FORCE: break;
vis_name = "libvisual_G-Force"; case EMOTION_VIS_LIBVISUAL_G_FORCE:
break; vis_name = "libvisual_G-Force";
case EMOTION_VIS_LIBVISUAL_GOOM: break;
vis_name = "libvisual_goom"; case EMOTION_VIS_LIBVISUAL_GOOM:
break; vis_name = "libvisual_goom";
case EMOTION_VIS_LIBVISUAL_INFINITE: break;
vis_name = "libvisual_infinite"; case EMOTION_VIS_LIBVISUAL_INFINITE:
break; vis_name = "libvisual_infinite";
case EMOTION_VIS_LIBVISUAL_JAKDAW: break;
vis_name = "libvisual_jakdaw"; case EMOTION_VIS_LIBVISUAL_JAKDAW:
break; vis_name = "libvisual_jakdaw";
case EMOTION_VIS_LIBVISUAL_JESS: break;
vis_name = "libvisual_jess"; case EMOTION_VIS_LIBVISUAL_JESS:
break; vis_name = "libvisual_jess";
case EMOTION_VIS_LIBVISUAL_LV_ANALYSER: break;
vis_name = "libvisual_lv_analyzer"; case EMOTION_VIS_LIBVISUAL_LV_ANALYSER:
break; vis_name = "libvisual_lv_analyzer";
case EMOTION_VIS_LIBVISUAL_LV_FLOWER: break;
vis_name = "libvisual_lv_flower"; case EMOTION_VIS_LIBVISUAL_LV_FLOWER:
break; vis_name = "libvisual_lv_flower";
case EMOTION_VIS_LIBVISUAL_LV_GLTEST: break;
vis_name = "libvisual_lv_gltest"; case EMOTION_VIS_LIBVISUAL_LV_GLTEST:
break; vis_name = "libvisual_lv_gltest";
case EMOTION_VIS_LIBVISUAL_LV_SCOPE: break;
vis_name = "libvisual_lv_scope"; case EMOTION_VIS_LIBVISUAL_LV_SCOPE:
break; vis_name = "libvisual_lv_scope";
case EMOTION_VIS_LIBVISUAL_MADSPIN: break;
vis_name = "libvisual_madspin"; case EMOTION_VIS_LIBVISUAL_MADSPIN:
break; vis_name = "libvisual_madspin";
case EMOTION_VIS_LIBVISUAL_NEBULUS: break;
vis_name = "libvisual_nebulus"; case EMOTION_VIS_LIBVISUAL_NEBULUS:
break; vis_name = "libvisual_nebulus";
case EMOTION_VIS_LIBVISUAL_OINKSIE: break;
vis_name = "libvisual_oinksie"; case EMOTION_VIS_LIBVISUAL_OINKSIE:
break; vis_name = "libvisual_oinksie";
case EMOTION_VIS_LIBVISUAL_PLASMA: break;
vis_name = "libvisual_plazma"; case EMOTION_VIS_LIBVISUAL_PLASMA:
break; vis_name = "libvisual_plazma";
default: break;
vis_name = "goom"; default:
break; vis_name = "goom";
break;
}
g_snprintf(buf, 128, "vis%d", index);
if ((vis = gst_element_factory_make(vis_name, buf)))
{
GstElement *queue;
GstElement *conv;
GstElement *cspace;
GstElement *sink;
GstPad *vispad;
GstCaps *caps;
g_snprintf(buf, 128, "visbin%d", index);
visbin = gst_bin_new(buf);
queue = gst_element_factory_make("queue", NULL);
conv = gst_element_factory_make("audioconvert", NULL);
cspace = gst_element_factory_make("ffmpegcolorspace", NULL);
g_snprintf(buf, 128, "vissink%d", index);
sink = gst_element_factory_make("fakesink", buf);
gst_bin_add_many(GST_BIN(visbin),
queue, conv, vis, cspace, sink, NULL);
gst_element_link_many(queue, conv, vis, cspace, NULL);
caps = gst_caps_new_simple("video/x-raw-rgb",
"bpp", G_TYPE_INT, 32,
"width", G_TYPE_INT, 320,
"height", G_TYPE_INT, 200,
NULL);
gst_element_link_filtered(cspace, sink, caps);
vispad = gst_element_get_pad(queue, "sink");
gst_element_add_pad(visbin, gst_ghost_pad_new("sink", vispad));
gst_object_unref(vispad);
}
} }
g_snprintf (buf, 128, "vis%d", index); gst_bin_add_many(GST_BIN(bin), tee, audiobin, NULL);
if ((vis = gst_element_factory_make (vis_name, buf))) {
GstElement *queue;
GstElement *conv;
GstElement *cspace;
GstElement *sink;
GstPad *vispad;
GstCaps *caps;
g_snprintf (buf, 128, "visbin%d", index);
visbin = gst_bin_new (buf);
queue = gst_element_factory_make ("queue", NULL);
conv = gst_element_factory_make ("audioconvert", NULL);
cspace = gst_element_factory_make ("ffmpegcolorspace", NULL);
g_snprintf (buf, 128, "vissink%d", index);
sink = gst_element_factory_make ("fakesink", buf);
gst_bin_add_many (GST_BIN (visbin),
queue, conv, vis, cspace, sink, NULL);
gst_element_link_many (queue, conv, vis, cspace, NULL);
caps = gst_caps_new_simple ("video/x-raw-rgb",
"bpp", G_TYPE_INT, 32,
"width", G_TYPE_INT, 320,
"height", G_TYPE_INT, 200,
NULL);
gst_element_link_filtered (cspace, sink, caps);
vispad = gst_element_get_pad (queue, "sink");
gst_element_add_pad (visbin, gst_ghost_pad_new ("sink", vispad));
gst_object_unref (vispad);
}
}
gst_bin_add_many (GST_BIN (bin), tee, audiobin, NULL);
if (visbin) if (visbin)
gst_bin_add (GST_BIN (bin), visbin); gst_bin_add(GST_BIN(bin), visbin);
binpad = gst_element_get_pad (audiobin, "sink"); binpad = gst_element_get_pad(audiobin, "sink");
teepad = gst_element_get_request_pad (tee, "src%d"); teepad = gst_element_get_request_pad(tee, "src%d");
gst_pad_link (teepad, binpad); gst_pad_link(teepad, binpad);
gst_object_unref (teepad); gst_object_unref(teepad);
gst_object_unref (binpad); gst_object_unref(binpad);
if (visbin) { if (visbin)
binpad = gst_element_get_pad (visbin, "sink"); {
teepad = gst_element_get_request_pad (tee, "src%d"); binpad = gst_element_get_pad(visbin, "sink");
gst_pad_link (teepad, binpad); teepad = gst_element_get_request_pad(tee, "src%d");
gst_object_unref (teepad); gst_pad_link(teepad, binpad);
gst_object_unref (binpad); gst_object_unref(teepad);
} gst_object_unref(binpad);
}
teepad = gst_element_get_pad (tee, "sink"); teepad = gst_element_get_pad(tee, "sink");
gst_element_add_pad (bin, gst_ghost_pad_new ("sink", teepad)); gst_element_add_pad(bin, gst_ghost_pad_new("sink", teepad));
gst_object_unref (teepad); gst_object_unref(teepad);
return bin; return bin;
} }
void void
emotion_streams_sinks_get (Emotion_Gstreamer_Video *ev, GstElement *decoder) emotion_streams_sinks_get(Emotion_Gstreamer_Video *ev, GstElement *decoder)
{ {
GstIterator *it; GstIterator *it;
gpointer data; gpointer data;
ecore_list_first_goto (ev->video_sinks); ecore_list_first_goto(ev->video_sinks);
ecore_list_first_goto (ev->audio_sinks); ecore_list_first_goto(ev->audio_sinks);
it = gst_element_iterate_src_pads (decoder); it = gst_element_iterate_src_pads(decoder);
while (gst_iterator_next (it, &data) == GST_ITERATOR_OK) { while (gst_iterator_next(it, &data) == GST_ITERATOR_OK)
GstPad *pad; {
GstCaps *caps; GstPad *pad;
gchar *str; GstCaps *caps;
gchar *str;
pad = GST_PAD (data); pad = GST_PAD(data);
caps = gst_pad_get_caps (pad); caps = gst_pad_get_caps(pad);
str = gst_caps_to_string (caps); str = gst_caps_to_string(caps);
g_print ("caps !! %s\n", str); g_print("caps !! %s\n", str);
/* video stream */ /* video stream */
if (g_str_has_prefix (str, "video/")) { if (g_str_has_prefix(str, "video/"))
Emotion_Video_Sink *vsink; {
Emotion_Video_Sink *vsink;
vsink = (Emotion_Video_Sink *)ecore_list_next (ev->video_sinks); vsink = (Emotion_Video_Sink *)ecore_list_next(ev->video_sinks);
emotion_video_sink_fill (vsink, pad, caps); emotion_video_sink_fill(vsink, pad, caps);
} }
/* audio stream */ /* audio stream */
else if (g_str_has_prefix (str, "audio/")) { else if (g_str_has_prefix(str, "audio/"))
Emotion_Audio_Sink *asink; {
gint index; Emotion_Audio_Sink *asink;
gint index;
asink = (Emotion_Audio_Sink *)ecore_list_next (ev->audio_sinks); asink = (Emotion_Audio_Sink *)ecore_list_next(ev->audio_sinks);
emotion_audio_sink_fill (asink, pad, caps); emotion_audio_sink_fill(asink, pad, caps);
index = ecore_list_index (ev->audio_sinks); index = ecore_list_index(ev->audio_sinks);
if (ecore_list_count (ev->video_sinks) == 0) { if (ecore_list_count(ev->video_sinks) == 0)
if (index == 1) { {
Emotion_Video_Sink *vsink; if (index == 1)
{
Emotion_Video_Sink *vsink;
vsink = emotion_visualization_sink_create (ev, asink); vsink = emotion_visualization_sink_create(ev, asink);
if (!vsink) goto finalize; if (!vsink) goto finalize;
} }
} }
else { else
gchar buf[128]; {
GstElement *visbin; gchar buf[128];
GstElement *visbin;
g_snprintf (buf, 128, "visbin%d", index); g_snprintf(buf, 128, "visbin%d", index);
visbin = gst_bin_get_by_name (GST_BIN (ev->pipeline), buf); visbin = gst_bin_get_by_name(GST_BIN(ev->pipeline), buf);
if (visbin) { if (visbin)
GstPad *srcpad; {
GstPad *sinkpad; GstPad *srcpad;
GstPad *sinkpad;
sinkpad = gst_element_get_pad (visbin, "sink"); sinkpad = gst_element_get_pad(visbin, "sink");
srcpad = gst_pad_get_peer (sinkpad); srcpad = gst_pad_get_peer(sinkpad);
gst_pad_unlink (srcpad, sinkpad); gst_pad_unlink(srcpad, sinkpad);
gst_object_unref (srcpad); gst_object_unref(srcpad);
gst_object_unref (sinkpad); gst_object_unref(sinkpad);
} }
} }
} }
finalize: finalize:
gst_caps_unref (caps); gst_caps_unref(caps);
g_free (str); g_free(str);
gst_object_unref (pad); gst_object_unref(pad);
} }
gst_iterator_free (it); gst_iterator_free(it);
} }
void void
emotion_video_sink_fill (Emotion_Video_Sink *vsink, GstPad *pad, GstCaps *caps) emotion_video_sink_fill(Emotion_Video_Sink *vsink, GstPad *pad, GstCaps *caps)
{ {
GstStructure *structure; GstStructure *structure;
GstQuery *query; GstQuery *query;
const GValue *val; const GValue *val;
gchar *str; gchar *str;
structure = gst_caps_get_structure (caps, 0); structure = gst_caps_get_structure(caps, 0);
str = gst_caps_to_string (caps); str = gst_caps_to_string(caps);
gst_structure_get_int (structure, "width", &vsink->width); gst_structure_get_int(structure, "width", &vsink->width);
gst_structure_get_int (structure, "height", &vsink->height); gst_structure_get_int(structure, "height", &vsink->height);
vsink->fps_num = 1; vsink->fps_num = 1;
vsink->fps_den = 1; vsink->fps_den = 1;
val = gst_structure_get_value (structure, "framerate"); val = gst_structure_get_value(structure, "framerate");
if (val) { if (val)
vsink->fps_num = gst_value_get_fraction_numerator (val); {
vsink->fps_den = gst_value_get_fraction_denominator (val); vsink->fps_num = gst_value_get_fraction_numerator(val);
} vsink->fps_den = gst_value_get_fraction_denominator(val);
if (g_str_has_prefix(str, "video/x-raw-yuv")) { }
val = gst_structure_get_value (structure, "format"); if (g_str_has_prefix(str, "video/x-raw-yuv"))
vsink->fourcc = gst_value_get_fourcc (val); {
} val = gst_structure_get_value(structure, "format");
vsink->fourcc = gst_value_get_fourcc(val);
}
else if (g_str_has_prefix(str, "video/x-raw-rgb")) else if (g_str_has_prefix(str, "video/x-raw-rgb"))
vsink->fourcc = GST_MAKE_FOURCC ('A','R','G','B'); vsink->fourcc = GST_MAKE_FOURCC('A', 'R', 'G', 'B');
else else
vsink->fourcc = 0; vsink->fourcc = 0;
query = gst_query_new_duration (GST_FORMAT_TIME); query = gst_query_new_duration(GST_FORMAT_TIME);
if (gst_pad_query (pad, query)) { if (gst_pad_query(pad, query))
gint64 time; {
gint64 time;
gst_query_parse_duration (query, NULL, &time); gst_query_parse_duration(query, NULL, &time);
vsink->length_time = (double)time / (double)GST_SECOND; vsink->length_time = (double)time / (double)GST_SECOND;
} }
g_free (str); g_free(str);
gst_query_unref (query); gst_query_unref(query);
} }
void void
emotion_audio_sink_fill (Emotion_Audio_Sink *asink, GstPad *pad, GstCaps *caps) emotion_audio_sink_fill(Emotion_Audio_Sink *asink, GstPad *pad, GstCaps *caps)
{ {
GstStructure *structure; GstStructure *structure;
GstQuery *query; GstQuery *query;
structure = gst_caps_get_structure (caps, 0); structure = gst_caps_get_structure(caps, 0);
gst_structure_get_int (structure, "channels", &asink->channels); gst_structure_get_int(structure, "channels", &asink->channels);
gst_structure_get_int (structure, "rate", &asink->samplerate); gst_structure_get_int(structure, "rate", &asink->samplerate);
query = gst_query_new_duration (GST_FORMAT_TIME); query = gst_query_new_duration(GST_FORMAT_TIME);
if (gst_pad_query (pad, query)) { if (gst_pad_query(pad, query))
gint64 time; {
gint64 time;
gst_query_parse_duration (query, NULL, &time); gst_query_parse_duration(query, NULL, &time);
asink->length_time = (double)time / (double)GST_SECOND; asink->length_time = (double)time / (double)GST_SECOND;
} }
gst_query_unref (query); gst_query_unref(query);
} }

View File

@ -5,121 +5,124 @@
#include "emotion_gstreamer_pipeline.h" #include "emotion_gstreamer_pipeline.h"
static Emotion_Audio_Sink *_emotion_audio_sink_new (Emotion_Gstreamer_Video *ev); static Emotion_Audio_Sink *_emotion_audio_sink_new (Emotion_Gstreamer_Video *ev);
static void _emotion_audio_sink_free (Emotion_Gstreamer_Video *ev, Emotion_Audio_Sink *asink);
static void _emotion_audio_sink_free (Emotion_Gstreamer_Video *ev, Emotion_Audio_Sink *asink);
int int
emotion_pipeline_cdda_build (void *video, const char * device, unsigned int track) emotion_pipeline_cdda_build(void *video, const char * device, unsigned int track)
{ {
GstElement *cdiocddasrc; GstElement *cdiocddasrc;
Emotion_Video_Sink *vsink; Emotion_Video_Sink *vsink;
Emotion_Audio_Sink *asink; Emotion_Audio_Sink *asink;
Emotion_Gstreamer_Video *ev; Emotion_Gstreamer_Video *ev;
/* GstFormat format; */ /* GstFormat format; */
/* gint64 tracks_count; */ /* gint64 tracks_count; */
ev = (Emotion_Gstreamer_Video *)video; ev = (Emotion_Gstreamer_Video *)video;
if (!ev) return 0; if (!ev) return 0;
cdiocddasrc = gst_element_factory_make ("cdiocddasrc", "src"); cdiocddasrc = gst_element_factory_make("cdiocddasrc", "src");
if (!cdiocddasrc) { if (!cdiocddasrc)
g_print ("cdiocddasrc element missing. Install it.\n"); {
goto failure_cdiocddasrc; g_print("cdiocddasrc element missing. Install it.\n");
} goto failure_cdiocddasrc;
}
if (device) if (device)
g_object_set (G_OBJECT (cdiocddasrc), "device", device, NULL); g_object_set(G_OBJECT(cdiocddasrc), "device", device, NULL);
g_object_set (G_OBJECT (cdiocddasrc), "track", track, NULL); g_object_set(G_OBJECT(cdiocddasrc), "track", track, NULL);
asink = _emotion_audio_sink_new (ev); asink = _emotion_audio_sink_new(ev);
if (!asink) if (!asink)
goto failure_emotion_sink; goto failure_emotion_sink;
asink->sink = emotion_audio_sink_create (ev, 1); asink->sink = emotion_audio_sink_create(ev, 1);
if (!asink->sink) if (!asink->sink)
goto failure_gstreamer_sink; goto failure_gstreamer_sink;
gst_bin_add_many((GST_BIN (ev->pipeline)), cdiocddasrc, asink->sink, NULL); gst_bin_add_many((GST_BIN(ev->pipeline)), cdiocddasrc, asink->sink, NULL);
if (!gst_element_link (cdiocddasrc, asink->sink)) if (!gst_element_link(cdiocddasrc, asink->sink))
goto failure_link; goto failure_link;
vsink = emotion_visualization_sink_create (ev, asink); vsink = emotion_visualization_sink_create(ev, asink);
if (!vsink) goto failure_link; if (!vsink) goto failure_link;
if (!emotion_pipeline_pause (ev->pipeline)) if (!emotion_pipeline_pause(ev->pipeline))
goto failure_gstreamer_pause; goto failure_gstreamer_pause;
{ {
GstQuery *query; GstQuery *query;
GstPad *pad; GstPad *pad;
GstCaps *caps; GstCaps *caps;
GstStructure *structure; GstStructure *structure;
/* should always be found */ /* should always be found */
pad = gst_element_get_pad (cdiocddasrc, "src"); pad = gst_element_get_pad(cdiocddasrc, "src");
caps = gst_pad_get_caps (pad); caps = gst_pad_get_caps(pad);
structure = gst_caps_get_structure (GST_CAPS (caps), 0); structure = gst_caps_get_structure(GST_CAPS(caps), 0);
gst_structure_get_int (structure, "channels", &asink->channels); gst_structure_get_int(structure, "channels", &asink->channels);
gst_structure_get_int (structure, "rate", &asink->samplerate); gst_structure_get_int(structure, "rate", &asink->samplerate);
gst_caps_unref (caps); gst_caps_unref(caps);
query = gst_query_new_duration (GST_FORMAT_TIME); query = gst_query_new_duration(GST_FORMAT_TIME);
if (gst_pad_query (pad, query)) { if (gst_pad_query(pad, query))
gint64 time; {
gint64 time;
gst_query_parse_duration (query, NULL, &time); gst_query_parse_duration(query, NULL, &time);
asink->length_time = (double)time / (double)GST_SECOND; asink->length_time = (double)time / (double)GST_SECOND;
vsink->length_time = asink->length_time; vsink->length_time = asink->length_time;
} }
gst_query_unref (query); gst_query_unref(query);
gst_object_unref (GST_OBJECT (pad)); gst_object_unref(GST_OBJECT(pad));
} }
return 1; return 1;
failure_gstreamer_pause: failure_gstreamer_pause:
emotion_video_sink_free (ev, vsink); emotion_video_sink_free(ev, vsink);
failure_link: failure_link:
gst_bin_remove (GST_BIN (ev->pipeline), asink->sink); gst_bin_remove(GST_BIN(ev->pipeline), asink->sink);
failure_gstreamer_sink: failure_gstreamer_sink:
_emotion_audio_sink_free (ev, asink); _emotion_audio_sink_free(ev, asink);
failure_emotion_sink: failure_emotion_sink:
gst_bin_remove (GST_BIN (ev->pipeline), cdiocddasrc); gst_bin_remove(GST_BIN(ev->pipeline), cdiocddasrc);
failure_cdiocddasrc: failure_cdiocddasrc:
return 0; return 0;
} }
static Emotion_Audio_Sink * static Emotion_Audio_Sink *
_emotion_audio_sink_new (Emotion_Gstreamer_Video *ev) _emotion_audio_sink_new(Emotion_Gstreamer_Video *ev)
{ {
Emotion_Audio_Sink *asink; Emotion_Audio_Sink *asink;
if (!ev) return NULL; if (!ev) return NULL;
asink = (Emotion_Audio_Sink *)malloc (sizeof (Emotion_Audio_Sink)); asink = (Emotion_Audio_Sink *)malloc(sizeof(Emotion_Audio_Sink));
if (!asink) return NULL; if (!asink) return NULL;
if (!ecore_list_append (ev->audio_sinks, asink)) { if (!ecore_list_append(ev->audio_sinks, asink))
free (asink); {
return NULL; free(asink);
} return NULL;
}
return asink; return asink;
} }
static void static void
_emotion_audio_sink_free (Emotion_Gstreamer_Video *ev, Emotion_Audio_Sink *asink) _emotion_audio_sink_free(Emotion_Gstreamer_Video *ev, Emotion_Audio_Sink *asink)
{ {
if (!ev || !asink) return; if (!ev || !asink) return;
if (ecore_list_goto (ev->audio_sinks, asink)) { if (ecore_list_goto(ev->audio_sinks, asink))
ecore_list_remove (ev->audio_sinks); {
free (asink); ecore_list_remove(ev->audio_sinks);
} free(asink);
}
} }