diff --git a/configure.ac b/configure.ac index 9f3dcf1c2f..0751b391c3 100644 --- a/configure.ac +++ b/configure.ac @@ -1796,8 +1796,8 @@ AC_ARG_ENABLE([g-main-loop], [want_g_main_loop="no"]) AC_ARG_ENABLE([gstreamer], - [AC_HELP_STRING([--disable-gstreamer], - [disable gstreamer support. @<:@default=enabled@:>@])], + [AC_HELP_STRING([--enable-gstreamer], + [enable gstreamer 0.10 support. @<:@default=disabled@:>@])], [ if test "x${enableval}" = "xyes" ; then want_gstreamer="yes" @@ -1805,7 +1805,19 @@ AC_ARG_ENABLE([gstreamer], want_gstreamer="no" fi ], - [want_gstreamer="yes"]) + [want_gstreamer="no"]) + +AC_ARG_ENABLE([gstreamer1], + [AC_HELP_STRING([--disable-gstreamer1], + [disable gstreamer 1.0 support. @<:@default=enabled@:>@])], + [ + if test "x${enableval}" = "xyes" ; then + want_gstreamer1="yes" + else + want_gstreamer1="no" + fi + ], + [want_gstreamer1="yes"]) AC_ARG_ENABLE([tizen], [AC_HELP_STRING([--enable-tizen], @@ -1882,10 +1894,17 @@ if test "x${want_g_main_loop}" = "xyes" ; then fi # not EFL_OPTIONAL_DEPEND_PKG() because it's only used for ecore examples +if test "${want_gstreamer1}" = "yes" -a "${want_gstreamer}" = "yes"; then + AC_MSG_ERROR([You can only enable either GStreamer 1.0 or GStreamer 0.10 support]) +fi + +if test "${want_gstreamer1}" = "yes"; then + PKG_CHECK_MODULES([GSTREAMER], [gstreamer-1.0]) +fi if test "${want_gstreamer}" = "yes"; then PKG_CHECK_MODULES([GSTREAMER], [gstreamer-0.10]) fi -AM_CONDITIONAL([HAVE_GSTREAMER], [test "${want_gstreamer}" = "yes"]) +AM_CONDITIONAL([HAVE_GSTREAMER], [test "${want_gstreamer}" = "yes" -o "${want_gstreamer1}" = "yes"]) EFL_EVAL_PKGS([ECORE]) @@ -3663,10 +3682,12 @@ have_gst_xoverlay="no" EMOTION_MODULE([xine], [${want_xine}]) EMOTION_MODULE([gstreamer], [${want_gstreamer}]) +EMOTION_MODULE([gstreamer1], [${want_gstreamer1}]) EMOTION_MODULE([generic], [${want_emotion_generic}]) EFL_ADD_FEATURE([EMOTION], [xine]) EFL_ADD_FEATURE([EMOTION], [gstreamer]) +EFL_ADD_FEATURE([EMOTION], [gstreamer1]) EFL_ADD_FEATURE([EMOTION], [generic], [${want_emotion_generic}]) EFL_EVAL_PKGS([EMOTION]) diff --git a/m4/emotion_module.m4 b/m4/emotion_module.m4 index 7685f992c2..75884e78d4 100644 --- a/m4/emotion_module.m4 +++ b/m4/emotion_module.m4 @@ -45,6 +45,19 @@ AC_DEFUN([EMOTION_MODULE_DEP_CHECK_GSTREAMER], fi ]) +dnl use: EMOTION_MODULE_DEP_CHECK_GSTREAMER_1(want_static) +dnl where want_engine = yes or static +AC_DEFUN([EMOTION_MODULE_DEP_CHECK_GSTREAMER1], +[dnl + GST_VER=1.0 + requirements="gstreamer-1.0 >= ${GST_VER} gstreamer-plugins-base-1.0 >= ${GST_VER} gstreamer-video-1.0 >= ${GST_VER} gstreamer-audio-1.0 >= ${GST_VER} gstreamer-tag-1.0 >= ${GST_VER}" + if test "$1" = "static"; then + EFL_DEPEND_PKG([EMOTION], [EMOTION_MODULE_GSTREAMER1], [${requirements}]) + else + PKG_CHECK_MODULES([EMOTION_MODULE_GSTREAMER1], [${requirements}]) + fi +]) + dnl use: EMOTION_MODULE_DEP_CHECK_GENERIC(want_static) dnl where want_engine = yes or static AC_DEFUN([EMOTION_MODULE_DEP_CHECK_GENERIC], diff --git a/src/Makefile_Emotion.am b/src/Makefile_Emotion.am index 4689f7d135..4796018113 100644 --- a/src/Makefile_Emotion.am +++ b/src/Makefile_Emotion.am @@ -52,7 +52,7 @@ modules_emotion_xine_module_la_LIBTOOLFLAGS = --tag=disable-static endif endif -# Gstreamer +# Gstreamer 0.10 EMOTION_GSTREAMER_SOURCES = \ modules/emotion/gstreamer/emotion_gstreamer.h \ modules/emotion/gstreamer/emotion_gstreamer.c \ @@ -85,6 +85,39 @@ endif endif endif +# Gstreamer 1.0 +EMOTION_GSTREAMER1_SOURCES = \ +modules/emotion/gstreamer1/emotion_gstreamer.h \ +modules/emotion/gstreamer1/emotion_gstreamer.c \ +modules/emotion/gstreamer1/emotion_alloc.c \ +modules/emotion/gstreamer1/emotion_convert.c \ +modules/emotion/gstreamer1/emotion_fakeeos.c \ +modules/emotion/gstreamer1/emotion_sink.c + +if EMOTION_STATIC_BUILD_GSTREAMER1 +lib_emotion_libemotion_la_SOURCES += $(EMOTION_GSTREAMER1_SOURCES) +else +if EMOTION_BUILD_GSTREAMER1 +emotionmodulegstreamer1dir = $(libdir)/emotion/modules/gstreamer1/$(MODULE_ARCH) +emotionmodulegstreamer1_LTLIBRARIES = modules/emotion/gstreamer1/module.la +modules_emotion_gstreamer1_module_la_SOURCES = $(EMOTION_GSTREAMER1_SOURCES) +modules_emotion_gstreamer1_module_la_CPPFLAGS = -I$(top_builddir)/src/lib/efl \ +@EMOTION_CFLAGS@ \ +@EMOTION_MODULE_GSTREAMER1_CFLAGS@ +modules_emotion_gstreamer1_module_la_LIBADD = \ +@USE_EMOTION_LIBS@ \ +@EMOTION_MODULE_GSTREAMER1_LIBS@ +modules_emotion_gstreamer1_module_la_DEPENDENCIES = @USE_EMOTION_INTERNAL_LIBS@ +modules_emotion_gstreamer1_module_la_LDFLAGS = -module @EFL_LTMODULE_FLAGS@ +modules_emotion_gstreamer1_module_la_LIBTOOLFLAGS = --tag=disable-static +if HAVE_ECORE_X +modules_emotion_gstreamer1_module_la_CPPFLAGS += @ECORE_X_CFLAGS@ @ECORE_EVAS_CFLAGS@ +modules_emotion_gstreamer1_module_la_LIBADD += @USE_ECORE_X_LIBS@ @USE_ECORE_EVAS_LIBS@ +modules_emotion_gstreamer1_module_la_DEPENDENCIES += @USE_ECORE_X_INTERNAL_LIBS@ @USE_ECORE_EVAS_INTERNAL_LIBS@ +endif +endif +endif + # Generic EMOTION_GENERIC_SOURCES = \ modules/emotion/generic/emotion_generic.h \ diff --git a/src/examples/ecore/Makefile.examples b/src/examples/ecore/Makefile.examples index f8ac82d50f..63e5f6b796 100644 --- a/src/examples/ecore/Makefile.examples +++ b/src/examples/ecore/Makefile.examples @@ -1,6 +1,13 @@ CC=gcc + +if HAVE_GSTREAMER_1 + GSTREAMER_DEP="gstreamer-1.0" +else + GSTREAMER_DEP="gstreamer-0.10" +endif + COMMON_FLAGS=`pkg-config --libs --cflags eina,ecore,evas,ecore-evas,ecore-audio,ecore-con,ecore-file, \ - eo,evas-software-buffer,gnutls,ecore-imf,ecore-imf-evas,gstreamer-0.10` -lm + eo,evas-software-buffer,gnutls,ecore-imf,ecore-imf-evas,$(GSTREAMER_DEP)` -lm EXAMPLES= ecore_animator_example \ ecore_audio_custom \ diff --git a/src/examples/ecore/ecore_pipe_gstreamer_example.c b/src/examples/ecore/ecore_pipe_gstreamer_example.c index 008b96e5d8..20d3fa1163 100644 --- a/src/examples/ecore/ecore_pipe_gstreamer_example.c +++ b/src/examples/ecore/ecore_pipe_gstreamer_example.c @@ -1,4 +1,6 @@ //Compile with: +// gcc -o ecore_pipe_gstreamer_example ecore_pipe_gstreamer_example.c `pkg-config --libs --cflags ecore gstreamer-1.0` +// or // gcc -o ecore_pipe_gstreamer_example ecore_pipe_gstreamer_example.c `pkg-config --libs --cflags ecore gstreamer-0.10` #include @@ -98,23 +100,33 @@ new_decoded_pad_cb(GstElement *demuxer, GstElement *decoder; GstPad *pad; GstCaps *caps; - gchar *str; + GstStructure *s; + const gchar *str; +#if GST_CHECK_VERSION(1,0,0) + caps = gst_pad_get_current_caps(new_pad); +#else caps = gst_pad_get_caps(new_pad); - str = gst_caps_to_string(caps); +#endif + s = gst_caps_get_structure(caps, 0); + str = gst_structure_get_name(s); if (g_str_has_prefix(str, "video/")) { decoder = GST_ELEMENT(user_data); +#if GST_CHECK_VERSION(1,0,0) + pad = gst_element_get_static_pad(decoder, "sink"); +#else pad = gst_element_get_pad(decoder, "sink"); +#endif if (GST_PAD_LINK_FAILED(gst_pad_link(new_pad, pad))) { g_warning("Failed to link %s:%s to %s:%s", GST_DEBUG_PAD_NAME(new_pad), GST_DEBUG_PAD_NAME(pad)); } + gst_object_unref(pad); } - g_free(str); gst_caps_unref(caps); } diff --git a/src/modules/emotion/gstreamer1/emotion_alloc.c b/src/modules/emotion/gstreamer1/emotion_alloc.c new file mode 100644 index 0000000000..80d1160217 --- /dev/null +++ b/src/modules/emotion/gstreamer1/emotion_alloc.c @@ -0,0 +1,73 @@ +#ifdef HAVE_CONFIG_H +# include "config.h" +#endif + +#include "emotion_gstreamer.h" + +Emotion_Gstreamer_Buffer * +emotion_gstreamer_buffer_alloc(EvasVideoSinkPrivate *sink, + GstBuffer *buffer, + Eina_Bool preroll) +{ + Emotion_Gstreamer_Buffer *send; + + if (!sink->ev) return NULL; + + send = malloc(sizeof (Emotion_Gstreamer_Buffer)); + if (!send) return NULL; + + send->sink = sink; + send->frame = gst_buffer_ref(buffer); + send->preroll = preroll; + send->force = EINA_FALSE; + sink->ev->out++; + send->ev = sink->ev; + + return send; +} + +void +emotion_gstreamer_buffer_free(Emotion_Gstreamer_Buffer *send) +{ + send->ev->in++; + + if (send->ev->in == send->ev->out + && send->ev->threads == NULL + && send->ev->delete_me) + send->ev->api->del(send->ev); + + gst_buffer_unref(send->frame); + free(send); +} + +Emotion_Gstreamer_Message * +emotion_gstreamer_message_alloc(Emotion_Gstreamer_Video *ev, + GstMessage *msg) +{ + Emotion_Gstreamer_Message *send; + + if (!ev) return NULL; + + send = malloc(sizeof (Emotion_Gstreamer_Message)); + if (!send) return NULL; + + ev->out++; + send->ev = ev; + send->msg = gst_message_ref(msg); + + return send; +} + +void +emotion_gstreamer_message_free(Emotion_Gstreamer_Message *send) +{ + send->ev->in++; + + if (send->ev->in == send->ev->out + && send->ev->threads == NULL + && send->ev->delete_me) + send->ev->api->del(send->ev); + + gst_message_unref(send->msg); + free(send); +} diff --git a/src/modules/emotion/gstreamer1/emotion_convert.c b/src/modules/emotion/gstreamer1/emotion_convert.c new file mode 100644 index 0000000000..2a3aaa58ed --- /dev/null +++ b/src/modules/emotion/gstreamer1/emotion_convert.c @@ -0,0 +1,160 @@ +#ifdef HAVE_CONFIG_H +# include "config.h" +#endif + +#include "emotion_gstreamer.h" + +static inline void +_evas_video_bgrx_step(unsigned char *evas_data, const unsigned char *gst_data, + unsigned int w, unsigned int h EINA_UNUSED, unsigned int output_height, unsigned int step) +{ + unsigned int x; + unsigned int y; + + for (y = 0; y < output_height; ++y) + { + for (x = 0; x < w; x++) + { + evas_data[0] = gst_data[0]; + evas_data[1] = gst_data[1]; + evas_data[2] = gst_data[2]; + evas_data[3] = 255; + gst_data += step; + evas_data += 4; + } + } +} + +static void +_evas_video_bgr(unsigned char *evas_data, const unsigned char *gst_data, unsigned int w, unsigned int h, unsigned int output_height) +{ + _evas_video_bgrx_step(evas_data, gst_data, w, h, output_height, 3); +} + +static void +_evas_video_bgrx(unsigned char *evas_data, const unsigned char *gst_data, unsigned int w, unsigned int h, unsigned int output_height) +{ + _evas_video_bgrx_step(evas_data, gst_data, w, h, output_height, 4); +} + +static void +_evas_video_bgra(unsigned char *evas_data, const unsigned char *gst_data, unsigned int w, unsigned int h EINA_UNUSED, unsigned int output_height) +{ + unsigned int x; + unsigned int y; + + for (y = 0; y < output_height; ++y) + { + unsigned char alpha; + + for (x = 0; x < w; ++x) + { + alpha = gst_data[3]; + evas_data[0] = (gst_data[0] * alpha) / 255; + evas_data[1] = (gst_data[1] * alpha) / 255; + evas_data[2] = (gst_data[2] * alpha) / 255; + evas_data[3] = alpha; + gst_data += 4; + evas_data += 4; + } + } +} + +static void +_evas_video_i420(unsigned char *evas_data, const unsigned char *gst_data, unsigned int w, unsigned int h, unsigned int output_height) +{ + const unsigned char **rows; + unsigned int i, j; + unsigned int rh; + unsigned int stride_y, stride_uv; + + rh = output_height; + + rows = (const unsigned char **)evas_data; + + stride_y = GST_ROUND_UP_4(w); + stride_uv = GST_ROUND_UP_8(w) / 2; + + for (i = 0; i < rh; i++) + rows[i] = &gst_data[i * stride_y]; + + for (j = 0; j < (rh / 2); j++, i++) + rows[i] = &gst_data[h * stride_y + j * stride_uv]; + + for (j = 0; j < (rh / 2); j++, i++) + rows[i] = &gst_data[h * stride_y + + (rh / 2) * stride_uv + + j * stride_uv]; +} + +static void +_evas_video_yv12(unsigned char *evas_data, const unsigned char *gst_data, unsigned int w, unsigned int h, unsigned int output_height) +{ + const unsigned char **rows; + unsigned int i, j; + unsigned int rh; + unsigned int stride_y, stride_uv; + + rh = output_height; + + rows = (const unsigned char **)evas_data; + + stride_y = GST_ROUND_UP_4(w); + stride_uv = GST_ROUND_UP_8(w) / 2; + + for (i = 0; i < rh; i++) + rows[i] = &gst_data[i * stride_y]; + + for (j = 0; j < (rh / 2); j++, i++) + rows[i] = &gst_data[h * stride_y + + (rh / 2) * stride_uv + + j * stride_uv]; + + for (j = 0; j < (rh / 2); j++, i++) + rows[i] = &gst_data[h * stride_y + j * stride_uv]; +} + +static void +_evas_video_yuy2(unsigned char *evas_data, const unsigned char *gst_data, unsigned int w, unsigned int h EINA_UNUSED, unsigned int output_height) +{ + const unsigned char **rows; + unsigned int i; + unsigned int stride; + + rows = (const unsigned char **)evas_data; + + stride = GST_ROUND_UP_4(w * 2); + + for (i = 0; i < output_height; i++) + rows[i] = &gst_data[i * stride]; +} + +static void +_evas_video_nv12(unsigned char *evas_data, const unsigned char *gst_data, unsigned int w, unsigned int h EINA_UNUSED, unsigned int output_height) +{ + const unsigned char **rows; + unsigned int i, j; + unsigned int rh; + + rh = output_height; + + rows = (const unsigned char **)evas_data; + + for (i = 0; i < rh; i++) + rows[i] = &gst_data[i * w]; + + for (j = 0; j < (rh / 2); j++, i++) + rows[i] = &gst_data[rh * w + j * w]; +} + +const ColorSpace_Format_Convertion colorspace_format_convertion[] = { + { "I420", GST_VIDEO_FORMAT_I420, EVAS_COLORSPACE_YCBCR422P601_PL, _evas_video_i420, EINA_TRUE }, + { "YV12", GST_VIDEO_FORMAT_YV12, EVAS_COLORSPACE_YCBCR422P601_PL, _evas_video_yv12, EINA_TRUE }, + { "YUY2", GST_VIDEO_FORMAT_YUY2, EVAS_COLORSPACE_YCBCR422601_PL, _evas_video_yuy2, EINA_FALSE }, + { "NV12", GST_VIDEO_FORMAT_NV12, EVAS_COLORSPACE_YCBCR420NV12601_PL, _evas_video_nv12, EINA_TRUE }, + { "BGR", GST_VIDEO_FORMAT_BGR, EVAS_COLORSPACE_ARGB8888, _evas_video_bgr, EINA_FALSE }, + { "BGRx", GST_VIDEO_FORMAT_BGRx, EVAS_COLORSPACE_ARGB8888, _evas_video_bgrx, EINA_FALSE }, + { "BGRA", GST_VIDEO_FORMAT_BGRA, EVAS_COLORSPACE_ARGB8888, _evas_video_bgra, EINA_FALSE }, + { NULL, 0, 0, NULL, 0 } +}; + diff --git a/src/modules/emotion/gstreamer1/emotion_fakeeos.c b/src/modules/emotion/gstreamer1/emotion_fakeeos.c new file mode 100644 index 0000000000..7e71e72940 --- /dev/null +++ b/src/modules/emotion/gstreamer1/emotion_fakeeos.c @@ -0,0 +1,47 @@ +#ifdef HAVE_CONFIG_H +# include "config.h" +#endif + +#include "emotion_gstreamer.h" + +typedef struct _FakeEOSBin +{ + GstBin parent; +} FakeEOSBin; + +typedef struct _FakeEOSBinClass +{ + GstBinClass parent; +} FakeEOSBinClass; + +G_DEFINE_TYPE (FakeEOSBin, fakeeos_bin, GST_TYPE_BIN); + +static void +fakeeos_bin_handle_message(GstBin * bin, GstMessage * message) +{ + /* FakeEOSBin *fakeeos = (FakeEOSBin *)(bin); */ + + switch (GST_MESSAGE_TYPE(message)) { + case GST_MESSAGE_EOS: + /* what to do here ? just returning at the moment */ + return; + default: + break; + } + + GST_BIN_CLASS(fakeeos_bin_parent_class)->handle_message(bin, message); +} + +static void +fakeeos_bin_class_init(FakeEOSBinClass * klass) +{ + GstBinClass *gstbin_class = GST_BIN_CLASS(klass); + + gstbin_class->handle_message = + GST_DEBUG_FUNCPTR (fakeeos_bin_handle_message); +} + +static void +fakeeos_bin_init(FakeEOSBin *src EINA_UNUSED) +{ +} diff --git a/src/modules/emotion/gstreamer1/emotion_gstreamer.c b/src/modules/emotion/gstreamer1/emotion_gstreamer.c new file mode 100644 index 0000000000..c9ed86c403 --- /dev/null +++ b/src/modules/emotion/gstreamer1/emotion_gstreamer.c @@ -0,0 +1,2053 @@ +#ifdef HAVE_CONFIG_H +# include "config.h" +#endif + +#include "emotion_gstreamer.h" + +Eina_Bool window_manager_video = EINA_FALSE; +int _emotion_gstreamer_log_domain = -1; +Eina_Bool debug_fps = EINA_FALSE; +Eina_Bool _ecore_x_available = EINA_FALSE; + +static Ecore_Idler *restart_idler; +static int _emotion_init_count = 0; + +/* Callbacks to get the eos */ +static void _for_each_tag (GstTagList const* list, gchar const* tag, void *data); +static void _free_metadata (Emotion_Gstreamer_Metadata *m); + +static GstBusSyncReply _eos_sync_fct(GstBus *bus, + GstMessage *message, + gpointer data); + +static Eina_Bool _em_restart_stream(void *data); + +/* Module interface */ + + +static int priority_overide = 0; + +static Emotion_Video_Stream * +emotion_video_stream_new(Emotion_Gstreamer_Video *ev) +{ + Emotion_Video_Stream *vstream; + + if (!ev) return NULL; + + vstream = (Emotion_Video_Stream *)calloc(1, sizeof(Emotion_Video_Stream)); + if (!vstream) return NULL; + + ev->video_streams = eina_list_append(ev->video_streams, vstream); + return vstream; +} + +static const char * +emotion_visualization_element_name_get(Emotion_Vis visualisation) +{ + switch (visualisation) + { + case EMOTION_VIS_NONE: + return NULL; + case EMOTION_VIS_GOOM: + return "goom"; + case EMOTION_VIS_LIBVISUAL_BUMPSCOPE: + return "libvisual_bumpscope"; + case EMOTION_VIS_LIBVISUAL_CORONA: + return "libvisual_corona"; + case EMOTION_VIS_LIBVISUAL_DANCING_PARTICLES: + return "libvisual_dancingparticles"; + case EMOTION_VIS_LIBVISUAL_GDKPIXBUF: + return "libvisual_gdkpixbuf"; + case EMOTION_VIS_LIBVISUAL_G_FORCE: + return "libvisual_G-Force"; + case EMOTION_VIS_LIBVISUAL_GOOM: + return "libvisual_goom"; + case EMOTION_VIS_LIBVISUAL_INFINITE: + return "libvisual_infinite"; + case EMOTION_VIS_LIBVISUAL_JAKDAW: + return "libvisual_jakdaw"; + case EMOTION_VIS_LIBVISUAL_JESS: + return "libvisual_jess"; + case EMOTION_VIS_LIBVISUAL_LV_ANALYSER: + return "libvisual_lv_analyzer"; + case EMOTION_VIS_LIBVISUAL_LV_FLOWER: + return "libvisual_lv_flower"; + case EMOTION_VIS_LIBVISUAL_LV_GLTEST: + return "libvisual_lv_gltest"; + case EMOTION_VIS_LIBVISUAL_LV_SCOPE: + return "libvisual_lv_scope"; + case EMOTION_VIS_LIBVISUAL_MADSPIN: + return "libvisual_madspin"; + case EMOTION_VIS_LIBVISUAL_NEBULUS: + return "libvisual_nebulus"; + case EMOTION_VIS_LIBVISUAL_OINKSIE: + return "libvisual_oinksie"; + case EMOTION_VIS_LIBVISUAL_PLASMA: + return "libvisual_plazma"; + default: + return "goom"; + } +} + +static void +em_cleanup(Emotion_Gstreamer_Video *ev) +{ + Emotion_Audio_Stream *astream; + Emotion_Video_Stream *vstream; + + if (ev->send) + { + emotion_gstreamer_buffer_free(ev->send); + ev->send = NULL; + } + + if (ev->eos_bus) + { + gst_object_unref(GST_OBJECT(ev->eos_bus)); + ev->eos_bus = NULL; + } + + if (ev->metadata) + { + _free_metadata(ev->metadata); + ev->metadata = NULL; + } + + if (ev->last_buffer) + { + gst_buffer_unref(ev->last_buffer); + ev->last_buffer = NULL; + } + + if (!ev->stream) + { + evas_object_image_video_surface_set(emotion_object_image_get(ev->obj), NULL); + ev->stream = EINA_TRUE; + } + + if (ev->pipeline) + { + gstreamer_video_sink_new(ev, ev->obj, NULL); + + g_object_set(G_OBJECT(ev->esink), "ev", NULL, NULL); + g_object_set(G_OBJECT(ev->esink), "evas-object", NULL, NULL); + gst_element_set_state(ev->pipeline, GST_STATE_NULL); + gst_object_unref(ev->pipeline); + + ev->pipeline = NULL; + ev->sink = NULL; + + if (ev->eteepad) gst_object_unref(ev->eteepad); + ev->eteepad = NULL; + if (ev->xvteepad) gst_object_unref(ev->xvteepad); + ev->xvteepad = NULL; + if (ev->xvpad) gst_object_unref(ev->xvpad); + ev->xvpad = NULL; + + ev->src_width = 0; + ev->src_height = 0; + +#ifdef HAVE_ECORE_X + INF("destroying window: %i", ev->win); + if (ev->win) ecore_x_window_free(ev->win); + ev->win = 0; +#endif + } + + if (restart_idler) + { + ecore_idler_del(restart_idler); + restart_idler = NULL; + } + + EINA_LIST_FREE(ev->audio_streams, astream) + free(astream); + EINA_LIST_FREE(ev->video_streams, vstream) + free(vstream); +} + +static void +em_del(void *video) +{ + Emotion_Gstreamer_Video *ev = video; + + if (ev->threads) + { + Ecore_Thread *t; + + EINA_LIST_FREE(ev->threads, t) + ecore_thread_cancel(t); + + ev->delete_me = EINA_TRUE; + return; + } + + if (ev->in != ev->out) + { + ev->delete_me = EINA_TRUE; + return; + } + + em_cleanup(ev); + + free(ev); +} + +static Eina_Bool +em_file_open(void *video, + const char *file) +{ + Emotion_Gstreamer_Video *ev = video; + Eina_Strbuf *sbuf = NULL; + const char *uri; + + if (!file) return EINA_FALSE; + if (strstr(file, "://") == NULL) + { + sbuf = eina_strbuf_new(); + eina_strbuf_append(sbuf, "file://"); + if (strncmp(file, "./", 2) == 0) + file += 2; + if (strstr(file, ":/") != NULL) + { /* We absolutely need file:///C:/ under Windows, so adding it here */ + eina_strbuf_append(sbuf, "/"); + } + else if (*file != '/') + { + char tmp[PATH_MAX]; + + if (getcwd(tmp, PATH_MAX)) + { + eina_strbuf_append(sbuf, tmp); + eina_strbuf_append(sbuf, "/"); + } + } + eina_strbuf_append(sbuf, file); + } + + ev->play_started = 0; + ev->pipeline_parsed = 0; + + uri = sbuf ? eina_strbuf_string_get(sbuf) : file; + DBG("setting file to '%s'", uri); + ev->pipeline = gstreamer_video_sink_new(ev, ev->obj, uri); + if (sbuf) eina_strbuf_free(sbuf); + + if (!ev->pipeline) + return EINA_FALSE; + + ev->eos_bus = gst_pipeline_get_bus(GST_PIPELINE(ev->pipeline)); + if (!ev->eos_bus) + { + ERR("could not get the bus"); + return EINA_FALSE; + } + + gst_bus_set_sync_handler(ev->eos_bus, _eos_sync_fct, ev, NULL); + + ev->position = 0.0; + + return 1; +} + +static void +em_file_close(void *video) +{ + Emotion_Gstreamer_Video *ev; + + ev = (Emotion_Gstreamer_Video *)video; + if (!ev) + return; + + if (ev->threads) + { + Ecore_Thread *t; + + EINA_LIST_FREE(ev->threads, t) + ecore_thread_cancel(t); + } + + em_cleanup(ev); + + ev->pipeline_parsed = EINA_FALSE; + ev->play_started = 0; +} + +static void +em_play(void *video, + double pos EINA_UNUSED) +{ + Emotion_Gstreamer_Video *ev; + + ev = (Emotion_Gstreamer_Video *)video; + if (!ev->pipeline) return; + + if (ev->pipeline_parsed) + gst_element_set_state(ev->pipeline, GST_STATE_PLAYING); + ev->play = 1; + ev->play_started = 1; +} + +static void +em_stop(void *video) +{ + Emotion_Gstreamer_Video *ev; + + ev = (Emotion_Gstreamer_Video *)video; + + if (!ev->pipeline) return; + + if (ev->pipeline_parsed) + gst_element_set_state(ev->pipeline, GST_STATE_PAUSED); + ev->play = 0; +} + +static void +em_size_get(void *video, + int *width, + int *height) +{ + Emotion_Gstreamer_Video *ev; + Emotion_Video_Stream *vstream; + + ev = (Emotion_Gstreamer_Video *)video; + + if (!_emotion_gstreamer_video_pipeline_parse(ev, EINA_FALSE)) + goto on_error; + + vstream = eina_list_nth(ev->video_streams, ev->video_stream_nbr - 1); + if (vstream) + { + if (width) *width = vstream->info.width; + if (height) *height = vstream->info.height; + + return; + } + + on_error: + if (width) *width = 0; + if (height) *height = 0; +} + +static void +em_pos_set(void *video, + double pos) +{ + Emotion_Gstreamer_Video *ev; + + ev = (Emotion_Gstreamer_Video *)video; + + if (!ev->pipeline) return; + + if (ev->play) + gst_element_set_state(ev->pipeline, GST_STATE_PAUSED); + + gst_element_seek(ev->pipeline, 1.0, + GST_FORMAT_TIME, + GST_SEEK_FLAG_ACCURATE | GST_SEEK_FLAG_FLUSH, + GST_SEEK_TYPE_SET, + (gint64)(pos * (double)GST_SECOND), + GST_SEEK_TYPE_NONE, -1); + + if (ev->play) + gst_element_set_state(ev->pipeline, GST_STATE_PLAYING); +} + +static double +em_len_get(void *video) +{ + Emotion_Gstreamer_Video *ev; + Emotion_Video_Stream *vstream; + Emotion_Audio_Stream *astream; + Eina_List *l; + gint64 val; + gboolean ret; + + ev = video; + + if (!ev->pipeline) return 0.0; + + ret = gst_element_query_duration(ev->pipeline, GST_FORMAT_TIME, &val); + if (!ret) + goto fallback; + + if (val <= 0.0) + goto fallback; + + return val / 1000000000.0; + + fallback: + if (!_emotion_gstreamer_video_pipeline_parse(ev, EINA_FALSE)) + return 0.0; + + EINA_LIST_FOREACH(ev->audio_streams, l, astream) + if (astream->length_time >= 0) + return astream->length_time; + + EINA_LIST_FOREACH(ev->video_streams, l, vstream) + if (vstream->length_time >= 0) + return vstream->length_time; + + return 0.0; +} + +static double +em_buffer_size_get(void *video) +{ + Emotion_Gstreamer_Video *ev; + + GstQuery *query; + gboolean busy; + gint percent; + + ev = video; + + if (!ev->pipeline) return 0.0; + + query = gst_query_new_buffering(GST_FORMAT_DEFAULT); + if (gst_element_query(ev->pipeline, query)) + gst_query_parse_buffering_percent(query, &busy, &percent); + else + percent = 100; + + gst_query_unref(query); + return ((float)(percent)) / 100.0; +} + +static int +em_fps_num_get(void *video) +{ + Emotion_Gstreamer_Video *ev; + Emotion_Video_Stream *vstream; + + ev = (Emotion_Gstreamer_Video *)video; + + if (!_emotion_gstreamer_video_pipeline_parse(ev, EINA_FALSE)) + return 0; + + vstream = eina_list_nth(ev->video_streams, ev->video_stream_nbr - 1); + if (vstream) + return vstream->info.fps_n; + + return 0; +} + +static int +em_fps_den_get(void *video) +{ + Emotion_Gstreamer_Video *ev; + Emotion_Video_Stream *vstream; + + ev = (Emotion_Gstreamer_Video *)video; + + if (!_emotion_gstreamer_video_pipeline_parse(ev, EINA_FALSE)) + return 1; + + vstream = eina_list_nth(ev->video_streams, ev->video_stream_nbr - 1); + if (vstream) + return vstream->info.fps_d; + + return 1; +} + +static double +em_fps_get(void *video) +{ + Emotion_Gstreamer_Video *ev; + Emotion_Video_Stream *vstream; + + ev = (Emotion_Gstreamer_Video *)video; + + if (!_emotion_gstreamer_video_pipeline_parse(ev, EINA_FALSE)) + return 0.0; + + vstream = eina_list_nth(ev->video_streams, ev->video_stream_nbr - 1); + if (vstream) + return (double)vstream->info.fps_n / (double)vstream->info.fps_d; + + return 0.0; +} + +static double +em_pos_get(void *video) +{ + Emotion_Gstreamer_Video *ev; + gint64 val; + gboolean ret; + + ev = video; + + if (!ev->pipeline) return 0.0; + + ret = gst_element_query_position(ev->pipeline, GST_FORMAT_TIME, &val); + if (!ret) + return ev->position; + + ev->position = val / 1000000000.0; + return ev->position; +} + +static void +em_vis_set(void *video, + Emotion_Vis vis) +{ + Emotion_Gstreamer_Video *ev; + + ev = (Emotion_Gstreamer_Video *)video; + + ev->vis = vis; +} + +static Emotion_Vis +em_vis_get(void *video) +{ + Emotion_Gstreamer_Video *ev; + + ev = (Emotion_Gstreamer_Video *)video; + + return ev->vis; +} + +static Eina_Bool +em_vis_supported(void *ef EINA_UNUSED, Emotion_Vis vis) +{ + const char *name; + GstElementFactory *factory; + + if (vis == EMOTION_VIS_NONE) + return EINA_TRUE; + + name = emotion_visualization_element_name_get(vis); + if (!name) + return EINA_FALSE; + + factory = gst_element_factory_find(name); + if (!factory) + return EINA_FALSE; + + gst_object_unref(factory); + return EINA_TRUE; +} + +static double +em_ratio_get(void *video) +{ + Emotion_Gstreamer_Video *ev; + + ev = (Emotion_Gstreamer_Video *)video; + + return ev->ratio; +} + +static int +em_video_handled(void *video) +{ + Emotion_Gstreamer_Video *ev; + + ev = (Emotion_Gstreamer_Video *)video; + + _emotion_gstreamer_video_pipeline_parse(ev, EINA_FALSE); + + if (!eina_list_count(ev->video_streams)) + return 0; + + return 1; +} + +static int +em_audio_handled(void *video) +{ + Emotion_Gstreamer_Video *ev; + + ev = (Emotion_Gstreamer_Video *)video; + + _emotion_gstreamer_video_pipeline_parse(ev, EINA_FALSE); + + if (!eina_list_count(ev->audio_streams)) + return 0; + + return 1; +} + +static int +em_seekable(void *video EINA_UNUSED) +{ + return 1; +} + +static void +em_frame_done(void *video EINA_UNUSED) +{ +} + +static Emotion_Format +em_format_get(void *video) +{ + Emotion_Gstreamer_Video *ev; + Emotion_Video_Stream *vstream; + + ev = (Emotion_Gstreamer_Video *)video; + + if (!_emotion_gstreamer_video_pipeline_parse(ev, EINA_FALSE)) + return EMOTION_FORMAT_NONE; + + vstream = eina_list_nth(ev->video_streams, ev->video_stream_nbr - 1); + if (vstream) + { + switch (vstream->info.finfo->format) + { + case GST_VIDEO_FORMAT_I420: + return EMOTION_FORMAT_I420; + case GST_VIDEO_FORMAT_YV12: + return EMOTION_FORMAT_YV12; + case GST_VIDEO_FORMAT_YUY2: + return EMOTION_FORMAT_YUY2; + case GST_VIDEO_FORMAT_ARGB: + /* FIXME: This will be wrong for big endian archs */ + return EMOTION_FORMAT_BGRA; + default: + return EMOTION_FORMAT_NONE; + } + } + return EMOTION_FORMAT_NONE; +} + +static void +em_video_data_size_get(void *video, int *w, int *h) +{ + Emotion_Gstreamer_Video *ev; + Emotion_Video_Stream *vstream; + + ev = (Emotion_Gstreamer_Video *)video; + + if (ev->pipeline && (!ev->video_stream_nbr || !ev->video_streams)) + if (!_emotion_gstreamer_video_pipeline_parse(ev, EINA_FALSE)) + goto on_error; + + vstream = eina_list_nth(ev->video_streams, ev->video_stream_nbr - 1); + if (vstream) + { + *w = vstream->info.width; + *h = vstream->info.height; + + return; + } + + on_error: + *w = 0; + *h = 0; +} + +static int +em_yuv_rows_get(void *video EINA_UNUSED, + int w EINA_UNUSED, + int h EINA_UNUSED, + unsigned char **yrows EINA_UNUSED, + unsigned char **urows EINA_UNUSED, + unsigned char **vrows EINA_UNUSED) +{ + return 0; +} + +static int +em_bgra_data_get(void *video EINA_UNUSED, unsigned char **bgra_data EINA_UNUSED) +{ + return 0; +} + +static void +em_event_feed(void *video, int event) +{ + Emotion_Gstreamer_Video *ev; + GstNavigationCommand command; + + ev = (Emotion_Gstreamer_Video *)video; + + switch (event) + { + case EMOTION_EVENT_MENU1: + command = GST_NAVIGATION_COMMAND_MENU1; + break; + case EMOTION_EVENT_MENU2: + command = GST_NAVIGATION_COMMAND_MENU2; + break; + case EMOTION_EVENT_MENU3: + command = GST_NAVIGATION_COMMAND_MENU3; + break; + case EMOTION_EVENT_MENU4: + command = GST_NAVIGATION_COMMAND_MENU4; + break; + case EMOTION_EVENT_MENU5: + command = GST_NAVIGATION_COMMAND_MENU5; + break; + case EMOTION_EVENT_MENU6: + command = GST_NAVIGATION_COMMAND_MENU6; + break; + case EMOTION_EVENT_MENU7: + command = GST_NAVIGATION_COMMAND_MENU7; + break; + case EMOTION_EVENT_UP: + command = GST_NAVIGATION_COMMAND_UP; + break; + case EMOTION_EVENT_DOWN: + command = GST_NAVIGATION_COMMAND_DOWN; + break; + case EMOTION_EVENT_LEFT: + command = GST_NAVIGATION_COMMAND_LEFT; + break; + case EMOTION_EVENT_RIGHT: + command = GST_NAVIGATION_COMMAND_RIGHT; + break; + case EMOTION_EVENT_SELECT: + command = GST_NAVIGATION_COMMAND_ACTIVATE; + break; + case EMOTION_EVENT_NEXT: + /* FIXME */ + command = GST_NAVIGATION_COMMAND_RIGHT; + break; + case EMOTION_EVENT_PREV: + /* FIXME */ + command = GST_NAVIGATION_COMMAND_LEFT; + break; + case EMOTION_EVENT_ANGLE_NEXT: + command = GST_NAVIGATION_COMMAND_NEXT_ANGLE; + break; + case EMOTION_EVENT_ANGLE_PREV: + command = GST_NAVIGATION_COMMAND_PREV_ANGLE; + break; + case EMOTION_EVENT_FORCE: + /* FIXME */ + command = GST_NAVIGATION_COMMAND_ACTIVATE; + break; + case EMOTION_EVENT_0: + case EMOTION_EVENT_1: + case EMOTION_EVENT_2: + case EMOTION_EVENT_3: + case EMOTION_EVENT_4: + case EMOTION_EVENT_5: + case EMOTION_EVENT_6: + case EMOTION_EVENT_7: + case EMOTION_EVENT_8: + case EMOTION_EVENT_9: + case EMOTION_EVENT_10: + default: + return; + break; + } + + gst_navigation_send_command (GST_NAVIGATION (ev->pipeline), command); +} + +static void +em_event_mouse_button_feed(void *video, int button, int x, int y) +{ + Emotion_Gstreamer_Video *ev; + + ev = (Emotion_Gstreamer_Video *)video; + /* FIXME */ + gst_navigation_send_mouse_event (GST_NAVIGATION (ev->pipeline), "mouse-button-press", button, x, y); + gst_navigation_send_mouse_event (GST_NAVIGATION (ev->pipeline), "mouse-button-release", button, x, y); +} + +static void +em_event_mouse_move_feed(void *video, int x, int y) +{ + Emotion_Gstreamer_Video *ev; + + ev = (Emotion_Gstreamer_Video *)video; + gst_navigation_send_mouse_event (GST_NAVIGATION (ev->pipeline), "mouse-move", 0, x, y); +} + +/* Video channels */ +static int +em_video_channel_count(void *video) +{ + Emotion_Gstreamer_Video *ev; + + ev = (Emotion_Gstreamer_Video *)video; + + _emotion_gstreamer_video_pipeline_parse(ev, EINA_FALSE); + + return eina_list_count(ev->video_streams); +} + +static void +em_video_channel_set(void *video, + int channel) +{ + Emotion_Gstreamer_Video *ev; + + ev = (Emotion_Gstreamer_Video *)video; + + _emotion_gstreamer_video_pipeline_parse(ev, EINA_FALSE); + + if (channel < 0) channel = -1; + + if (ev->pipeline) + g_object_set (ev->pipeline, "current-video", channel, NULL); +} + +static int +em_video_channel_get(void *video) +{ + Emotion_Gstreamer_Video *ev; + + ev = (Emotion_Gstreamer_Video *)video; + + _emotion_gstreamer_video_pipeline_parse(ev, EINA_FALSE); + + return ev->video_stream_nbr; +} + +static void +em_video_subtitle_file_set(void *video EINA_UNUSED, + const char *filepath EINA_UNUSED) +{ + DBG("video_subtitle_file_set not implemented for gstreamer yet."); +} + +static const char * +em_video_subtitle_file_get(void *video EINA_UNUSED) +{ + DBG("video_subtitle_file_get not implemented for gstreamer yet."); + return NULL; +} + +static const char * +em_video_channel_name_get(void *video EINA_UNUSED, + int channel EINA_UNUSED) +{ + return NULL; +} + +static void +em_video_channel_mute_set(void *video, + int mute) +{ + Emotion_Gstreamer_Video *ev; + + ev = (Emotion_Gstreamer_Video *)video; + + ev->video_mute = mute; +} + +static int +em_video_channel_mute_get(void *video) +{ + Emotion_Gstreamer_Video *ev; + + ev = (Emotion_Gstreamer_Video *)video; + + return ev->video_mute; +} + +/* Audio channels */ + +static int +em_audio_channel_count(void *video) +{ + Emotion_Gstreamer_Video *ev; + + ev = (Emotion_Gstreamer_Video *)video; + + _emotion_gstreamer_video_pipeline_parse(ev, EINA_FALSE); + + return eina_list_count(ev->audio_streams); +} + +static void +em_audio_channel_set(void *video, + int channel) +{ + Emotion_Gstreamer_Video *ev; + + ev = (Emotion_Gstreamer_Video *)video; + + _emotion_gstreamer_video_pipeline_parse(ev, EINA_FALSE); + + if (channel < 0) channel = -1; + + if (ev->pipeline) + g_object_set (ev->pipeline, "current-audio", channel, NULL); +} + +static int +em_audio_channel_get(void *video) +{ + Emotion_Gstreamer_Video *ev; + + ev = (Emotion_Gstreamer_Video *)video; + + _emotion_gstreamer_video_pipeline_parse(ev, EINA_FALSE); + + return ev->audio_stream_nbr; +} + +static const char * +em_audio_channel_name_get(void *video EINA_UNUSED, + int channel EINA_UNUSED) +{ + return NULL; +} + +#define GST_PLAY_FLAG_AUDIO (1 << 1) + +static void +em_audio_channel_mute_set(void *video, + int mute) +{ + /* NOTE: at first I wanted to completly shutdown the audio path on mute, + but that's not possible as the audio sink could be the clock source + for the pipeline (at least that's the case on some of the hardware + I have been tested emotion on. + */ + Emotion_Gstreamer_Video *ev; + + ev = (Emotion_Gstreamer_Video *)video; + + if (!ev->pipeline) return; + + ev->audio_mute = mute; + + g_object_set(G_OBJECT(ev->pipeline), "mute", !!mute, NULL); +} + +static int +em_audio_channel_mute_get(void *video) +{ + Emotion_Gstreamer_Video *ev; + + ev = (Emotion_Gstreamer_Video *)video; + + return ev->audio_mute; +} + +static void +em_audio_channel_volume_set(void *video, + double vol) +{ + Emotion_Gstreamer_Video *ev; + + ev = (Emotion_Gstreamer_Video *)video; + + if (!ev->pipeline) return; + + if (vol < 0.0) + vol = 0.0; + if (vol > 1.0) + vol = 1.0; + ev->volume = vol; + g_object_set(G_OBJECT(ev->pipeline), "volume", vol, NULL); +} + +static double +em_audio_channel_volume_get(void *video) +{ + Emotion_Gstreamer_Video *ev; + + ev = (Emotion_Gstreamer_Video *)video; + + return ev->volume; +} + +/* spu stuff */ + +static int +em_spu_channel_count(void *video EINA_UNUSED) +{ + return 0; +} + +static void +em_spu_channel_set(void *video EINA_UNUSED, int channel EINA_UNUSED) +{ +} + +static int +em_spu_channel_get(void *video EINA_UNUSED) +{ + return 1; +} + +static const char * +em_spu_channel_name_get(void *video EINA_UNUSED, int channel EINA_UNUSED) +{ + return NULL; +} + +static void +em_spu_channel_mute_set(void *video EINA_UNUSED, int mute EINA_UNUSED) +{ +} + +static int +em_spu_channel_mute_get(void *video EINA_UNUSED) +{ + return 0; +} + +static int +em_chapter_count(void *video EINA_UNUSED) +{ + return 0; +} + +static void +em_chapter_set(void *video EINA_UNUSED, int chapter EINA_UNUSED) +{ +} + +static int +em_chapter_get(void *video EINA_UNUSED) +{ + return 0; +} + +static const char * +em_chapter_name_get(void *video EINA_UNUSED, int chapter EINA_UNUSED) +{ + return NULL; +} + +static void +em_speed_set(void *video EINA_UNUSED, double speed EINA_UNUSED) +{ +} + +static double +em_speed_get(void *video EINA_UNUSED) +{ + return 1.0; +} + +static int +em_eject(void *video EINA_UNUSED) +{ + return 1; +} + +static const char * +em_meta_get(void *video, int meta) +{ + Emotion_Gstreamer_Video *ev; + const char *str = NULL; + + ev = (Emotion_Gstreamer_Video *)video; + + if (!ev || !ev->metadata) return NULL; + switch (meta) + { + case META_TRACK_TITLE: + str = ev->metadata->title; + break; + case META_TRACK_ARTIST: + str = ev->metadata->artist; + break; + case META_TRACK_ALBUM: + str = ev->metadata->album; + break; + case META_TRACK_YEAR: + str = ev->metadata->year; + break; + case META_TRACK_GENRE: + str = ev->metadata->genre; + break; + case META_TRACK_COMMENT: + str = ev->metadata->comment; + break; + case META_TRACK_DISCID: + str = ev->metadata->disc_id; + break; + default: + break; + } + + return str; +} + +static void +em_priority_set(void *video, Eina_Bool pri) +{ + Emotion_Gstreamer_Video *ev; + + ev = video; + if (priority_overide > 3) return; /* If we failed to much to create that pipeline, let's don't wast our time anymore */ + + if (ev->priority != pri && ev->pipeline) + { + if (ev->threads) + { + Ecore_Thread *t; + + EINA_LIST_FREE(ev->threads, t) + ecore_thread_cancel(t); + } + em_cleanup(ev); + restart_idler = ecore_idler_add(_em_restart_stream, ev); + } + ev->priority = pri; +} + +static Eina_Bool +em_priority_get(void *video) +{ + Emotion_Gstreamer_Video *ev; + + ev = video; + return !ev->stream; +} + +#ifdef HAVE_ECORE_X +static Eina_Bool +_ecore_event_x_destroy(void *data EINA_UNUSED, int type EINA_UNUSED, void *event EINA_UNUSED) +{ + Ecore_X_Event_Window_Destroy *ev = event; + + INF("killed window: %x (%x).", ev->win, ev->event_win); + + return EINA_TRUE; +} + +static void +gstreamer_ecore_x_check(void) +{ + Ecore_X_Window *roots; + int num; + + ecore_event_handler_add(ECORE_X_EVENT_WINDOW_DESTROY, _ecore_event_x_destroy, NULL); + + /* Check if the window manager is able to handle our special Xv window. */ + roots = ecore_x_window_root_list(&num); + if (roots && num > 0) + { + Ecore_X_Window win, twin; + int nwins; + + nwins = ecore_x_window_prop_window_get(roots[0], + ECORE_X_ATOM_NET_SUPPORTING_WM_CHECK, + &win, 1); + if (nwins > 0) + { + nwins = ecore_x_window_prop_window_get(win, + ECORE_X_ATOM_NET_SUPPORTING_WM_CHECK, + &twin, 1); + if (nwins > 0 && twin == win) + { + Ecore_X_Atom *supported; + int supported_num; + int i; + + if (ecore_x_netwm_supported_get(roots[0], &supported, &supported_num)) + { + Eina_Bool parent = EINA_FALSE; + Eina_Bool video_position = EINA_FALSE; + + for (i = 0; i < supported_num; ++i) + { + if (supported[i] == ECORE_X_ATOM_E_VIDEO_PARENT) + parent = EINA_TRUE; + else if (supported[i] == ECORE_X_ATOM_E_VIDEO_POSITION) + video_position = EINA_TRUE; + if (parent && video_position) + break; + } + + if (parent && video_position) + { + window_manager_video = EINA_TRUE; + } + } + free(supported); + } + } + } + free(roots); +} +#endif + +static void * +em_add(const Emotion_Engine *api, + Evas_Object *obj, + const Emotion_Module_Options *opt EINA_UNUSED) +{ + Emotion_Gstreamer_Video *ev; + + ev = calloc(1, sizeof(Emotion_Gstreamer_Video)); + EINA_SAFETY_ON_NULL_RETURN_VAL(ev, NULL); + + ev->api = api; + ev->obj = obj; + + /* Default values */ + ev->ratio = 1.0; + ev->vis = EMOTION_VIS_NONE; + ev->volume = 0.8; + ev->play_started = 0; + ev->delete_me = EINA_FALSE; + ev->threads = NULL; + + return ev; +} + +static const Emotion_Engine em_engine = +{ + EMOTION_ENGINE_API_VERSION, + EMOTION_ENGINE_PRIORITY_DEFAULT, + "gstreamer1", + em_add, /* add */ + em_del, /* del */ + em_file_open, /* file_open */ + em_file_close, /* file_close */ + em_play, /* play */ + em_stop, /* stop */ + em_size_get, /* size_get */ + em_pos_set, /* pos_set */ + em_len_get, /* len_get */ + em_buffer_size_get, /* buffer_size_get */ + em_fps_num_get, /* fps_num_get */ + em_fps_den_get, /* fps_den_get */ + em_fps_get, /* fps_get */ + em_pos_get, /* pos_get */ + em_vis_set, /* vis_set */ + em_vis_get, /* vis_get */ + em_vis_supported, /* vis_supported */ + em_ratio_get, /* ratio_get */ + em_video_handled, /* video_handled */ + em_audio_handled, /* audio_handled */ + em_seekable, /* seekable */ + em_frame_done, /* frame_done */ + em_format_get, /* format_get */ + em_video_data_size_get, /* video_data_size_get */ + em_yuv_rows_get, /* yuv_rows_get */ + em_bgra_data_get, /* bgra_data_get */ + em_event_feed, /* event_feed */ + em_event_mouse_button_feed, /* event_mouse_button_feed */ + em_event_mouse_move_feed, /* event_mouse_move_feed */ + em_video_channel_count, /* video_channel_count */ + em_video_channel_set, /* video_channel_set */ + em_video_channel_get, /* video_channel_get */ + em_video_subtitle_file_set, /* video_subtitle_file_set */ + em_video_subtitle_file_get, /* video_subtitle_file_get */ + em_video_channel_name_get, /* video_channel_name_get */ + em_video_channel_mute_set, /* video_channel_mute_set */ + em_video_channel_mute_get, /* video_channel_mute_get */ + em_audio_channel_count, /* audio_channel_count */ + em_audio_channel_set, /* audio_channel_set */ + em_audio_channel_get, /* audio_channel_get */ + em_audio_channel_name_get, /* audio_channel_name_get */ + em_audio_channel_mute_set, /* audio_channel_mute_set */ + em_audio_channel_mute_get, /* audio_channel_mute_get */ + em_audio_channel_volume_set, /* audio_channel_volume_set */ + em_audio_channel_volume_get, /* audio_channel_volume_get */ + em_spu_channel_count, /* spu_channel_count */ + em_spu_channel_set, /* spu_channel_set */ + em_spu_channel_get, /* spu_channel_get */ + em_spu_channel_name_get, /* spu_channel_name_get */ + em_spu_channel_mute_set, /* spu_channel_mute_set */ + em_spu_channel_mute_get, /* spu_channel_mute_get */ + em_chapter_count, /* chapter_count */ + em_chapter_set, /* chapter_set */ + em_chapter_get, /* chapter_get */ + em_chapter_name_get, /* chapter_name_get */ + em_speed_set, /* speed_set */ + em_speed_get, /* speed_get */ + em_eject, /* eject */ + em_meta_get, /* meta_get */ + em_priority_set, /* priority_set */ + em_priority_get /* priority_get */ +}; + +Eina_Bool +gstreamer_module_init(void) +{ + GError *error; + + if (_emotion_init_count > 0) + { + _emotion_pending_ecore_begin(); + return EINA_TRUE; + } + + if (getenv("EMOTION_FPS_DEBUG")) debug_fps = EINA_TRUE; + + eina_threads_init(); + eina_log_threads_enable(); + _emotion_gstreamer_log_domain = eina_log_domain_register + ("emotion-gstreamer", EINA_COLOR_LIGHTCYAN); + if (_emotion_gstreamer_log_domain < 0) + { + EINA_LOG_CRIT("Could not register log domain 'emotion-gstreamer'"); + return EINA_FALSE; + } + + if (!gst_init_check(0, NULL, &error)) + { + EINA_LOG_CRIT("Could not init GStreamer"); + goto error_gst_init; + } + +#ifdef HAVE_ECORE_X + if (ecore_x_init(NULL) > 0) + { + _ecore_x_available = EINA_TRUE; + gstreamer_ecore_x_check(); + } +#endif + + if (gst_plugin_register_static(GST_VERSION_MAJOR, GST_VERSION_MINOR, + "emotion-sink", + "video sink plugin for Emotion", + gstreamer_plugin_init, + VERSION, + "LGPL", + "Enlightenment", + PACKAGE, + "http://www.enlightenment.org/") == FALSE) + { + EINA_LOG_CRIT("Could not load static gstreamer video sink for Emotion."); + goto error_gst_plugin; + } + + if (!_emotion_module_register(&em_engine)) + { + ERR("Could not register module %p", &em_engine); + goto error_register; + } + + _emotion_init_count = 1; + return EINA_TRUE; + + error_register: + error_gst_plugin: +#ifdef HAVE_ECORE_X + if (_ecore_x_available) + { + ecore_x_shutdown(); + _ecore_x_available = EINA_FALSE; + window_manager_video = EINA_FALSE; + } +#endif + + gst_deinit(); + + error_gst_init: + eina_log_domain_unregister(_emotion_gstreamer_log_domain); + _emotion_gstreamer_log_domain = -1; + + return EINA_FALSE; +} + +void +gstreamer_module_shutdown(void) +{ + if (_emotion_init_count > 1) + { + _emotion_init_count--; + return; + } + else if (_emotion_init_count == 0) + { + EINA_LOG_ERR("too many gstreamer_module_shutdown()"); + return; + } + _emotion_init_count = 0; + + _emotion_module_unregister(&em_engine); + +#ifdef HAVE_ECORE_X + if (_ecore_x_available) + { + ecore_x_shutdown(); + _ecore_x_available = EINA_FALSE; + window_manager_video = EINA_FALSE; + } +#endif + + eina_log_domain_unregister(_emotion_gstreamer_log_domain); + _emotion_gstreamer_log_domain = -1; + + gst_deinit(); +} + +#ifndef EMOTION_STATIC_BUILD_GSTREAMER + +EINA_MODULE_INIT(gstreamer_module_init); +EINA_MODULE_SHUTDOWN(gstreamer_module_shutdown); + +#endif + +static void +_for_each_tag(GstTagList const* list, + gchar const* tag, + void *data) +{ + Emotion_Gstreamer_Video *ev; + int i; + int count; + + + ev = (Emotion_Gstreamer_Video*)data; + + if (!ev || !ev->metadata) return; + + /* FIXME: Should use the GStreamer tag merging functions */ + count = gst_tag_list_get_tag_size(list, tag); + + for (i = 0; i < count; i++) + { + if (!strcmp(tag, GST_TAG_TITLE)) + { + char *str; + g_free(ev->metadata->title); + if (gst_tag_list_get_string(list, GST_TAG_TITLE, &str)) + ev->metadata->title = str; + else + ev->metadata->title = NULL; + break; + } + if (!strcmp(tag, GST_TAG_ALBUM)) + { + gchar *str; + g_free(ev->metadata->album); + if (gst_tag_list_get_string(list, GST_TAG_ALBUM, &str)) + ev->metadata->album = str; + else + ev->metadata->album = NULL; + break; + } + if (!strcmp(tag, GST_TAG_ARTIST)) + { + gchar *str; + g_free(ev->metadata->artist); + if (gst_tag_list_get_string(list, GST_TAG_ARTIST, &str)) + ev->metadata->artist = str; + else + ev->metadata->artist = NULL; + break; + } + if (!strcmp(tag, GST_TAG_GENRE)) + { + gchar *str; + g_free(ev->metadata->genre); + if (gst_tag_list_get_string(list, GST_TAG_GENRE, &str)) + ev->metadata->genre = str; + else + ev->metadata->genre = NULL; + break; + } + if (!strcmp(tag, GST_TAG_COMMENT)) + { + gchar *str; + g_free(ev->metadata->comment); + if (gst_tag_list_get_string(list, GST_TAG_COMMENT, &str)) + ev->metadata->comment = str; + else + ev->metadata->comment = NULL; + break; + } + if (!strcmp(tag, GST_TAG_DATE)) + { + gchar *str; + const GValue *date; + g_free(ev->metadata->year); + date = gst_tag_list_get_value_index(list, GST_TAG_DATE, 0); + if (date) + str = g_strdup_value_contents(date); + else + str = NULL; + ev->metadata->year = str; + break; + } + + if (!strcmp(tag, GST_TAG_TRACK_NUMBER)) + { + gchar *str; + const GValue *track; + g_free(ev->metadata->count); + track = gst_tag_list_get_value_index(list, GST_TAG_TRACK_NUMBER, 0); + if (track) + str = g_strdup_value_contents(track); + else + str = NULL; + ev->metadata->count = str; + break; + } + + if (!strcmp(tag, GST_TAG_CDDA_CDDB_DISCID)) + { + gchar *str; + const GValue *discid; + g_free(ev->metadata->disc_id); + discid = gst_tag_list_get_value_index(list, GST_TAG_CDDA_CDDB_DISCID, 0); + if (discid) + str = g_strdup_value_contents(discid); + else + str = NULL; + ev->metadata->disc_id = str; + break; + } + } + +} + +static void +_free_metadata(Emotion_Gstreamer_Metadata *m) +{ + if (!m) return; + + g_free(m->title); + g_free(m->album); + g_free(m->artist); + g_free(m->genre); + g_free(m->comment); + g_free(m->year); + g_free(m->count); + g_free(m->disc_id); + + free(m); +} + +static Eina_Bool +_em_restart_stream(void *data) +{ + Emotion_Gstreamer_Video *ev; + + ev = data; + + ev->pipeline = gstreamer_video_sink_new(ev, ev->obj, ev->uri); + + if (ev->pipeline) + { + ev->eos_bus = gst_pipeline_get_bus(GST_PIPELINE(ev->pipeline)); + if (!ev->eos_bus) + { + ERR("could not get the bus"); + return EINA_FALSE; + } + + gst_bus_set_sync_handler(ev->eos_bus, _eos_sync_fct, ev, NULL); + } + + restart_idler = NULL; + + return ECORE_CALLBACK_CANCEL; +} + +static Eina_Bool +_video_size_get(GstElement *elem, int *width, int *height) +{ + GstIterator *itr = NULL; + GstCaps *caps; + GValue v = G_VALUE_INIT; + GstPad *pad; + Eina_Bool ret = EINA_FALSE; + Eina_Bool done = EINA_FALSE; + GstVideoInfo info; + + itr = gst_element_iterate_src_pads(elem); + + while (!done && !ret) + { + switch(gst_iterator_next(itr, &v)) + { + case GST_ITERATOR_OK: + pad = GST_PAD(g_value_get_object(&v)); + caps = gst_pad_get_current_caps(pad); + if (gst_video_info_from_caps(&info, caps)) + { + *width = info.width; + *height = info.height; + ret = EINA_TRUE; + done = EINA_TRUE; + } + gst_caps_unref(caps); + g_value_reset(&v); + break; + case GST_ITERATOR_RESYNC: + gst_iterator_resync(itr); + done = ret = EINA_FALSE; + break; + case GST_ITERATOR_ERROR: + case GST_ITERATOR_DONE: + done = TRUE; + break; + } + } + g_value_unset(&v); + gst_iterator_free(itr); + + return ret; +} + +static void +_main_frame_resize(void *data) +{ + Emotion_Gstreamer_Video *ev = data; + double ratio; + + ratio = (double)ev->src_width / (double)ev->src_height; + _emotion_frame_resize(ev->obj, ev->src_width, ev->src_height, ratio); + _emotion_pending_ecore_end(); +} + +static void +_no_more_pads(GstElement *decodebin, gpointer data) +{ + GstIterator *itr = NULL; + Emotion_Gstreamer_Video *ev = data; + GValue v = G_VALUE_INIT; + GstElement *elem; + Eina_Bool done = EINA_FALSE; + + itr = gst_bin_iterate_elements(GST_BIN(decodebin)); + + while (!done) + { + switch(gst_iterator_next(itr, &v)) + { + case GST_ITERATOR_OK: + elem = GST_ELEMENT(g_value_get_object(&v)); + if(_video_size_get(elem, &ev->src_width, &ev->src_height)) + { + _emotion_pending_ecore_begin(); + ecore_main_loop_thread_safe_call_async(_main_frame_resize, ev); + g_value_reset(&v); + done = EINA_TRUE; + break; + } + g_value_reset(&v); + break; + case GST_ITERATOR_RESYNC: + gst_iterator_resync(itr); + done = EINA_FALSE; + break; + case GST_ITERATOR_ERROR: + case GST_ITERATOR_DONE: + done = TRUE; + break; + } + } + g_value_unset(&v); + gst_iterator_free(itr); +} + +static void +_eos_main_fct(void *data) +{ + Emotion_Gstreamer_Message *send; + Emotion_Gstreamer_Video *ev; + GstMessage *msg; + + send = data; + ev = send->ev; + msg = send->msg; + + if (ev->play_started && !ev->delete_me) + { + _emotion_playback_started(ev->obj); + ev->play_started = 0; + } + + switch (GST_MESSAGE_TYPE(msg)) + { + case GST_MESSAGE_EOS: + if (!ev->delete_me) + { + ev->play = 0; + _emotion_decode_stop(ev->obj); + _emotion_playback_finished(ev->obj); + } + break; + case GST_MESSAGE_TAG: + if (!ev->delete_me) + { + GstTagList *new_tags; + gst_message_parse_tag(msg, &new_tags); + if (new_tags) + { + gst_tag_list_foreach(new_tags, + (GstTagForeachFunc)_for_each_tag, + ev); + gst_tag_list_free(new_tags); + } + } + break; + case GST_MESSAGE_ASYNC_DONE: + if (!ev->delete_me) _emotion_seek_done(ev->obj); + break; + case GST_MESSAGE_STREAM_STATUS: + break; + case GST_MESSAGE_STATE_CHANGED: + if (!ev->delete_me) + { + /* FIXME: This is conceptionally broken */ + if (!g_signal_handlers_disconnect_by_func(msg->src, _no_more_pads, ev)) + g_signal_connect(msg->src, "no-more-pads", G_CALLBACK(_no_more_pads), ev); + } + break; + case GST_MESSAGE_ERROR: + em_cleanup(ev); + + if (ev->priority) + { + ERR("Switching back to canvas rendering."); + ev->priority = EINA_FALSE; + priority_overide++; + + restart_idler = ecore_idler_add(_em_restart_stream, ev); + } + break; + default: + ERR("bus say: %s [%i - %s]", + GST_MESSAGE_SRC_NAME(msg), + GST_MESSAGE_TYPE(msg), + GST_MESSAGE_TYPE_NAME(msg)); + break; + } + + emotion_gstreamer_message_free(send); + _emotion_pending_ecore_end(); +} + +static GstBusSyncReply +_eos_sync_fct(GstBus *bus EINA_UNUSED, GstMessage *msg, gpointer data) +{ + Emotion_Gstreamer_Video *ev = data; + Emotion_Gstreamer_Message *send; + + switch (GST_MESSAGE_TYPE(msg)) + { + case GST_MESSAGE_EOS: + case GST_MESSAGE_TAG: + case GST_MESSAGE_ASYNC_DONE: + case GST_MESSAGE_STREAM_STATUS: + INF("bus say: %s [%i - %s]", + GST_MESSAGE_SRC_NAME(msg), + GST_MESSAGE_TYPE(msg), + GST_MESSAGE_TYPE_NAME(msg)); + send = emotion_gstreamer_message_alloc(ev, msg); + + if (send) + { + _emotion_pending_ecore_begin(); + ecore_main_loop_thread_safe_call_async(_eos_main_fct, send); + } + + break; + + case GST_MESSAGE_STATE_CHANGED: + { + GstState old_state, new_state; + + gst_message_parse_state_changed(msg, &old_state, &new_state, NULL); + INF("Element %s changed state from %s to %s.", + GST_OBJECT_NAME(msg->src), + gst_element_state_get_name(old_state), + gst_element_state_get_name(new_state)); + + /* FIXME: This is broken */ + if (!strncmp(GST_OBJECT_NAME(msg->src), "decodebin", 9) && new_state == GST_STATE_READY) + { + send = emotion_gstreamer_message_alloc(ev, msg); + + if (send) + { + _emotion_pending_ecore_begin(); + ecore_main_loop_thread_safe_call_async(_eos_main_fct, send); + } + } + break; + } + case GST_MESSAGE_ERROR: + { + GError *error; + gchar *debug; + + gst_message_parse_error(msg, &error, &debug); + ERR("ERROR from element %s: %s", GST_OBJECT_NAME(msg->src), error->message); + ERR("Debugging info: %s", (debug) ? debug : "none"); + g_error_free(error); + g_free(debug); + + /* FIXME: This is broken */ + if (strncmp(GST_OBJECT_NAME(msg->src), "xvimagesink", 11) == 0) + { + send = emotion_gstreamer_message_alloc(ev, msg); + + if (send) + { + _emotion_pending_ecore_begin(); + ecore_main_loop_thread_safe_call_async(_eos_main_fct, send); + } + } + break; + } + case GST_MESSAGE_WARNING: + { + GError *error; + gchar *debug; + + gst_message_parse_warning(msg, &error, &debug); + WRN("WARNING from element %s: %s", GST_OBJECT_NAME(msg->src), error->message); + WRN("Debugging info: %s", (debug) ? debug : "none"); + g_error_free(error); + g_free(debug); + break; + } + default: + WRN("bus say: %s [%i - %s]", + GST_MESSAGE_SRC_NAME(msg), + GST_MESSAGE_TYPE(msg), + GST_MESSAGE_TYPE_NAME(msg)); + break; + } + + gst_message_unref(msg); + + return GST_BUS_DROP; +} + +Eina_Bool +_emotion_gstreamer_video_pipeline_parse(Emotion_Gstreamer_Video *ev, + Eina_Bool force) +{ + gboolean res; + int i; + + if (ev->pipeline_parsed) + return EINA_TRUE; + + if (force && ev->threads) + { + Ecore_Thread *t; + + EINA_LIST_FREE(ev->threads, t) + ecore_thread_cancel(t); + } + + if (ev->threads) + return EINA_FALSE; + + res = gst_element_get_state(ev->pipeline, NULL, NULL, GST_CLOCK_TIME_NONE); + if (res == GST_STATE_CHANGE_NO_PREROLL) + { + gst_element_set_state(ev->pipeline, GST_STATE_PLAYING); + + res = gst_element_get_state(ev->pipeline, NULL, NULL, GST_CLOCK_TIME_NONE); + } + + /** NOTE: you need to set: GST_DEBUG_DUMP_DOT_DIR=/tmp EMOTION_ENGINE=gstreamer to save the $EMOTION_GSTREAMER_DOT file in '/tmp' */ + /** then call dot -Tpng -oemotion_pipeline.png /tmp/$TIMESTAMP-$EMOTION_GSTREAMER_DOT.dot */ + if (getenv("EMOTION_GSTREAMER_DOT")) + GST_DEBUG_BIN_TO_DOT_FILE_WITH_TS(GST_BIN(ev->pipeline), + GST_DEBUG_GRAPH_SHOW_ALL, + getenv("EMOTION_GSTREAMER_DOT")); + + if (!(res == GST_STATE_CHANGE_SUCCESS + || res == GST_STATE_CHANGE_NO_PREROLL)) + { + ERR("Unable to get GST_CLOCK_TIME_NONE."); + return EINA_FALSE; + } + + g_object_get(G_OBJECT(ev->pipeline), + "n-audio", &ev->audio_stream_nbr, + "n-video", &ev->video_stream_nbr, + NULL); + + if ((ev->video_stream_nbr == 0) && (ev->audio_stream_nbr == 0)) + { + ERR("No audio nor video stream found"); + return EINA_FALSE; + } + + /* video stream */ + for (i = 0; i < ev->video_stream_nbr; i++) + { + Emotion_Video_Stream *vstream; + GstPad *pad = NULL; + GstCaps *caps; + GstQuery *query; + + gdouble length_time = 0.0; + GstVideoInfo info; + + g_signal_emit_by_name(ev->pipeline, "get-video-pad", i, &pad); + if (!pad) + continue; + + caps = gst_pad_get_current_caps(pad); + if (!caps) + goto unref_pad_v; + + if (!gst_video_info_from_caps(&info, caps)) + goto unref_caps_v; + + query = gst_query_new_duration(GST_FORMAT_TIME); + if (gst_pad_peer_query(pad, query)) + { + gint64 t; + + gst_query_parse_duration(query, NULL, &t); + length_time = (double)t / (double)GST_SECOND; + } + else + goto unref_query_v; + + vstream = emotion_video_stream_new(ev); + if (!vstream) goto unref_query_v; + + vstream->length_time = length_time; + vstream->info = info; + vstream->index = i; + + unref_query_v: + gst_query_unref(query); + unref_caps_v: + gst_caps_unref(caps); + unref_pad_v: + gst_object_unref(pad); + } + + /* Audio streams */ + for (i = 0; i < ev->audio_stream_nbr; i++) + { + Emotion_Audio_Stream *astream; + GstPad *pad; + GstCaps *caps; + GstAudioInfo info; + GstQuery *query; + + gdouble length_time = 0.0; + + g_signal_emit_by_name(ev->pipeline, "get-audio-pad", i, &pad); + if (!pad) + continue; + + caps = gst_pad_get_current_caps(pad); + if (!caps) + goto unref_pad_a; + + if (!gst_audio_info_from_caps(&info, caps)) + goto unref_caps_a; + + query = gst_query_new_duration(GST_FORMAT_TIME); + if (gst_pad_peer_query(pad, query)) + { + gint64 t; + + gst_query_parse_duration(query, NULL, &t); + length_time = (double)t / (double)GST_SECOND; + } + else + goto unref_query_a; + + astream = calloc(1, sizeof(Emotion_Audio_Stream)); + if (!astream) continue; + ev->audio_streams = eina_list_append(ev->audio_streams, astream); + + astream->length_time = length_time; + astream->info = info; + + unref_query_a: + gst_query_unref(query); + unref_caps_a: + gst_caps_unref(caps); + unref_pad_a: + gst_object_unref(pad); + } + + /* Visualization sink */ + if (ev->video_stream_nbr == 0) + { + GstElement *vis = NULL; + Emotion_Video_Stream *vstream; + Emotion_Audio_Stream *astream; + gint flags; + const char *vis_name; + + if (!(vis_name = emotion_visualization_element_name_get(ev->vis))) + { + WRN("pb vis name %d", ev->vis); + goto finalize; + } + + astream = eina_list_data_get(ev->audio_streams); + + vis = gst_element_factory_make(vis_name, "vissink"); + vstream = emotion_video_stream_new(ev); + if (!vstream) + goto finalize; + else + DBG("could not create visualization stream"); + + vstream->length_time = astream->length_time; + gst_video_info_init (&vstream->info); + gst_video_info_set_format (&vstream->info, 320, 200, GST_VIDEO_FORMAT_ARGB); + vstream->info.fps_n = 25; + vstream->info.fps_d = 1; + + g_object_set(G_OBJECT(ev->pipeline), "vis-plugin", vis, NULL); + g_object_get(G_OBJECT(ev->pipeline), "flags", &flags, NULL); + flags |= 0x00000008; + g_object_set(G_OBJECT(ev->pipeline), "flags", flags, NULL); + } + + finalize: + + ev->video_stream_nbr = eina_list_count(ev->video_streams); + ev->audio_stream_nbr = eina_list_count(ev->audio_streams); + + if (ev->video_stream_nbr == 1) + { + Emotion_Video_Stream *vstream; + + vstream = eina_list_data_get(ev->video_streams); + ev->ratio = (double)vstream->info.width / (double)vstream->info.height; + ev->ratio *= (double)vstream->info.par_n / (double)vstream->info.par_d; + _emotion_frame_resize(ev->obj, vstream->info.width, vstream->info.height, ev->ratio); + } + + { + /* on recapitule : */ + Emotion_Video_Stream *vstream; + Emotion_Audio_Stream *astream; + + vstream = eina_list_data_get(ev->video_streams); + if (vstream) + { + DBG("video size=%dx%d, fps=%d/%d, " + "format=%s, length=%"GST_TIME_FORMAT, + vstream->info.width, vstream->info.height, vstream->info.fps_n, vstream->info.fps_d, + gst_video_format_to_string(vstream->info.finfo->format), + GST_TIME_ARGS((guint64)(vstream->length_time * GST_SECOND))); + } + + astream = eina_list_data_get(ev->audio_streams); + if (astream) + { + DBG("audio channels=%d, rate=%d, length=%"GST_TIME_FORMAT, + astream->info.channels, astream->info.rate, + GST_TIME_ARGS((guint64)(astream->length_time * GST_SECOND))); + } + } + + if (ev->metadata) + _free_metadata(ev->metadata); + ev->metadata = calloc(1, sizeof(Emotion_Gstreamer_Metadata)); + + ev->pipeline_parsed = EINA_TRUE; + + em_audio_channel_volume_set(ev, ev->volume); + em_audio_channel_mute_set(ev, ev->audio_mute); + + if (ev->play_started) + { + _emotion_playback_started(ev->obj); + ev->play_started = 0; + } + + _emotion_open_done(ev->obj); + + return EINA_TRUE; +} diff --git a/src/modules/emotion/gstreamer1/emotion_gstreamer.h b/src/modules/emotion/gstreamer1/emotion_gstreamer.h new file mode 100644 index 0000000000..7ba9050cf0 --- /dev/null +++ b/src/modules/emotion/gstreamer1/emotion_gstreamer.h @@ -0,0 +1,295 @@ +#ifndef __EMOTION_GSTREAMER_H__ +#define __EMOTION_GSTREAMER_H__ + +#include "emotion_modules.h" + +#include +#include + +#include +#include +#include + +#include +#include +#include +#include +#include +#include +#include + +// forcibly disable x overlay window.. broken badly. +#undef HAVE_ECORE_X + +#ifdef HAVE_ECORE_X +# include +# include +#endif + +typedef void (*Evas_Video_Convert_Cb)(unsigned char *evas_data, + const unsigned char *gst_data, + unsigned int w, + unsigned int h, + unsigned int output_height); + +typedef struct _EvasVideoSinkPrivate EvasVideoSinkPrivate; +typedef struct _EvasVideoSink EvasVideoSink; +typedef struct _EvasVideoSinkClass EvasVideoSinkClass; +typedef struct _Emotion_Gstreamer_Video Emotion_Gstreamer_Video; +typedef struct _Emotion_Audio_Stream Emotion_Audio_Stream; +typedef struct _Emotion_Gstreamer_Metadata Emotion_Gstreamer_Metadata; +typedef struct _Emotion_Gstreamer_Buffer Emotion_Gstreamer_Buffer; +typedef struct _Emotion_Gstreamer_Message Emotion_Gstreamer_Message; +typedef struct _Emotion_Video_Stream Emotion_Video_Stream; + +struct _Emotion_Video_Stream +{ + gdouble length_time; + GstVideoInfo info; + int index; +}; + +struct _Emotion_Audio_Stream +{ + gdouble length_time; + GstAudioInfo info; +}; + +struct _Emotion_Gstreamer_Metadata +{ + char *title; + char *album; + char *artist; + char *genre; + char *comment; + char *year; + char *count; + char *disc_id; +}; + +struct _Emotion_Gstreamer_Video +{ + const Emotion_Engine *api; + + /* Gstreamer elements */ + GstElement *pipeline; + GstElement *sink; + GstElement *esink; + GstElement *xvsink; + GstElement *tee; + GstElement *convert; + + GstPad *eteepad; + GstPad *xvteepad; + GstPad *xvpad; + Eina_List *threads; + + /* eos */ + GstBus *eos_bus; + + /* Strams */ + Eina_List *video_streams; + Eina_List *audio_streams; + + int video_stream_nbr; + int audio_stream_nbr; + + /* We need to keep a copy of the last inserted buffer as evas doesn't copy YUV data around */ + GstBuffer *last_buffer; + + /* Evas object */ + Evas_Object *obj; + + /* Characteristics of stream */ + double position; + double ratio; + double volume; + + volatile int seek_to; + volatile int get_poslen; + + Emotion_Gstreamer_Metadata *metadata; + +#ifdef HAVE_ECORE_X + Ecore_X_Window win; +#endif + + const char *uri; + + Emotion_Gstreamer_Buffer *send; + + EvasVideoSinkPrivate *sink_data; + + Emotion_Vis vis; + + int in; + int out; + + int frames; + int flapse; + double rtime; + double rlapse; + + struct + { + double width; + double height; + } fill; + + Eina_Bool play : 1; + Eina_Bool play_started : 1; + Eina_Bool video_mute : 1; + Eina_Bool audio_mute : 1; + Eina_Bool pipeline_parsed : 1; + Eina_Bool delete_me : 1; + Eina_Bool kill_buffer : 1; + Eina_Bool stream : 1; + Eina_Bool priority : 1; + + int src_width; + int src_height; +}; + +struct _EvasVideoSink { + /*< private >*/ + GstVideoSink parent; + EvasVideoSinkPrivate *priv; +}; + +struct _EvasVideoSinkClass { + /*< private >*/ + GstVideoSinkClass parent_class; +}; + +struct _EvasVideoSinkPrivate { + EINA_REFCOUNT; + + Evas_Object *o; + + Emotion_Gstreamer_Video *ev; + + Evas_Video_Convert_Cb func; + + GstVideoInfo info; + unsigned int eheight; + Evas_Colorspace eformat; + + Eina_Lock m; + Eina_Condition c; + + // If this is TRUE all processing should finish ASAP + // This is necessary because there could be a race between + // unlock() and render(), where unlock() wins, signals the + // GCond, then render() tries to render a frame although + // everything else isn't running anymore. This will lead + // to deadlocks because render() holds the stream lock. + // + // Protected by the buffer mutex + Eina_Bool unlocked : 1; +}; + +struct _Emotion_Gstreamer_Buffer +{ + Emotion_Gstreamer_Video *ev; + EvasVideoSinkPrivate *sink; + + GstBuffer *frame; + + Eina_Bool preroll : 1; + Eina_Bool force : 1; +}; + +struct _Emotion_Gstreamer_Message +{ + Emotion_Gstreamer_Video *ev; + + GstMessage *msg; +}; + +extern Eina_Bool window_manager_video; +extern Eina_Bool debug_fps; +extern int _emotion_gstreamer_log_domain; +extern Eina_Bool _ecore_x_available; + +#ifdef DBG +#undef DBG +#endif +#define DBG(...) EINA_LOG_DOM_DBG(_emotion_gstreamer_log_domain, __VA_ARGS__) + +#ifdef INF +#undef INF +#endif +#define INF(...) EINA_LOG_DOM_INFO(_emotion_gstreamer_log_domain, __VA_ARGS__) + +#ifdef WRN +#undef WRN +#endif +#define WRN(...) EINA_LOG_DOM_WARN(_emotion_gstreamer_log_domain, __VA_ARGS__) + +#ifdef ERR +#undef ERR +#endif +#define ERR(...) EINA_LOG_DOM_ERR(_emotion_gstreamer_log_domain, __VA_ARGS__) + +#ifdef CRITICAL +#undef CRITICAL +#endif +#define CRITICAL(...) EINA_LOG_DOM_CRIT(_emotion_gstreamer_log_domain, __VA_ARGS__) + +#define EVAS_TYPE_VIDEO_SINK evas_video_sink_get_type() + +GType fakeeos_bin_get_type(void); + +#define EVAS_VIDEO_SINK(obj) \ + (G_TYPE_CHECK_INSTANCE_CAST((obj), \ + EVAS_TYPE_VIDEO_SINK, EvasVideoSink)) + +#define EVAS_VIDEO_SINK_CLASS(klass) \ + (G_TYPE_CHECK_CLASS_CAST((klass), \ + EVAS_TYPE_VIDEO_SINK, EvasVideoSinkClass)) + +#define EVAS_IS_VIDEO_SINK(obj) \ + (G_TYPE_CHECK_INSTANCE_TYPE((obj), \ + EVAS_TYPE_VIDEO_SINK)) + +#define EVAS_IS_VIDEO_SINK_CLASS(klass) \ + (G_TYPE_CHECK_CLASS_TYPE((klass), \ + EVAS_TYPE_VIDEO_SINK)) + +#define EVAS_VIDEO_SINK_GET_CLASS(obj) \ + (G_TYPE_INSTANCE_GET_CLASS((obj), \ + EVAS_TYPE_VIDEO_SINK, EvasVideoSinkClass)) + +#define GST_TYPE_FAKEEOS_BIN fakeeos_bin_get_type() + +GstElement *gstreamer_video_sink_new(Emotion_Gstreamer_Video *ev, + Evas_Object *obj, + const char *uri); + +gboolean gstreamer_plugin_init(GstPlugin *plugin); + +Emotion_Gstreamer_Buffer *emotion_gstreamer_buffer_alloc(EvasVideoSinkPrivate *sink, + GstBuffer *buffer, + Eina_Bool preroll); +void emotion_gstreamer_buffer_free(Emotion_Gstreamer_Buffer *send); + +Emotion_Gstreamer_Message *emotion_gstreamer_message_alloc(Emotion_Gstreamer_Video *ev, + GstMessage *msg); +void emotion_gstreamer_message_free(Emotion_Gstreamer_Message *send); +Eina_Bool _emotion_gstreamer_video_pipeline_parse(Emotion_Gstreamer_Video *ev, + Eina_Bool force); + +typedef struct _ColorSpace_Format_Convertion ColorSpace_Format_Convertion; + +struct _ColorSpace_Format_Convertion +{ + const char *name; + GstVideoFormat format; + Evas_Colorspace eformat; + Evas_Video_Convert_Cb func; + Eina_Bool force_height; +}; + +extern const ColorSpace_Format_Convertion colorspace_format_convertion[]; + +#endif /* __EMOTION_GSTREAMER_H__ */ diff --git a/src/modules/emotion/gstreamer1/emotion_sink.c b/src/modules/emotion/gstreamer1/emotion_sink.c new file mode 100644 index 0000000000..3a061b6dd8 --- /dev/null +++ b/src/modules/emotion/gstreamer1/emotion_sink.c @@ -0,0 +1,1006 @@ +#ifdef HAVE_CONFIG_H +# include "config.h" +#endif + +#include "emotion_gstreamer.h" + +static GstStaticPadTemplate sinktemplate = GST_STATIC_PAD_TEMPLATE("sink", + GST_PAD_SINK, GST_PAD_ALWAYS, + GST_STATIC_CAPS(GST_VIDEO_CAPS_MAKE("{ I420, YV12, YUY2, NV12, BGRx, BGR, BGRA }"))); + +GST_DEBUG_CATEGORY_STATIC(evas_video_sink_debug); +#define GST_CAT_DEFAULT evas_video_sink_debug + +enum { + LAST_SIGNAL +}; + +enum { + PROP_0, + PROP_EVAS_OBJECT, + PROP_WIDTH, + PROP_HEIGHT, + PROP_EV, + PROP_LAST +}; + +#define _do_init \ + GST_DEBUG_CATEGORY_INIT(evas_video_sink_debug, \ + "emotion-sink", \ + 0, \ + "emotion video sink") + +#define parent_class evas_video_sink_parent_class +G_DEFINE_TYPE_WITH_CODE(EvasVideoSink, + evas_video_sink, + GST_TYPE_VIDEO_SINK, + _do_init); + + +static void unlock_buffer_mutex(EvasVideoSinkPrivate* priv); +static void evas_video_sink_main_render(void *data); + +static void +evas_video_sink_init(EvasVideoSink* sink) +{ + EvasVideoSinkPrivate* priv; + + INF("sink init"); + sink->priv = priv = G_TYPE_INSTANCE_GET_PRIVATE(sink, EVAS_TYPE_VIDEO_SINK, EvasVideoSinkPrivate); + priv->o = NULL; + priv->info.width = 0; + priv->info.height = 0; + priv->eheight = 0; + priv->func = NULL; + priv->eformat = EVAS_COLORSPACE_ARGB8888; + eina_lock_new(&priv->m); + eina_condition_new(&priv->c, &priv->m); + priv->unlocked = EINA_FALSE; +} + +/**** Object methods ****/ +static void +_cleanup_priv(void *data, Evas *e EINA_UNUSED, Evas_Object *obj, void *event_info EINA_UNUSED) +{ + EvasVideoSinkPrivate* priv; + + priv = data; + + eina_lock_take(&priv->m); + if (priv->o == obj) + priv->o = NULL; + eina_lock_release(&priv->m); +} + +static void +evas_video_sink_set_property(GObject * object, guint prop_id, + const GValue * value, GParamSpec * pspec) +{ + EvasVideoSink* sink; + EvasVideoSinkPrivate* priv; + + sink = EVAS_VIDEO_SINK (object); + priv = sink->priv; + + switch (prop_id) { + case PROP_EVAS_OBJECT: + eina_lock_take(&priv->m); + if (priv->o) + evas_object_event_callback_del(priv->o, EVAS_CALLBACK_DEL, _cleanup_priv); + priv->o = g_value_get_pointer (value); + INF("sink set Evas_Object %p.", priv->o); + if (priv->o) + evas_object_event_callback_add(priv->o, EVAS_CALLBACK_DEL, _cleanup_priv, priv); + eina_lock_release(&priv->m); + break; + case PROP_EV: + INF("sink set ev."); + eina_lock_take(&priv->m); + priv->ev = g_value_get_pointer (value); + eina_lock_release(&priv->m); + break; + default: + G_OBJECT_WARN_INVALID_PROPERTY_ID (object, prop_id, pspec); + ERR("invalid property"); + break; + } +} + +static void +evas_video_sink_get_property(GObject * object, guint prop_id, + GValue * value, GParamSpec * pspec) +{ + EvasVideoSink* sink; + EvasVideoSinkPrivate* priv; + + sink = EVAS_VIDEO_SINK (object); + priv = sink->priv; + + switch (prop_id) { + case PROP_EVAS_OBJECT: + INF("sink get property."); + eina_lock_take(&priv->m); + g_value_set_pointer(value, priv->o); + eina_lock_release(&priv->m); + break; + case PROP_WIDTH: + INF("sink get width."); + eina_lock_take(&priv->m); + g_value_set_int(value, priv->info.width); + eina_lock_release(&priv->m); + break; + case PROP_HEIGHT: + INF("sink get height."); + eina_lock_take(&priv->m); + g_value_set_int (value, priv->eheight); + eina_lock_release(&priv->m); + break; + case PROP_EV: + INF("sink get ev."); + eina_lock_take(&priv->m); + g_value_set_pointer (value, priv->ev); + eina_lock_release(&priv->m); + break; + default: + G_OBJECT_WARN_INVALID_PROPERTY_ID (object, prop_id, pspec); + ERR("invalide property"); + break; + } +} + +static void +evas_video_sink_dispose(GObject* object) +{ + EvasVideoSink* sink; + EvasVideoSinkPrivate* priv; + + INF("dispose."); + + sink = EVAS_VIDEO_SINK(object); + priv = sink->priv; + + eina_lock_free(&priv->m); + eina_condition_free(&priv->c); + + G_OBJECT_CLASS(parent_class)->dispose(object); +} + + +/**** BaseSink methods ****/ + +gboolean evas_video_sink_set_caps(GstBaseSink *bsink, GstCaps *caps) +{ + EvasVideoSink* sink; + EvasVideoSinkPrivate* priv; + GstVideoInfo info; + unsigned int i; + + sink = EVAS_VIDEO_SINK(bsink); + priv = sink->priv; + + if (!gst_video_info_from_caps(&info, caps)) + { + ERR("Unable to parse caps."); + return FALSE; + } + + priv->info = info; + priv->eheight = info.height; + + for (i = 0; colorspace_format_convertion[i].name != NULL; ++i) + if (info.finfo->format == colorspace_format_convertion[i].format) + { + DBG("Found '%s'", colorspace_format_convertion[i].name); + priv->eformat = colorspace_format_convertion[i].eformat; + priv->func = colorspace_format_convertion[i].func; + if (colorspace_format_convertion[i].force_height) + { + priv->eheight = (priv->eheight >> 1) << 1; + } + if (priv->ev) + priv->ev->kill_buffer = EINA_TRUE; + return TRUE; + } + + ERR("unsupported : %s\n", gst_video_format_to_string(info.finfo->format)); + return FALSE; +} + +static gboolean +evas_video_sink_start(GstBaseSink* base_sink) +{ + EvasVideoSinkPrivate* priv; + gboolean res = TRUE; + + INF("sink start"); + + priv = EVAS_VIDEO_SINK(base_sink)->priv; + eina_lock_take(&priv->m); + if (!priv->o) + res = FALSE; + else + priv->unlocked = EINA_FALSE; + eina_lock_release(&priv->m); + return res; +} + +static gboolean +evas_video_sink_stop(GstBaseSink* base_sink) +{ + EvasVideoSinkPrivate* priv = EVAS_VIDEO_SINK(base_sink)->priv; + + INF("sink stop"); + + unlock_buffer_mutex(priv); + return TRUE; +} + +static gboolean +evas_video_sink_unlock(GstBaseSink* object) +{ + EvasVideoSink* sink; + + INF("sink unlock"); + + sink = EVAS_VIDEO_SINK(object); + + unlock_buffer_mutex(sink->priv); + + return GST_CALL_PARENT_WITH_DEFAULT(GST_BASE_SINK_CLASS, unlock, + (object), TRUE); +} + +static gboolean +evas_video_sink_unlock_stop(GstBaseSink* object) +{ + EvasVideoSink* sink; + EvasVideoSinkPrivate* priv; + + sink = EVAS_VIDEO_SINK(object); + priv = sink->priv; + + INF("sink unlock stop"); + + eina_lock_take(&priv->m); + priv->unlocked = FALSE; + eina_lock_release(&priv->m); + + return GST_CALL_PARENT_WITH_DEFAULT(GST_BASE_SINK_CLASS, unlock_stop, + (object), TRUE); +} + +static GstFlowReturn +evas_video_sink_preroll(GstBaseSink* bsink, GstBuffer* buffer) +{ + Emotion_Gstreamer_Buffer *send; + EvasVideoSinkPrivate *priv; + EvasVideoSink *sink; + + INF("sink preroll %p [%" G_GSIZE_FORMAT "]", buffer, gst_buffer_get_size(buffer)); + + sink = EVAS_VIDEO_SINK(bsink); + priv = sink->priv; + + if (gst_buffer_get_size(buffer) <= 0) + { + WRN("empty buffer"); + return GST_FLOW_OK; + } + + send = emotion_gstreamer_buffer_alloc(priv, buffer, EINA_TRUE); + + if (send) + { + _emotion_pending_ecore_begin(); + ecore_main_loop_thread_safe_call_async(evas_video_sink_main_render, send); + } + + return GST_FLOW_OK; +} + +static GstFlowReturn +evas_video_sink_render(GstBaseSink* bsink, GstBuffer* buffer) +{ + Emotion_Gstreamer_Buffer *send; + EvasVideoSinkPrivate *priv; + EvasVideoSink *sink; + + INF("sink render %p", buffer); + + sink = EVAS_VIDEO_SINK(bsink); + priv = sink->priv; + + eina_lock_take(&priv->m); + + if (priv->unlocked) { + ERR("LOCKED"); + eina_lock_release(&priv->m); + return GST_FLOW_FLUSHING; + } + + send = emotion_gstreamer_buffer_alloc(priv, buffer, EINA_FALSE); + if (!send) { + eina_lock_release(&priv->m); + return GST_FLOW_ERROR; + } + + _emotion_pending_ecore_begin(); + ecore_main_loop_thread_safe_call_async(evas_video_sink_main_render, send); + + eina_condition_wait(&priv->c); + eina_lock_release(&priv->m); + + return GST_FLOW_OK; +} + +static void +_update_emotion_fps(Emotion_Gstreamer_Video *ev) +{ + double tim; + + if (!debug_fps) return; + + tim = ecore_time_get(); + ev->frames++; + + if (ev->rlapse == 0.0) + { + ev->rlapse = tim; + ev->flapse = ev->frames; + } + else if ((tim - ev->rlapse) >= 0.5) + { + printf("FRAME: %i, FPS: %3.1f\n", + ev->frames, + (ev->frames - ev->flapse) / (tim - ev->rlapse)); + ev->rlapse = tim; + ev->flapse = ev->frames; + } +} + +static void +evas_video_sink_main_render(void *data) +{ + Emotion_Gstreamer_Buffer *send; + Emotion_Gstreamer_Video *ev = NULL; + Emotion_Video_Stream *vstream; + EvasVideoSinkPrivate *priv = NULL; + GstBuffer *buffer; + GstMapInfo map; + unsigned char *evas_data; + gint64 pos; + Eina_Bool preroll = EINA_FALSE; + + send = data; + + if (!send) goto exit_point; + + priv = send->sink; + buffer = send->frame; + preroll = send->preroll; + ev = send->ev; + + /* frame after cleanup */ + if (!preroll && !ev->last_buffer) + { + priv = NULL; + goto exit_point; + } + + if (!priv || !priv->o || priv->unlocked) + goto exit_point; + + if (ev->send && send != ev->send) + { + emotion_gstreamer_buffer_free(ev->send); + ev->send = NULL; + } + + if (!ev->stream && !send->force) + { + ev->send = send; + _emotion_frame_new(ev->obj); + evas_object_image_data_update_add(priv->o, 0, 0, priv->info.width, priv->eheight); + goto exit_stream; + } + + if (!gst_buffer_map(buffer, &map, GST_MAP_READ)) + goto exit_stream; + + _emotion_gstreamer_video_pipeline_parse(ev, EINA_TRUE); + + INF("sink main render [%i, %i] (source height: %i)", priv->info.width, priv->eheight, priv->info.height); + + evas_object_image_alpha_set(priv->o, 0); + evas_object_image_colorspace_set(priv->o, priv->eformat); + evas_object_image_size_set(priv->o, priv->info.width, priv->eheight); + + evas_data = evas_object_image_data_get(priv->o, 1); + + if (priv->func) + priv->func(evas_data, map.data, priv->info.width, priv->info.height, priv->eheight); + else + WRN("No way to decode %x colorspace !", priv->eformat); + + gst_buffer_unmap(buffer, &map); + + evas_object_image_data_set(priv->o, evas_data); + evas_object_image_data_update_add(priv->o, 0, 0, priv->info.width, priv->eheight); + evas_object_image_pixels_dirty_set(priv->o, 0); + + _update_emotion_fps(ev); + + if (!preroll && ev->play_started) + { + _emotion_playback_started(ev->obj); + ev->play_started = 0; + } + + if (!send->force) + { + _emotion_frame_new(ev->obj); + } + + gst_element_query_position(ev->pipeline, GST_FORMAT_TIME, &pos); + ev->position = (double)pos / (double)GST_SECOND; + + vstream = eina_list_nth(ev->video_streams, ev->video_stream_nbr - 1); + + if (vstream) + { + vstream->info.width = priv->info.width; + vstream->info.height = priv->eheight; + _emotion_video_pos_update(ev->obj, ev->position, vstream->length_time); + } + + ev->ratio = (double) priv->info.width / (double) priv->eheight; + ev->ratio *= (double) priv->info.par_n / (double) priv->info.par_d; + + _emotion_frame_resize(ev->obj, priv->info.width, priv->eheight, ev->ratio); + + buffer = gst_buffer_ref(buffer); + if (ev->last_buffer) gst_buffer_unref(ev->last_buffer); + ev->last_buffer = buffer; + + exit_point: + if (send) emotion_gstreamer_buffer_free(send); + + exit_stream: + if (priv) + { + if (preroll || !priv->o) + { + _emotion_pending_ecore_end(); + return; + } + + if (!priv->unlocked) + eina_condition_signal(&priv->c); + } + _emotion_pending_ecore_end(); +} + +static void +unlock_buffer_mutex(EvasVideoSinkPrivate* priv) +{ + priv->unlocked = EINA_TRUE; + + eina_condition_signal(&priv->c); +} + +static void +evas_video_sink_class_init(EvasVideoSinkClass* klass) +{ + GObjectClass* gobject_class; + GstElementClass* gstelement_class; + GstBaseSinkClass* gstbase_sink_class; + + gobject_class = G_OBJECT_CLASS(klass); + gstelement_class = GST_ELEMENT_CLASS(klass); + gstbase_sink_class = GST_BASE_SINK_CLASS(klass); + + g_type_class_add_private(klass, sizeof(EvasVideoSinkPrivate)); + + gobject_class->set_property = evas_video_sink_set_property; + gobject_class->get_property = evas_video_sink_get_property; + + g_object_class_install_property (gobject_class, PROP_EVAS_OBJECT, + g_param_spec_pointer ("evas-object", "Evas Object", + "The Evas object where the display of the video will be done", + G_PARAM_READWRITE)); + + g_object_class_install_property (gobject_class, PROP_WIDTH, + g_param_spec_int ("width", "Width", + "The width of the video", + 0, 65536, 0, G_PARAM_READABLE | G_PARAM_STATIC_STRINGS)); + + g_object_class_install_property (gobject_class, PROP_HEIGHT, + g_param_spec_int ("height", "Height", + "The height of the video", + 0, 65536, 0, G_PARAM_READABLE | G_PARAM_STATIC_STRINGS)); + g_object_class_install_property (gobject_class, PROP_EV, + g_param_spec_pointer ("ev", "Emotion_Gstreamer_Video", + "The internal data of the emotion object", + G_PARAM_READWRITE)); + + gobject_class->dispose = evas_video_sink_dispose; + + gst_element_class_add_pad_template(gstelement_class, gst_static_pad_template_get(&sinktemplate)); + gst_element_class_set_static_metadata(gstelement_class, "Evas video sink", + "Sink/Video", "Sends video data from a GStreamer pipeline to an Evas object", + "Vincent Torri "); + + gstbase_sink_class->set_caps = evas_video_sink_set_caps; + gstbase_sink_class->stop = evas_video_sink_stop; + gstbase_sink_class->start = evas_video_sink_start; + gstbase_sink_class->unlock = evas_video_sink_unlock; + gstbase_sink_class->unlock_stop = evas_video_sink_unlock_stop; + gstbase_sink_class->render = evas_video_sink_render; + gstbase_sink_class->preroll = evas_video_sink_preroll; +} + +gboolean +gstreamer_plugin_init (GstPlugin * plugin) +{ + return gst_element_register (plugin, + "emotion-sink", + GST_RANK_NONE, + EVAS_TYPE_VIDEO_SINK); +} + +static void +_emotion_gstreamer_pause(void *data, Ecore_Thread *thread) +{ + Emotion_Gstreamer_Video *ev = data; + gboolean res; + + if (ecore_thread_check(thread) || !ev->pipeline) return; + + gst_element_set_state(ev->pipeline, GST_STATE_PAUSED); + res = gst_element_get_state(ev->pipeline, NULL, NULL, GST_CLOCK_TIME_NONE); + if (res == GST_STATE_CHANGE_NO_PREROLL) + { + gst_element_set_state(ev->pipeline, GST_STATE_PLAYING); + gst_element_get_state(ev->pipeline, NULL, NULL, GST_CLOCK_TIME_NONE); + } +} + +static void +_emotion_gstreamer_cancel(void *data, Ecore_Thread *thread) +{ + Emotion_Gstreamer_Video *ev = data; + + ev->threads = eina_list_remove(ev->threads, thread); + + if (getenv("EMOTION_GSTREAMER_DOT")) GST_DEBUG_BIN_TO_DOT_FILE_WITH_TS(GST_BIN(ev->pipeline), GST_DEBUG_GRAPH_SHOW_ALL, getenv("EMOTION_GSTREAMER_DOT")); + + if (ev->in == ev->out && ev->delete_me) + ev->api->del(ev); +} + +static void +_emotion_gstreamer_end(void *data, Ecore_Thread *thread) +{ + Emotion_Gstreamer_Video *ev = data; + + ev->threads = eina_list_remove(ev->threads, thread); + + if (ev->play) + { + gst_element_set_state(ev->pipeline, GST_STATE_PLAYING); + ev->play_started = 1; + } + + if (getenv("EMOTION_GSTREAMER_DOT")) GST_DEBUG_BIN_TO_DOT_FILE_WITH_TS(GST_BIN(ev->pipeline), GST_DEBUG_GRAPH_SHOW_ALL, getenv("EMOTION_GSTREAMER_DOT")); + + if (ev->in == ev->out && ev->delete_me) + ev->api->del(ev); + else + _emotion_gstreamer_video_pipeline_parse(data, EINA_TRUE); +} + +static void +_video_resize(void *data, Evas_Object *obj EINA_UNUSED, const Evas_Video_Surface *surface EINA_UNUSED, + Evas_Coord w, Evas_Coord h) +{ +#ifdef HAVE_ECORE_X + Emotion_Gstreamer_Video *ev = data; + + ecore_x_window_resize(ev->win, w, h); + DBG("resize: %i, %i", w, h); +#else + if (data) + { + DBG("resize: %i, %i (fake)", w, h); + } +#endif +} + +static void +_video_move(void *data, Evas_Object *obj EINA_UNUSED, const Evas_Video_Surface *surface EINA_UNUSED, + Evas_Coord x, Evas_Coord y) +{ +#ifdef HAVE_ECORE_X + Emotion_Gstreamer_Video *ev = data; + unsigned int pos[2]; + + DBG("move: %i, %i", x, y); + pos[0] = x; pos[1] = y; + ecore_x_window_prop_card32_set(ev->win, ECORE_X_ATOM_E_VIDEO_POSITION, pos, 2); +#else + if (data) + { + DBG("move: %i, %i (fake)", x, y); + } +#endif +} + +#if 0 +/* Much better idea to always feed the XvImageSink and let him handle optimizing the rendering as we do */ +static void +_block_pad_unlink_cb(GstPad *pad, gboolean blocked, gpointer user_data) +{ + if (blocked) + { + Emotion_Gstreamer_Video *ev = user_data; + GstEvent *gev; + + gst_pad_unlink(ev->xvteepad, ev->xvpad); + gev = gst_event_new_eos(); + gst_pad_send_event(ev->xvpad, gev); + gst_pad_set_blocked_async(pad, FALSE, _block_pad_unlink_cb, NULL); + } +} + +static void +_block_pad_link_cb(GstPad *pad, gboolean blocked, gpointer user_data) +{ + if (blocked) + { + Emotion_Gstreamer_Video *ev = user_data; + + gst_pad_link(ev->xvteepad, ev->xvpad); + if (ev->play) + gst_element_set_state(ev->xvsink, GST_STATE_PLAYING); + else + gst_element_set_state(ev->xvsink, GST_STATE_PAUSED); + gst_pad_set_blocked_async(pad, FALSE, _block_pad_link_cb, NULL); + } +} +#endif + +static void +_video_show(void *data, Evas_Object *obj EINA_UNUSED, const Evas_Video_Surface *surface EINA_UNUSED) +{ +#ifdef HAVE_ECORE_X + Emotion_Gstreamer_Video *ev = data; + + DBG("show xv"); + ecore_x_window_show(ev->win); +#else + if (data) + { + DBG("show xv (fake)"); + } +#endif + /* gst_pad_set_blocked_async(ev->xvteepad, TRUE, _block_pad_link_cb, ev); */ +} + +static void +_video_hide(void *data, Evas_Object *obj EINA_UNUSED, const Evas_Video_Surface *surface EINA_UNUSED) +{ +#ifdef HAVE_ECORE_X + Emotion_Gstreamer_Video *ev = data; + + DBG("hide xv"); + ecore_x_window_hide(ev->win); +#else + if (data) + { + DBG("hide xv (fake)"); + } +#endif + /* gst_pad_set_blocked_async(ev->xvteepad, TRUE, _block_pad_unlink_cb, ev); */ +} + +static void +_video_update_pixels(void *data, Evas_Object *obj EINA_UNUSED, const Evas_Video_Surface *surface EINA_UNUSED) +{ + Emotion_Gstreamer_Video *ev = data; + Emotion_Gstreamer_Buffer *send; + + if (!ev->send) return; + + send = ev->send; + send->force = EINA_TRUE; + ev->send = NULL; + + _emotion_pending_ecore_begin(); + evas_video_sink_main_render(send); +} + + +static void +_image_resize(void *data EINA_UNUSED, Evas *e EINA_UNUSED, Evas_Object *obj EINA_UNUSED, void *event_info EINA_UNUSED) +{ +} + +GstElement * +gstreamer_video_sink_new(Emotion_Gstreamer_Video *ev, + Evas_Object *o, + const char *uri) +{ + GstElement *playbin; + GstElement *bin = NULL; + GstElement *esink = NULL; + GstElement *xvsink = NULL; + GstElement *tee = NULL; + GstElement *queue = NULL; + Evas_Object *obj; + GstPad *pad; + GstPad *teepad; + int flags; + const char *launch; +#if defined HAVE_ECORE_X + const char *engine = NULL; + Eina_List *engines; +#endif + + obj = emotion_object_image_get(o); + if (!obj) + { +// ERR("Not Evas_Object specified"); + return NULL; + } + + if (!uri) + return NULL; + + launch = emotion_webcam_custom_get(uri); + if (launch) + { + GError *error = NULL; + + playbin = gst_parse_bin_from_description(launch, 1, &error); + if (!playbin) + { + ERR("Unable to setup command : '%s' got error '%s'.", launch, error->message); + g_error_free(error); + return NULL; + } + if (error) + { + WRN("got recoverable error '%s' for command : '%s'.", error->message, launch); + g_error_free(error); + } + } + else + { + playbin = gst_element_factory_make("playbin", "playbin"); + if (!playbin) + { + ERR("Unable to create 'playbin' GstElement."); + return NULL; + } + } + + bin = gst_bin_new(NULL); + if (!bin) + { + ERR("Unable to create GstBin !"); + goto unref_pipeline; + } + + tee = gst_element_factory_make("tee", NULL); + if (!tee) + { + ERR("Unable to create 'tee' GstElement."); + goto unref_pipeline; + } + +#if defined HAVE_ECORE_X + if (window_manager_video) + { + Eina_List *l; + const char *ename; + + engines = evas_render_method_list(); + + EINA_LIST_FOREACH(engines, l, ename) + { + if (evas_render_method_lookup(ename) == + evas_output_method_get(evas_object_evas_get(obj))) + { + engine = ename; + break; + } + } + + if (ev->priority && engine && strstr(engine, "_x11") != NULL) + { + Ecore_Evas *ee; + Evas_Coord x, y, w, h; + Ecore_X_Window win; + Ecore_X_Window parent; + + evas_object_geometry_get(obj, &x, &y, &w, &h); + + ee = ecore_evas_ecore_evas_get(evas_object_evas_get(obj)); + + if (w < 4) w = 4; + if (h < 2) h = 2; + + /* Here we really need to have the help of the window manager, this code will change when we update E17. */ + parent = (Ecore_X_Window) ecore_evas_window_get(ee); + DBG("parent: %x", parent); + + win = ecore_x_window_new(0, x, y, w, h); + DBG("creating window: %x [%i, %i, %i, %i]", win, x, y, w, h); + if (win) + { + Ecore_X_Window_State state[] = { ECORE_X_WINDOW_STATE_SKIP_TASKBAR, ECORE_X_WINDOW_STATE_SKIP_PAGER }; + + ecore_x_netwm_window_state_set(win, state, 2); + ecore_x_window_hide(win); + xvsink = gst_element_factory_make("xvimagesink", NULL); + if (xvsink) + { + unsigned int pos[2]; + + gst_video_overlay_set_window_handle(GST_VIDEO_OVERLAY(xvsink), win); + ev->win = win; + + ecore_x_window_prop_card32_set(win, ECORE_X_ATOM_E_VIDEO_PARENT, &parent, 1); + + pos[0] = x; pos[1] = y; + ecore_x_window_prop_card32_set(win, ECORE_X_ATOM_E_VIDEO_POSITION, pos, 2); + } + else + { + DBG("destroying win: %x", win); + ecore_x_window_free(win); + } + } + } + evas_render_method_list_free(engines); + } +#else +//# warning "missing: ecore_x" +#endif + + esink = gst_element_factory_make("emotion-sink", "sink"); + if (!esink) + { + ERR("Unable to create 'emotion-sink' GstElement."); + goto unref_pipeline; + } + + g_object_set(G_OBJECT(esink), "evas-object", obj, NULL); + g_object_set(G_OBJECT(esink), "ev", ev, NULL); + + evas_object_image_pixels_get_callback_set(obj, NULL, NULL); + evas_object_event_callback_add(obj, EVAS_CALLBACK_RESIZE, _image_resize, ev); + + /* We need queue to force each video sink to be in its own thread */ + queue = gst_element_factory_make("queue", "equeue"); + if (!queue) + { + ERR("Unable to create 'queue' GstElement."); + goto unref_pipeline; + } + + gst_bin_add_many(GST_BIN(bin), tee, queue, esink, NULL); + gst_element_link_many(queue, esink, NULL); + + /* link both sink to GstTee */ + pad = gst_element_get_static_pad(queue, "sink"); + teepad = gst_element_get_request_pad(tee, "src_%u"); + gst_pad_link(teepad, pad); + gst_object_unref(pad); + + ev->eteepad = teepad; + + /* FIXME: Why a bin that drops the EOS message?! */ + if (xvsink) + { + GstElement *fakeeos; + + queue = gst_element_factory_make("queue", "xvqueue"); + fakeeos = GST_ELEMENT(GST_BIN(g_object_new(GST_TYPE_FAKEEOS_BIN, "name", "eosbin", NULL))); + if (queue && fakeeos) + { + GstPad *queue_pad; + + gst_bin_add_many(GST_BIN(bin), fakeeos, NULL); + + gst_bin_add_many(GST_BIN(fakeeos), queue, xvsink, NULL); + gst_element_link_many(queue, xvsink, NULL); + queue_pad = gst_element_get_static_pad(queue, "sink"); + gst_element_add_pad(fakeeos, gst_ghost_pad_new("sink", queue_pad)); + + pad = gst_element_get_static_pad(fakeeos, "sink"); + teepad = gst_element_get_request_pad(tee, "src_%u"); + gst_pad_link(teepad, pad); + + xvsink = fakeeos; + + ev->xvteepad = teepad; + ev->xvpad = pad; + } + else + { + if (fakeeos) gst_object_unref(fakeeos); + if (queue) gst_object_unref(queue); + gst_object_unref(xvsink); + xvsink = NULL; + } + } + + teepad = gst_element_get_static_pad(tee, "sink"); + gst_element_add_pad(bin, gst_ghost_pad_new("sink", teepad)); + gst_object_unref(teepad); + +#define GST_PLAY_FLAG_NATIVE_VIDEO (1 << 6) +#define GST_PLAY_FLAG_DOWNLOAD (1 << 7) +#define GST_PLAY_FLAG_AUDIO (1 << 1) +#define GST_PLAY_FLAG_NATIVE_AUDIO (1 << 5) + + if (launch) + { + g_object_set(G_OBJECT(playbin), "sink", bin, NULL); + } + else + { + g_object_get(G_OBJECT(playbin), "flags", &flags, NULL); + g_object_set(G_OBJECT(playbin), "flags", flags | GST_PLAY_FLAG_NATIVE_VIDEO | GST_PLAY_FLAG_DOWNLOAD | GST_PLAY_FLAG_NATIVE_AUDIO, NULL); + g_object_set(G_OBJECT(playbin), "video-sink", bin, NULL); + g_object_set(G_OBJECT(playbin), "uri", uri, NULL); + } + + evas_object_image_pixels_get_callback_set(obj, NULL, NULL); + + ev->stream = EINA_TRUE; + + if (xvsink) + { + Evas_Video_Surface video; + + video.version = EVAS_VIDEO_SURFACE_VERSION; + video.data = ev; + video.parent = NULL; + video.move = _video_move; + video.resize = _video_resize; + video.show = _video_show; + video.hide = _video_hide; + video.update_pixels = _video_update_pixels; + + evas_object_image_video_surface_set(obj, &video); + ev->stream = EINA_FALSE; + } + + eina_stringshare_replace(&ev->uri, uri); + ev->pipeline = playbin; + ev->sink = bin; + ev->esink = esink; + ev->xvsink = xvsink; + ev->tee = tee; + ev->threads = eina_list_append(ev->threads, + ecore_thread_run(_emotion_gstreamer_pause, + _emotion_gstreamer_end, + _emotion_gstreamer_cancel, + ev)); + + /** NOTE: you need to set: GST_DEBUG_DUMP_DOT_DIR=/tmp EMOTION_ENGINE=gstreamer to save the $EMOTION_GSTREAMER_DOT file in '/tmp' */ + /** then call dot -Tpng -oemotion_pipeline.png /tmp/$TIMESTAMP-$EMOTION_GSTREAMER_DOT.dot */ + if (getenv("EMOTION_GSTREAMER_DOT")) GST_DEBUG_BIN_TO_DOT_FILE_WITH_TS(GST_BIN(playbin), GST_DEBUG_GRAPH_SHOW_ALL, getenv("EMOTION_GSTREAMER_DOT")); + + return playbin; + + unref_pipeline: + gst_object_unref(xvsink); + gst_object_unref(esink); + gst_object_unref(tee); + gst_object_unref(bin); + gst_object_unref(playbin); + return NULL; +}